##// END OF EJS Templates
backends: use reference explicitly to properly translate GIT references to commits such as numeric branches
milka -
r4653:5035738c default
parent child Browse files
Show More
@@ -1,1070 +1,1071 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Some simple helper functions
24 24 """
25 25
26 26 import collections
27 27 import datetime
28 28 import dateutil.relativedelta
29 29 import hashlib
30 30 import logging
31 31 import re
32 32 import sys
33 33 import time
34 34 import urllib
35 35 import urlobject
36 36 import uuid
37 37 import getpass
38 38 from functools import update_wrapper, partial
39 39
40 40 import pygments.lexers
41 41 import sqlalchemy
42 42 import sqlalchemy.engine.url
43 43 import sqlalchemy.exc
44 44 import sqlalchemy.sql
45 45 import webob
46 46 import pyramid.threadlocal
47 47 from pyramid import compat
48 48 from pyramid.settings import asbool
49 49
50 50 import rhodecode
51 51 from rhodecode.translation import _, _pluralize
52 52
53 53
54 54 def md5(s):
55 55 return hashlib.md5(s).hexdigest()
56 56
57 57
58 58 def md5_safe(s):
59 59 return md5(safe_str(s))
60 60
61 61
62 62 def sha1(s):
63 63 return hashlib.sha1(s).hexdigest()
64 64
65 65
66 66 def sha1_safe(s):
67 67 return sha1(safe_str(s))
68 68
69 69
70 70 def __get_lem(extra_mapping=None):
71 71 """
72 72 Get language extension map based on what's inside pygments lexers
73 73 """
74 74 d = collections.defaultdict(lambda: [])
75 75
76 76 def __clean(s):
77 77 s = s.lstrip('*')
78 78 s = s.lstrip('.')
79 79
80 80 if s.find('[') != -1:
81 81 exts = []
82 82 start, stop = s.find('['), s.find(']')
83 83
84 84 for suffix in s[start + 1:stop]:
85 85 exts.append(s[:s.find('[')] + suffix)
86 86 return [e.lower() for e in exts]
87 87 else:
88 88 return [s.lower()]
89 89
90 90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 91 m = map(__clean, t[-2])
92 92 if m:
93 93 m = reduce(lambda x, y: x + y, m)
94 94 for ext in m:
95 95 desc = lx.replace('Lexer', '')
96 96 d[ext].append(desc)
97 97
98 98 data = dict(d)
99 99
100 100 extra_mapping = extra_mapping or {}
101 101 if extra_mapping:
102 102 for k, v in extra_mapping.items():
103 103 if k not in data:
104 104 # register new mapping2lexer
105 105 data[k] = [v]
106 106
107 107 return data
108 108
109 109
110 110 def str2bool(_str):
111 111 """
112 112 returns True/False value from given string, it tries to translate the
113 113 string into boolean
114 114
115 115 :param _str: string value to translate into boolean
116 116 :rtype: boolean
117 117 :returns: boolean from given string
118 118 """
119 119 if _str is None:
120 120 return False
121 121 if _str in (True, False):
122 122 return _str
123 123 _str = str(_str).strip().lower()
124 124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125 125
126 126
127 127 def aslist(obj, sep=None, strip=True):
128 128 """
129 129 Returns given string separated by sep as list
130 130
131 131 :param obj:
132 132 :param sep:
133 133 :param strip:
134 134 """
135 135 if isinstance(obj, (basestring,)):
136 136 lst = obj.split(sep)
137 137 if strip:
138 138 lst = [v.strip() for v in lst]
139 139 return lst
140 140 elif isinstance(obj, (list, tuple)):
141 141 return obj
142 142 elif obj is None:
143 143 return []
144 144 else:
145 145 return [obj]
146 146
147 147
148 148 def convert_line_endings(line, mode):
149 149 """
150 150 Converts a given line "line end" accordingly to given mode
151 151
152 152 Available modes are::
153 153 0 - Unix
154 154 1 - Mac
155 155 2 - DOS
156 156
157 157 :param line: given line to convert
158 158 :param mode: mode to convert to
159 159 :rtype: str
160 160 :return: converted line according to mode
161 161 """
162 162 if mode == 0:
163 163 line = line.replace('\r\n', '\n')
164 164 line = line.replace('\r', '\n')
165 165 elif mode == 1:
166 166 line = line.replace('\r\n', '\r')
167 167 line = line.replace('\n', '\r')
168 168 elif mode == 2:
169 169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 170 return line
171 171
172 172
173 173 def detect_mode(line, default):
174 174 """
175 175 Detects line break for given line, if line break couldn't be found
176 176 given default value is returned
177 177
178 178 :param line: str line
179 179 :param default: default
180 180 :rtype: int
181 181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 182 """
183 183 if line.endswith('\r\n'):
184 184 return 2
185 185 elif line.endswith('\n'):
186 186 return 0
187 187 elif line.endswith('\r'):
188 188 return 1
189 189 else:
190 190 return default
191 191
192 192
193 193 def safe_int(val, default=None):
194 194 """
195 195 Returns int() of val if val is not convertable to int use default
196 196 instead
197 197
198 198 :param val:
199 199 :param default:
200 200 """
201 201
202 202 try:
203 203 val = int(val)
204 204 except (ValueError, TypeError):
205 205 val = default
206 206
207 207 return val
208 208
209 209
210 210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 211 """
212 212 safe unicode function. Does few trick to turn str_ into unicode
213 213
214 214 In case of UnicodeDecode error, we try to return it with encoding detected
215 215 by chardet library if it fails fallback to unicode with errors replaced
216 216
217 217 :param str_: string to decode
218 218 :rtype: unicode
219 219 :returns: unicode object
220 220 """
221 221 if isinstance(str_, unicode):
222 222 return str_
223 223
224 224 if not from_encoding:
225 225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 226 'utf8'), sep=',')
227 227 from_encoding = DEFAULT_ENCODINGS
228 228
229 229 if not isinstance(from_encoding, (list, tuple)):
230 230 from_encoding = [from_encoding]
231 231
232 232 try:
233 233 return unicode(str_)
234 234 except UnicodeDecodeError:
235 235 pass
236 236
237 237 for enc in from_encoding:
238 238 try:
239 239 return unicode(str_, enc)
240 240 except UnicodeDecodeError:
241 241 pass
242 242
243 243 if use_chardet:
244 244 try:
245 245 import chardet
246 246 encoding = chardet.detect(str_)['encoding']
247 247 if encoding is None:
248 248 raise Exception()
249 249 return str_.decode(encoding)
250 250 except (ImportError, UnicodeDecodeError, Exception):
251 251 return unicode(str_, from_encoding[0], 'replace')
252 252 else:
253 253 return unicode(str_, from_encoding[0], 'replace')
254 254
255 255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 256 """
257 257 safe str function. Does few trick to turn unicode_ into string
258 258
259 259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 260 by chardet library if it fails fallback to string with errors replaced
261 261
262 262 :param unicode_: unicode to encode
263 263 :rtype: str
264 264 :returns: str object
265 265 """
266 266
267 267 # if it's not basestr cast to str
268 268 if not isinstance(unicode_, compat.string_types):
269 269 return str(unicode_)
270 270
271 271 if isinstance(unicode_, str):
272 272 return unicode_
273 273
274 274 if not to_encoding:
275 275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 276 'utf8'), sep=',')
277 277 to_encoding = DEFAULT_ENCODINGS
278 278
279 279 if not isinstance(to_encoding, (list, tuple)):
280 280 to_encoding = [to_encoding]
281 281
282 282 for enc in to_encoding:
283 283 try:
284 284 return unicode_.encode(enc)
285 285 except UnicodeEncodeError:
286 286 pass
287 287
288 288 if use_chardet:
289 289 try:
290 290 import chardet
291 291 encoding = chardet.detect(unicode_)['encoding']
292 292 if encoding is None:
293 293 raise UnicodeEncodeError()
294 294
295 295 return unicode_.encode(encoding)
296 296 except (ImportError, UnicodeEncodeError):
297 297 return unicode_.encode(to_encoding[0], 'replace')
298 298 else:
299 299 return unicode_.encode(to_encoding[0], 'replace')
300 300
301 301
302 302 def remove_suffix(s, suffix):
303 303 if s.endswith(suffix):
304 304 s = s[:-1 * len(suffix)]
305 305 return s
306 306
307 307
308 308 def remove_prefix(s, prefix):
309 309 if s.startswith(prefix):
310 310 s = s[len(prefix):]
311 311 return s
312 312
313 313
314 314 def find_calling_context(ignore_modules=None):
315 315 """
316 316 Look through the calling stack and return the frame which called
317 317 this function and is part of core module ( ie. rhodecode.* )
318 318
319 319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 320 """
321 321
322 322 ignore_modules = ignore_modules or []
323 323
324 324 f = sys._getframe(2)
325 325 while f.f_back is not None:
326 326 name = f.f_globals.get('__name__')
327 327 if name and name.startswith(__name__.split('.')[0]):
328 328 if name not in ignore_modules:
329 329 return f
330 330 f = f.f_back
331 331 return None
332 332
333 333
334 334 def ping_connection(connection, branch):
335 335 if branch:
336 336 # "branch" refers to a sub-connection of a connection,
337 337 # we don't want to bother pinging on these.
338 338 return
339 339
340 340 # turn off "close with result". This flag is only used with
341 341 # "connectionless" execution, otherwise will be False in any case
342 342 save_should_close_with_result = connection.should_close_with_result
343 343 connection.should_close_with_result = False
344 344
345 345 try:
346 346 # run a SELECT 1. use a core select() so that
347 347 # the SELECT of a scalar value without a table is
348 348 # appropriately formatted for the backend
349 349 connection.scalar(sqlalchemy.sql.select([1]))
350 350 except sqlalchemy.exc.DBAPIError as err:
351 351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 352 # for the DBAPI's exception. It includes a .connection_invalidated
353 353 # attribute which specifies if this connection is a "disconnect"
354 354 # condition, which is based on inspection of the original exception
355 355 # by the dialect in use.
356 356 if err.connection_invalidated:
357 357 # run the same SELECT again - the connection will re-validate
358 358 # itself and establish a new connection. The disconnect detection
359 359 # here also causes the whole connection pool to be invalidated
360 360 # so that all stale connections are discarded.
361 361 connection.scalar(sqlalchemy.sql.select([1]))
362 362 else:
363 363 raise
364 364 finally:
365 365 # restore "close with result"
366 366 connection.should_close_with_result = save_should_close_with_result
367 367
368 368
369 369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 370 """Custom engine_from_config functions."""
371 371 log = logging.getLogger('sqlalchemy.engine')
372 372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374 374
375 375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376 376
377 377 def color_sql(sql):
378 378 color_seq = '\033[1;33m' # This is yellow: code 33
379 379 normal = '\x1b[0m'
380 380 return ''.join([color_seq, sql, normal])
381 381
382 382 if use_ping_connection:
383 383 log.debug('Adding ping_connection on the engine config.')
384 384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385 385
386 386 if debug:
387 387 # attach events only for debug configuration
388 388 def before_cursor_execute(conn, cursor, statement,
389 389 parameters, context, executemany):
390 390 setattr(conn, 'query_start_time', time.time())
391 391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 392 calling_context = find_calling_context(ignore_modules=[
393 393 'rhodecode.lib.caching_query',
394 394 'rhodecode.model.settings',
395 395 ])
396 396 if calling_context:
397 397 log.info(color_sql('call context %s:%s' % (
398 398 calling_context.f_code.co_filename,
399 399 calling_context.f_lineno,
400 400 )))
401 401
402 402 def after_cursor_execute(conn, cursor, statement,
403 403 parameters, context, executemany):
404 404 delattr(conn, 'query_start_time')
405 405
406 406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408 408
409 409 return engine
410 410
411 411
412 412 def get_encryption_key(config):
413 413 secret = config.get('rhodecode.encrypted_values.secret')
414 414 default = config['beaker.session.secret']
415 415 return secret or default
416 416
417 417
418 418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 419 short_format=False):
420 420 """
421 421 Turns a datetime into an age string.
422 422 If show_short_version is True, this generates a shorter string with
423 423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424 424
425 425 * IMPORTANT*
426 426 Code of this function is written in special way so it's easier to
427 427 backport it to javascript. If you mean to update it, please also update
428 428 `jquery.timeago-extension.js` file
429 429
430 430 :param prevdate: datetime object
431 431 :param now: get current time, if not define we use
432 432 `datetime.datetime.now()`
433 433 :param show_short_version: if it should approximate the date and
434 434 return a shorter string
435 435 :param show_suffix:
436 436 :param short_format: show short format, eg 2D instead of 2 days
437 437 :rtype: unicode
438 438 :returns: unicode words describing age
439 439 """
440 440
441 441 def _get_relative_delta(now, prevdate):
442 442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 443 return {
444 444 'year': base.years,
445 445 'month': base.months,
446 446 'day': base.days,
447 447 'hour': base.hours,
448 448 'minute': base.minutes,
449 449 'second': base.seconds,
450 450 }
451 451
452 452 def _is_leap_year(year):
453 453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454 454
455 455 def get_month(prevdate):
456 456 return prevdate.month
457 457
458 458 def get_year(prevdate):
459 459 return prevdate.year
460 460
461 461 now = now or datetime.datetime.now()
462 462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 463 deltas = {}
464 464 future = False
465 465
466 466 if prevdate > now:
467 467 now_old = now
468 468 now = prevdate
469 469 prevdate = now_old
470 470 future = True
471 471 if future:
472 472 prevdate = prevdate.replace(microsecond=0)
473 473 # Get date parts deltas
474 474 for part in order:
475 475 rel_delta = _get_relative_delta(now, prevdate)
476 476 deltas[part] = rel_delta[part]
477 477
478 478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 479 # not 1 hour, -59 minutes and -59 seconds)
480 480 offsets = [[5, 60], [4, 60], [3, 24]]
481 481 for element in offsets: # seconds, minutes, hours
482 482 num = element[0]
483 483 length = element[1]
484 484
485 485 part = order[num]
486 486 carry_part = order[num - 1]
487 487
488 488 if deltas[part] < 0:
489 489 deltas[part] += length
490 490 deltas[carry_part] -= 1
491 491
492 492 # Same thing for days except that the increment depends on the (variable)
493 493 # number of days in the month
494 494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 495 if deltas['day'] < 0:
496 496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 497 deltas['day'] += 29
498 498 else:
499 499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500 500
501 501 deltas['month'] -= 1
502 502
503 503 if deltas['month'] < 0:
504 504 deltas['month'] += 12
505 505 deltas['year'] -= 1
506 506
507 507 # Format the result
508 508 if short_format:
509 509 fmt_funcs = {
510 510 'year': lambda d: u'%dy' % d,
511 511 'month': lambda d: u'%dm' % d,
512 512 'day': lambda d: u'%dd' % d,
513 513 'hour': lambda d: u'%dh' % d,
514 514 'minute': lambda d: u'%dmin' % d,
515 515 'second': lambda d: u'%dsec' % d,
516 516 }
517 517 else:
518 518 fmt_funcs = {
519 519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 525 }
526 526
527 527 i = 0
528 528 for part in order:
529 529 value = deltas[part]
530 530 if value != 0:
531 531
532 532 if i < 5:
533 533 sub_part = order[i + 1]
534 534 sub_value = deltas[sub_part]
535 535 else:
536 536 sub_value = 0
537 537
538 538 if sub_value == 0 or show_short_version:
539 539 _val = fmt_funcs[part](value)
540 540 if future:
541 541 if show_suffix:
542 542 return _(u'in ${ago}', mapping={'ago': _val})
543 543 else:
544 544 return _(_val)
545 545
546 546 else:
547 547 if show_suffix:
548 548 return _(u'${ago} ago', mapping={'ago': _val})
549 549 else:
550 550 return _(_val)
551 551
552 552 val = fmt_funcs[part](value)
553 553 val_detail = fmt_funcs[sub_part](sub_value)
554 554 mapping = {'val': val, 'detail': val_detail}
555 555
556 556 if short_format:
557 557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 558 if show_suffix:
559 559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 560 if future:
561 561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 562 else:
563 563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 564 if show_suffix:
565 565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 566 if future:
567 567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568 568
569 569 return datetime_tmpl
570 570 i += 1
571 571 return _(u'just now')
572 572
573 573
574 574 def age_from_seconds(seconds):
575 575 seconds = safe_int(seconds) or 0
576 576 prevdate = time_to_datetime(time.time() + seconds)
577 577 return age(prevdate, show_suffix=False, show_short_version=True)
578 578
579 579
580 580 def cleaned_uri(uri):
581 581 """
582 582 Quotes '[' and ']' from uri if there is only one of them.
583 583 according to RFC3986 we cannot use such chars in uri
584 584 :param uri:
585 585 :return: uri without this chars
586 586 """
587 587 return urllib.quote(uri, safe='@$:/')
588 588
589 589
590 590 def credentials_filter(uri):
591 591 """
592 592 Returns a url with removed credentials
593 593
594 594 :param uri:
595 595 """
596 596 import urlobject
597 597 url_obj = urlobject.URLObject(cleaned_uri(uri))
598 598 url_obj = url_obj.without_password().without_username()
599 599
600 600 return url_obj
601 601
602 602
603 603 def get_host_info(request):
604 604 """
605 605 Generate host info, to obtain full url e.g https://server.com
606 606 use this
607 607 `{scheme}://{netloc}`
608 608 """
609 609 if not request:
610 610 return {}
611 611
612 612 qualified_home_url = request.route_url('home')
613 613 parsed_url = urlobject.URLObject(qualified_home_url)
614 614 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
615 615
616 616 return {
617 617 'scheme': parsed_url.scheme,
618 618 'netloc': parsed_url.netloc+decoded_path,
619 619 'hostname': parsed_url.hostname,
620 620 }
621 621
622 622
623 623 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
624 624 qualified_home_url = request.route_url('home')
625 625 parsed_url = urlobject.URLObject(qualified_home_url)
626 626 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
627 627
628 628 args = {
629 629 'scheme': parsed_url.scheme,
630 630 'user': '',
631 631 'sys_user': getpass.getuser(),
632 632 # path if we use proxy-prefix
633 633 'netloc': parsed_url.netloc+decoded_path,
634 634 'hostname': parsed_url.hostname,
635 635 'prefix': decoded_path,
636 636 'repo': repo_name,
637 637 'repoid': str(repo_id),
638 638 'repo_type': repo_type
639 639 }
640 640 args.update(override)
641 641 args['user'] = urllib.quote(safe_str(args['user']))
642 642
643 643 for k, v in args.items():
644 644 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
645 645
646 646 # special case for SVN clone url
647 647 if repo_type == 'svn':
648 648 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
649 649
650 650 # remove leading @ sign if it's present. Case of empty user
651 651 url_obj = urlobject.URLObject(uri_tmpl)
652 652 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
653 653
654 654 return safe_unicode(url)
655 655
656 656
657 657 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
658 maybe_unreachable=False):
658 maybe_unreachable=False, reference_obj=None):
659 659 """
660 660 Safe version of get_commit if this commit doesn't exists for a
661 661 repository it returns a Dummy one instead
662 662
663 663 :param repo: repository instance
664 664 :param commit_id: commit id as str
665 665 :param commit_idx: numeric commit index
666 666 :param pre_load: optional list of commit attributes to load
667 667 :param maybe_unreachable: translate unreachable commits on git repos
668 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
668 669 """
669 670 # TODO(skreft): remove these circular imports
670 671 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
671 672 from rhodecode.lib.vcs.exceptions import RepositoryError
672 673 if not isinstance(repo, BaseRepository):
673 674 raise Exception('You must pass an Repository '
674 675 'object as first argument got %s', type(repo))
675 676
676 677 try:
677 678 commit = repo.get_commit(
678 679 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
679 maybe_unreachable=maybe_unreachable)
680 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
680 681 except (RepositoryError, LookupError):
681 682 commit = EmptyCommit()
682 683 return commit
683 684
684 685
685 686 def datetime_to_time(dt):
686 687 if dt:
687 688 return time.mktime(dt.timetuple())
688 689
689 690
690 691 def time_to_datetime(tm):
691 692 if tm:
692 693 if isinstance(tm, compat.string_types):
693 694 try:
694 695 tm = float(tm)
695 696 except ValueError:
696 697 return
697 698 return datetime.datetime.fromtimestamp(tm)
698 699
699 700
700 701 def time_to_utcdatetime(tm):
701 702 if tm:
702 703 if isinstance(tm, compat.string_types):
703 704 try:
704 705 tm = float(tm)
705 706 except ValueError:
706 707 return
707 708 return datetime.datetime.utcfromtimestamp(tm)
708 709
709 710
710 711 MENTIONS_REGEX = re.compile(
711 712 # ^@ or @ without any special chars in front
712 713 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
713 714 # main body starts with letter, then can be . - _
714 715 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
715 716 re.VERBOSE | re.MULTILINE)
716 717
717 718
718 719 def extract_mentioned_users(s):
719 720 """
720 721 Returns unique usernames from given string s that have @mention
721 722
722 723 :param s: string to get mentions
723 724 """
724 725 usrs = set()
725 726 for username in MENTIONS_REGEX.findall(s):
726 727 usrs.add(username)
727 728
728 729 return sorted(list(usrs), key=lambda k: k.lower())
729 730
730 731
731 732 class AttributeDictBase(dict):
732 733 def __getstate__(self):
733 734 odict = self.__dict__ # get attribute dictionary
734 735 return odict
735 736
736 737 def __setstate__(self, dict):
737 738 self.__dict__ = dict
738 739
739 740 __setattr__ = dict.__setitem__
740 741 __delattr__ = dict.__delitem__
741 742
742 743
743 744 class StrictAttributeDict(AttributeDictBase):
744 745 """
745 746 Strict Version of Attribute dict which raises an Attribute error when
746 747 requested attribute is not set
747 748 """
748 749 def __getattr__(self, attr):
749 750 try:
750 751 return self[attr]
751 752 except KeyError:
752 753 raise AttributeError('%s object has no attribute %s' % (
753 754 self.__class__, attr))
754 755
755 756
756 757 class AttributeDict(AttributeDictBase):
757 758 def __getattr__(self, attr):
758 759 return self.get(attr, None)
759 760
760 761
761 762
762 763 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
763 764 def __init__(self, default_factory=None, *args, **kwargs):
764 765 # in python3 you can omit the args to super
765 766 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
766 767 self.default_factory = default_factory
767 768
768 769
769 770 def fix_PATH(os_=None):
770 771 """
771 772 Get current active python path, and append it to PATH variable to fix
772 773 issues of subprocess calls and different python versions
773 774 """
774 775 if os_ is None:
775 776 import os
776 777 else:
777 778 os = os_
778 779
779 780 cur_path = os.path.split(sys.executable)[0]
780 781 if not os.environ['PATH'].startswith(cur_path):
781 782 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
782 783
783 784
784 785 def obfuscate_url_pw(engine):
785 786 _url = engine or ''
786 787 try:
787 788 _url = sqlalchemy.engine.url.make_url(engine)
788 789 if _url.password:
789 790 _url.password = 'XXXXX'
790 791 except Exception:
791 792 pass
792 793 return unicode(_url)
793 794
794 795
795 796 def get_server_url(environ):
796 797 req = webob.Request(environ)
797 798 return req.host_url + req.script_name
798 799
799 800
800 801 def unique_id(hexlen=32):
801 802 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
802 803 return suuid(truncate_to=hexlen, alphabet=alphabet)
803 804
804 805
805 806 def suuid(url=None, truncate_to=22, alphabet=None):
806 807 """
807 808 Generate and return a short URL safe UUID.
808 809
809 810 If the url parameter is provided, set the namespace to the provided
810 811 URL and generate a UUID.
811 812
812 813 :param url to get the uuid for
813 814 :truncate_to: truncate the basic 22 UUID to shorter version
814 815
815 816 The IDs won't be universally unique any longer, but the probability of
816 817 a collision will still be very low.
817 818 """
818 819 # Define our alphabet.
819 820 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
820 821
821 822 # If no URL is given, generate a random UUID.
822 823 if url is None:
823 824 unique_id = uuid.uuid4().int
824 825 else:
825 826 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
826 827
827 828 alphabet_length = len(_ALPHABET)
828 829 output = []
829 830 while unique_id > 0:
830 831 digit = unique_id % alphabet_length
831 832 output.append(_ALPHABET[digit])
832 833 unique_id = int(unique_id / alphabet_length)
833 834 return "".join(output)[:truncate_to]
834 835
835 836
836 837 def get_current_rhodecode_user(request=None):
837 838 """
838 839 Gets rhodecode user from request
839 840 """
840 841 pyramid_request = request or pyramid.threadlocal.get_current_request()
841 842
842 843 # web case
843 844 if pyramid_request and hasattr(pyramid_request, 'user'):
844 845 return pyramid_request.user
845 846
846 847 # api case
847 848 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
848 849 return pyramid_request.rpc_user
849 850
850 851 return None
851 852
852 853
853 854 def action_logger_generic(action, namespace=''):
854 855 """
855 856 A generic logger for actions useful to the system overview, tries to find
856 857 an acting user for the context of the call otherwise reports unknown user
857 858
858 859 :param action: logging message eg 'comment 5 deleted'
859 860 :param type: string
860 861
861 862 :param namespace: namespace of the logging message eg. 'repo.comments'
862 863 :param type: string
863 864
864 865 """
865 866
866 867 logger_name = 'rhodecode.actions'
867 868
868 869 if namespace:
869 870 logger_name += '.' + namespace
870 871
871 872 log = logging.getLogger(logger_name)
872 873
873 874 # get a user if we can
874 875 user = get_current_rhodecode_user()
875 876
876 877 logfunc = log.info
877 878
878 879 if not user:
879 880 user = '<unknown user>'
880 881 logfunc = log.warning
881 882
882 883 logfunc('Logging action by {}: {}'.format(user, action))
883 884
884 885
885 886 def escape_split(text, sep=',', maxsplit=-1):
886 887 r"""
887 888 Allows for escaping of the separator: e.g. arg='foo\, bar'
888 889
889 890 It should be noted that the way bash et. al. do command line parsing, those
890 891 single quotes are required.
891 892 """
892 893 escaped_sep = r'\%s' % sep
893 894
894 895 if escaped_sep not in text:
895 896 return text.split(sep, maxsplit)
896 897
897 898 before, _mid, after = text.partition(escaped_sep)
898 899 startlist = before.split(sep, maxsplit) # a regular split is fine here
899 900 unfinished = startlist[-1]
900 901 startlist = startlist[:-1]
901 902
902 903 # recurse because there may be more escaped separators
903 904 endlist = escape_split(after, sep, maxsplit)
904 905
905 906 # finish building the escaped value. we use endlist[0] becaue the first
906 907 # part of the string sent in recursion is the rest of the escaped value.
907 908 unfinished += sep + endlist[0]
908 909
909 910 return startlist + [unfinished] + endlist[1:] # put together all the parts
910 911
911 912
912 913 class OptionalAttr(object):
913 914 """
914 915 Special Optional Option that defines other attribute. Example::
915 916
916 917 def test(apiuser, userid=Optional(OAttr('apiuser')):
917 918 user = Optional.extract(userid)
918 919 # calls
919 920
920 921 """
921 922
922 923 def __init__(self, attr_name):
923 924 self.attr_name = attr_name
924 925
925 926 def __repr__(self):
926 927 return '<OptionalAttr:%s>' % self.attr_name
927 928
928 929 def __call__(self):
929 930 return self
930 931
931 932
932 933 # alias
933 934 OAttr = OptionalAttr
934 935
935 936
936 937 class Optional(object):
937 938 """
938 939 Defines an optional parameter::
939 940
940 941 param = param.getval() if isinstance(param, Optional) else param
941 942 param = param() if isinstance(param, Optional) else param
942 943
943 944 is equivalent of::
944 945
945 946 param = Optional.extract(param)
946 947
947 948 """
948 949
949 950 def __init__(self, type_):
950 951 self.type_ = type_
951 952
952 953 def __repr__(self):
953 954 return '<Optional:%s>' % self.type_.__repr__()
954 955
955 956 def __call__(self):
956 957 return self.getval()
957 958
958 959 def getval(self):
959 960 """
960 961 returns value from this Optional instance
961 962 """
962 963 if isinstance(self.type_, OAttr):
963 964 # use params name
964 965 return self.type_.attr_name
965 966 return self.type_
966 967
967 968 @classmethod
968 969 def extract(cls, val):
969 970 """
970 971 Extracts value from Optional() instance
971 972
972 973 :param val:
973 974 :return: original value if it's not Optional instance else
974 975 value of instance
975 976 """
976 977 if isinstance(val, cls):
977 978 return val.getval()
978 979 return val
979 980
980 981
981 982 def glob2re(pat):
982 983 """
983 984 Translate a shell PATTERN to a regular expression.
984 985
985 986 There is no way to quote meta-characters.
986 987 """
987 988
988 989 i, n = 0, len(pat)
989 990 res = ''
990 991 while i < n:
991 992 c = pat[i]
992 993 i = i+1
993 994 if c == '*':
994 995 #res = res + '.*'
995 996 res = res + '[^/]*'
996 997 elif c == '?':
997 998 #res = res + '.'
998 999 res = res + '[^/]'
999 1000 elif c == '[':
1000 1001 j = i
1001 1002 if j < n and pat[j] == '!':
1002 1003 j = j+1
1003 1004 if j < n and pat[j] == ']':
1004 1005 j = j+1
1005 1006 while j < n and pat[j] != ']':
1006 1007 j = j+1
1007 1008 if j >= n:
1008 1009 res = res + '\\['
1009 1010 else:
1010 1011 stuff = pat[i:j].replace('\\','\\\\')
1011 1012 i = j+1
1012 1013 if stuff[0] == '!':
1013 1014 stuff = '^' + stuff[1:]
1014 1015 elif stuff[0] == '^':
1015 1016 stuff = '\\' + stuff
1016 1017 res = '%s[%s]' % (res, stuff)
1017 1018 else:
1018 1019 res = res + re.escape(c)
1019 1020 return res + '\Z(?ms)'
1020 1021
1021 1022
1022 1023 def parse_byte_string(size_str):
1023 1024 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1024 1025 if not match:
1025 1026 raise ValueError('Given size:%s is invalid, please make sure '
1026 1027 'to use format of <num>(MB|KB)' % size_str)
1027 1028
1028 1029 _parts = match.groups()
1029 1030 num, type_ = _parts
1030 1031 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1031 1032
1032 1033
1033 1034 class CachedProperty(object):
1034 1035 """
1035 1036 Lazy Attributes. With option to invalidate the cache by running a method
1036 1037
1037 1038 class Foo():
1038 1039
1039 1040 @CachedProperty
1040 1041 def heavy_func():
1041 1042 return 'super-calculation'
1042 1043
1043 1044 foo = Foo()
1044 1045 foo.heavy_func() # first computions
1045 1046 foo.heavy_func() # fetch from cache
1046 1047 foo._invalidate_prop_cache('heavy_func')
1047 1048 # at this point calling foo.heavy_func() will be re-computed
1048 1049 """
1049 1050
1050 1051 def __init__(self, func, func_name=None):
1051 1052
1052 1053 if func_name is None:
1053 1054 func_name = func.__name__
1054 1055 self.data = (func, func_name)
1055 1056 update_wrapper(self, func)
1056 1057
1057 1058 def __get__(self, inst, class_):
1058 1059 if inst is None:
1059 1060 return self
1060 1061
1061 1062 func, func_name = self.data
1062 1063 value = func(inst)
1063 1064 inst.__dict__[func_name] = value
1064 1065 if '_invalidate_prop_cache' not in inst.__dict__:
1065 1066 inst.__dict__['_invalidate_prop_cache'] = partial(
1066 1067 self._invalidate_prop_cache, inst)
1067 1068 return value
1068 1069
1069 1070 def _invalidate_prop_cache(self, inst, name):
1070 1071 inst.__dict__.pop(name, None)
@@ -1,1933 +1,1937 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class Reference(_Reference):
64 64
65 65 @property
66 66 def branch(self):
67 67 if self.type == 'branch':
68 68 return self.name
69 69
70 70 @property
71 71 def bookmark(self):
72 72 if self.type == 'book':
73 73 return self.name
74 74
75 @property
76 def to_unicode(self):
77 return reference_to_unicode(self)
78
75 79
76 80 def unicode_to_reference(raw):
77 81 """
78 82 Convert a unicode (or string) to a reference object.
79 83 If unicode evaluates to False it returns None.
80 84 """
81 85 if raw:
82 86 refs = raw.split(':')
83 87 return Reference(*refs)
84 88 else:
85 89 return None
86 90
87 91
88 92 def reference_to_unicode(ref):
89 93 """
90 94 Convert a reference object to unicode.
91 95 If reference is None it returns None.
92 96 """
93 97 if ref:
94 98 return u':'.join(ref)
95 99 else:
96 100 return None
97 101
98 102
99 103 class MergeFailureReason(object):
100 104 """
101 105 Enumeration with all the reasons why the server side merge could fail.
102 106
103 107 DO NOT change the number of the reasons, as they may be stored in the
104 108 database.
105 109
106 110 Changing the name of a reason is acceptable and encouraged to deprecate old
107 111 reasons.
108 112 """
109 113
110 114 # Everything went well.
111 115 NONE = 0
112 116
113 117 # An unexpected exception was raised. Check the logs for more details.
114 118 UNKNOWN = 1
115 119
116 120 # The merge was not successful, there are conflicts.
117 121 MERGE_FAILED = 2
118 122
119 123 # The merge succeeded but we could not push it to the target repository.
120 124 PUSH_FAILED = 3
121 125
122 126 # The specified target is not a head in the target repository.
123 127 TARGET_IS_NOT_HEAD = 4
124 128
125 129 # The source repository contains more branches than the target. Pushing
126 130 # the merge will create additional branches in the target.
127 131 HG_SOURCE_HAS_MORE_BRANCHES = 5
128 132
129 133 # The target reference has multiple heads. That does not allow to correctly
130 134 # identify the target location. This could only happen for mercurial
131 135 # branches.
132 136 HG_TARGET_HAS_MULTIPLE_HEADS = 6
133 137
134 138 # The target repository is locked
135 139 TARGET_IS_LOCKED = 7
136 140
137 141 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
138 142 # A involved commit could not be found.
139 143 _DEPRECATED_MISSING_COMMIT = 8
140 144
141 145 # The target repo reference is missing.
142 146 MISSING_TARGET_REF = 9
143 147
144 148 # The source repo reference is missing.
145 149 MISSING_SOURCE_REF = 10
146 150
147 151 # The merge was not successful, there are conflicts related to sub
148 152 # repositories.
149 153 SUBREPO_MERGE_FAILED = 11
150 154
151 155
152 156 class UpdateFailureReason(object):
153 157 """
154 158 Enumeration with all the reasons why the pull request update could fail.
155 159
156 160 DO NOT change the number of the reasons, as they may be stored in the
157 161 database.
158 162
159 163 Changing the name of a reason is acceptable and encouraged to deprecate old
160 164 reasons.
161 165 """
162 166
163 167 # Everything went well.
164 168 NONE = 0
165 169
166 170 # An unexpected exception was raised. Check the logs for more details.
167 171 UNKNOWN = 1
168 172
169 173 # The pull request is up to date.
170 174 NO_CHANGE = 2
171 175
172 176 # The pull request has a reference type that is not supported for update.
173 177 WRONG_REF_TYPE = 3
174 178
175 179 # Update failed because the target reference is missing.
176 180 MISSING_TARGET_REF = 4
177 181
178 182 # Update failed because the source reference is missing.
179 183 MISSING_SOURCE_REF = 5
180 184
181 185
182 186 class MergeResponse(object):
183 187
184 188 # uses .format(**metadata) for variables
185 189 MERGE_STATUS_MESSAGES = {
186 190 MergeFailureReason.NONE: lazy_ugettext(
187 191 u'This pull request can be automatically merged.'),
188 192 MergeFailureReason.UNKNOWN: lazy_ugettext(
189 193 u'This pull request cannot be merged because of an unhandled exception. '
190 194 u'{exception}'),
191 195 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
192 196 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
193 197 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
194 198 u'This pull request could not be merged because push to '
195 199 u'target:`{target}@{merge_commit}` failed.'),
196 200 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
197 201 u'This pull request cannot be merged because the target '
198 202 u'`{target_ref.name}` is not a head.'),
199 203 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
200 204 u'This pull request cannot be merged because the source contains '
201 205 u'more branches than the target.'),
202 206 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
203 207 u'This pull request cannot be merged because the target `{target_ref.name}` '
204 208 u'has multiple heads: `{heads}`.'),
205 209 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
206 210 u'This pull request cannot be merged because the target repository is '
207 211 u'locked by {locked_by}.'),
208 212
209 213 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
210 214 u'This pull request cannot be merged because the target '
211 215 u'reference `{target_ref.name}` is missing.'),
212 216 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
213 217 u'This pull request cannot be merged because the source '
214 218 u'reference `{source_ref.name}` is missing.'),
215 219 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
216 220 u'This pull request cannot be merged because of conflicts related '
217 221 u'to sub repositories.'),
218 222
219 223 # Deprecations
220 224 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
221 225 u'This pull request cannot be merged because the target or the '
222 226 u'source reference is missing.'),
223 227
224 228 }
225 229
226 230 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
227 231 self.possible = possible
228 232 self.executed = executed
229 233 self.merge_ref = merge_ref
230 234 self.failure_reason = failure_reason
231 235 self.metadata = metadata or {}
232 236
233 237 def __repr__(self):
234 238 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
235 239
236 240 def __eq__(self, other):
237 241 same_instance = isinstance(other, self.__class__)
238 242 return same_instance \
239 243 and self.possible == other.possible \
240 244 and self.executed == other.executed \
241 245 and self.failure_reason == other.failure_reason
242 246
243 247 @property
244 248 def label(self):
245 249 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
246 250 not k.startswith('_'))
247 251 return label_dict.get(self.failure_reason)
248 252
249 253 @property
250 254 def merge_status_message(self):
251 255 """
252 256 Return a human friendly error message for the given merge status code.
253 257 """
254 258 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
255 259
256 260 try:
257 261 return msg.format(**self.metadata)
258 262 except Exception:
259 263 log.exception('Failed to format %s message', self)
260 264 return msg
261 265
262 266 def asdict(self):
263 267 data = {}
264 268 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
265 269 'merge_status_message']:
266 270 data[k] = getattr(self, k)
267 271 return data
268 272
269 273
270 274 class TargetRefMissing(ValueError):
271 275 pass
272 276
273 277
274 278 class SourceRefMissing(ValueError):
275 279 pass
276 280
277 281
278 282 class BaseRepository(object):
279 283 """
280 284 Base Repository for final backends
281 285
282 286 .. attribute:: DEFAULT_BRANCH_NAME
283 287
284 288 name of default branch (i.e. "trunk" for svn, "master" for git etc.
285 289
286 290 .. attribute:: commit_ids
287 291
288 292 list of all available commit ids, in ascending order
289 293
290 294 .. attribute:: path
291 295
292 296 absolute path to the repository
293 297
294 298 .. attribute:: bookmarks
295 299
296 300 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
297 301 there are no bookmarks or the backend implementation does not support
298 302 bookmarks.
299 303
300 304 .. attribute:: tags
301 305
302 306 Mapping from name to :term:`Commit ID` of the tag.
303 307
304 308 """
305 309
306 310 DEFAULT_BRANCH_NAME = None
307 311 DEFAULT_CONTACT = u"Unknown"
308 312 DEFAULT_DESCRIPTION = u"unknown"
309 313 EMPTY_COMMIT_ID = '0' * 40
310 314
311 315 path = None
312 316
313 317 _is_empty = None
314 318 _commit_ids = {}
315 319
316 320 def __init__(self, repo_path, config=None, create=False, **kwargs):
317 321 """
318 322 Initializes repository. Raises RepositoryError if repository could
319 323 not be find at the given ``repo_path`` or directory at ``repo_path``
320 324 exists and ``create`` is set to True.
321 325
322 326 :param repo_path: local path of the repository
323 327 :param config: repository configuration
324 328 :param create=False: if set to True, would try to create repository.
325 329 :param src_url=None: if set, should be proper url from which repository
326 330 would be cloned; requires ``create`` parameter to be set to True -
327 331 raises RepositoryError if src_url is set and create evaluates to
328 332 False
329 333 """
330 334 raise NotImplementedError
331 335
332 336 def __repr__(self):
333 337 return '<%s at %s>' % (self.__class__.__name__, self.path)
334 338
335 339 def __len__(self):
336 340 return self.count()
337 341
338 342 def __eq__(self, other):
339 343 same_instance = isinstance(other, self.__class__)
340 344 return same_instance and other.path == self.path
341 345
342 346 def __ne__(self, other):
343 347 return not self.__eq__(other)
344 348
345 349 def get_create_shadow_cache_pr_path(self, db_repo):
346 350 path = db_repo.cached_diffs_dir
347 351 if not os.path.exists(path):
348 352 os.makedirs(path, 0o755)
349 353 return path
350 354
351 355 @classmethod
352 356 def get_default_config(cls, default=None):
353 357 config = Config()
354 358 if default and isinstance(default, list):
355 359 for section, key, val in default:
356 360 config.set(section, key, val)
357 361 return config
358 362
359 363 @LazyProperty
360 364 def _remote(self):
361 365 raise NotImplementedError
362 366
363 367 def _heads(self, branch=None):
364 368 return []
365 369
366 370 @LazyProperty
367 371 def EMPTY_COMMIT(self):
368 372 return EmptyCommit(self.EMPTY_COMMIT_ID)
369 373
370 374 @LazyProperty
371 375 def alias(self):
372 376 for k, v in settings.BACKENDS.items():
373 377 if v.split('.')[-1] == str(self.__class__.__name__):
374 378 return k
375 379
376 380 @LazyProperty
377 381 def name(self):
378 382 return safe_unicode(os.path.basename(self.path))
379 383
380 384 @LazyProperty
381 385 def description(self):
382 386 raise NotImplementedError
383 387
384 388 def refs(self):
385 389 """
386 390 returns a `dict` with branches, bookmarks, tags, and closed_branches
387 391 for this repository
388 392 """
389 393 return dict(
390 394 branches=self.branches,
391 395 branches_closed=self.branches_closed,
392 396 tags=self.tags,
393 397 bookmarks=self.bookmarks
394 398 )
395 399
396 400 @LazyProperty
397 401 def branches(self):
398 402 """
399 403 A `dict` which maps branch names to commit ids.
400 404 """
401 405 raise NotImplementedError
402 406
403 407 @LazyProperty
404 408 def branches_closed(self):
405 409 """
406 410 A `dict` which maps tags names to commit ids.
407 411 """
408 412 raise NotImplementedError
409 413
410 414 @LazyProperty
411 415 def bookmarks(self):
412 416 """
413 417 A `dict` which maps tags names to commit ids.
414 418 """
415 419 raise NotImplementedError
416 420
417 421 @LazyProperty
418 422 def tags(self):
419 423 """
420 424 A `dict` which maps tags names to commit ids.
421 425 """
422 426 raise NotImplementedError
423 427
424 428 @LazyProperty
425 429 def size(self):
426 430 """
427 431 Returns combined size in bytes for all repository files
428 432 """
429 433 tip = self.get_commit()
430 434 return tip.size
431 435
432 436 def size_at_commit(self, commit_id):
433 437 commit = self.get_commit(commit_id)
434 438 return commit.size
435 439
436 440 def _check_for_empty(self):
437 441 no_commits = len(self._commit_ids) == 0
438 442 if no_commits:
439 443 # check on remote to be sure
440 444 return self._remote.is_empty()
441 445 else:
442 446 return False
443 447
444 448 def is_empty(self):
445 449 if rhodecode.is_test:
446 450 return self._check_for_empty()
447 451
448 452 if self._is_empty is None:
449 453 # cache empty for production, but not tests
450 454 self._is_empty = self._check_for_empty()
451 455
452 456 return self._is_empty
453 457
454 458 @staticmethod
455 459 def check_url(url, config):
456 460 """
457 461 Function will check given url and try to verify if it's a valid
458 462 link.
459 463 """
460 464 raise NotImplementedError
461 465
462 466 @staticmethod
463 467 def is_valid_repository(path):
464 468 """
465 469 Check if given `path` contains a valid repository of this backend
466 470 """
467 471 raise NotImplementedError
468 472
469 473 # ==========================================================================
470 474 # COMMITS
471 475 # ==========================================================================
472 476
473 477 @CachedProperty
474 478 def commit_ids(self):
475 479 raise NotImplementedError
476 480
477 481 def append_commit_id(self, commit_id):
478 482 if commit_id not in self.commit_ids:
479 483 self._rebuild_cache(self.commit_ids + [commit_id])
480 484
481 485 # clear cache
482 486 self._invalidate_prop_cache('commit_ids')
483 487 self._is_empty = False
484 488
485 489 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
486 translate_tag=None, maybe_unreachable=False):
490 translate_tag=None, maybe_unreachable=False, reference_obj=None):
487 491 """
488 492 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
489 493 are both None, most recent commit is returned.
490 494
491 495 :param pre_load: Optional. List of commit attributes to load.
492 496
493 497 :raises ``EmptyRepositoryError``: if there are no commits
494 498 """
495 499 raise NotImplementedError
496 500
497 501 def __iter__(self):
498 502 for commit_id in self.commit_ids:
499 503 yield self.get_commit(commit_id=commit_id)
500 504
501 505 def get_commits(
502 506 self, start_id=None, end_id=None, start_date=None, end_date=None,
503 507 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
504 508 """
505 509 Returns iterator of `BaseCommit` objects from start to end
506 510 not inclusive. This should behave just like a list, ie. end is not
507 511 inclusive.
508 512
509 513 :param start_id: None or str, must be a valid commit id
510 514 :param end_id: None or str, must be a valid commit id
511 515 :param start_date:
512 516 :param end_date:
513 517 :param branch_name:
514 518 :param show_hidden:
515 519 :param pre_load:
516 520 :param translate_tags:
517 521 """
518 522 raise NotImplementedError
519 523
520 524 def __getitem__(self, key):
521 525 """
522 526 Allows index based access to the commit objects of this repository.
523 527 """
524 528 pre_load = ["author", "branch", "date", "message", "parents"]
525 529 if isinstance(key, slice):
526 530 return self._get_range(key, pre_load)
527 531 return self.get_commit(commit_idx=key, pre_load=pre_load)
528 532
529 533 def _get_range(self, slice_obj, pre_load):
530 534 for commit_id in self.commit_ids.__getitem__(slice_obj):
531 535 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
532 536
533 537 def count(self):
534 538 return len(self.commit_ids)
535 539
536 540 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
537 541 """
538 542 Creates and returns a tag for the given ``commit_id``.
539 543
540 544 :param name: name for new tag
541 545 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
542 546 :param commit_id: commit id for which new tag would be created
543 547 :param message: message of the tag's commit
544 548 :param date: date of tag's commit
545 549
546 550 :raises TagAlreadyExistError: if tag with same name already exists
547 551 """
548 552 raise NotImplementedError
549 553
550 554 def remove_tag(self, name, user, message=None, date=None):
551 555 """
552 556 Removes tag with the given ``name``.
553 557
554 558 :param name: name of the tag to be removed
555 559 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
556 560 :param message: message of the tag's removal commit
557 561 :param date: date of tag's removal commit
558 562
559 563 :raises TagDoesNotExistError: if tag with given name does not exists
560 564 """
561 565 raise NotImplementedError
562 566
563 567 def get_diff(
564 568 self, commit1, commit2, path=None, ignore_whitespace=False,
565 569 context=3, path1=None):
566 570 """
567 571 Returns (git like) *diff*, as plain text. Shows changes introduced by
568 572 `commit2` since `commit1`.
569 573
570 574 :param commit1: Entry point from which diff is shown. Can be
571 575 ``self.EMPTY_COMMIT`` - in this case, patch showing all
572 576 the changes since empty state of the repository until `commit2`
573 577 :param commit2: Until which commit changes should be shown.
574 578 :param path: Can be set to a path of a file to create a diff of that
575 579 file. If `path1` is also set, this value is only associated to
576 580 `commit2`.
577 581 :param ignore_whitespace: If set to ``True``, would not show whitespace
578 582 changes. Defaults to ``False``.
579 583 :param context: How many lines before/after changed lines should be
580 584 shown. Defaults to ``3``.
581 585 :param path1: Can be set to a path to associate with `commit1`. This
582 586 parameter works only for backends which support diff generation for
583 587 different paths. Other backends will raise a `ValueError` if `path1`
584 588 is set and has a different value than `path`.
585 589 :param file_path: filter this diff by given path pattern
586 590 """
587 591 raise NotImplementedError
588 592
589 593 def strip(self, commit_id, branch=None):
590 594 """
591 595 Strip given commit_id from the repository
592 596 """
593 597 raise NotImplementedError
594 598
595 599 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
596 600 """
597 601 Return a latest common ancestor commit if one exists for this repo
598 602 `commit_id1` vs `commit_id2` from `repo2`.
599 603
600 604 :param commit_id1: Commit it from this repository to use as a
601 605 target for the comparison.
602 606 :param commit_id2: Source commit id to use for comparison.
603 607 :param repo2: Source repository to use for comparison.
604 608 """
605 609 raise NotImplementedError
606 610
607 611 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
608 612 """
609 613 Compare this repository's revision `commit_id1` with `commit_id2`.
610 614
611 615 Returns a tuple(commits, ancestor) that would be merged from
612 616 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
613 617 will be returned as ancestor.
614 618
615 619 :param commit_id1: Commit it from this repository to use as a
616 620 target for the comparison.
617 621 :param commit_id2: Source commit id to use for comparison.
618 622 :param repo2: Source repository to use for comparison.
619 623 :param merge: If set to ``True`` will do a merge compare which also
620 624 returns the common ancestor.
621 625 :param pre_load: Optional. List of commit attributes to load.
622 626 """
623 627 raise NotImplementedError
624 628
625 629 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
626 630 user_name='', user_email='', message='', dry_run=False,
627 631 use_rebase=False, close_branch=False):
628 632 """
629 633 Merge the revisions specified in `source_ref` from `source_repo`
630 634 onto the `target_ref` of this repository.
631 635
632 636 `source_ref` and `target_ref` are named tupls with the following
633 637 fields `type`, `name` and `commit_id`.
634 638
635 639 Returns a MergeResponse named tuple with the following fields
636 640 'possible', 'executed', 'source_commit', 'target_commit',
637 641 'merge_commit'.
638 642
639 643 :param repo_id: `repo_id` target repo id.
640 644 :param workspace_id: `workspace_id` unique identifier.
641 645 :param target_ref: `target_ref` points to the commit on top of which
642 646 the `source_ref` should be merged.
643 647 :param source_repo: The repository that contains the commits to be
644 648 merged.
645 649 :param source_ref: `source_ref` points to the topmost commit from
646 650 the `source_repo` which should be merged.
647 651 :param user_name: Merge commit `user_name`.
648 652 :param user_email: Merge commit `user_email`.
649 653 :param message: Merge commit `message`.
650 654 :param dry_run: If `True` the merge will not take place.
651 655 :param use_rebase: If `True` commits from the source will be rebased
652 656 on top of the target instead of being merged.
653 657 :param close_branch: If `True` branch will be close before merging it
654 658 """
655 659 if dry_run:
656 660 message = message or settings.MERGE_DRY_RUN_MESSAGE
657 661 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
658 662 user_name = user_name or settings.MERGE_DRY_RUN_USER
659 663 else:
660 664 if not user_name:
661 665 raise ValueError('user_name cannot be empty')
662 666 if not user_email:
663 667 raise ValueError('user_email cannot be empty')
664 668 if not message:
665 669 raise ValueError('message cannot be empty')
666 670
667 671 try:
668 672 return self._merge_repo(
669 673 repo_id, workspace_id, target_ref, source_repo,
670 674 source_ref, message, user_name, user_email, dry_run=dry_run,
671 675 use_rebase=use_rebase, close_branch=close_branch)
672 676 except RepositoryError as exc:
673 677 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
674 678 return MergeResponse(
675 679 False, False, None, MergeFailureReason.UNKNOWN,
676 680 metadata={'exception': str(exc)})
677 681
678 682 def _merge_repo(self, repo_id, workspace_id, target_ref,
679 683 source_repo, source_ref, merge_message,
680 684 merger_name, merger_email, dry_run=False,
681 685 use_rebase=False, close_branch=False):
682 686 """Internal implementation of merge."""
683 687 raise NotImplementedError
684 688
685 689 def _maybe_prepare_merge_workspace(
686 690 self, repo_id, workspace_id, target_ref, source_ref):
687 691 """
688 692 Create the merge workspace.
689 693
690 694 :param workspace_id: `workspace_id` unique identifier.
691 695 """
692 696 raise NotImplementedError
693 697
694 698 @classmethod
695 699 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
696 700 """
697 701 Legacy version that was used before. We still need it for
698 702 backward compat
699 703 """
700 704 return os.path.join(
701 705 os.path.dirname(repo_path),
702 706 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
703 707
704 708 @classmethod
705 709 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
706 710 # The name of the shadow repository must start with '.', so it is
707 711 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
708 712 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
709 713 if os.path.exists(legacy_repository_path):
710 714 return legacy_repository_path
711 715 else:
712 716 return os.path.join(
713 717 os.path.dirname(repo_path),
714 718 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
715 719
716 720 def cleanup_merge_workspace(self, repo_id, workspace_id):
717 721 """
718 722 Remove merge workspace.
719 723
720 724 This function MUST not fail in case there is no workspace associated to
721 725 the given `workspace_id`.
722 726
723 727 :param workspace_id: `workspace_id` unique identifier.
724 728 """
725 729 shadow_repository_path = self._get_shadow_repository_path(
726 730 self.path, repo_id, workspace_id)
727 731 shadow_repository_path_del = '{}.{}.delete'.format(
728 732 shadow_repository_path, time.time())
729 733
730 734 # move the shadow repo, so it never conflicts with the one used.
731 735 # we use this method because shutil.rmtree had some edge case problems
732 736 # removing symlinked repositories
733 737 if not os.path.isdir(shadow_repository_path):
734 738 return
735 739
736 740 shutil.move(shadow_repository_path, shadow_repository_path_del)
737 741 try:
738 742 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
739 743 except Exception:
740 744 log.exception('Failed to gracefully remove shadow repo under %s',
741 745 shadow_repository_path_del)
742 746 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
743 747
744 748 # ========== #
745 749 # COMMIT API #
746 750 # ========== #
747 751
748 752 @LazyProperty
749 753 def in_memory_commit(self):
750 754 """
751 755 Returns :class:`InMemoryCommit` object for this repository.
752 756 """
753 757 raise NotImplementedError
754 758
755 759 # ======================== #
756 760 # UTILITIES FOR SUBCLASSES #
757 761 # ======================== #
758 762
759 763 def _validate_diff_commits(self, commit1, commit2):
760 764 """
761 765 Validates that the given commits are related to this repository.
762 766
763 767 Intended as a utility for sub classes to have a consistent validation
764 768 of input parameters in methods like :meth:`get_diff`.
765 769 """
766 770 self._validate_commit(commit1)
767 771 self._validate_commit(commit2)
768 772 if (isinstance(commit1, EmptyCommit) and
769 773 isinstance(commit2, EmptyCommit)):
770 774 raise ValueError("Cannot compare two empty commits")
771 775
772 776 def _validate_commit(self, commit):
773 777 if not isinstance(commit, BaseCommit):
774 778 raise TypeError(
775 779 "%s is not of type BaseCommit" % repr(commit))
776 780 if commit.repository != self and not isinstance(commit, EmptyCommit):
777 781 raise ValueError(
778 782 "Commit %s must be a valid commit from this repository %s, "
779 783 "related to this repository instead %s." %
780 784 (commit, self, commit.repository))
781 785
782 786 def _validate_commit_id(self, commit_id):
783 787 if not isinstance(commit_id, compat.string_types):
784 788 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
785 789
786 790 def _validate_commit_idx(self, commit_idx):
787 791 if not isinstance(commit_idx, (int, long)):
788 792 raise TypeError("commit_idx must be a numeric value")
789 793
790 794 def _validate_branch_name(self, branch_name):
791 795 if branch_name and branch_name not in self.branches_all:
792 796 msg = ("Branch %s not found in %s" % (branch_name, self))
793 797 raise BranchDoesNotExistError(msg)
794 798
795 799 #
796 800 # Supporting deprecated API parts
797 801 # TODO: johbo: consider to move this into a mixin
798 802 #
799 803
800 804 @property
801 805 def EMPTY_CHANGESET(self):
802 806 warnings.warn(
803 807 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
804 808 return self.EMPTY_COMMIT_ID
805 809
806 810 @property
807 811 def revisions(self):
808 812 warnings.warn("Use commits attribute instead", DeprecationWarning)
809 813 return self.commit_ids
810 814
811 815 @revisions.setter
812 816 def revisions(self, value):
813 817 warnings.warn("Use commits attribute instead", DeprecationWarning)
814 818 self.commit_ids = value
815 819
816 820 def get_changeset(self, revision=None, pre_load=None):
817 821 warnings.warn("Use get_commit instead", DeprecationWarning)
818 822 commit_id = None
819 823 commit_idx = None
820 824 if isinstance(revision, compat.string_types):
821 825 commit_id = revision
822 826 else:
823 827 commit_idx = revision
824 828 return self.get_commit(
825 829 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
826 830
827 831 def get_changesets(
828 832 self, start=None, end=None, start_date=None, end_date=None,
829 833 branch_name=None, pre_load=None):
830 834 warnings.warn("Use get_commits instead", DeprecationWarning)
831 835 start_id = self._revision_to_commit(start)
832 836 end_id = self._revision_to_commit(end)
833 837 return self.get_commits(
834 838 start_id=start_id, end_id=end_id, start_date=start_date,
835 839 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
836 840
837 841 def _revision_to_commit(self, revision):
838 842 """
839 843 Translates a revision to a commit_id
840 844
841 845 Helps to support the old changeset based API which allows to use
842 846 commit ids and commit indices interchangeable.
843 847 """
844 848 if revision is None:
845 849 return revision
846 850
847 851 if isinstance(revision, compat.string_types):
848 852 commit_id = revision
849 853 else:
850 854 commit_id = self.commit_ids[revision]
851 855 return commit_id
852 856
853 857 @property
854 858 def in_memory_changeset(self):
855 859 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
856 860 return self.in_memory_commit
857 861
858 862 def get_path_permissions(self, username):
859 863 """
860 864 Returns a path permission checker or None if not supported
861 865
862 866 :param username: session user name
863 867 :return: an instance of BasePathPermissionChecker or None
864 868 """
865 869 return None
866 870
867 871 def install_hooks(self, force=False):
868 872 return self._remote.install_hooks(force)
869 873
870 874 def get_hooks_info(self):
871 875 return self._remote.get_hooks_info()
872 876
873 877
874 878 class BaseCommit(object):
875 879 """
876 880 Each backend should implement it's commit representation.
877 881
878 882 **Attributes**
879 883
880 884 ``repository``
881 885 repository object within which commit exists
882 886
883 887 ``id``
884 888 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
885 889 just ``tip``.
886 890
887 891 ``raw_id``
888 892 raw commit representation (i.e. full 40 length sha for git
889 893 backend)
890 894
891 895 ``short_id``
892 896 shortened (if apply) version of ``raw_id``; it would be simple
893 897 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
894 898 as ``raw_id`` for subversion
895 899
896 900 ``idx``
897 901 commit index
898 902
899 903 ``files``
900 904 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
901 905
902 906 ``dirs``
903 907 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
904 908
905 909 ``nodes``
906 910 combined list of ``Node`` objects
907 911
908 912 ``author``
909 913 author of the commit, as unicode
910 914
911 915 ``message``
912 916 message of the commit, as unicode
913 917
914 918 ``parents``
915 919 list of parent commits
916 920
917 921 """
918 922 repository = None
919 923 branch = None
920 924
921 925 """
922 926 Depending on the backend this should be set to the branch name of the
923 927 commit. Backends not supporting branches on commits should leave this
924 928 value as ``None``.
925 929 """
926 930
927 931 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
928 932 """
929 933 This template is used to generate a default prefix for repository archives
930 934 if no prefix has been specified.
931 935 """
932 936
933 937 def __str__(self):
934 938 return '<%s at %s:%s>' % (
935 939 self.__class__.__name__, self.idx, self.short_id)
936 940
937 941 def __repr__(self):
938 942 return self.__str__()
939 943
940 944 def __unicode__(self):
941 945 return u'%s:%s' % (self.idx, self.short_id)
942 946
943 947 def __eq__(self, other):
944 948 same_instance = isinstance(other, self.__class__)
945 949 return same_instance and self.raw_id == other.raw_id
946 950
947 951 def __json__(self):
948 952 parents = []
949 953 try:
950 954 for parent in self.parents:
951 955 parents.append({'raw_id': parent.raw_id})
952 956 except NotImplementedError:
953 957 # empty commit doesn't have parents implemented
954 958 pass
955 959
956 960 return {
957 961 'short_id': self.short_id,
958 962 'raw_id': self.raw_id,
959 963 'revision': self.idx,
960 964 'message': self.message,
961 965 'date': self.date,
962 966 'author': self.author,
963 967 'parents': parents,
964 968 'branch': self.branch
965 969 }
966 970
967 971 def __getstate__(self):
968 972 d = self.__dict__.copy()
969 973 d.pop('_remote', None)
970 974 d.pop('repository', None)
971 975 return d
972 976
973 977 def serialize(self):
974 978 return self.__json__()
975 979
976 980 def _get_refs(self):
977 981 return {
978 982 'branches': [self.branch] if self.branch else [],
979 983 'bookmarks': getattr(self, 'bookmarks', []),
980 984 'tags': self.tags
981 985 }
982 986
983 987 @LazyProperty
984 988 def last(self):
985 989 """
986 990 ``True`` if this is last commit in repository, ``False``
987 991 otherwise; trying to access this attribute while there is no
988 992 commits would raise `EmptyRepositoryError`
989 993 """
990 994 if self.repository is None:
991 995 raise CommitError("Cannot check if it's most recent commit")
992 996 return self.raw_id == self.repository.commit_ids[-1]
993 997
994 998 @LazyProperty
995 999 def parents(self):
996 1000 """
997 1001 Returns list of parent commits.
998 1002 """
999 1003 raise NotImplementedError
1000 1004
1001 1005 @LazyProperty
1002 1006 def first_parent(self):
1003 1007 """
1004 1008 Returns list of parent commits.
1005 1009 """
1006 1010 return self.parents[0] if self.parents else EmptyCommit()
1007 1011
1008 1012 @property
1009 1013 def merge(self):
1010 1014 """
1011 1015 Returns boolean if commit is a merge.
1012 1016 """
1013 1017 return len(self.parents) > 1
1014 1018
1015 1019 @LazyProperty
1016 1020 def children(self):
1017 1021 """
1018 1022 Returns list of child commits.
1019 1023 """
1020 1024 raise NotImplementedError
1021 1025
1022 1026 @LazyProperty
1023 1027 def id(self):
1024 1028 """
1025 1029 Returns string identifying this commit.
1026 1030 """
1027 1031 raise NotImplementedError
1028 1032
1029 1033 @LazyProperty
1030 1034 def raw_id(self):
1031 1035 """
1032 1036 Returns raw string identifying this commit.
1033 1037 """
1034 1038 raise NotImplementedError
1035 1039
1036 1040 @LazyProperty
1037 1041 def short_id(self):
1038 1042 """
1039 1043 Returns shortened version of ``raw_id`` attribute, as string,
1040 1044 identifying this commit, useful for presentation to users.
1041 1045 """
1042 1046 raise NotImplementedError
1043 1047
1044 1048 @LazyProperty
1045 1049 def idx(self):
1046 1050 """
1047 1051 Returns integer identifying this commit.
1048 1052 """
1049 1053 raise NotImplementedError
1050 1054
1051 1055 @LazyProperty
1052 1056 def committer(self):
1053 1057 """
1054 1058 Returns committer for this commit
1055 1059 """
1056 1060 raise NotImplementedError
1057 1061
1058 1062 @LazyProperty
1059 1063 def committer_name(self):
1060 1064 """
1061 1065 Returns committer name for this commit
1062 1066 """
1063 1067
1064 1068 return author_name(self.committer)
1065 1069
1066 1070 @LazyProperty
1067 1071 def committer_email(self):
1068 1072 """
1069 1073 Returns committer email address for this commit
1070 1074 """
1071 1075
1072 1076 return author_email(self.committer)
1073 1077
1074 1078 @LazyProperty
1075 1079 def author(self):
1076 1080 """
1077 1081 Returns author for this commit
1078 1082 """
1079 1083
1080 1084 raise NotImplementedError
1081 1085
1082 1086 @LazyProperty
1083 1087 def author_name(self):
1084 1088 """
1085 1089 Returns author name for this commit
1086 1090 """
1087 1091
1088 1092 return author_name(self.author)
1089 1093
1090 1094 @LazyProperty
1091 1095 def author_email(self):
1092 1096 """
1093 1097 Returns author email address for this commit
1094 1098 """
1095 1099
1096 1100 return author_email(self.author)
1097 1101
1098 1102 def get_file_mode(self, path):
1099 1103 """
1100 1104 Returns stat mode of the file at `path`.
1101 1105 """
1102 1106 raise NotImplementedError
1103 1107
1104 1108 def is_link(self, path):
1105 1109 """
1106 1110 Returns ``True`` if given `path` is a symlink
1107 1111 """
1108 1112 raise NotImplementedError
1109 1113
1110 1114 def is_node_binary(self, path):
1111 1115 """
1112 1116 Returns ``True`` is given path is a binary file
1113 1117 """
1114 1118 raise NotImplementedError
1115 1119
1116 1120 def get_file_content(self, path):
1117 1121 """
1118 1122 Returns content of the file at the given `path`.
1119 1123 """
1120 1124 raise NotImplementedError
1121 1125
1122 1126 def get_file_content_streamed(self, path):
1123 1127 """
1124 1128 returns a streaming response from vcsserver with file content
1125 1129 """
1126 1130 raise NotImplementedError
1127 1131
1128 1132 def get_file_size(self, path):
1129 1133 """
1130 1134 Returns size of the file at the given `path`.
1131 1135 """
1132 1136 raise NotImplementedError
1133 1137
1134 1138 def get_path_commit(self, path, pre_load=None):
1135 1139 """
1136 1140 Returns last commit of the file at the given `path`.
1137 1141
1138 1142 :param pre_load: Optional. List of commit attributes to load.
1139 1143 """
1140 1144 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1141 1145 if not commits:
1142 1146 raise RepositoryError(
1143 1147 'Failed to fetch history for path {}. '
1144 1148 'Please check if such path exists in your repository'.format(
1145 1149 path))
1146 1150 return commits[0]
1147 1151
1148 1152 def get_path_history(self, path, limit=None, pre_load=None):
1149 1153 """
1150 1154 Returns history of file as reversed list of :class:`BaseCommit`
1151 1155 objects for which file at given `path` has been modified.
1152 1156
1153 1157 :param limit: Optional. Allows to limit the size of the returned
1154 1158 history. This is intended as a hint to the underlying backend, so
1155 1159 that it can apply optimizations depending on the limit.
1156 1160 :param pre_load: Optional. List of commit attributes to load.
1157 1161 """
1158 1162 raise NotImplementedError
1159 1163
1160 1164 def get_file_annotate(self, path, pre_load=None):
1161 1165 """
1162 1166 Returns a generator of four element tuples with
1163 1167 lineno, sha, commit lazy loader and line
1164 1168
1165 1169 :param pre_load: Optional. List of commit attributes to load.
1166 1170 """
1167 1171 raise NotImplementedError
1168 1172
1169 1173 def get_nodes(self, path):
1170 1174 """
1171 1175 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1172 1176 state of commit at the given ``path``.
1173 1177
1174 1178 :raises ``CommitError``: if node at the given ``path`` is not
1175 1179 instance of ``DirNode``
1176 1180 """
1177 1181 raise NotImplementedError
1178 1182
1179 1183 def get_node(self, path):
1180 1184 """
1181 1185 Returns ``Node`` object from the given ``path``.
1182 1186
1183 1187 :raises ``NodeDoesNotExistError``: if there is no node at the given
1184 1188 ``path``
1185 1189 """
1186 1190 raise NotImplementedError
1187 1191
1188 1192 def get_largefile_node(self, path):
1189 1193 """
1190 1194 Returns the path to largefile from Mercurial/Git-lfs storage.
1191 1195 or None if it's not a largefile node
1192 1196 """
1193 1197 return None
1194 1198
1195 1199 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1196 1200 archive_dir_name=None, write_metadata=False, mtime=None,
1197 1201 archive_at_path='/'):
1198 1202 """
1199 1203 Creates an archive containing the contents of the repository.
1200 1204
1201 1205 :param archive_dest_path: path to the file which to create the archive.
1202 1206 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1203 1207 :param archive_dir_name: name of root directory in archive.
1204 1208 Default is repository name and commit's short_id joined with dash:
1205 1209 ``"{repo_name}-{short_id}"``.
1206 1210 :param write_metadata: write a metadata file into archive.
1207 1211 :param mtime: custom modification time for archive creation, defaults
1208 1212 to time.time() if not given.
1209 1213 :param archive_at_path: pack files at this path (default '/')
1210 1214
1211 1215 :raise VCSError: If prefix has a problem.
1212 1216 """
1213 1217 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1214 1218 if kind not in allowed_kinds:
1215 1219 raise ImproperArchiveTypeError(
1216 1220 'Archive kind (%s) not supported use one of %s' %
1217 1221 (kind, allowed_kinds))
1218 1222
1219 1223 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1220 1224 mtime = mtime is not None or time.mktime(self.date.timetuple())
1221 1225 commit_id = self.raw_id
1222 1226
1223 1227 return self.repository._remote.archive_repo(
1224 1228 archive_dest_path, kind, mtime, archive_at_path,
1225 1229 archive_dir_name, commit_id)
1226 1230
1227 1231 def _validate_archive_prefix(self, archive_dir_name):
1228 1232 if archive_dir_name is None:
1229 1233 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1230 1234 repo_name=safe_str(self.repository.name),
1231 1235 short_id=self.short_id)
1232 1236 elif not isinstance(archive_dir_name, str):
1233 1237 raise ValueError("prefix not a bytes object: %s" % repr(archive_dir_name))
1234 1238 elif archive_dir_name.startswith('/'):
1235 1239 raise VCSError("Prefix cannot start with leading slash")
1236 1240 elif archive_dir_name.strip() == '':
1237 1241 raise VCSError("Prefix cannot be empty")
1238 1242 return archive_dir_name
1239 1243
1240 1244 @LazyProperty
1241 1245 def root(self):
1242 1246 """
1243 1247 Returns ``RootNode`` object for this commit.
1244 1248 """
1245 1249 return self.get_node('')
1246 1250
1247 1251 def next(self, branch=None):
1248 1252 """
1249 1253 Returns next commit from current, if branch is gives it will return
1250 1254 next commit belonging to this branch
1251 1255
1252 1256 :param branch: show commits within the given named branch
1253 1257 """
1254 1258 indexes = xrange(self.idx + 1, self.repository.count())
1255 1259 return self._find_next(indexes, branch)
1256 1260
1257 1261 def prev(self, branch=None):
1258 1262 """
1259 1263 Returns previous commit from current, if branch is gives it will
1260 1264 return previous commit belonging to this branch
1261 1265
1262 1266 :param branch: show commit within the given named branch
1263 1267 """
1264 1268 indexes = xrange(self.idx - 1, -1, -1)
1265 1269 return self._find_next(indexes, branch)
1266 1270
1267 1271 def _find_next(self, indexes, branch=None):
1268 1272 if branch and self.branch != branch:
1269 1273 raise VCSError('Branch option used on commit not belonging '
1270 1274 'to that branch')
1271 1275
1272 1276 for next_idx in indexes:
1273 1277 commit = self.repository.get_commit(commit_idx=next_idx)
1274 1278 if branch and branch != commit.branch:
1275 1279 continue
1276 1280 return commit
1277 1281 raise CommitDoesNotExistError
1278 1282
1279 1283 def diff(self, ignore_whitespace=True, context=3):
1280 1284 """
1281 1285 Returns a `Diff` object representing the change made by this commit.
1282 1286 """
1283 1287 parent = self.first_parent
1284 1288 diff = self.repository.get_diff(
1285 1289 parent, self,
1286 1290 ignore_whitespace=ignore_whitespace,
1287 1291 context=context)
1288 1292 return diff
1289 1293
1290 1294 @LazyProperty
1291 1295 def added(self):
1292 1296 """
1293 1297 Returns list of added ``FileNode`` objects.
1294 1298 """
1295 1299 raise NotImplementedError
1296 1300
1297 1301 @LazyProperty
1298 1302 def changed(self):
1299 1303 """
1300 1304 Returns list of modified ``FileNode`` objects.
1301 1305 """
1302 1306 raise NotImplementedError
1303 1307
1304 1308 @LazyProperty
1305 1309 def removed(self):
1306 1310 """
1307 1311 Returns list of removed ``FileNode`` objects.
1308 1312 """
1309 1313 raise NotImplementedError
1310 1314
1311 1315 @LazyProperty
1312 1316 def size(self):
1313 1317 """
1314 1318 Returns total number of bytes from contents of all filenodes.
1315 1319 """
1316 1320 return sum((node.size for node in self.get_filenodes_generator()))
1317 1321
1318 1322 def walk(self, topurl=''):
1319 1323 """
1320 1324 Similar to os.walk method. Insted of filesystem it walks through
1321 1325 commit starting at given ``topurl``. Returns generator of tuples
1322 1326 (topnode, dirnodes, filenodes).
1323 1327 """
1324 1328 topnode = self.get_node(topurl)
1325 1329 if not topnode.is_dir():
1326 1330 return
1327 1331 yield (topnode, topnode.dirs, topnode.files)
1328 1332 for dirnode in topnode.dirs:
1329 1333 for tup in self.walk(dirnode.path):
1330 1334 yield tup
1331 1335
1332 1336 def get_filenodes_generator(self):
1333 1337 """
1334 1338 Returns generator that yields *all* file nodes.
1335 1339 """
1336 1340 for topnode, dirs, files in self.walk():
1337 1341 for node in files:
1338 1342 yield node
1339 1343
1340 1344 #
1341 1345 # Utilities for sub classes to support consistent behavior
1342 1346 #
1343 1347
1344 1348 def no_node_at_path(self, path):
1345 1349 return NodeDoesNotExistError(
1346 1350 u"There is no file nor directory at the given path: "
1347 1351 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1348 1352
1349 1353 def _fix_path(self, path):
1350 1354 """
1351 1355 Paths are stored without trailing slash so we need to get rid off it if
1352 1356 needed.
1353 1357 """
1354 1358 return path.rstrip('/')
1355 1359
1356 1360 #
1357 1361 # Deprecated API based on changesets
1358 1362 #
1359 1363
1360 1364 @property
1361 1365 def revision(self):
1362 1366 warnings.warn("Use idx instead", DeprecationWarning)
1363 1367 return self.idx
1364 1368
1365 1369 @revision.setter
1366 1370 def revision(self, value):
1367 1371 warnings.warn("Use idx instead", DeprecationWarning)
1368 1372 self.idx = value
1369 1373
1370 1374 def get_file_changeset(self, path):
1371 1375 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1372 1376 return self.get_path_commit(path)
1373 1377
1374 1378
1375 1379 class BaseChangesetClass(type):
1376 1380
1377 1381 def __instancecheck__(self, instance):
1378 1382 return isinstance(instance, BaseCommit)
1379 1383
1380 1384
1381 1385 class BaseChangeset(BaseCommit):
1382 1386
1383 1387 __metaclass__ = BaseChangesetClass
1384 1388
1385 1389 def __new__(cls, *args, **kwargs):
1386 1390 warnings.warn(
1387 1391 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1388 1392 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1389 1393
1390 1394
1391 1395 class BaseInMemoryCommit(object):
1392 1396 """
1393 1397 Represents differences between repository's state (most recent head) and
1394 1398 changes made *in place*.
1395 1399
1396 1400 **Attributes**
1397 1401
1398 1402 ``repository``
1399 1403 repository object for this in-memory-commit
1400 1404
1401 1405 ``added``
1402 1406 list of ``FileNode`` objects marked as *added*
1403 1407
1404 1408 ``changed``
1405 1409 list of ``FileNode`` objects marked as *changed*
1406 1410
1407 1411 ``removed``
1408 1412 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1409 1413 *removed*
1410 1414
1411 1415 ``parents``
1412 1416 list of :class:`BaseCommit` instances representing parents of
1413 1417 in-memory commit. Should always be 2-element sequence.
1414 1418
1415 1419 """
1416 1420
1417 1421 def __init__(self, repository):
1418 1422 self.repository = repository
1419 1423 self.added = []
1420 1424 self.changed = []
1421 1425 self.removed = []
1422 1426 self.parents = []
1423 1427
1424 1428 def add(self, *filenodes):
1425 1429 """
1426 1430 Marks given ``FileNode`` objects as *to be committed*.
1427 1431
1428 1432 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1429 1433 latest commit
1430 1434 :raises ``NodeAlreadyAddedError``: if node with same path is already
1431 1435 marked as *added*
1432 1436 """
1433 1437 # Check if not already marked as *added* first
1434 1438 for node in filenodes:
1435 1439 if node.path in (n.path for n in self.added):
1436 1440 raise NodeAlreadyAddedError(
1437 1441 "Such FileNode %s is already marked for addition"
1438 1442 % node.path)
1439 1443 for node in filenodes:
1440 1444 self.added.append(node)
1441 1445
1442 1446 def change(self, *filenodes):
1443 1447 """
1444 1448 Marks given ``FileNode`` objects to be *changed* in next commit.
1445 1449
1446 1450 :raises ``EmptyRepositoryError``: if there are no commits yet
1447 1451 :raises ``NodeAlreadyExistsError``: if node with same path is already
1448 1452 marked to be *changed*
1449 1453 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1450 1454 marked to be *removed*
1451 1455 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1452 1456 commit
1453 1457 :raises ``NodeNotChangedError``: if node hasn't really be changed
1454 1458 """
1455 1459 for node in filenodes:
1456 1460 if node.path in (n.path for n in self.removed):
1457 1461 raise NodeAlreadyRemovedError(
1458 1462 "Node at %s is already marked as removed" % node.path)
1459 1463 try:
1460 1464 self.repository.get_commit()
1461 1465 except EmptyRepositoryError:
1462 1466 raise EmptyRepositoryError(
1463 1467 "Nothing to change - try to *add* new nodes rather than "
1464 1468 "changing them")
1465 1469 for node in filenodes:
1466 1470 if node.path in (n.path for n in self.changed):
1467 1471 raise NodeAlreadyChangedError(
1468 1472 "Node at '%s' is already marked as changed" % node.path)
1469 1473 self.changed.append(node)
1470 1474
1471 1475 def remove(self, *filenodes):
1472 1476 """
1473 1477 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1474 1478 *removed* in next commit.
1475 1479
1476 1480 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1477 1481 be *removed*
1478 1482 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1479 1483 be *changed*
1480 1484 """
1481 1485 for node in filenodes:
1482 1486 if node.path in (n.path for n in self.removed):
1483 1487 raise NodeAlreadyRemovedError(
1484 1488 "Node is already marked to for removal at %s" % node.path)
1485 1489 if node.path in (n.path for n in self.changed):
1486 1490 raise NodeAlreadyChangedError(
1487 1491 "Node is already marked to be changed at %s" % node.path)
1488 1492 # We only mark node as *removed* - real removal is done by
1489 1493 # commit method
1490 1494 self.removed.append(node)
1491 1495
1492 1496 def reset(self):
1493 1497 """
1494 1498 Resets this instance to initial state (cleans ``added``, ``changed``
1495 1499 and ``removed`` lists).
1496 1500 """
1497 1501 self.added = []
1498 1502 self.changed = []
1499 1503 self.removed = []
1500 1504 self.parents = []
1501 1505
1502 1506 def get_ipaths(self):
1503 1507 """
1504 1508 Returns generator of paths from nodes marked as added, changed or
1505 1509 removed.
1506 1510 """
1507 1511 for node in itertools.chain(self.added, self.changed, self.removed):
1508 1512 yield node.path
1509 1513
1510 1514 def get_paths(self):
1511 1515 """
1512 1516 Returns list of paths from nodes marked as added, changed or removed.
1513 1517 """
1514 1518 return list(self.get_ipaths())
1515 1519
1516 1520 def check_integrity(self, parents=None):
1517 1521 """
1518 1522 Checks in-memory commit's integrity. Also, sets parents if not
1519 1523 already set.
1520 1524
1521 1525 :raises CommitError: if any error occurs (i.e.
1522 1526 ``NodeDoesNotExistError``).
1523 1527 """
1524 1528 if not self.parents:
1525 1529 parents = parents or []
1526 1530 if len(parents) == 0:
1527 1531 try:
1528 1532 parents = [self.repository.get_commit(), None]
1529 1533 except EmptyRepositoryError:
1530 1534 parents = [None, None]
1531 1535 elif len(parents) == 1:
1532 1536 parents += [None]
1533 1537 self.parents = parents
1534 1538
1535 1539 # Local parents, only if not None
1536 1540 parents = [p for p in self.parents if p]
1537 1541
1538 1542 # Check nodes marked as added
1539 1543 for p in parents:
1540 1544 for node in self.added:
1541 1545 try:
1542 1546 p.get_node(node.path)
1543 1547 except NodeDoesNotExistError:
1544 1548 pass
1545 1549 else:
1546 1550 raise NodeAlreadyExistsError(
1547 1551 "Node `%s` already exists at %s" % (node.path, p))
1548 1552
1549 1553 # Check nodes marked as changed
1550 1554 missing = set(self.changed)
1551 1555 not_changed = set(self.changed)
1552 1556 if self.changed and not parents:
1553 1557 raise NodeDoesNotExistError(str(self.changed[0].path))
1554 1558 for p in parents:
1555 1559 for node in self.changed:
1556 1560 try:
1557 1561 old = p.get_node(node.path)
1558 1562 missing.remove(node)
1559 1563 # if content actually changed, remove node from not_changed
1560 1564 if old.content != node.content:
1561 1565 not_changed.remove(node)
1562 1566 except NodeDoesNotExistError:
1563 1567 pass
1564 1568 if self.changed and missing:
1565 1569 raise NodeDoesNotExistError(
1566 1570 "Node `%s` marked as modified but missing in parents: %s"
1567 1571 % (node.path, parents))
1568 1572
1569 1573 if self.changed and not_changed:
1570 1574 raise NodeNotChangedError(
1571 1575 "Node `%s` wasn't actually changed (parents: %s)"
1572 1576 % (not_changed.pop().path, parents))
1573 1577
1574 1578 # Check nodes marked as removed
1575 1579 if self.removed and not parents:
1576 1580 raise NodeDoesNotExistError(
1577 1581 "Cannot remove node at %s as there "
1578 1582 "were no parents specified" % self.removed[0].path)
1579 1583 really_removed = set()
1580 1584 for p in parents:
1581 1585 for node in self.removed:
1582 1586 try:
1583 1587 p.get_node(node.path)
1584 1588 really_removed.add(node)
1585 1589 except CommitError:
1586 1590 pass
1587 1591 not_removed = set(self.removed) - really_removed
1588 1592 if not_removed:
1589 1593 # TODO: johbo: This code branch does not seem to be covered
1590 1594 raise NodeDoesNotExistError(
1591 1595 "Cannot remove node at %s from "
1592 1596 "following parents: %s" % (not_removed, parents))
1593 1597
1594 1598 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1595 1599 """
1596 1600 Performs in-memory commit (doesn't check workdir in any way) and
1597 1601 returns newly created :class:`BaseCommit`. Updates repository's
1598 1602 attribute `commits`.
1599 1603
1600 1604 .. note::
1601 1605
1602 1606 While overriding this method each backend's should call
1603 1607 ``self.check_integrity(parents)`` in the first place.
1604 1608
1605 1609 :param message: message of the commit
1606 1610 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1607 1611 :param parents: single parent or sequence of parents from which commit
1608 1612 would be derived
1609 1613 :param date: ``datetime.datetime`` instance. Defaults to
1610 1614 ``datetime.datetime.now()``.
1611 1615 :param branch: branch name, as string. If none given, default backend's
1612 1616 branch would be used.
1613 1617
1614 1618 :raises ``CommitError``: if any error occurs while committing
1615 1619 """
1616 1620 raise NotImplementedError
1617 1621
1618 1622
1619 1623 class BaseInMemoryChangesetClass(type):
1620 1624
1621 1625 def __instancecheck__(self, instance):
1622 1626 return isinstance(instance, BaseInMemoryCommit)
1623 1627
1624 1628
1625 1629 class BaseInMemoryChangeset(BaseInMemoryCommit):
1626 1630
1627 1631 __metaclass__ = BaseInMemoryChangesetClass
1628 1632
1629 1633 def __new__(cls, *args, **kwargs):
1630 1634 warnings.warn(
1631 1635 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1632 1636 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1633 1637
1634 1638
1635 1639 class EmptyCommit(BaseCommit):
1636 1640 """
1637 1641 An dummy empty commit. It's possible to pass hash when creating
1638 1642 an EmptyCommit
1639 1643 """
1640 1644
1641 1645 def __init__(
1642 1646 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1643 1647 message='', author='', date=None):
1644 1648 self._empty_commit_id = commit_id
1645 1649 # TODO: johbo: Solve idx parameter, default value does not make
1646 1650 # too much sense
1647 1651 self.idx = idx
1648 1652 self.message = message
1649 1653 self.author = author
1650 1654 self.date = date or datetime.datetime.fromtimestamp(0)
1651 1655 self.repository = repo
1652 1656 self.alias = alias
1653 1657
1654 1658 @LazyProperty
1655 1659 def raw_id(self):
1656 1660 """
1657 1661 Returns raw string identifying this commit, useful for web
1658 1662 representation.
1659 1663 """
1660 1664
1661 1665 return self._empty_commit_id
1662 1666
1663 1667 @LazyProperty
1664 1668 def branch(self):
1665 1669 if self.alias:
1666 1670 from rhodecode.lib.vcs.backends import get_backend
1667 1671 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1668 1672
1669 1673 @LazyProperty
1670 1674 def short_id(self):
1671 1675 return self.raw_id[:12]
1672 1676
1673 1677 @LazyProperty
1674 1678 def id(self):
1675 1679 return self.raw_id
1676 1680
1677 1681 def get_path_commit(self, path):
1678 1682 return self
1679 1683
1680 1684 def get_file_content(self, path):
1681 1685 return u''
1682 1686
1683 1687 def get_file_content_streamed(self, path):
1684 1688 yield self.get_file_content()
1685 1689
1686 1690 def get_file_size(self, path):
1687 1691 return 0
1688 1692
1689 1693
1690 1694 class EmptyChangesetClass(type):
1691 1695
1692 1696 def __instancecheck__(self, instance):
1693 1697 return isinstance(instance, EmptyCommit)
1694 1698
1695 1699
1696 1700 class EmptyChangeset(EmptyCommit):
1697 1701
1698 1702 __metaclass__ = EmptyChangesetClass
1699 1703
1700 1704 def __new__(cls, *args, **kwargs):
1701 1705 warnings.warn(
1702 1706 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1703 1707 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1704 1708
1705 1709 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1706 1710 alias=None, revision=-1, message='', author='', date=None):
1707 1711 if requested_revision is not None:
1708 1712 warnings.warn(
1709 1713 "Parameter requested_revision not supported anymore",
1710 1714 DeprecationWarning)
1711 1715 super(EmptyChangeset, self).__init__(
1712 1716 commit_id=cs, repo=repo, alias=alias, idx=revision,
1713 1717 message=message, author=author, date=date)
1714 1718
1715 1719 @property
1716 1720 def revision(self):
1717 1721 warnings.warn("Use idx instead", DeprecationWarning)
1718 1722 return self.idx
1719 1723
1720 1724 @revision.setter
1721 1725 def revision(self, value):
1722 1726 warnings.warn("Use idx instead", DeprecationWarning)
1723 1727 self.idx = value
1724 1728
1725 1729
1726 1730 class EmptyRepository(BaseRepository):
1727 1731 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1728 1732 pass
1729 1733
1730 1734 def get_diff(self, *args, **kwargs):
1731 1735 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1732 1736 return GitDiff('')
1733 1737
1734 1738
1735 1739 class CollectionGenerator(object):
1736 1740
1737 1741 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1738 1742 self.repo = repo
1739 1743 self.commit_ids = commit_ids
1740 1744 # TODO: (oliver) this isn't currently hooked up
1741 1745 self.collection_size = None
1742 1746 self.pre_load = pre_load
1743 1747 self.translate_tag = translate_tag
1744 1748
1745 1749 def __len__(self):
1746 1750 if self.collection_size is not None:
1747 1751 return self.collection_size
1748 1752 return self.commit_ids.__len__()
1749 1753
1750 1754 def __iter__(self):
1751 1755 for commit_id in self.commit_ids:
1752 1756 # TODO: johbo: Mercurial passes in commit indices or commit ids
1753 1757 yield self._commit_factory(commit_id)
1754 1758
1755 1759 def _commit_factory(self, commit_id):
1756 1760 """
1757 1761 Allows backends to override the way commits are generated.
1758 1762 """
1759 1763 return self.repo.get_commit(
1760 1764 commit_id=commit_id, pre_load=self.pre_load,
1761 1765 translate_tag=self.translate_tag)
1762 1766
1763 1767 def __getslice__(self, i, j):
1764 1768 """
1765 1769 Returns an iterator of sliced repository
1766 1770 """
1767 1771 commit_ids = self.commit_ids[i:j]
1768 1772 return self.__class__(
1769 1773 self.repo, commit_ids, pre_load=self.pre_load,
1770 1774 translate_tag=self.translate_tag)
1771 1775
1772 1776 def __repr__(self):
1773 1777 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1774 1778
1775 1779
1776 1780 class Config(object):
1777 1781 """
1778 1782 Represents the configuration for a repository.
1779 1783
1780 1784 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1781 1785 standard library. It implements only the needed subset.
1782 1786 """
1783 1787
1784 1788 def __init__(self):
1785 1789 self._values = {}
1786 1790
1787 1791 def copy(self):
1788 1792 clone = Config()
1789 1793 for section, values in self._values.items():
1790 1794 clone._values[section] = values.copy()
1791 1795 return clone
1792 1796
1793 1797 def __repr__(self):
1794 1798 return '<Config(%s sections) at %s>' % (
1795 1799 len(self._values), hex(id(self)))
1796 1800
1797 1801 def items(self, section):
1798 1802 return self._values.get(section, {}).iteritems()
1799 1803
1800 1804 def get(self, section, option):
1801 1805 return self._values.get(section, {}).get(option)
1802 1806
1803 1807 def set(self, section, option, value):
1804 1808 section_values = self._values.setdefault(section, {})
1805 1809 section_values[option] = value
1806 1810
1807 1811 def clear_section(self, section):
1808 1812 self._values[section] = {}
1809 1813
1810 1814 def serialize(self):
1811 1815 """
1812 1816 Creates a list of three tuples (section, key, value) representing
1813 1817 this config object.
1814 1818 """
1815 1819 items = []
1816 1820 for section in self._values:
1817 1821 for option, value in self._values[section].items():
1818 1822 items.append(
1819 1823 (safe_str(section), safe_str(option), safe_str(value)))
1820 1824 return items
1821 1825
1822 1826
1823 1827 class Diff(object):
1824 1828 """
1825 1829 Represents a diff result from a repository backend.
1826 1830
1827 1831 Subclasses have to provide a backend specific value for
1828 1832 :attr:`_header_re` and :attr:`_meta_re`.
1829 1833 """
1830 1834 _meta_re = None
1831 1835 _header_re = None
1832 1836
1833 1837 def __init__(self, raw_diff):
1834 1838 self.raw = raw_diff
1835 1839
1836 1840 def chunks(self):
1837 1841 """
1838 1842 split the diff in chunks of separate --git a/file b/file chunks
1839 1843 to make diffs consistent we must prepend with \n, and make sure
1840 1844 we can detect last chunk as this was also has special rule
1841 1845 """
1842 1846
1843 1847 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1844 1848 header = diff_parts[0]
1845 1849
1846 1850 if self._meta_re:
1847 1851 match = self._meta_re.match(header)
1848 1852
1849 1853 chunks = diff_parts[1:]
1850 1854 total_chunks = len(chunks)
1851 1855
1852 1856 return (
1853 1857 DiffChunk(chunk, self, cur_chunk == total_chunks)
1854 1858 for cur_chunk, chunk in enumerate(chunks, start=1))
1855 1859
1856 1860
1857 1861 class DiffChunk(object):
1858 1862
1859 1863 def __init__(self, chunk, diff, last_chunk):
1860 1864 self._diff = diff
1861 1865
1862 1866 # since we split by \ndiff --git that part is lost from original diff
1863 1867 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1864 1868 if not last_chunk:
1865 1869 chunk += '\n'
1866 1870
1867 1871 match = self._diff._header_re.match(chunk)
1868 1872 self.header = match.groupdict()
1869 1873 self.diff = chunk[match.end():]
1870 1874 self.raw = chunk
1871 1875
1872 1876
1873 1877 class BasePathPermissionChecker(object):
1874 1878
1875 1879 @staticmethod
1876 1880 def create_from_patterns(includes, excludes):
1877 1881 if includes and '*' in includes and not excludes:
1878 1882 return AllPathPermissionChecker()
1879 1883 elif excludes and '*' in excludes:
1880 1884 return NonePathPermissionChecker()
1881 1885 else:
1882 1886 return PatternPathPermissionChecker(includes, excludes)
1883 1887
1884 1888 @property
1885 1889 def has_full_access(self):
1886 1890 raise NotImplemented()
1887 1891
1888 1892 def has_access(self, path):
1889 1893 raise NotImplemented()
1890 1894
1891 1895
1892 1896 class AllPathPermissionChecker(BasePathPermissionChecker):
1893 1897
1894 1898 @property
1895 1899 def has_full_access(self):
1896 1900 return True
1897 1901
1898 1902 def has_access(self, path):
1899 1903 return True
1900 1904
1901 1905
1902 1906 class NonePathPermissionChecker(BasePathPermissionChecker):
1903 1907
1904 1908 @property
1905 1909 def has_full_access(self):
1906 1910 return False
1907 1911
1908 1912 def has_access(self, path):
1909 1913 return False
1910 1914
1911 1915
1912 1916 class PatternPathPermissionChecker(BasePathPermissionChecker):
1913 1917
1914 1918 def __init__(self, includes, excludes):
1915 1919 self.includes = includes
1916 1920 self.excludes = excludes
1917 1921 self.includes_re = [] if not includes else [
1918 1922 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1919 1923 self.excludes_re = [] if not excludes else [
1920 1924 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1921 1925
1922 1926 @property
1923 1927 def has_full_access(self):
1924 1928 return '*' in self.includes and not self.excludes
1925 1929
1926 1930 def has_access(self, path):
1927 1931 for regex in self.excludes_re:
1928 1932 if regex.match(path):
1929 1933 return False
1930 1934 for regex in self.includes_re:
1931 1935 if regex.match(path):
1932 1936 return True
1933 1937 return False
@@ -1,1034 +1,1043 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232
232 233 def is_null(value):
233 234 return len(value) == commit_id_or_idx.count('0')
234 235
235 236 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 237 return self.commit_ids[-1]
237 238
238 239 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 240 *map(safe_str, [commit_id_or_idx, self.name]))
240 241
241 242 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 is_branch = reference_obj and reference_obj.branch
244 is_numeric_idx = \
245 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
246 or isinstance(commit_id_or_idx, int)
247
248 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
244 249 try:
245 250 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
246 251 except Exception:
247 252 raise CommitDoesNotExistError(commit_missing_err)
248 253
249 254 elif is_bstr:
250 # Need to call remote to translate id for tagging scenario
255 # Need to call remote to translate id for tagging scenarios,
256 # or branch that are numeric
251 257 try:
252 258 remote_data = self._remote.get_object(commit_id_or_idx,
253 259 maybe_unreachable=maybe_unreachable)
254 260 commit_id_or_idx = remote_data["commit_id"]
255 261 except (CommitDoesNotExistError,):
256 262 raise CommitDoesNotExistError(commit_missing_err)
257 263
258 264 # Ensure we return full id
259 265 if not SHA_PATTERN.match(str(commit_id_or_idx)):
260 266 raise CommitDoesNotExistError(
261 267 "Given commit id %s not recognized" % commit_id_or_idx)
262 268 return commit_id_or_idx
263 269
264 270 def get_hook_location(self):
265 271 """
266 272 returns absolute path to location where hooks are stored
267 273 """
268 274 loc = os.path.join(self.path, 'hooks')
269 275 if not self.bare:
270 276 loc = os.path.join(self.path, '.git', 'hooks')
271 277 return loc
272 278
273 279 @LazyProperty
274 280 def last_change(self):
275 281 """
276 282 Returns last change made on this repository as
277 283 `datetime.datetime` object.
278 284 """
279 285 try:
280 286 return self.get_commit().date
281 287 except RepositoryError:
282 288 tzoffset = makedate()[1]
283 289 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
284 290
285 291 def _get_fs_mtime(self):
286 292 idx_loc = '' if self.bare else '.git'
287 293 # fallback to filesystem
288 294 in_path = os.path.join(self.path, idx_loc, "index")
289 295 he_path = os.path.join(self.path, idx_loc, "HEAD")
290 296 if os.path.exists(in_path):
291 297 return os.stat(in_path).st_mtime
292 298 else:
293 299 return os.stat(he_path).st_mtime
294 300
295 301 @LazyProperty
296 302 def description(self):
297 303 description = self._remote.get_description()
298 304 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
299 305
300 306 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
301 307 if self.is_empty():
302 308 return OrderedDict()
303 309
304 310 result = []
305 311 for ref, sha in self._refs.iteritems():
306 312 if ref.startswith(prefix):
307 313 ref_name = ref
308 314 if strip_prefix:
309 315 ref_name = ref[len(prefix):]
310 316 result.append((safe_unicode(ref_name), sha))
311 317
312 318 def get_name(entry):
313 319 return entry[0]
314 320
315 321 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
316 322
317 323 def _get_branches(self):
318 324 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
319 325
320 326 @CachedProperty
321 327 def branches(self):
322 328 return self._get_branches()
323 329
324 330 @CachedProperty
325 331 def branches_closed(self):
326 332 return {}
327 333
328 334 @CachedProperty
329 335 def bookmarks(self):
330 336 return {}
331 337
332 338 @CachedProperty
333 339 def branches_all(self):
334 340 all_branches = {}
335 341 all_branches.update(self.branches)
336 342 all_branches.update(self.branches_closed)
337 343 return all_branches
338 344
339 345 @CachedProperty
340 346 def tags(self):
341 347 return self._get_tags()
342 348
343 349 def _get_tags(self):
344 350 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
345 351
346 352 def tag(self, name, user, commit_id=None, message=None, date=None,
347 353 **kwargs):
348 354 # TODO: fix this method to apply annotated tags correct with message
349 355 """
350 356 Creates and returns a tag for the given ``commit_id``.
351 357
352 358 :param name: name for new tag
353 359 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
354 360 :param commit_id: commit id for which new tag would be created
355 361 :param message: message of the tag's commit
356 362 :param date: date of tag's commit
357 363
358 364 :raises TagAlreadyExistError: if tag with same name already exists
359 365 """
360 366 if name in self.tags:
361 367 raise TagAlreadyExistError("Tag %s already exists" % name)
362 368 commit = self.get_commit(commit_id=commit_id)
363 369 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
364 370
365 371 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
366 372
367 373 self._invalidate_prop_cache('tags')
368 374 self._invalidate_prop_cache('_refs')
369 375
370 376 return commit
371 377
372 378 def remove_tag(self, name, user, message=None, date=None):
373 379 """
374 380 Removes tag with the given ``name``.
375 381
376 382 :param name: name of the tag to be removed
377 383 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
378 384 :param message: message of the tag's removal commit
379 385 :param date: date of tag's removal commit
380 386
381 387 :raises TagDoesNotExistError: if tag with given name does not exists
382 388 """
383 389 if name not in self.tags:
384 390 raise TagDoesNotExistError("Tag %s does not exist" % name)
385 391
386 392 self._remote.tag_remove(name)
387 393 self._invalidate_prop_cache('tags')
388 394 self._invalidate_prop_cache('_refs')
389 395
390 396 def _get_refs(self):
391 397 return self._remote.get_refs()
392 398
393 399 @CachedProperty
394 400 def _refs(self):
395 401 return self._get_refs()
396 402
397 403 @property
398 404 def _ref_tree(self):
399 405 node = tree = {}
400 406 for ref, sha in self._refs.iteritems():
401 407 path = ref.split('/')
402 408 for bit in path[:-1]:
403 409 node = node.setdefault(bit, {})
404 410 node[path[-1]] = sha
405 411 node = tree
406 412 return tree
407 413
408 414 def get_remote_ref(self, ref_name):
409 415 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
410 416 try:
411 417 return self._refs[ref_key]
412 418 except Exception:
413 419 return
414 420
415 421 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 translate_tag=True, maybe_unreachable=False):
422 translate_tag=True, maybe_unreachable=False, reference_obj=None):
417 423 """
418 424 Returns `GitCommit` object representing commit from git repository
419 425 at the given `commit_id` or head (most recent commit) if None given.
420 426 """
427
421 428 if self.is_empty():
422 429 raise EmptyRepositoryError("There are no commits yet")
423 430
424 431 if commit_id is not None:
425 432 self._validate_commit_id(commit_id)
426 433 try:
427 434 # we have cached idx, use it without contacting the remote
428 435 idx = self._commit_ids[commit_id]
429 436 return GitCommit(self, commit_id, idx, pre_load=pre_load)
430 437 except KeyError:
431 438 pass
432 439
433 440 elif commit_idx is not None:
434 441 self._validate_commit_idx(commit_idx)
435 442 try:
436 443 _commit_id = self.commit_ids[commit_idx]
437 444 if commit_idx < 0:
438 445 commit_idx = self.commit_ids.index(_commit_id)
439 446 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 447 except IndexError:
441 448 commit_id = commit_idx
442 449 else:
443 450 commit_id = "tip"
444 451
445 452 if translate_tag:
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
453 commit_id = self._lookup_commit(
454 commit_id, maybe_unreachable=maybe_unreachable,
455 reference_obj=reference_obj)
447 456
448 457 try:
449 458 idx = self._commit_ids[commit_id]
450 459 except KeyError:
451 460 idx = -1
452 461
453 462 return GitCommit(self, commit_id, idx, pre_load=pre_load)
454 463
455 464 def get_commits(
456 465 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 466 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
458 467 """
459 468 Returns generator of `GitCommit` objects from start to end (both
460 469 are inclusive), in ascending date order.
461 470
462 471 :param start_id: None, str(commit_id)
463 472 :param end_id: None, str(commit_id)
464 473 :param start_date: if specified, commits with commit date less than
465 474 ``start_date`` would be filtered out from returned set
466 475 :param end_date: if specified, commits with commit date greater than
467 476 ``end_date`` would be filtered out from returned set
468 477 :param branch_name: if specified, commits not reachable from given
469 478 branch would be filtered out from returned set
470 479 :param show_hidden: Show hidden commits such as obsolete or hidden from
471 480 Mercurial evolve
472 481 :raise BranchDoesNotExistError: If given `branch_name` does not
473 482 exist.
474 483 :raise CommitDoesNotExistError: If commits for given `start` or
475 484 `end` could not be found.
476 485
477 486 """
478 487 if self.is_empty():
479 488 raise EmptyRepositoryError("There are no commits yet")
480 489
481 490 self._validate_branch_name(branch_name)
482 491
483 492 if start_id is not None:
484 493 self._validate_commit_id(start_id)
485 494 if end_id is not None:
486 495 self._validate_commit_id(end_id)
487 496
488 497 start_raw_id = self._lookup_commit(start_id)
489 498 start_pos = self._commit_ids[start_raw_id] if start_id else None
490 499 end_raw_id = self._lookup_commit(end_id)
491 500 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
492 501
493 502 if None not in [start_id, end_id] and start_pos > end_pos:
494 503 raise RepositoryError(
495 504 "Start commit '%s' cannot be after end commit '%s'" %
496 505 (start_id, end_id))
497 506
498 507 if end_pos is not None:
499 508 end_pos += 1
500 509
501 510 filter_ = []
502 511 if branch_name:
503 512 filter_.append({'branch_name': branch_name})
504 513 if start_date and not end_date:
505 514 filter_.append({'since': start_date})
506 515 if end_date and not start_date:
507 516 filter_.append({'until': end_date})
508 517 if start_date and end_date:
509 518 filter_.append({'since': start_date})
510 519 filter_.append({'until': end_date})
511 520
512 521 # if start_pos or end_pos:
513 522 # filter_.append({'start': start_pos})
514 523 # filter_.append({'end': end_pos})
515 524
516 525 if filter_:
517 526 revfilters = {
518 527 'branch_name': branch_name,
519 528 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
520 529 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
521 530 'start': start_pos,
522 531 'end': end_pos,
523 532 }
524 533 commit_ids = self._get_commit_ids(filters=revfilters)
525 534
526 535 else:
527 536 commit_ids = self.commit_ids
528 537
529 538 if start_pos or end_pos:
530 539 commit_ids = commit_ids[start_pos: end_pos]
531 540
532 541 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 542 translate_tag=translate_tags)
534 543
535 544 def get_diff(
536 545 self, commit1, commit2, path='', ignore_whitespace=False,
537 546 context=3, path1=None):
538 547 """
539 548 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 549 ``commit2`` since ``commit1``.
541 550
542 551 :param commit1: Entry point from which diff is shown. Can be
543 552 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 553 the changes since empty state of the repository until ``commit2``
545 554 :param commit2: Until which commits changes should be shown.
546 555 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 556 changes. Defaults to ``False``.
548 557 :param context: How many lines before/after changed lines should be
549 558 shown. Defaults to ``3``.
550 559 """
551 560 self._validate_diff_commits(commit1, commit2)
552 561 if path1 is not None and path1 != path:
553 562 raise ValueError("Diff of two different paths not supported.")
554 563
555 564 if path:
556 565 file_filter = path
557 566 else:
558 567 file_filter = None
559 568
560 569 diff = self._remote.diff(
561 570 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
562 571 opt_ignorews=ignore_whitespace,
563 572 context=context)
564 573 return GitDiff(diff)
565 574
566 575 def strip(self, commit_id, branch_name):
567 576 commit = self.get_commit(commit_id=commit_id)
568 577 if commit.merge:
569 578 raise Exception('Cannot reset to merge commit')
570 579
571 580 # parent is going to be the new head now
572 581 commit = commit.parents[0]
573 582 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574 583
575 584 # clear cached properties
576 585 self._invalidate_prop_cache('commit_ids')
577 586 self._invalidate_prop_cache('_refs')
578 587 self._invalidate_prop_cache('branches')
579 588
580 589 return len(self.commit_ids)
581 590
582 591 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
583 592 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
584 593 self, commit_id1, repo2, commit_id2)
585 594
586 595 if commit_id1 == commit_id2:
587 596 return commit_id1
588 597
589 598 if self != repo2:
590 599 commits = self._remote.get_missing_revs(
591 600 commit_id1, commit_id2, repo2.path)
592 601 if commits:
593 602 commit = repo2.get_commit(commits[-1])
594 603 if commit.parents:
595 604 ancestor_id = commit.parents[0].raw_id
596 605 else:
597 606 ancestor_id = None
598 607 else:
599 608 # no commits from other repo, ancestor_id is the commit_id2
600 609 ancestor_id = commit_id2
601 610 else:
602 611 output, __ = self.run_git_command(
603 612 ['merge-base', commit_id1, commit_id2])
604 613 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
605 614
606 615 log.debug('Found common ancestor with sha: %s', ancestor_id)
607 616
608 617 return ancestor_id
609 618
610 619 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
611 620 repo1 = self
612 621 ancestor_id = None
613 622
614 623 if commit_id1 == commit_id2:
615 624 commits = []
616 625 elif repo1 != repo2:
617 626 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
618 627 repo2.path)
619 628 commits = [
620 629 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
621 630 for commit_id in reversed(missing_ids)]
622 631 else:
623 632 output, __ = repo1.run_git_command(
624 633 ['log', '--reverse', '--pretty=format: %H', '-s',
625 634 '%s..%s' % (commit_id1, commit_id2)])
626 635 commits = [
627 636 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
628 637 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
629 638
630 639 return commits
631 640
632 641 @LazyProperty
633 642 def in_memory_commit(self):
634 643 """
635 644 Returns ``GitInMemoryCommit`` object for this repository.
636 645 """
637 646 return GitInMemoryCommit(self)
638 647
639 648 def pull(self, url, commit_ids=None, update_after=False):
640 649 """
641 650 Pull changes from external location. Pull is different in GIT
642 651 that fetch since it's doing a checkout
643 652
644 653 :param commit_ids: Optional. Can be set to a list of commit ids
645 654 which shall be pulled from the other repository.
646 655 """
647 656 refs = None
648 657 if commit_ids is not None:
649 658 remote_refs = self._remote.get_remote_refs(url)
650 659 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
651 660 self._remote.pull(url, refs=refs, update_after=update_after)
652 661 self._remote.invalidate_vcs_cache()
653 662
654 663 def fetch(self, url, commit_ids=None):
655 664 """
656 665 Fetch all git objects from external location.
657 666 """
658 667 self._remote.sync_fetch(url, refs=commit_ids)
659 668 self._remote.invalidate_vcs_cache()
660 669
661 670 def push(self, url):
662 671 refs = None
663 672 self._remote.sync_push(url, refs=refs)
664 673
665 674 def set_refs(self, ref_name, commit_id):
666 675 self._remote.set_refs(ref_name, commit_id)
667 676 self._invalidate_prop_cache('_refs')
668 677
669 678 def remove_ref(self, ref_name):
670 679 self._remote.remove_ref(ref_name)
671 680 self._invalidate_prop_cache('_refs')
672 681
673 682 def run_gc(self, prune=True):
674 683 cmd = ['gc', '--aggressive']
675 684 if prune:
676 685 cmd += ['--prune=now']
677 686 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
678 687 return stderr
679 688
680 689 def _update_server_info(self):
681 690 """
682 691 runs gits update-server-info command in this repo instance
683 692 """
684 693 self._remote.update_server_info()
685 694
686 695 def _current_branch(self):
687 696 """
688 697 Return the name of the current branch.
689 698
690 699 It only works for non bare repositories (i.e. repositories with a
691 700 working copy)
692 701 """
693 702 if self.bare:
694 703 raise RepositoryError('Bare git repos do not have active branches')
695 704
696 705 if self.is_empty():
697 706 return None
698 707
699 708 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
700 709 return stdout.strip()
701 710
702 711 def _checkout(self, branch_name, create=False, force=False):
703 712 """
704 713 Checkout a branch in the working directory.
705 714
706 715 It tries to create the branch if create is True, failing if the branch
707 716 already exists.
708 717
709 718 It only works for non bare repositories (i.e. repositories with a
710 719 working copy)
711 720 """
712 721 if self.bare:
713 722 raise RepositoryError('Cannot checkout branches in a bare git repo')
714 723
715 724 cmd = ['checkout']
716 725 if force:
717 726 cmd.append('-f')
718 727 if create:
719 728 cmd.append('-b')
720 729 cmd.append(branch_name)
721 730 self.run_git_command(cmd, fail_on_stderr=False)
722 731
723 732 def _create_branch(self, branch_name, commit_id):
724 733 """
725 734 creates a branch in a GIT repo
726 735 """
727 736 self._remote.create_branch(branch_name, commit_id)
728 737
729 738 def _identify(self):
730 739 """
731 740 Return the current state of the working directory.
732 741 """
733 742 if self.bare:
734 743 raise RepositoryError('Bare git repos do not have active branches')
735 744
736 745 if self.is_empty():
737 746 return None
738 747
739 748 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
740 749 return stdout.strip()
741 750
742 751 def _local_clone(self, clone_path, branch_name, source_branch=None):
743 752 """
744 753 Create a local clone of the current repo.
745 754 """
746 755 # N.B.(skreft): the --branch option is required as otherwise the shallow
747 756 # clone will only fetch the active branch.
748 757 cmd = ['clone', '--branch', branch_name,
749 758 self.path, os.path.abspath(clone_path)]
750 759
751 760 self.run_git_command(cmd, fail_on_stderr=False)
752 761
753 762 # if we get the different source branch, make sure we also fetch it for
754 763 # merge conditions
755 764 if source_branch and source_branch != branch_name:
756 765 # check if the ref exists.
757 766 shadow_repo = GitRepository(os.path.abspath(clone_path))
758 767 if shadow_repo.get_remote_ref(source_branch):
759 768 cmd = ['fetch', self.path, source_branch]
760 769 self.run_git_command(cmd, fail_on_stderr=False)
761 770
762 771 def _local_fetch(self, repository_path, branch_name, use_origin=False):
763 772 """
764 773 Fetch a branch from a local repository.
765 774 """
766 775 repository_path = os.path.abspath(repository_path)
767 776 if repository_path == self.path:
768 777 raise ValueError('Cannot fetch from the same repository')
769 778
770 779 if use_origin:
771 780 branch_name = '+{branch}:refs/heads/{branch}'.format(
772 781 branch=branch_name)
773 782
774 783 cmd = ['fetch', '--no-tags', '--update-head-ok',
775 784 repository_path, branch_name]
776 785 self.run_git_command(cmd, fail_on_stderr=False)
777 786
778 787 def _local_reset(self, branch_name):
779 788 branch_name = '{}'.format(branch_name)
780 789 cmd = ['reset', '--hard', branch_name, '--']
781 790 self.run_git_command(cmd, fail_on_stderr=False)
782 791
783 792 def _last_fetch_heads(self):
784 793 """
785 794 Return the last fetched heads that need merging.
786 795
787 796 The algorithm is defined at
788 797 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
789 798 """
790 799 if not self.bare:
791 800 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
792 801 else:
793 802 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
794 803
795 804 heads = []
796 805 with open(fetch_heads_path) as f:
797 806 for line in f:
798 807 if ' not-for-merge ' in line:
799 808 continue
800 809 line = re.sub('\t.*', '', line, flags=re.DOTALL)
801 810 heads.append(line)
802 811
803 812 return heads
804 813
805 814 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
806 815 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
807 816
808 817 def _local_pull(self, repository_path, branch_name, ff_only=True):
809 818 """
810 819 Pull a branch from a local repository.
811 820 """
812 821 if self.bare:
813 822 raise RepositoryError('Cannot pull into a bare git repository')
814 823 # N.B.(skreft): The --ff-only option is to make sure this is a
815 824 # fast-forward (i.e., we are only pulling new changes and there are no
816 825 # conflicts with our current branch)
817 826 # Additionally, that option needs to go before --no-tags, otherwise git
818 827 # pull complains about it being an unknown flag.
819 828 cmd = ['pull']
820 829 if ff_only:
821 830 cmd.append('--ff-only')
822 831 cmd.extend(['--no-tags', repository_path, branch_name])
823 832 self.run_git_command(cmd, fail_on_stderr=False)
824 833
825 834 def _local_merge(self, merge_message, user_name, user_email, heads):
826 835 """
827 836 Merge the given head into the checked out branch.
828 837
829 838 It will force a merge commit.
830 839
831 840 Currently it raises an error if the repo is empty, as it is not possible
832 841 to create a merge commit in an empty repo.
833 842
834 843 :param merge_message: The message to use for the merge commit.
835 844 :param heads: the heads to merge.
836 845 """
837 846 if self.bare:
838 847 raise RepositoryError('Cannot merge into a bare git repository')
839 848
840 849 if not heads:
841 850 return
842 851
843 852 if self.is_empty():
844 853 # TODO(skreft): do something more robust in this case.
845 854 raise RepositoryError('Do not know how to merge into empty repositories yet')
846 855 unresolved = None
847 856
848 857 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
849 858 # commit message. We also specify the user who is doing the merge.
850 859 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
851 860 '-c', 'user.email=%s' % safe_str(user_email),
852 861 'merge', '--no-ff', '-m', safe_str(merge_message)]
853 862
854 863 merge_cmd = cmd + heads
855 864
856 865 try:
857 866 self.run_git_command(merge_cmd, fail_on_stderr=False)
858 867 except RepositoryError:
859 868 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
860 869 fail_on_stderr=False)[0].splitlines()
861 870 # NOTE(marcink): we add U notation for consistent with HG backend output
862 871 unresolved = ['U {}'.format(f) for f in files]
863 872
864 873 # Cleanup any merge leftovers
865 874 self._remote.invalidate_vcs_cache()
866 875 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
867 876
868 877 if unresolved:
869 878 raise UnresolvedFilesInRepo(unresolved)
870 879 else:
871 880 raise
872 881
873 882 def _local_push(
874 883 self, source_branch, repository_path, target_branch,
875 884 enable_hooks=False, rc_scm_data=None):
876 885 """
877 886 Push the source_branch to the given repository and target_branch.
878 887
879 888 Currently it if the target_branch is not master and the target repo is
880 889 empty, the push will work, but then GitRepository won't be able to find
881 890 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
882 891 pointing to master, which does not exist).
883 892
884 893 It does not run the hooks in the target repo.
885 894 """
886 895 # TODO(skreft): deal with the case in which the target repo is empty,
887 896 # and the target_branch is not master.
888 897 target_repo = GitRepository(repository_path)
889 898 if (not target_repo.bare and
890 899 target_repo._current_branch() == target_branch):
891 900 # Git prevents pushing to the checked out branch, so simulate it by
892 901 # pulling into the target repository.
893 902 target_repo._local_pull(self.path, source_branch)
894 903 else:
895 904 cmd = ['push', os.path.abspath(repository_path),
896 905 '%s:%s' % (source_branch, target_branch)]
897 906 gitenv = {}
898 907 if rc_scm_data:
899 908 gitenv.update({'RC_SCM_DATA': rc_scm_data})
900 909
901 910 if not enable_hooks:
902 911 gitenv['RC_SKIP_HOOKS'] = '1'
903 912 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
904 913
905 914 def _get_new_pr_branch(self, source_branch, target_branch):
906 915 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
907 916 pr_branches = []
908 917 for branch in self.branches:
909 918 if branch.startswith(prefix):
910 919 pr_branches.append(int(branch[len(prefix):]))
911 920
912 921 if not pr_branches:
913 922 branch_id = 0
914 923 else:
915 924 branch_id = max(pr_branches) + 1
916 925
917 926 return '%s%d' % (prefix, branch_id)
918 927
919 928 def _maybe_prepare_merge_workspace(
920 929 self, repo_id, workspace_id, target_ref, source_ref):
921 930 shadow_repository_path = self._get_shadow_repository_path(
922 931 self.path, repo_id, workspace_id)
923 932 if not os.path.exists(shadow_repository_path):
924 933 self._local_clone(
925 934 shadow_repository_path, target_ref.name, source_ref.name)
926 935 log.debug('Prepared %s shadow repository in %s',
927 936 self.alias, shadow_repository_path)
928 937
929 938 return shadow_repository_path
930 939
931 940 def _merge_repo(self, repo_id, workspace_id, target_ref,
932 941 source_repo, source_ref, merge_message,
933 942 merger_name, merger_email, dry_run=False,
934 943 use_rebase=False, close_branch=False):
935 944
936 945 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
937 946 'rebase' if use_rebase else 'merge', dry_run)
938 947 if target_ref.commit_id != self.branches[target_ref.name]:
939 948 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
940 949 target_ref.commit_id, self.branches[target_ref.name])
941 950 return MergeResponse(
942 951 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
943 952 metadata={'target_ref': target_ref})
944 953
945 954 shadow_repository_path = self._maybe_prepare_merge_workspace(
946 955 repo_id, workspace_id, target_ref, source_ref)
947 956 shadow_repo = self.get_shadow_instance(shadow_repository_path)
948 957
949 958 # checkout source, if it's different. Otherwise we could not
950 959 # fetch proper commits for merge testing
951 960 if source_ref.name != target_ref.name:
952 961 if shadow_repo.get_remote_ref(source_ref.name):
953 962 shadow_repo._checkout(source_ref.name, force=True)
954 963
955 964 # checkout target, and fetch changes
956 965 shadow_repo._checkout(target_ref.name, force=True)
957 966
958 967 # fetch/reset pull the target, in case it is changed
959 968 # this handles even force changes
960 969 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
961 970 shadow_repo._local_reset(target_ref.name)
962 971
963 972 # Need to reload repo to invalidate the cache, or otherwise we cannot
964 973 # retrieve the last target commit.
965 974 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 975 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
967 976 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
968 977 target_ref, target_ref.commit_id,
969 978 shadow_repo.branches[target_ref.name])
970 979 return MergeResponse(
971 980 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
972 981 metadata={'target_ref': target_ref})
973 982
974 983 # calculate new branch
975 984 pr_branch = shadow_repo._get_new_pr_branch(
976 985 source_ref.name, target_ref.name)
977 986 log.debug('using pull-request merge branch: `%s`', pr_branch)
978 987 # checkout to temp branch, and fetch changes
979 988 shadow_repo._checkout(pr_branch, create=True)
980 989 try:
981 990 shadow_repo._local_fetch(source_repo.path, source_ref.name)
982 991 except RepositoryError:
983 992 log.exception('Failure when doing local fetch on '
984 993 'shadow repo: %s', shadow_repo)
985 994 return MergeResponse(
986 995 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
987 996 metadata={'source_ref': source_ref})
988 997
989 998 merge_ref = None
990 999 merge_failure_reason = MergeFailureReason.NONE
991 1000 metadata = {}
992 1001 try:
993 1002 shadow_repo._local_merge(merge_message, merger_name, merger_email,
994 1003 [source_ref.commit_id])
995 1004 merge_possible = True
996 1005
997 1006 # Need to invalidate the cache, or otherwise we
998 1007 # cannot retrieve the merge commit.
999 1008 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1000 1009 merge_commit_id = shadow_repo.branches[pr_branch]
1001 1010
1002 1011 # Set a reference pointing to the merge commit. This reference may
1003 1012 # be used to easily identify the last successful merge commit in
1004 1013 # the shadow repository.
1005 1014 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1006 1015 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1007 1016 except RepositoryError as e:
1008 1017 log.exception('Failure when doing local merge on git shadow repo')
1009 1018 if isinstance(e, UnresolvedFilesInRepo):
1010 1019 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1011 1020
1012 1021 merge_possible = False
1013 1022 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1014 1023
1015 1024 if merge_possible and not dry_run:
1016 1025 try:
1017 1026 shadow_repo._local_push(
1018 1027 pr_branch, self.path, target_ref.name, enable_hooks=True,
1019 1028 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1020 1029 merge_succeeded = True
1021 1030 except RepositoryError:
1022 1031 log.exception(
1023 1032 'Failure when doing local push from the shadow '
1024 1033 'repository to the target repository at %s.', self.path)
1025 1034 merge_succeeded = False
1026 1035 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1027 1036 metadata['target'] = 'git shadow repo'
1028 1037 metadata['merge_commit'] = pr_branch
1029 1038 else:
1030 1039 merge_succeeded = False
1031 1040
1032 1041 return MergeResponse(
1033 1042 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1034 1043 metadata=metadata)
@@ -1,1012 +1,1012 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 301 self, commit_id1, repo2, commit_id2)
302 302
303 303 if commit_id1 == commit_id2:
304 304 return commit_id1
305 305
306 306 ancestors = self._remote.revs_from_revspec(
307 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 308 other_path=repo2.path)
309 309
310 310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 311
312 312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 313 return ancestor_id
314 314
315 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 316 if commit_id1 == commit_id2:
317 317 commits = []
318 318 else:
319 319 if merge:
320 320 indexes = self._remote.revs_from_revspec(
321 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 323 else:
324 324 indexes = self._remote.revs_from_revspec(
325 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 326 commit_id1, other_path=repo2.path)
327 327
328 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 329 for idx in indexes]
330 330
331 331 return commits
332 332
333 333 @staticmethod
334 334 def check_url(url, config):
335 335 """
336 336 Function will check given url and try to verify if it's a valid
337 337 link. Sometimes it may happened that mercurial will issue basic
338 338 auth request that can cause whole API to hang when used from python
339 339 or other external calls.
340 340
341 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 342 when the return code is non 200
343 343 """
344 344 # check first if it's not an local url
345 345 if os.path.isdir(url) or url.startswith('file:'):
346 346 return True
347 347
348 348 # Request the _remote to verify the url
349 349 return connection.Hg.check_url(url, config.serialize())
350 350
351 351 @staticmethod
352 352 def is_valid_repository(path):
353 353 return os.path.isdir(os.path.join(path, '.hg'))
354 354
355 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 356 """
357 357 Function will check for mercurial repository in given path. If there
358 358 is no repository in that path it will raise an exception unless
359 359 `create` parameter is set to True - in that case repository would
360 360 be created.
361 361
362 362 If `src_url` is given, would try to clone repository from the
363 363 location at given clone_point. Additionally it'll make update to
364 364 working copy accordingly to `do_workspace_checkout` flag.
365 365 """
366 366 if create and os.path.exists(self.path):
367 367 raise RepositoryError(
368 368 "Cannot create repository at %s, location already exist"
369 369 % self.path)
370 370
371 371 if src_url:
372 372 url = str(self._get_url(src_url))
373 373 MercurialRepository.check_url(url, self.config)
374 374
375 375 self._remote.clone(url, self.path, do_workspace_checkout)
376 376
377 377 # Don't try to create if we've already cloned repo
378 378 create = False
379 379
380 380 if create:
381 381 os.makedirs(self.path, mode=0o755)
382 382 self._remote.localrepository(create)
383 383
384 384 @LazyProperty
385 385 def in_memory_commit(self):
386 386 return MercurialInMemoryCommit(self)
387 387
388 388 @LazyProperty
389 389 def description(self):
390 390 description = self._remote.get_config_value(
391 391 'web', 'description', untrusted=True)
392 392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393 393
394 394 @LazyProperty
395 395 def contact(self):
396 396 contact = (
397 397 self._remote.get_config_value("web", "contact") or
398 398 self._remote.get_config_value("ui", "username"))
399 399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400 400
401 401 @LazyProperty
402 402 def last_change(self):
403 403 """
404 404 Returns last change made on this repository as
405 405 `datetime.datetime` object.
406 406 """
407 407 try:
408 408 return self.get_commit().date
409 409 except RepositoryError:
410 410 tzoffset = makedate()[1]
411 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 412
413 413 def _get_fs_mtime(self):
414 414 # fallback to filesystem
415 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 416 st_path = os.path.join(self.path, '.hg', "store")
417 417 if os.path.exists(cl_path):
418 418 return os.stat(cl_path).st_mtime
419 419 else:
420 420 return os.stat(st_path).st_mtime
421 421
422 422 def _get_url(self, url):
423 423 """
424 424 Returns normalized url. If schema is not given, would fall
425 425 to filesystem
426 426 (``file:///``) schema.
427 427 """
428 428 url = url.encode('utf8')
429 429 if url != 'default' and '://' not in url:
430 430 url = "file:" + urllib.pathname2url(url)
431 431 return url
432 432
433 433 def get_hook_location(self):
434 434 """
435 435 returns absolute path to location where hooks are stored
436 436 """
437 437 return os.path.join(self.path, '.hg', '.hgrc')
438 438
439 439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False):
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 441 """
442 442 Returns ``MercurialCommit`` object representing repository's
443 443 commit at the given `commit_id` or `commit_idx`.
444 444 """
445 445 if self.is_empty():
446 446 raise EmptyRepositoryError("There are no commits yet")
447 447
448 448 if commit_id is not None:
449 449 self._validate_commit_id(commit_id)
450 450 try:
451 451 # we have cached idx, use it without contacting the remote
452 452 idx = self._commit_ids[commit_id]
453 453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 454 except KeyError:
455 455 pass
456 456
457 457 elif commit_idx is not None:
458 458 self._validate_commit_idx(commit_idx)
459 459 try:
460 460 _commit_id = self.commit_ids[commit_idx]
461 461 if commit_idx < 0:
462 462 commit_idx = self.commit_ids.index(_commit_id)
463 463
464 464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 465 except IndexError:
466 466 commit_id = commit_idx
467 467 else:
468 468 commit_id = "tip"
469 469
470 470 if isinstance(commit_id, unicode):
471 471 commit_id = safe_str(commit_id)
472 472
473 473 try:
474 474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 475 except CommitDoesNotExistError:
476 476 msg = "Commit {} does not exist for `{}`".format(
477 477 *map(safe_str, [commit_id, self.name]))
478 478 raise CommitDoesNotExistError(msg)
479 479
480 480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481 481
482 482 def get_commits(
483 483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 485 """
486 486 Returns generator of ``MercurialCommit`` objects from start to end
487 487 (both are inclusive)
488 488
489 489 :param start_id: None, str(commit_id)
490 490 :param end_id: None, str(commit_id)
491 491 :param start_date: if specified, commits with commit date less than
492 492 ``start_date`` would be filtered out from returned set
493 493 :param end_date: if specified, commits with commit date greater than
494 494 ``end_date`` would be filtered out from returned set
495 495 :param branch_name: if specified, commits not reachable from given
496 496 branch would be filtered out from returned set
497 497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 498 Mercurial evolve
499 499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 500 exist.
501 501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 502 ``end`` could not be found.
503 503 """
504 504 # actually we should check now if it's not an empty repo
505 505 if self.is_empty():
506 506 raise EmptyRepositoryError("There are no commits yet")
507 507 self._validate_branch_name(branch_name)
508 508
509 509 branch_ancestors = False
510 510 if start_id is not None:
511 511 self._validate_commit_id(start_id)
512 512 c_start = self.get_commit(commit_id=start_id)
513 513 start_pos = self._commit_ids[c_start.raw_id]
514 514 else:
515 515 start_pos = None
516 516
517 517 if end_id is not None:
518 518 self._validate_commit_id(end_id)
519 519 c_end = self.get_commit(commit_id=end_id)
520 520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 521 else:
522 522 end_pos = None
523 523
524 524 if None not in [start_id, end_id] and start_pos > end_pos:
525 525 raise RepositoryError(
526 526 "Start commit '%s' cannot be after end commit '%s'" %
527 527 (start_id, end_id))
528 528
529 529 if end_pos is not None:
530 530 end_pos += 1
531 531
532 532 commit_filter = []
533 533
534 534 if branch_name and not branch_ancestors:
535 535 commit_filter.append('branch("%s")' % (branch_name,))
536 536 elif branch_name and branch_ancestors:
537 537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538 538
539 539 if start_date and not end_date:
540 540 commit_filter.append('date(">%s")' % (start_date,))
541 541 if end_date and not start_date:
542 542 commit_filter.append('date("<%s")' % (end_date,))
543 543 if start_date and end_date:
544 544 commit_filter.append(
545 545 'date(">%s") and date("<%s")' % (start_date, end_date))
546 546
547 547 if not show_hidden:
548 548 commit_filter.append('not obsolete()')
549 549 commit_filter.append('not hidden()')
550 550
551 551 # TODO: johbo: Figure out a simpler way for this solution
552 552 collection_generator = CollectionGenerator
553 553 if commit_filter:
554 554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 555 revisions = self._remote.rev_range([commit_filter])
556 556 collection_generator = MercurialIndexBasedCollectionGenerator
557 557 else:
558 558 revisions = self.commit_ids
559 559
560 560 if start_pos or end_pos:
561 561 revisions = revisions[start_pos:end_pos]
562 562
563 563 return collection_generator(self, revisions, pre_load=pre_load)
564 564
565 565 def pull(self, url, commit_ids=None):
566 566 """
567 567 Pull changes from external location.
568 568
569 569 :param commit_ids: Optional. Can be set to a list of commit ids
570 570 which shall be pulled from the other repository.
571 571 """
572 572 url = self._get_url(url)
573 573 self._remote.pull(url, commit_ids=commit_ids)
574 574 self._remote.invalidate_vcs_cache()
575 575
576 576 def fetch(self, url, commit_ids=None):
577 577 """
578 578 Backward compatibility with GIT fetch==pull
579 579 """
580 580 return self.pull(url, commit_ids=commit_ids)
581 581
582 582 def push(self, url):
583 583 url = self._get_url(url)
584 584 self._remote.sync_push(url)
585 585
586 586 def _local_clone(self, clone_path):
587 587 """
588 588 Create a local clone of the current repo.
589 589 """
590 590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 591 hooks=False)
592 592
593 593 def _update(self, revision, clean=False):
594 594 """
595 595 Update the working copy to the specified revision.
596 596 """
597 597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 598 self._remote.update(revision, clean=clean)
599 599
600 600 def _identify(self):
601 601 """
602 602 Return the current state of the working directory.
603 603 """
604 604 return self._remote.identify().strip().rstrip('+')
605 605
606 606 def _heads(self, branch=None):
607 607 """
608 608 Return the commit ids of the repository heads.
609 609 """
610 610 return self._remote.heads(branch=branch).strip().split(' ')
611 611
612 612 def _ancestor(self, revision1, revision2):
613 613 """
614 614 Return the common ancestor of the two revisions.
615 615 """
616 616 return self._remote.ancestor(revision1, revision2)
617 617
618 618 def _local_push(
619 619 self, revision, repository_path, push_branches=False,
620 620 enable_hooks=False):
621 621 """
622 622 Push the given revision to the specified repository.
623 623
624 624 :param push_branches: allow to create branches in the target repo.
625 625 """
626 626 self._remote.push(
627 627 [revision], repository_path, hooks=enable_hooks,
628 628 push_branches=push_branches)
629 629
630 630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 632 """
633 633 Merge the given source_revision into the checked out revision.
634 634
635 635 Returns the commit id of the merge and a boolean indicating if the
636 636 commit needs to be pushed.
637 637 """
638 638 source_ref_commit_id = source_ref.commit_id
639 639 target_ref_commit_id = target_ref.commit_id
640 640
641 641 # update our workdir to target ref, for proper merge
642 642 self._update(target_ref_commit_id, clean=True)
643 643
644 644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646 646
647 647 if close_commit_id:
648 648 # NOTE(marcink): if we get the close commit, this is our new source
649 649 # which will include the close commit itself.
650 650 source_ref_commit_id = close_commit_id
651 651
652 652 if ancestor == source_ref_commit_id:
653 653 # Nothing to do, the changes were already integrated
654 654 return target_ref_commit_id, False
655 655
656 656 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 657 # In this case we should force a commit message
658 658 return source_ref_commit_id, True
659 659
660 660 unresolved = None
661 661 if use_rebase:
662 662 try:
663 663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 665 self._remote.rebase(
666 666 source=source_ref_commit_id, dest=target_ref_commit_id)
667 667 self._remote.invalidate_vcs_cache()
668 668 self._update(bookmark_name, clean=True)
669 669 return self._identify(), True
670 670 except RepositoryError as e:
671 671 # The rebase-abort may raise another exception which 'hides'
672 672 # the original one, therefore we log it here.
673 673 log.exception('Error while rebasing shadow repo during merge.')
674 674 if 'unresolved conflicts' in safe_str(e):
675 675 unresolved = self._remote.get_unresolved_files()
676 676 log.debug('unresolved files: %s', unresolved)
677 677
678 678 # Cleanup any rebase leftovers
679 679 self._remote.invalidate_vcs_cache()
680 680 self._remote.rebase(abort=True)
681 681 self._remote.invalidate_vcs_cache()
682 682 self._remote.update(clean=True)
683 683 if unresolved:
684 684 raise UnresolvedFilesInRepo(unresolved)
685 685 else:
686 686 raise
687 687 else:
688 688 try:
689 689 self._remote.merge(source_ref_commit_id)
690 690 self._remote.invalidate_vcs_cache()
691 691 self._remote.commit(
692 692 message=safe_str(merge_message),
693 693 username=safe_str('%s <%s>' % (user_name, user_email)))
694 694 self._remote.invalidate_vcs_cache()
695 695 return self._identify(), True
696 696 except RepositoryError as e:
697 697 # The merge-abort may raise another exception which 'hides'
698 698 # the original one, therefore we log it here.
699 699 log.exception('Error while merging shadow repo during merge.')
700 700 if 'unresolved merge conflicts' in safe_str(e):
701 701 unresolved = self._remote.get_unresolved_files()
702 702 log.debug('unresolved files: %s', unresolved)
703 703
704 704 # Cleanup any merge leftovers
705 705 self._remote.update(clean=True)
706 706 if unresolved:
707 707 raise UnresolvedFilesInRepo(unresolved)
708 708 else:
709 709 raise
710 710
711 711 def _local_close(self, target_ref, user_name, user_email,
712 712 source_ref, close_message=''):
713 713 """
714 714 Close the branch of the given source_revision
715 715
716 716 Returns the commit id of the close and a boolean indicating if the
717 717 commit needs to be pushed.
718 718 """
719 719 self._update(source_ref.commit_id)
720 720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 721 try:
722 722 self._remote.commit(
723 723 message=safe_str(message),
724 724 username=safe_str('%s <%s>' % (user_name, user_email)),
725 725 close_branch=True)
726 726 self._remote.invalidate_vcs_cache()
727 727 return self._identify(), True
728 728 except RepositoryError:
729 729 # Cleanup any commit leftovers
730 730 self._remote.update(clean=True)
731 731 raise
732 732
733 733 def _is_the_same_branch(self, target_ref, source_ref):
734 734 return (
735 735 self._get_branch_name(target_ref) ==
736 736 self._get_branch_name(source_ref))
737 737
738 738 def _get_branch_name(self, ref):
739 739 if ref.type == 'branch':
740 740 return ref.name
741 741 return self._remote.ctx_branch(ref.commit_id)
742 742
743 743 def _maybe_prepare_merge_workspace(
744 744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 745 shadow_repository_path = self._get_shadow_repository_path(
746 746 self.path, repo_id, workspace_id)
747 747 if not os.path.exists(shadow_repository_path):
748 748 self._local_clone(shadow_repository_path)
749 749 log.debug(
750 750 'Prepared shadow repository in %s', shadow_repository_path)
751 751
752 752 return shadow_repository_path
753 753
754 754 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 755 source_repo, source_ref, merge_message,
756 756 merger_name, merger_email, dry_run=False,
757 757 use_rebase=False, close_branch=False):
758 758
759 759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 760 'rebase' if use_rebase else 'merge', dry_run)
761 761 if target_ref.commit_id not in self._heads():
762 762 return MergeResponse(
763 763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 764 metadata={'target_ref': target_ref})
765 765
766 766 try:
767 767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 768 heads_all = self._heads(target_ref.name)
769 769 max_heads = 10
770 770 if len(heads_all) > max_heads:
771 771 heads = '\n,'.join(
772 772 heads_all[:max_heads] +
773 773 ['and {} more.'.format(len(heads_all)-max_heads)])
774 774 else:
775 775 heads = '\n,'.join(heads_all)
776 776 metadata = {
777 777 'target_ref': target_ref,
778 778 'source_ref': source_ref,
779 779 'heads': heads
780 780 }
781 781 return MergeResponse(
782 782 False, False, None,
783 783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 784 metadata=metadata)
785 785 except CommitDoesNotExistError:
786 786 log.exception('Failure when looking up branch heads on hg target')
787 787 return MergeResponse(
788 788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 789 metadata={'target_ref': target_ref})
790 790
791 791 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 792 repo_id, workspace_id, target_ref, source_ref)
793 793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794 794
795 795 log.debug('Pulling in target reference %s', target_ref)
796 796 self._validate_pull_reference(target_ref)
797 797 shadow_repo._local_pull(self.path, target_ref)
798 798
799 799 try:
800 800 log.debug('Pulling in source reference %s', source_ref)
801 801 source_repo._validate_pull_reference(source_ref)
802 802 shadow_repo._local_pull(source_repo.path, source_ref)
803 803 except CommitDoesNotExistError:
804 804 log.exception('Failure when doing local pull on hg shadow repo')
805 805 return MergeResponse(
806 806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 807 metadata={'source_ref': source_ref})
808 808
809 809 merge_ref = None
810 810 merge_commit_id = None
811 811 close_commit_id = None
812 812 merge_failure_reason = MergeFailureReason.NONE
813 813 metadata = {}
814 814
815 815 # enforce that close branch should be used only in case we source from
816 816 # an actual Branch
817 817 close_branch = close_branch and source_ref.type == 'branch'
818 818
819 819 # don't allow to close branch if source and target are the same
820 820 close_branch = close_branch and source_ref.name != target_ref.name
821 821
822 822 needs_push_on_close = False
823 823 if close_branch and not use_rebase and not dry_run:
824 824 try:
825 825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 826 target_ref, merger_name, merger_email, source_ref)
827 827 merge_possible = True
828 828 except RepositoryError:
829 829 log.exception('Failure when doing close branch on '
830 830 'shadow repo: %s', shadow_repo)
831 831 merge_possible = False
832 832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 833 else:
834 834 merge_possible = True
835 835
836 836 needs_push = False
837 837 if merge_possible:
838 838
839 839 try:
840 840 merge_commit_id, needs_push = shadow_repo._local_merge(
841 841 target_ref, merge_message, merger_name, merger_email,
842 842 source_ref, use_rebase=use_rebase,
843 843 close_commit_id=close_commit_id, dry_run=dry_run)
844 844 merge_possible = True
845 845
846 846 # read the state of the close action, if it
847 847 # maybe required a push
848 848 needs_push = needs_push or needs_push_on_close
849 849
850 850 # Set a bookmark pointing to the merge commit. This bookmark
851 851 # may be used to easily identify the last successful merge
852 852 # commit in the shadow repository.
853 853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 855 except SubrepoMergeError:
856 856 log.exception(
857 857 'Subrepo merge error during local merge on hg shadow repo.')
858 858 merge_possible = False
859 859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 860 needs_push = False
861 861 except RepositoryError as e:
862 862 log.exception('Failure when doing local merge on hg shadow repo')
863 863 if isinstance(e, UnresolvedFilesInRepo):
864 864 all_conflicts = list(e.args[0])
865 865 max_conflicts = 20
866 866 if len(all_conflicts) > max_conflicts:
867 867 conflicts = all_conflicts[:max_conflicts] \
868 868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 869 else:
870 870 conflicts = all_conflicts
871 871 metadata['unresolved_files'] = \
872 872 '\n* conflict: ' + \
873 873 ('\n * conflict: '.join(conflicts))
874 874
875 875 merge_possible = False
876 876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 877 needs_push = False
878 878
879 879 if merge_possible and not dry_run:
880 880 if needs_push:
881 881 # In case the target is a bookmark, update it, so after pushing
882 882 # the bookmarks is also updated in the target.
883 883 if target_ref.type == 'book':
884 884 shadow_repo.bookmark(
885 885 target_ref.name, revision=merge_commit_id)
886 886 try:
887 887 shadow_repo_with_hooks = self.get_shadow_instance(
888 888 shadow_repository_path,
889 889 enable_hooks=True)
890 890 # This is the actual merge action, we push from shadow
891 891 # into origin.
892 892 # Note: the push_branches option will push any new branch
893 893 # defined in the source repository to the target. This may
894 894 # be dangerous as branches are permanent in Mercurial.
895 895 # This feature was requested in issue #441.
896 896 shadow_repo_with_hooks._local_push(
897 897 merge_commit_id, self.path, push_branches=True,
898 898 enable_hooks=True)
899 899
900 900 # maybe we also need to push the close_commit_id
901 901 if close_commit_id:
902 902 shadow_repo_with_hooks._local_push(
903 903 close_commit_id, self.path, push_branches=True,
904 904 enable_hooks=True)
905 905 merge_succeeded = True
906 906 except RepositoryError:
907 907 log.exception(
908 908 'Failure when doing local push from the shadow '
909 909 'repository to the target repository at %s.', self.path)
910 910 merge_succeeded = False
911 911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 912 metadata['target'] = 'hg shadow repo'
913 913 metadata['merge_commit'] = merge_commit_id
914 914 else:
915 915 merge_succeeded = True
916 916 else:
917 917 merge_succeeded = False
918 918
919 919 return MergeResponse(
920 920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 921 metadata=metadata)
922 922
923 923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 924 config = self.config.copy()
925 925 if not enable_hooks:
926 926 config.clear_section('hooks')
927 927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928 928
929 929 def _validate_pull_reference(self, reference):
930 930 if not (reference.name in self.bookmarks or
931 931 reference.name in self.branches or
932 932 self.get_commit(reference.commit_id)):
933 933 raise CommitDoesNotExistError(
934 934 'Unknown branch, bookmark or commit id')
935 935
936 936 def _local_pull(self, repository_path, reference):
937 937 """
938 938 Fetch a branch, bookmark or commit from a local repository.
939 939 """
940 940 repository_path = os.path.abspath(repository_path)
941 941 if repository_path == self.path:
942 942 raise ValueError('Cannot pull from the same repository')
943 943
944 944 reference_type_to_option_name = {
945 945 'book': 'bookmark',
946 946 'branch': 'branch',
947 947 }
948 948 option_name = reference_type_to_option_name.get(
949 949 reference.type, 'revision')
950 950
951 951 if option_name == 'revision':
952 952 ref = reference.commit_id
953 953 else:
954 954 ref = reference.name
955 955
956 956 options = {option_name: [ref]}
957 957 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 958 self._remote.invalidate_vcs_cache()
959 959
960 960 def bookmark(self, bookmark, revision=None):
961 961 if isinstance(bookmark, unicode):
962 962 bookmark = safe_str(bookmark)
963 963 self._remote.bookmark(bookmark, revision=revision)
964 964 self._remote.invalidate_vcs_cache()
965 965
966 966 def get_path_permissions(self, username):
967 967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968 968
969 969 def read_patterns(suffix):
970 970 svalue = None
971 971 for section, option in [
972 972 ('narrowacl', username + suffix),
973 973 ('narrowacl', 'default' + suffix),
974 974 ('narrowhgacl', username + suffix),
975 975 ('narrowhgacl', 'default' + suffix)
976 976 ]:
977 977 try:
978 978 svalue = hgacl.get(section, option)
979 979 break # stop at the first value we find
980 980 except configparser.NoOptionError:
981 981 pass
982 982 if not svalue:
983 983 return None
984 984 result = ['/']
985 985 for pattern in svalue.split():
986 986 result.append(pattern)
987 987 if '*' not in pattern and '?' not in pattern:
988 988 result.append(pattern + '/*')
989 989 return result
990 990
991 991 if os.path.exists(hgacl_file):
992 992 try:
993 993 hgacl = configparser.RawConfigParser()
994 994 hgacl.read(hgacl_file)
995 995
996 996 includes = read_patterns('.includes')
997 997 excludes = read_patterns('.excludes')
998 998 return BasePathPermissionChecker.create_from_patterns(
999 999 includes, excludes)
1000 1000 except BaseException as e:
1001 1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 1002 hgacl_file, self.name, e)
1003 1003 raise exceptions.RepositoryRequirementError(msg)
1004 1004 else:
1005 1005 return None
1006 1006
1007 1007
1008 1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009 1009
1010 1010 def _commit_factory(self, commit_id):
1011 1011 return self.repo.get_commit(
1012 1012 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,370 +1,370 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import date_astimestamp
33 33 from rhodecode.lib.utils import safe_str, safe_unicode
34 34 from rhodecode.lib.utils2 import CachedProperty
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.backends.svn.commit import (
38 38 SubversionCommit, _date_from_svn_properties)
39 39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 44 VCSError, NodeDoesNotExistError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class SubversionRepository(base.BaseRepository):
51 51 """
52 52 Subversion backend implementation
53 53
54 54 .. important::
55 55
56 56 It is very important to distinguish the commit index and the commit id
57 57 which is assigned by Subversion. The first one is always handled as an
58 58 `int` by this implementation. The commit id assigned by Subversion on
59 59 the other side will always be a `str`.
60 60
61 61 There is a specific trap since the first commit will have the index
62 62 ``0`` but the svn id will be ``"1"``.
63 63
64 64 """
65 65
66 66 # Note: Subversion does not really have a default branch name.
67 67 DEFAULT_BRANCH_NAME = None
68 68
69 69 contact = base.BaseRepository.DEFAULT_CONTACT
70 70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71 71
72 72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 73 bare=False, **kwargs):
74 74 self.path = safe_str(os.path.abspath(repo_path))
75 75 self.config = config if config else self.get_default_config()
76 76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77 77
78 78 self._init_repo(create, src_url)
79 79
80 80 # caches
81 81 self._commit_ids = {}
82 82
83 83 @LazyProperty
84 84 def _remote(self):
85 85 repo_id = self.path
86 86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87 87
88 88 def _init_repo(self, create, src_url):
89 89 if create and os.path.exists(self.path):
90 90 raise RepositoryError(
91 91 "Cannot create repository at %s, location already exist"
92 92 % self.path)
93 93
94 94 if create:
95 95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 96 if src_url:
97 97 src_url = _sanitize_url(src_url)
98 98 self._remote.import_remote_repository(src_url)
99 99 else:
100 100 self._check_path()
101 101
102 102 @CachedProperty
103 103 def commit_ids(self):
104 104 head = self._remote.lookup(None)
105 105 return [str(r) for r in xrange(1, head + 1)]
106 106
107 107 def _rebuild_cache(self, commit_ids):
108 108 pass
109 109
110 110 def run_svn_command(self, cmd, **opts):
111 111 """
112 112 Runs given ``cmd`` as svn command and returns tuple
113 113 (stdout, stderr).
114 114
115 115 :param cmd: full svn command to be executed
116 116 :param opts: env options to pass into Subprocess command
117 117 """
118 118 if not isinstance(cmd, list):
119 119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120 120
121 121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 122 out, err = self._remote.run_svn_command(cmd, **opts)
123 123 if err and not skip_stderr_log:
124 124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 125 return out, err
126 126
127 127 @LazyProperty
128 128 def branches(self):
129 129 return self._tags_or_branches('vcs_svn_branch')
130 130
131 131 @LazyProperty
132 132 def branches_closed(self):
133 133 return {}
134 134
135 135 @LazyProperty
136 136 def bookmarks(self):
137 137 return {}
138 138
139 139 @LazyProperty
140 140 def branches_all(self):
141 141 # TODO: johbo: Implement proper branch support
142 142 all_branches = {}
143 143 all_branches.update(self.branches)
144 144 all_branches.update(self.branches_closed)
145 145 return all_branches
146 146
147 147 @LazyProperty
148 148 def tags(self):
149 149 return self._tags_or_branches('vcs_svn_tag')
150 150
151 151 def _tags_or_branches(self, config_section):
152 152 found_items = {}
153 153
154 154 if self.is_empty():
155 155 return {}
156 156
157 157 for pattern in self._patterns_from_section(config_section):
158 158 pattern = vcspath.sanitize(pattern)
159 159 tip = self.get_commit()
160 160 try:
161 161 if pattern.endswith('*'):
162 162 basedir = tip.get_node(vcspath.dirname(pattern))
163 163 directories = basedir.dirs
164 164 else:
165 165 directories = (tip.get_node(pattern), )
166 166 except NodeDoesNotExistError:
167 167 continue
168 168 found_items.update(
169 169 (safe_unicode(n.path),
170 170 self.commit_ids[-1])
171 171 for n in directories)
172 172
173 173 def get_name(item):
174 174 return item[0]
175 175
176 176 return OrderedDict(sorted(found_items.items(), key=get_name))
177 177
178 178 def _patterns_from_section(self, section):
179 179 return (pattern for key, pattern in self.config.items(section))
180 180
181 181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 182 if self != repo2:
183 183 raise ValueError(
184 184 "Subversion does not support getting common ancestor of"
185 185 " different repositories.")
186 186
187 187 if int(commit_id1) < int(commit_id2):
188 188 return commit_id1
189 189 return commit_id2
190 190
191 191 def verify(self):
192 192 verify = self._remote.verify()
193 193
194 194 self._remote.invalidate_vcs_cache()
195 195 return verify
196 196
197 197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 198 # TODO: johbo: Implement better comparison, this is a very naive
199 199 # version which does not allow to compare branches, tags or folders
200 200 # at all.
201 201 if repo2 != self:
202 202 raise ValueError(
203 203 "Subversion does not support comparison of of different "
204 204 "repositories.")
205 205
206 206 if commit_id1 == commit_id2:
207 207 return []
208 208
209 209 commit_idx1 = self._get_commit_idx(commit_id1)
210 210 commit_idx2 = self._get_commit_idx(commit_id2)
211 211
212 212 commits = [
213 213 self.get_commit(commit_idx=idx)
214 214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215 215
216 216 return commits
217 217
218 218 def _get_commit_idx(self, commit_id):
219 219 try:
220 220 svn_rev = int(commit_id)
221 221 except:
222 222 # TODO: johbo: this might be only one case, HEAD, check this
223 223 svn_rev = self._remote.lookup(commit_id)
224 224 commit_idx = svn_rev - 1
225 225 if commit_idx >= len(self.commit_ids):
226 226 raise CommitDoesNotExistError(
227 227 "Commit at index %s does not exist." % (commit_idx, ))
228 228 return commit_idx
229 229
230 230 @staticmethod
231 231 def check_url(url, config):
232 232 """
233 233 Check if `url` is a valid source to import a Subversion repository.
234 234 """
235 235 # convert to URL if it's a local directory
236 236 if os.path.isdir(url):
237 237 url = 'file://' + urllib.pathname2url(url)
238 238 return connection.Svn.check_url(url, config.serialize())
239 239
240 240 @staticmethod
241 241 def is_valid_repository(path):
242 242 try:
243 243 SubversionRepository(path)
244 244 return True
245 245 except VCSError:
246 246 pass
247 247 return False
248 248
249 249 def _check_path(self):
250 250 if not os.path.exists(self.path):
251 251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 252 if not self._remote.is_path_valid_repository(self.path):
253 253 raise VCSError(
254 254 'Path "%s" does not contain a Subversion repository' %
255 255 (self.path, ))
256 256
257 257 @LazyProperty
258 258 def last_change(self):
259 259 """
260 260 Returns last change made on this repository as
261 261 `datetime.datetime` object.
262 262 """
263 263 # Subversion always has a first commit which has id "0" and contains
264 264 # what we are looking for.
265 265 last_id = len(self.commit_ids)
266 266 properties = self._remote.revision_properties(last_id)
267 267 return _date_from_svn_properties(properties)
268 268
269 269 @LazyProperty
270 270 def in_memory_commit(self):
271 271 return SubversionInMemoryCommit(self)
272 272
273 273 def get_hook_location(self):
274 274 """
275 275 returns absolute path to location where hooks are stored
276 276 """
277 277 return os.path.join(self.path, 'hooks')
278 278
279 279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False):
280 translate_tag=None, maybe_unreachable=False, reference_obj=None):
281 281 if self.is_empty():
282 282 raise EmptyRepositoryError("There are no commits yet")
283 283 if commit_id is not None:
284 284 self._validate_commit_id(commit_id)
285 285 elif commit_idx is not None:
286 286 self._validate_commit_idx(commit_idx)
287 287 try:
288 288 commit_id = self.commit_ids[commit_idx]
289 289 except IndexError:
290 290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
291 291
292 292 commit_id = self._sanitize_commit_id(commit_id)
293 293 commit = SubversionCommit(repository=self, commit_id=commit_id)
294 294 return commit
295 295
296 296 def get_commits(
297 297 self, start_id=None, end_id=None, start_date=None, end_date=None,
298 298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
299 299 if self.is_empty():
300 300 raise EmptyRepositoryError("There are no commit_ids yet")
301 301 self._validate_branch_name(branch_name)
302 302
303 303 if start_id is not None:
304 304 self._validate_commit_id(start_id)
305 305 if end_id is not None:
306 306 self._validate_commit_id(end_id)
307 307
308 308 start_raw_id = self._sanitize_commit_id(start_id)
309 309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
310 310 end_raw_id = self._sanitize_commit_id(end_id)
311 311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
312 312
313 313 if None not in [start_id, end_id] and start_pos > end_pos:
314 314 raise RepositoryError(
315 315 "Start commit '%s' cannot be after end commit '%s'" %
316 316 (start_id, end_id))
317 317 if end_pos is not None:
318 318 end_pos += 1
319 319
320 320 # Date based filtering
321 321 if start_date or end_date:
322 322 start_raw_id, end_raw_id = self._remote.lookup_interval(
323 323 date_astimestamp(start_date) if start_date else None,
324 324 date_astimestamp(end_date) if end_date else None)
325 325 start_pos = start_raw_id - 1
326 326 end_pos = end_raw_id
327 327
328 328 commit_ids = self.commit_ids
329 329
330 330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
331 331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
332 332 svn_rev = long(self.commit_ids[-1])
333 333 commit_ids = self._remote.node_history(
334 334 path=branch_name, revision=svn_rev, limit=None)
335 335 commit_ids = [str(i) for i in reversed(commit_ids)]
336 336
337 337 if start_pos or end_pos:
338 338 commit_ids = commit_ids[start_pos:end_pos]
339 339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
340 340
341 341 def _sanitize_commit_id(self, commit_id):
342 342 if commit_id and commit_id.isdigit():
343 343 if int(commit_id) <= len(self.commit_ids):
344 344 return commit_id
345 345 else:
346 346 raise CommitDoesNotExistError(
347 347 "Commit %s does not exist." % (commit_id, ))
348 348 if commit_id not in [
349 349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
350 350 raise CommitDoesNotExistError(
351 351 "Commit id %s not understood." % (commit_id, ))
352 352 svn_rev = self._remote.lookup('HEAD')
353 353 return str(svn_rev)
354 354
355 355 def get_diff(
356 356 self, commit1, commit2, path=None, ignore_whitespace=False,
357 357 context=3, path1=None):
358 358 self._validate_diff_commits(commit1, commit2)
359 359 svn_rev1 = long(commit1.raw_id)
360 360 svn_rev2 = long(commit2.raw_id)
361 361 diff = self._remote.diff(
362 362 svn_rev1, svn_rev2, path1=path1, path2=path,
363 363 ignore_whitespace=ignore_whitespace, context=context)
364 364 return SubversionDiff(diff)
365 365
366 366
367 367 def _sanitize_url(url):
368 368 if '://' not in url:
369 369 url = 'file://' + urllib.pathname2url(url)
370 370 return url
@@ -1,5836 +1,5836 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import (
60 60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
61 61 from rhodecode.lib.utils2 import (
62 62 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
63 63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
64 64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
65 65 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
66 66 JsonRaw
67 67 from rhodecode.lib.ext_json import json
68 68 from rhodecode.lib.caching_query import FromCache
69 69 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
70 70 from rhodecode.lib.encrypt2 import Encryptor
71 71 from rhodecode.lib.exceptions import (
72 72 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
73 73 from rhodecode.model.meta import Base, Session
74 74
75 75 URL_SEP = '/'
76 76 log = logging.getLogger(__name__)
77 77
78 78 # =============================================================================
79 79 # BASE CLASSES
80 80 # =============================================================================
81 81
82 82 # this is propagated from .ini file rhodecode.encrypted_values.secret or
83 83 # beaker.session.secret if first is not set.
84 84 # and initialized at environment.py
85 85 ENCRYPTION_KEY = None
86 86
87 87 # used to sort permissions by types, '#' used here is not allowed to be in
88 88 # usernames, and it's very early in sorted string.printable table.
89 89 PERMISSION_TYPE_SORT = {
90 90 'admin': '####',
91 91 'write': '###',
92 92 'read': '##',
93 93 'none': '#',
94 94 }
95 95
96 96
97 97 def display_user_sort(obj):
98 98 """
99 99 Sort function used to sort permissions in .permissions() function of
100 100 Repository, RepoGroup, UserGroup. Also it put the default user in front
101 101 of all other resources
102 102 """
103 103
104 104 if obj.username == User.DEFAULT_USER:
105 105 return '#####'
106 106 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
107 107 extra_sort_num = '1' # default
108 108
109 109 # NOTE(dan): inactive duplicates goes last
110 110 if getattr(obj, 'duplicate_perm', None):
111 111 extra_sort_num = '9'
112 112 return prefix + extra_sort_num + obj.username
113 113
114 114
115 115 def display_user_group_sort(obj):
116 116 """
117 117 Sort function used to sort permissions in .permissions() function of
118 118 Repository, RepoGroup, UserGroup. Also it put the default user in front
119 119 of all other resources
120 120 """
121 121
122 122 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
123 123 return prefix + obj.users_group_name
124 124
125 125
126 126 def _hash_key(k):
127 127 return sha1_safe(k)
128 128
129 129
130 130 def in_filter_generator(qry, items, limit=500):
131 131 """
132 132 Splits IN() into multiple with OR
133 133 e.g.::
134 134 cnt = Repository.query().filter(
135 135 or_(
136 136 *in_filter_generator(Repository.repo_id, range(100000))
137 137 )).count()
138 138 """
139 139 if not items:
140 140 # empty list will cause empty query which might cause security issues
141 141 # this can lead to hidden unpleasant results
142 142 items = [-1]
143 143
144 144 parts = []
145 145 for chunk in xrange(0, len(items), limit):
146 146 parts.append(
147 147 qry.in_(items[chunk: chunk + limit])
148 148 )
149 149
150 150 return parts
151 151
152 152
153 153 base_table_args = {
154 154 'extend_existing': True,
155 155 'mysql_engine': 'InnoDB',
156 156 'mysql_charset': 'utf8',
157 157 'sqlite_autoincrement': True
158 158 }
159 159
160 160
161 161 class EncryptedTextValue(TypeDecorator):
162 162 """
163 163 Special column for encrypted long text data, use like::
164 164
165 165 value = Column("encrypted_value", EncryptedValue(), nullable=False)
166 166
167 167 This column is intelligent so if value is in unencrypted form it return
168 168 unencrypted form, but on save it always encrypts
169 169 """
170 170 impl = Text
171 171
172 172 def process_bind_param(self, value, dialect):
173 173 """
174 174 Setter for storing value
175 175 """
176 176 import rhodecode
177 177 if not value:
178 178 return value
179 179
180 180 # protect against double encrypting if values is already encrypted
181 181 if value.startswith('enc$aes$') \
182 182 or value.startswith('enc$aes_hmac$') \
183 183 or value.startswith('enc2$'):
184 184 raise ValueError('value needs to be in unencrypted format, '
185 185 'ie. not starting with enc$ or enc2$')
186 186
187 187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 188 if algo == 'aes':
189 189 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
190 190 elif algo == 'fernet':
191 191 return Encryptor(ENCRYPTION_KEY).encrypt(value)
192 192 else:
193 193 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
194 194
195 195 def process_result_value(self, value, dialect):
196 196 """
197 197 Getter for retrieving value
198 198 """
199 199
200 200 import rhodecode
201 201 if not value:
202 202 return value
203 203
204 204 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
205 205 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
206 206 if algo == 'aes':
207 207 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
208 208 elif algo == 'fernet':
209 209 return Encryptor(ENCRYPTION_KEY).decrypt(value)
210 210 else:
211 211 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
212 212 return decrypted_data
213 213
214 214
215 215 class BaseModel(object):
216 216 """
217 217 Base Model for all classes
218 218 """
219 219
220 220 @classmethod
221 221 def _get_keys(cls):
222 222 """return column names for this model """
223 223 return class_mapper(cls).c.keys()
224 224
225 225 def get_dict(self):
226 226 """
227 227 return dict with keys and values corresponding
228 228 to this model data """
229 229
230 230 d = {}
231 231 for k in self._get_keys():
232 232 d[k] = getattr(self, k)
233 233
234 234 # also use __json__() if present to get additional fields
235 235 _json_attr = getattr(self, '__json__', None)
236 236 if _json_attr:
237 237 # update with attributes from __json__
238 238 if callable(_json_attr):
239 239 _json_attr = _json_attr()
240 240 for k, val in _json_attr.iteritems():
241 241 d[k] = val
242 242 return d
243 243
244 244 def get_appstruct(self):
245 245 """return list with keys and values tuples corresponding
246 246 to this model data """
247 247
248 248 lst = []
249 249 for k in self._get_keys():
250 250 lst.append((k, getattr(self, k),))
251 251 return lst
252 252
253 253 def populate_obj(self, populate_dict):
254 254 """populate model with data from given populate_dict"""
255 255
256 256 for k in self._get_keys():
257 257 if k in populate_dict:
258 258 setattr(self, k, populate_dict[k])
259 259
260 260 @classmethod
261 261 def query(cls):
262 262 return Session().query(cls)
263 263
264 264 @classmethod
265 265 def get(cls, id_):
266 266 if id_:
267 267 return cls.query().get(id_)
268 268
269 269 @classmethod
270 270 def get_or_404(cls, id_):
271 271 from pyramid.httpexceptions import HTTPNotFound
272 272
273 273 try:
274 274 id_ = int(id_)
275 275 except (TypeError, ValueError):
276 276 raise HTTPNotFound()
277 277
278 278 res = cls.query().get(id_)
279 279 if not res:
280 280 raise HTTPNotFound()
281 281 return res
282 282
283 283 @classmethod
284 284 def getAll(cls):
285 285 # deprecated and left for backward compatibility
286 286 return cls.get_all()
287 287
288 288 @classmethod
289 289 def get_all(cls):
290 290 return cls.query().all()
291 291
292 292 @classmethod
293 293 def delete(cls, id_):
294 294 obj = cls.query().get(id_)
295 295 Session().delete(obj)
296 296
297 297 @classmethod
298 298 def identity_cache(cls, session, attr_name, value):
299 299 exist_in_session = []
300 300 for (item_cls, pkey), instance in session.identity_map.items():
301 301 if cls == item_cls and getattr(instance, attr_name) == value:
302 302 exist_in_session.append(instance)
303 303 if exist_in_session:
304 304 if len(exist_in_session) == 1:
305 305 return exist_in_session[0]
306 306 log.exception(
307 307 'multiple objects with attr %s and '
308 308 'value %s found with same name: %r',
309 309 attr_name, value, exist_in_session)
310 310
311 311 def __repr__(self):
312 312 if hasattr(self, '__unicode__'):
313 313 # python repr needs to return str
314 314 try:
315 315 return safe_str(self.__unicode__())
316 316 except UnicodeDecodeError:
317 317 pass
318 318 return '<DB:%s>' % (self.__class__.__name__)
319 319
320 320
321 321 class RhodeCodeSetting(Base, BaseModel):
322 322 __tablename__ = 'rhodecode_settings'
323 323 __table_args__ = (
324 324 UniqueConstraint('app_settings_name'),
325 325 base_table_args
326 326 )
327 327
328 328 SETTINGS_TYPES = {
329 329 'str': safe_str,
330 330 'int': safe_int,
331 331 'unicode': safe_unicode,
332 332 'bool': str2bool,
333 333 'list': functools.partial(aslist, sep=',')
334 334 }
335 335 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
336 336 GLOBAL_CONF_KEY = 'app_settings'
337 337
338 338 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
339 339 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
340 340 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
341 341 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
342 342
343 343 def __init__(self, key='', val='', type='unicode'):
344 344 self.app_settings_name = key
345 345 self.app_settings_type = type
346 346 self.app_settings_value = val
347 347
348 348 @validates('_app_settings_value')
349 349 def validate_settings_value(self, key, val):
350 350 assert type(val) == unicode
351 351 return val
352 352
353 353 @hybrid_property
354 354 def app_settings_value(self):
355 355 v = self._app_settings_value
356 356 _type = self.app_settings_type
357 357 if _type:
358 358 _type = self.app_settings_type.split('.')[0]
359 359 # decode the encrypted value
360 360 if 'encrypted' in self.app_settings_type:
361 361 cipher = EncryptedTextValue()
362 362 v = safe_unicode(cipher.process_result_value(v, None))
363 363
364 364 converter = self.SETTINGS_TYPES.get(_type) or \
365 365 self.SETTINGS_TYPES['unicode']
366 366 return converter(v)
367 367
368 368 @app_settings_value.setter
369 369 def app_settings_value(self, val):
370 370 """
371 371 Setter that will always make sure we use unicode in app_settings_value
372 372
373 373 :param val:
374 374 """
375 375 val = safe_unicode(val)
376 376 # encode the encrypted value
377 377 if 'encrypted' in self.app_settings_type:
378 378 cipher = EncryptedTextValue()
379 379 val = safe_unicode(cipher.process_bind_param(val, None))
380 380 self._app_settings_value = val
381 381
382 382 @hybrid_property
383 383 def app_settings_type(self):
384 384 return self._app_settings_type
385 385
386 386 @app_settings_type.setter
387 387 def app_settings_type(self, val):
388 388 if val.split('.')[0] not in self.SETTINGS_TYPES:
389 389 raise Exception('type must be one of %s got %s'
390 390 % (self.SETTINGS_TYPES.keys(), val))
391 391 self._app_settings_type = val
392 392
393 393 @classmethod
394 394 def get_by_prefix(cls, prefix):
395 395 return RhodeCodeSetting.query()\
396 396 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
397 397 .all()
398 398
399 399 def __unicode__(self):
400 400 return u"<%s('%s:%s[%s]')>" % (
401 401 self.__class__.__name__,
402 402 self.app_settings_name, self.app_settings_value,
403 403 self.app_settings_type
404 404 )
405 405
406 406
407 407 class RhodeCodeUi(Base, BaseModel):
408 408 __tablename__ = 'rhodecode_ui'
409 409 __table_args__ = (
410 410 UniqueConstraint('ui_key'),
411 411 base_table_args
412 412 )
413 413
414 414 HOOK_REPO_SIZE = 'changegroup.repo_size'
415 415 # HG
416 416 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
417 417 HOOK_PULL = 'outgoing.pull_logger'
418 418 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
419 419 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
420 420 HOOK_PUSH = 'changegroup.push_logger'
421 421 HOOK_PUSH_KEY = 'pushkey.key_push'
422 422
423 423 HOOKS_BUILTIN = [
424 424 HOOK_PRE_PULL,
425 425 HOOK_PULL,
426 426 HOOK_PRE_PUSH,
427 427 HOOK_PRETX_PUSH,
428 428 HOOK_PUSH,
429 429 HOOK_PUSH_KEY,
430 430 ]
431 431
432 432 # TODO: johbo: Unify way how hooks are configured for git and hg,
433 433 # git part is currently hardcoded.
434 434
435 435 # SVN PATTERNS
436 436 SVN_BRANCH_ID = 'vcs_svn_branch'
437 437 SVN_TAG_ID = 'vcs_svn_tag'
438 438
439 439 ui_id = Column(
440 440 "ui_id", Integer(), nullable=False, unique=True, default=None,
441 441 primary_key=True)
442 442 ui_section = Column(
443 443 "ui_section", String(255), nullable=True, unique=None, default=None)
444 444 ui_key = Column(
445 445 "ui_key", String(255), nullable=True, unique=None, default=None)
446 446 ui_value = Column(
447 447 "ui_value", String(255), nullable=True, unique=None, default=None)
448 448 ui_active = Column(
449 449 "ui_active", Boolean(), nullable=True, unique=None, default=True)
450 450
451 451 def __repr__(self):
452 452 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
453 453 self.ui_key, self.ui_value)
454 454
455 455
456 456 class RepoRhodeCodeSetting(Base, BaseModel):
457 457 __tablename__ = 'repo_rhodecode_settings'
458 458 __table_args__ = (
459 459 UniqueConstraint(
460 460 'app_settings_name', 'repository_id',
461 461 name='uq_repo_rhodecode_setting_name_repo_id'),
462 462 base_table_args
463 463 )
464 464
465 465 repository_id = Column(
466 466 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
467 467 nullable=False)
468 468 app_settings_id = Column(
469 469 "app_settings_id", Integer(), nullable=False, unique=True,
470 470 default=None, primary_key=True)
471 471 app_settings_name = Column(
472 472 "app_settings_name", String(255), nullable=True, unique=None,
473 473 default=None)
474 474 _app_settings_value = Column(
475 475 "app_settings_value", String(4096), nullable=True, unique=None,
476 476 default=None)
477 477 _app_settings_type = Column(
478 478 "app_settings_type", String(255), nullable=True, unique=None,
479 479 default=None)
480 480
481 481 repository = relationship('Repository')
482 482
483 483 def __init__(self, repository_id, key='', val='', type='unicode'):
484 484 self.repository_id = repository_id
485 485 self.app_settings_name = key
486 486 self.app_settings_type = type
487 487 self.app_settings_value = val
488 488
489 489 @validates('_app_settings_value')
490 490 def validate_settings_value(self, key, val):
491 491 assert type(val) == unicode
492 492 return val
493 493
494 494 @hybrid_property
495 495 def app_settings_value(self):
496 496 v = self._app_settings_value
497 497 type_ = self.app_settings_type
498 498 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
499 499 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
500 500 return converter(v)
501 501
502 502 @app_settings_value.setter
503 503 def app_settings_value(self, val):
504 504 """
505 505 Setter that will always make sure we use unicode in app_settings_value
506 506
507 507 :param val:
508 508 """
509 509 self._app_settings_value = safe_unicode(val)
510 510
511 511 @hybrid_property
512 512 def app_settings_type(self):
513 513 return self._app_settings_type
514 514
515 515 @app_settings_type.setter
516 516 def app_settings_type(self, val):
517 517 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
518 518 if val not in SETTINGS_TYPES:
519 519 raise Exception('type must be one of %s got %s'
520 520 % (SETTINGS_TYPES.keys(), val))
521 521 self._app_settings_type = val
522 522
523 523 def __unicode__(self):
524 524 return u"<%s('%s:%s:%s[%s]')>" % (
525 525 self.__class__.__name__, self.repository.repo_name,
526 526 self.app_settings_name, self.app_settings_value,
527 527 self.app_settings_type
528 528 )
529 529
530 530
531 531 class RepoRhodeCodeUi(Base, BaseModel):
532 532 __tablename__ = 'repo_rhodecode_ui'
533 533 __table_args__ = (
534 534 UniqueConstraint(
535 535 'repository_id', 'ui_section', 'ui_key',
536 536 name='uq_repo_rhodecode_ui_repository_id_section_key'),
537 537 base_table_args
538 538 )
539 539
540 540 repository_id = Column(
541 541 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
542 542 nullable=False)
543 543 ui_id = Column(
544 544 "ui_id", Integer(), nullable=False, unique=True, default=None,
545 545 primary_key=True)
546 546 ui_section = Column(
547 547 "ui_section", String(255), nullable=True, unique=None, default=None)
548 548 ui_key = Column(
549 549 "ui_key", String(255), nullable=True, unique=None, default=None)
550 550 ui_value = Column(
551 551 "ui_value", String(255), nullable=True, unique=None, default=None)
552 552 ui_active = Column(
553 553 "ui_active", Boolean(), nullable=True, unique=None, default=True)
554 554
555 555 repository = relationship('Repository')
556 556
557 557 def __repr__(self):
558 558 return '<%s[%s:%s]%s=>%s]>' % (
559 559 self.__class__.__name__, self.repository.repo_name,
560 560 self.ui_section, self.ui_key, self.ui_value)
561 561
562 562
563 563 class User(Base, BaseModel):
564 564 __tablename__ = 'users'
565 565 __table_args__ = (
566 566 UniqueConstraint('username'), UniqueConstraint('email'),
567 567 Index('u_username_idx', 'username'),
568 568 Index('u_email_idx', 'email'),
569 569 base_table_args
570 570 )
571 571
572 572 DEFAULT_USER = 'default'
573 573 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
574 574 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
575 575
576 576 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
577 577 username = Column("username", String(255), nullable=True, unique=None, default=None)
578 578 password = Column("password", String(255), nullable=True, unique=None, default=None)
579 579 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
580 580 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
581 581 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
582 582 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
583 583 _email = Column("email", String(255), nullable=True, unique=None, default=None)
584 584 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
585 585 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
586 586 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
587 587
588 588 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
589 589 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
590 590 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
591 591 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
592 592 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
593 593 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
594 594
595 595 user_log = relationship('UserLog')
596 596 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
597 597
598 598 repositories = relationship('Repository')
599 599 repository_groups = relationship('RepoGroup')
600 600 user_groups = relationship('UserGroup')
601 601
602 602 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
603 603 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
604 604
605 605 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
606 606 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
607 607 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
608 608
609 609 group_member = relationship('UserGroupMember', cascade='all')
610 610
611 611 notifications = relationship('UserNotification', cascade='all')
612 612 # notifications assigned to this user
613 613 user_created_notifications = relationship('Notification', cascade='all')
614 614 # comments created by this user
615 615 user_comments = relationship('ChangesetComment', cascade='all')
616 616 # user profile extra info
617 617 user_emails = relationship('UserEmailMap', cascade='all')
618 618 user_ip_map = relationship('UserIpMap', cascade='all')
619 619 user_auth_tokens = relationship('UserApiKeys', cascade='all')
620 620 user_ssh_keys = relationship('UserSshKeys', cascade='all')
621 621
622 622 # gists
623 623 user_gists = relationship('Gist', cascade='all')
624 624 # user pull requests
625 625 user_pull_requests = relationship('PullRequest', cascade='all')
626 626
627 627 # external identities
628 628 external_identities = relationship(
629 629 'ExternalIdentity',
630 630 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
631 631 cascade='all')
632 632 # review rules
633 633 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
634 634
635 635 # artifacts owned
636 636 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
637 637
638 638 # no cascade, set NULL
639 639 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
640 640
641 641 def __unicode__(self):
642 642 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
643 643 self.user_id, self.username)
644 644
645 645 @hybrid_property
646 646 def email(self):
647 647 return self._email
648 648
649 649 @email.setter
650 650 def email(self, val):
651 651 self._email = val.lower() if val else None
652 652
653 653 @hybrid_property
654 654 def first_name(self):
655 655 from rhodecode.lib import helpers as h
656 656 if self.name:
657 657 return h.escape(self.name)
658 658 return self.name
659 659
660 660 @hybrid_property
661 661 def last_name(self):
662 662 from rhodecode.lib import helpers as h
663 663 if self.lastname:
664 664 return h.escape(self.lastname)
665 665 return self.lastname
666 666
667 667 @hybrid_property
668 668 def api_key(self):
669 669 """
670 670 Fetch if exist an auth-token with role ALL connected to this user
671 671 """
672 672 user_auth_token = UserApiKeys.query()\
673 673 .filter(UserApiKeys.user_id == self.user_id)\
674 674 .filter(or_(UserApiKeys.expires == -1,
675 675 UserApiKeys.expires >= time.time()))\
676 676 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
677 677 if user_auth_token:
678 678 user_auth_token = user_auth_token.api_key
679 679
680 680 return user_auth_token
681 681
682 682 @api_key.setter
683 683 def api_key(self, val):
684 684 # don't allow to set API key this is deprecated for now
685 685 self._api_key = None
686 686
687 687 @property
688 688 def reviewer_pull_requests(self):
689 689 return PullRequestReviewers.query() \
690 690 .options(joinedload(PullRequestReviewers.pull_request)) \
691 691 .filter(PullRequestReviewers.user_id == self.user_id) \
692 692 .all()
693 693
694 694 @property
695 695 def firstname(self):
696 696 # alias for future
697 697 return self.name
698 698
699 699 @property
700 700 def emails(self):
701 701 other = UserEmailMap.query()\
702 702 .filter(UserEmailMap.user == self) \
703 703 .order_by(UserEmailMap.email_id.asc()) \
704 704 .all()
705 705 return [self.email] + [x.email for x in other]
706 706
707 707 def emails_cached(self):
708 708 emails = UserEmailMap.query()\
709 709 .filter(UserEmailMap.user == self) \
710 710 .order_by(UserEmailMap.email_id.asc())
711 711
712 712 emails = emails.options(
713 713 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
714 714 )
715 715
716 716 return [self.email] + [x.email for x in emails]
717 717
718 718 @property
719 719 def auth_tokens(self):
720 720 auth_tokens = self.get_auth_tokens()
721 721 return [x.api_key for x in auth_tokens]
722 722
723 723 def get_auth_tokens(self):
724 724 return UserApiKeys.query()\
725 725 .filter(UserApiKeys.user == self)\
726 726 .order_by(UserApiKeys.user_api_key_id.asc())\
727 727 .all()
728 728
729 729 @LazyProperty
730 730 def feed_token(self):
731 731 return self.get_feed_token()
732 732
733 733 def get_feed_token(self, cache=True):
734 734 feed_tokens = UserApiKeys.query()\
735 735 .filter(UserApiKeys.user == self)\
736 736 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
737 737 if cache:
738 738 feed_tokens = feed_tokens.options(
739 739 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
740 740
741 741 feed_tokens = feed_tokens.all()
742 742 if feed_tokens:
743 743 return feed_tokens[0].api_key
744 744 return 'NO_FEED_TOKEN_AVAILABLE'
745 745
746 746 @LazyProperty
747 747 def artifact_token(self):
748 748 return self.get_artifact_token()
749 749
750 750 def get_artifact_token(self, cache=True):
751 751 artifacts_tokens = UserApiKeys.query()\
752 752 .filter(UserApiKeys.user == self) \
753 753 .filter(or_(UserApiKeys.expires == -1,
754 754 UserApiKeys.expires >= time.time())) \
755 755 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
756 756
757 757 if cache:
758 758 artifacts_tokens = artifacts_tokens.options(
759 759 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
760 760
761 761 artifacts_tokens = artifacts_tokens.all()
762 762 if artifacts_tokens:
763 763 return artifacts_tokens[0].api_key
764 764 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
765 765
766 766 def get_or_create_artifact_token(self):
767 767 artifacts_tokens = UserApiKeys.query()\
768 768 .filter(UserApiKeys.user == self) \
769 769 .filter(or_(UserApiKeys.expires == -1,
770 770 UserApiKeys.expires >= time.time())) \
771 771 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
772 772
773 773 artifacts_tokens = artifacts_tokens.all()
774 774 if artifacts_tokens:
775 775 return artifacts_tokens[0].api_key
776 776 else:
777 777 from rhodecode.model.auth_token import AuthTokenModel
778 778 artifact_token = AuthTokenModel().create(
779 779 self, 'auto-generated-artifact-token',
780 780 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
781 781 Session.commit()
782 782 return artifact_token.api_key
783 783
784 784 @classmethod
785 785 def get(cls, user_id, cache=False):
786 786 if not user_id:
787 787 return
788 788
789 789 user = cls.query()
790 790 if cache:
791 791 user = user.options(
792 792 FromCache("sql_cache_short", "get_users_%s" % user_id))
793 793 return user.get(user_id)
794 794
795 795 @classmethod
796 796 def extra_valid_auth_tokens(cls, user, role=None):
797 797 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
798 798 .filter(or_(UserApiKeys.expires == -1,
799 799 UserApiKeys.expires >= time.time()))
800 800 if role:
801 801 tokens = tokens.filter(or_(UserApiKeys.role == role,
802 802 UserApiKeys.role == UserApiKeys.ROLE_ALL))
803 803 return tokens.all()
804 804
805 805 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
806 806 from rhodecode.lib import auth
807 807
808 808 log.debug('Trying to authenticate user: %s via auth-token, '
809 809 'and roles: %s', self, roles)
810 810
811 811 if not auth_token:
812 812 return False
813 813
814 814 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
815 815 tokens_q = UserApiKeys.query()\
816 816 .filter(UserApiKeys.user_id == self.user_id)\
817 817 .filter(or_(UserApiKeys.expires == -1,
818 818 UserApiKeys.expires >= time.time()))
819 819
820 820 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
821 821
822 822 crypto_backend = auth.crypto_backend()
823 823 enc_token_map = {}
824 824 plain_token_map = {}
825 825 for token in tokens_q:
826 826 if token.api_key.startswith(crypto_backend.ENC_PREF):
827 827 enc_token_map[token.api_key] = token
828 828 else:
829 829 plain_token_map[token.api_key] = token
830 830 log.debug(
831 831 'Found %s plain and %s encrypted tokens to check for authentication for this user',
832 832 len(plain_token_map), len(enc_token_map))
833 833
834 834 # plain token match comes first
835 835 match = plain_token_map.get(auth_token)
836 836
837 837 # check encrypted tokens now
838 838 if not match:
839 839 for token_hash, token in enc_token_map.items():
840 840 # NOTE(marcink): this is expensive to calculate, but most secure
841 841 if crypto_backend.hash_check(auth_token, token_hash):
842 842 match = token
843 843 break
844 844
845 845 if match:
846 846 log.debug('Found matching token %s', match)
847 847 if match.repo_id:
848 848 log.debug('Found scope, checking for scope match of token %s', match)
849 849 if match.repo_id == scope_repo_id:
850 850 return True
851 851 else:
852 852 log.debug(
853 853 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
854 854 'and calling scope is:%s, skipping further checks',
855 855 match.repo, scope_repo_id)
856 856 return False
857 857 else:
858 858 return True
859 859
860 860 return False
861 861
862 862 @property
863 863 def ip_addresses(self):
864 864 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
865 865 return [x.ip_addr for x in ret]
866 866
867 867 @property
868 868 def username_and_name(self):
869 869 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
870 870
871 871 @property
872 872 def username_or_name_or_email(self):
873 873 full_name = self.full_name if self.full_name is not ' ' else None
874 874 return self.username or full_name or self.email
875 875
876 876 @property
877 877 def full_name(self):
878 878 return '%s %s' % (self.first_name, self.last_name)
879 879
880 880 @property
881 881 def full_name_or_username(self):
882 882 return ('%s %s' % (self.first_name, self.last_name)
883 883 if (self.first_name and self.last_name) else self.username)
884 884
885 885 @property
886 886 def full_contact(self):
887 887 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
888 888
889 889 @property
890 890 def short_contact(self):
891 891 return '%s %s' % (self.first_name, self.last_name)
892 892
893 893 @property
894 894 def is_admin(self):
895 895 return self.admin
896 896
897 897 @property
898 898 def language(self):
899 899 return self.user_data.get('language')
900 900
901 901 def AuthUser(self, **kwargs):
902 902 """
903 903 Returns instance of AuthUser for this user
904 904 """
905 905 from rhodecode.lib.auth import AuthUser
906 906 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
907 907
908 908 @hybrid_property
909 909 def user_data(self):
910 910 if not self._user_data:
911 911 return {}
912 912
913 913 try:
914 914 return json.loads(self._user_data)
915 915 except TypeError:
916 916 return {}
917 917
918 918 @user_data.setter
919 919 def user_data(self, val):
920 920 if not isinstance(val, dict):
921 921 raise Exception('user_data must be dict, got %s' % type(val))
922 922 try:
923 923 self._user_data = json.dumps(val)
924 924 except Exception:
925 925 log.error(traceback.format_exc())
926 926
927 927 @classmethod
928 928 def get_by_username(cls, username, case_insensitive=False,
929 929 cache=False, identity_cache=False):
930 930 session = Session()
931 931
932 932 if case_insensitive:
933 933 q = cls.query().filter(
934 934 func.lower(cls.username) == func.lower(username))
935 935 else:
936 936 q = cls.query().filter(cls.username == username)
937 937
938 938 if cache:
939 939 if identity_cache:
940 940 val = cls.identity_cache(session, 'username', username)
941 941 if val:
942 942 return val
943 943 else:
944 944 cache_key = "get_user_by_name_%s" % _hash_key(username)
945 945 q = q.options(
946 946 FromCache("sql_cache_short", cache_key))
947 947
948 948 return q.scalar()
949 949
950 950 @classmethod
951 951 def get_by_auth_token(cls, auth_token, cache=False):
952 952 q = UserApiKeys.query()\
953 953 .filter(UserApiKeys.api_key == auth_token)\
954 954 .filter(or_(UserApiKeys.expires == -1,
955 955 UserApiKeys.expires >= time.time()))
956 956 if cache:
957 957 q = q.options(
958 958 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
959 959
960 960 match = q.first()
961 961 if match:
962 962 return match.user
963 963
964 964 @classmethod
965 965 def get_by_email(cls, email, case_insensitive=False, cache=False):
966 966
967 967 if case_insensitive:
968 968 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
969 969
970 970 else:
971 971 q = cls.query().filter(cls.email == email)
972 972
973 973 email_key = _hash_key(email)
974 974 if cache:
975 975 q = q.options(
976 976 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
977 977
978 978 ret = q.scalar()
979 979 if ret is None:
980 980 q = UserEmailMap.query()
981 981 # try fetching in alternate email map
982 982 if case_insensitive:
983 983 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
984 984 else:
985 985 q = q.filter(UserEmailMap.email == email)
986 986 q = q.options(joinedload(UserEmailMap.user))
987 987 if cache:
988 988 q = q.options(
989 989 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
990 990 ret = getattr(q.scalar(), 'user', None)
991 991
992 992 return ret
993 993
994 994 @classmethod
995 995 def get_from_cs_author(cls, author):
996 996 """
997 997 Tries to get User objects out of commit author string
998 998
999 999 :param author:
1000 1000 """
1001 1001 from rhodecode.lib.helpers import email, author_name
1002 1002 # Valid email in the attribute passed, see if they're in the system
1003 1003 _email = email(author)
1004 1004 if _email:
1005 1005 user = cls.get_by_email(_email, case_insensitive=True)
1006 1006 if user:
1007 1007 return user
1008 1008 # Maybe we can match by username?
1009 1009 _author = author_name(author)
1010 1010 user = cls.get_by_username(_author, case_insensitive=True)
1011 1011 if user:
1012 1012 return user
1013 1013
1014 1014 def update_userdata(self, **kwargs):
1015 1015 usr = self
1016 1016 old = usr.user_data
1017 1017 old.update(**kwargs)
1018 1018 usr.user_data = old
1019 1019 Session().add(usr)
1020 1020 log.debug('updated userdata with %s', kwargs)
1021 1021
1022 1022 def update_lastlogin(self):
1023 1023 """Update user lastlogin"""
1024 1024 self.last_login = datetime.datetime.now()
1025 1025 Session().add(self)
1026 1026 log.debug('updated user %s lastlogin', self.username)
1027 1027
1028 1028 def update_password(self, new_password):
1029 1029 from rhodecode.lib.auth import get_crypt_password
1030 1030
1031 1031 self.password = get_crypt_password(new_password)
1032 1032 Session().add(self)
1033 1033
1034 1034 @classmethod
1035 1035 def get_first_super_admin(cls):
1036 1036 user = User.query()\
1037 1037 .filter(User.admin == true()) \
1038 1038 .order_by(User.user_id.asc()) \
1039 1039 .first()
1040 1040
1041 1041 if user is None:
1042 1042 raise Exception('FATAL: Missing administrative account!')
1043 1043 return user
1044 1044
1045 1045 @classmethod
1046 1046 def get_all_super_admins(cls, only_active=False):
1047 1047 """
1048 1048 Returns all admin accounts sorted by username
1049 1049 """
1050 1050 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1051 1051 if only_active:
1052 1052 qry = qry.filter(User.active == true())
1053 1053 return qry.all()
1054 1054
1055 1055 @classmethod
1056 1056 def get_all_user_ids(cls, only_active=True):
1057 1057 """
1058 1058 Returns all users IDs
1059 1059 """
1060 1060 qry = Session().query(User.user_id)
1061 1061
1062 1062 if only_active:
1063 1063 qry = qry.filter(User.active == true())
1064 1064 return [x.user_id for x in qry]
1065 1065
1066 1066 @classmethod
1067 1067 def get_default_user(cls, cache=False, refresh=False):
1068 1068 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1069 1069 if user is None:
1070 1070 raise Exception('FATAL: Missing default account!')
1071 1071 if refresh:
1072 1072 # The default user might be based on outdated state which
1073 1073 # has been loaded from the cache.
1074 1074 # A call to refresh() ensures that the
1075 1075 # latest state from the database is used.
1076 1076 Session().refresh(user)
1077 1077 return user
1078 1078
1079 1079 @classmethod
1080 1080 def get_default_user_id(cls):
1081 1081 import rhodecode
1082 1082 return rhodecode.CONFIG['default_user_id']
1083 1083
1084 1084 def _get_default_perms(self, user, suffix=''):
1085 1085 from rhodecode.model.permission import PermissionModel
1086 1086 return PermissionModel().get_default_perms(user.user_perms, suffix)
1087 1087
1088 1088 def get_default_perms(self, suffix=''):
1089 1089 return self._get_default_perms(self, suffix)
1090 1090
1091 1091 def get_api_data(self, include_secrets=False, details='full'):
1092 1092 """
1093 1093 Common function for generating user related data for API
1094 1094
1095 1095 :param include_secrets: By default secrets in the API data will be replaced
1096 1096 by a placeholder value to prevent exposing this data by accident. In case
1097 1097 this data shall be exposed, set this flag to ``True``.
1098 1098
1099 1099 :param details: details can be 'basic|full' basic gives only a subset of
1100 1100 the available user information that includes user_id, name and emails.
1101 1101 """
1102 1102 user = self
1103 1103 user_data = self.user_data
1104 1104 data = {
1105 1105 'user_id': user.user_id,
1106 1106 'username': user.username,
1107 1107 'firstname': user.name,
1108 1108 'lastname': user.lastname,
1109 1109 'description': user.description,
1110 1110 'email': user.email,
1111 1111 'emails': user.emails,
1112 1112 }
1113 1113 if details == 'basic':
1114 1114 return data
1115 1115
1116 1116 auth_token_length = 40
1117 1117 auth_token_replacement = '*' * auth_token_length
1118 1118
1119 1119 extras = {
1120 1120 'auth_tokens': [auth_token_replacement],
1121 1121 'active': user.active,
1122 1122 'admin': user.admin,
1123 1123 'extern_type': user.extern_type,
1124 1124 'extern_name': user.extern_name,
1125 1125 'last_login': user.last_login,
1126 1126 'last_activity': user.last_activity,
1127 1127 'ip_addresses': user.ip_addresses,
1128 1128 'language': user_data.get('language')
1129 1129 }
1130 1130 data.update(extras)
1131 1131
1132 1132 if include_secrets:
1133 1133 data['auth_tokens'] = user.auth_tokens
1134 1134 return data
1135 1135
1136 1136 def __json__(self):
1137 1137 data = {
1138 1138 'full_name': self.full_name,
1139 1139 'full_name_or_username': self.full_name_or_username,
1140 1140 'short_contact': self.short_contact,
1141 1141 'full_contact': self.full_contact,
1142 1142 }
1143 1143 data.update(self.get_api_data())
1144 1144 return data
1145 1145
1146 1146
1147 1147 class UserApiKeys(Base, BaseModel):
1148 1148 __tablename__ = 'user_api_keys'
1149 1149 __table_args__ = (
1150 1150 Index('uak_api_key_idx', 'api_key'),
1151 1151 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1152 1152 base_table_args
1153 1153 )
1154 1154 __mapper_args__ = {}
1155 1155
1156 1156 # ApiKey role
1157 1157 ROLE_ALL = 'token_role_all'
1158 1158 ROLE_VCS = 'token_role_vcs'
1159 1159 ROLE_API = 'token_role_api'
1160 1160 ROLE_HTTP = 'token_role_http'
1161 1161 ROLE_FEED = 'token_role_feed'
1162 1162 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1163 1163 # The last one is ignored in the list as we only
1164 1164 # use it for one action, and cannot be created by users
1165 1165 ROLE_PASSWORD_RESET = 'token_password_reset'
1166 1166
1167 1167 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1168 1168
1169 1169 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1170 1170 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1171 1171 api_key = Column("api_key", String(255), nullable=False, unique=True)
1172 1172 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1173 1173 expires = Column('expires', Float(53), nullable=False)
1174 1174 role = Column('role', String(255), nullable=True)
1175 1175 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1176 1176
1177 1177 # scope columns
1178 1178 repo_id = Column(
1179 1179 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1180 1180 nullable=True, unique=None, default=None)
1181 1181 repo = relationship('Repository', lazy='joined')
1182 1182
1183 1183 repo_group_id = Column(
1184 1184 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1185 1185 nullable=True, unique=None, default=None)
1186 1186 repo_group = relationship('RepoGroup', lazy='joined')
1187 1187
1188 1188 user = relationship('User', lazy='joined')
1189 1189
1190 1190 def __unicode__(self):
1191 1191 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1192 1192
1193 1193 def __json__(self):
1194 1194 data = {
1195 1195 'auth_token': self.api_key,
1196 1196 'role': self.role,
1197 1197 'scope': self.scope_humanized,
1198 1198 'expired': self.expired
1199 1199 }
1200 1200 return data
1201 1201
1202 1202 def get_api_data(self, include_secrets=False):
1203 1203 data = self.__json__()
1204 1204 if include_secrets:
1205 1205 return data
1206 1206 else:
1207 1207 data['auth_token'] = self.token_obfuscated
1208 1208 return data
1209 1209
1210 1210 @hybrid_property
1211 1211 def description_safe(self):
1212 1212 from rhodecode.lib import helpers as h
1213 1213 return h.escape(self.description)
1214 1214
1215 1215 @property
1216 1216 def expired(self):
1217 1217 if self.expires == -1:
1218 1218 return False
1219 1219 return time.time() > self.expires
1220 1220
1221 1221 @classmethod
1222 1222 def _get_role_name(cls, role):
1223 1223 return {
1224 1224 cls.ROLE_ALL: _('all'),
1225 1225 cls.ROLE_HTTP: _('http/web interface'),
1226 1226 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1227 1227 cls.ROLE_API: _('api calls'),
1228 1228 cls.ROLE_FEED: _('feed access'),
1229 1229 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1230 1230 }.get(role, role)
1231 1231
1232 1232 @classmethod
1233 1233 def _get_role_description(cls, role):
1234 1234 return {
1235 1235 cls.ROLE_ALL: _('Token for all actions.'),
1236 1236 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1237 1237 'login using `api_access_controllers_whitelist` functionality.'),
1238 1238 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1239 1239 'Requires auth_token authentication plugin to be active. <br/>'
1240 1240 'Such Token should be used then instead of a password to '
1241 1241 'interact with a repository, and additionally can be '
1242 1242 'limited to single repository using repo scope.'),
1243 1243 cls.ROLE_API: _('Token limited to api calls.'),
1244 1244 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1245 1245 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1246 1246 }.get(role, role)
1247 1247
1248 1248 @property
1249 1249 def role_humanized(self):
1250 1250 return self._get_role_name(self.role)
1251 1251
1252 1252 def _get_scope(self):
1253 1253 if self.repo:
1254 1254 return 'Repository: {}'.format(self.repo.repo_name)
1255 1255 if self.repo_group:
1256 1256 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1257 1257 return 'Global'
1258 1258
1259 1259 @property
1260 1260 def scope_humanized(self):
1261 1261 return self._get_scope()
1262 1262
1263 1263 @property
1264 1264 def token_obfuscated(self):
1265 1265 if self.api_key:
1266 1266 return self.api_key[:4] + "****"
1267 1267
1268 1268
1269 1269 class UserEmailMap(Base, BaseModel):
1270 1270 __tablename__ = 'user_email_map'
1271 1271 __table_args__ = (
1272 1272 Index('uem_email_idx', 'email'),
1273 1273 UniqueConstraint('email'),
1274 1274 base_table_args
1275 1275 )
1276 1276 __mapper_args__ = {}
1277 1277
1278 1278 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1279 1279 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1280 1280 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1281 1281 user = relationship('User', lazy='joined')
1282 1282
1283 1283 @validates('_email')
1284 1284 def validate_email(self, key, email):
1285 1285 # check if this email is not main one
1286 1286 main_email = Session().query(User).filter(User.email == email).scalar()
1287 1287 if main_email is not None:
1288 1288 raise AttributeError('email %s is present is user table' % email)
1289 1289 return email
1290 1290
1291 1291 @hybrid_property
1292 1292 def email(self):
1293 1293 return self._email
1294 1294
1295 1295 @email.setter
1296 1296 def email(self, val):
1297 1297 self._email = val.lower() if val else None
1298 1298
1299 1299
1300 1300 class UserIpMap(Base, BaseModel):
1301 1301 __tablename__ = 'user_ip_map'
1302 1302 __table_args__ = (
1303 1303 UniqueConstraint('user_id', 'ip_addr'),
1304 1304 base_table_args
1305 1305 )
1306 1306 __mapper_args__ = {}
1307 1307
1308 1308 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1309 1309 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1310 1310 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1311 1311 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1312 1312 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1313 1313 user = relationship('User', lazy='joined')
1314 1314
1315 1315 @hybrid_property
1316 1316 def description_safe(self):
1317 1317 from rhodecode.lib import helpers as h
1318 1318 return h.escape(self.description)
1319 1319
1320 1320 @classmethod
1321 1321 def _get_ip_range(cls, ip_addr):
1322 1322 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1323 1323 return [str(net.network_address), str(net.broadcast_address)]
1324 1324
1325 1325 def __json__(self):
1326 1326 return {
1327 1327 'ip_addr': self.ip_addr,
1328 1328 'ip_range': self._get_ip_range(self.ip_addr),
1329 1329 }
1330 1330
1331 1331 def __unicode__(self):
1332 1332 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1333 1333 self.user_id, self.ip_addr)
1334 1334
1335 1335
1336 1336 class UserSshKeys(Base, BaseModel):
1337 1337 __tablename__ = 'user_ssh_keys'
1338 1338 __table_args__ = (
1339 1339 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1340 1340
1341 1341 UniqueConstraint('ssh_key_fingerprint'),
1342 1342
1343 1343 base_table_args
1344 1344 )
1345 1345 __mapper_args__ = {}
1346 1346
1347 1347 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1348 1348 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1349 1349 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1350 1350
1351 1351 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1352 1352
1353 1353 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1354 1354 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1355 1355 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1356 1356
1357 1357 user = relationship('User', lazy='joined')
1358 1358
1359 1359 def __json__(self):
1360 1360 data = {
1361 1361 'ssh_fingerprint': self.ssh_key_fingerprint,
1362 1362 'description': self.description,
1363 1363 'created_on': self.created_on
1364 1364 }
1365 1365 return data
1366 1366
1367 1367 def get_api_data(self):
1368 1368 data = self.__json__()
1369 1369 return data
1370 1370
1371 1371
1372 1372 class UserLog(Base, BaseModel):
1373 1373 __tablename__ = 'user_logs'
1374 1374 __table_args__ = (
1375 1375 base_table_args,
1376 1376 )
1377 1377
1378 1378 VERSION_1 = 'v1'
1379 1379 VERSION_2 = 'v2'
1380 1380 VERSIONS = [VERSION_1, VERSION_2]
1381 1381
1382 1382 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1383 1383 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1384 1384 username = Column("username", String(255), nullable=True, unique=None, default=None)
1385 1385 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1386 1386 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1387 1387 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1388 1388 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1389 1389 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1390 1390
1391 1391 version = Column("version", String(255), nullable=True, default=VERSION_1)
1392 1392 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1393 1393 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1394 1394
1395 1395 def __unicode__(self):
1396 1396 return u"<%s('id:%s:%s')>" % (
1397 1397 self.__class__.__name__, self.repository_name, self.action)
1398 1398
1399 1399 def __json__(self):
1400 1400 return {
1401 1401 'user_id': self.user_id,
1402 1402 'username': self.username,
1403 1403 'repository_id': self.repository_id,
1404 1404 'repository_name': self.repository_name,
1405 1405 'user_ip': self.user_ip,
1406 1406 'action_date': self.action_date,
1407 1407 'action': self.action,
1408 1408 }
1409 1409
1410 1410 @hybrid_property
1411 1411 def entry_id(self):
1412 1412 return self.user_log_id
1413 1413
1414 1414 @property
1415 1415 def action_as_day(self):
1416 1416 return datetime.date(*self.action_date.timetuple()[:3])
1417 1417
1418 1418 user = relationship('User')
1419 1419 repository = relationship('Repository', cascade='')
1420 1420
1421 1421
1422 1422 class UserGroup(Base, BaseModel):
1423 1423 __tablename__ = 'users_groups'
1424 1424 __table_args__ = (
1425 1425 base_table_args,
1426 1426 )
1427 1427
1428 1428 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1429 1429 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1430 1430 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1431 1431 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1432 1432 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1433 1433 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1434 1434 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1435 1435 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1436 1436
1437 1437 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1438 1438 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1439 1439 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1440 1440 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1441 1441 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1442 1442 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1443 1443
1444 1444 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1445 1445 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1446 1446
1447 1447 @classmethod
1448 1448 def _load_group_data(cls, column):
1449 1449 if not column:
1450 1450 return {}
1451 1451
1452 1452 try:
1453 1453 return json.loads(column) or {}
1454 1454 except TypeError:
1455 1455 return {}
1456 1456
1457 1457 @hybrid_property
1458 1458 def description_safe(self):
1459 1459 from rhodecode.lib import helpers as h
1460 1460 return h.escape(self.user_group_description)
1461 1461
1462 1462 @hybrid_property
1463 1463 def group_data(self):
1464 1464 return self._load_group_data(self._group_data)
1465 1465
1466 1466 @group_data.expression
1467 1467 def group_data(self, **kwargs):
1468 1468 return self._group_data
1469 1469
1470 1470 @group_data.setter
1471 1471 def group_data(self, val):
1472 1472 try:
1473 1473 self._group_data = json.dumps(val)
1474 1474 except Exception:
1475 1475 log.error(traceback.format_exc())
1476 1476
1477 1477 @classmethod
1478 1478 def _load_sync(cls, group_data):
1479 1479 if group_data:
1480 1480 return group_data.get('extern_type')
1481 1481
1482 1482 @property
1483 1483 def sync(self):
1484 1484 return self._load_sync(self.group_data)
1485 1485
1486 1486 def __unicode__(self):
1487 1487 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1488 1488 self.users_group_id,
1489 1489 self.users_group_name)
1490 1490
1491 1491 @classmethod
1492 1492 def get_by_group_name(cls, group_name, cache=False,
1493 1493 case_insensitive=False):
1494 1494 if case_insensitive:
1495 1495 q = cls.query().filter(func.lower(cls.users_group_name) ==
1496 1496 func.lower(group_name))
1497 1497
1498 1498 else:
1499 1499 q = cls.query().filter(cls.users_group_name == group_name)
1500 1500 if cache:
1501 1501 q = q.options(
1502 1502 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1503 1503 return q.scalar()
1504 1504
1505 1505 @classmethod
1506 1506 def get(cls, user_group_id, cache=False):
1507 1507 if not user_group_id:
1508 1508 return
1509 1509
1510 1510 user_group = cls.query()
1511 1511 if cache:
1512 1512 user_group = user_group.options(
1513 1513 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1514 1514 return user_group.get(user_group_id)
1515 1515
1516 1516 def permissions(self, with_admins=True, with_owner=True,
1517 1517 expand_from_user_groups=False):
1518 1518 """
1519 1519 Permissions for user groups
1520 1520 """
1521 1521 _admin_perm = 'usergroup.admin'
1522 1522
1523 1523 owner_row = []
1524 1524 if with_owner:
1525 1525 usr = AttributeDict(self.user.get_dict())
1526 1526 usr.owner_row = True
1527 1527 usr.permission = _admin_perm
1528 1528 owner_row.append(usr)
1529 1529
1530 1530 super_admin_ids = []
1531 1531 super_admin_rows = []
1532 1532 if with_admins:
1533 1533 for usr in User.get_all_super_admins():
1534 1534 super_admin_ids.append(usr.user_id)
1535 1535 # if this admin is also owner, don't double the record
1536 1536 if usr.user_id == owner_row[0].user_id:
1537 1537 owner_row[0].admin_row = True
1538 1538 else:
1539 1539 usr = AttributeDict(usr.get_dict())
1540 1540 usr.admin_row = True
1541 1541 usr.permission = _admin_perm
1542 1542 super_admin_rows.append(usr)
1543 1543
1544 1544 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1545 1545 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1546 1546 joinedload(UserUserGroupToPerm.user),
1547 1547 joinedload(UserUserGroupToPerm.permission),)
1548 1548
1549 1549 # get owners and admins and permissions. We do a trick of re-writing
1550 1550 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1551 1551 # has a global reference and changing one object propagates to all
1552 1552 # others. This means if admin is also an owner admin_row that change
1553 1553 # would propagate to both objects
1554 1554 perm_rows = []
1555 1555 for _usr in q.all():
1556 1556 usr = AttributeDict(_usr.user.get_dict())
1557 1557 # if this user is also owner/admin, mark as duplicate record
1558 1558 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1559 1559 usr.duplicate_perm = True
1560 1560 usr.permission = _usr.permission.permission_name
1561 1561 perm_rows.append(usr)
1562 1562
1563 1563 # filter the perm rows by 'default' first and then sort them by
1564 1564 # admin,write,read,none permissions sorted again alphabetically in
1565 1565 # each group
1566 1566 perm_rows = sorted(perm_rows, key=display_user_sort)
1567 1567
1568 1568 user_groups_rows = []
1569 1569 if expand_from_user_groups:
1570 1570 for ug in self.permission_user_groups(with_members=True):
1571 1571 for user_data in ug.members:
1572 1572 user_groups_rows.append(user_data)
1573 1573
1574 1574 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1575 1575
1576 1576 def permission_user_groups(self, with_members=False):
1577 1577 q = UserGroupUserGroupToPerm.query()\
1578 1578 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1579 1579 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1580 1580 joinedload(UserGroupUserGroupToPerm.target_user_group),
1581 1581 joinedload(UserGroupUserGroupToPerm.permission),)
1582 1582
1583 1583 perm_rows = []
1584 1584 for _user_group in q.all():
1585 1585 entry = AttributeDict(_user_group.user_group.get_dict())
1586 1586 entry.permission = _user_group.permission.permission_name
1587 1587 if with_members:
1588 1588 entry.members = [x.user.get_dict()
1589 1589 for x in _user_group.user_group.members]
1590 1590 perm_rows.append(entry)
1591 1591
1592 1592 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1593 1593 return perm_rows
1594 1594
1595 1595 def _get_default_perms(self, user_group, suffix=''):
1596 1596 from rhodecode.model.permission import PermissionModel
1597 1597 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1598 1598
1599 1599 def get_default_perms(self, suffix=''):
1600 1600 return self._get_default_perms(self, suffix)
1601 1601
1602 1602 def get_api_data(self, with_group_members=True, include_secrets=False):
1603 1603 """
1604 1604 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1605 1605 basically forwarded.
1606 1606
1607 1607 """
1608 1608 user_group = self
1609 1609 data = {
1610 1610 'users_group_id': user_group.users_group_id,
1611 1611 'group_name': user_group.users_group_name,
1612 1612 'group_description': user_group.user_group_description,
1613 1613 'active': user_group.users_group_active,
1614 1614 'owner': user_group.user.username,
1615 1615 'sync': user_group.sync,
1616 1616 'owner_email': user_group.user.email,
1617 1617 }
1618 1618
1619 1619 if with_group_members:
1620 1620 users = []
1621 1621 for user in user_group.members:
1622 1622 user = user.user
1623 1623 users.append(user.get_api_data(include_secrets=include_secrets))
1624 1624 data['users'] = users
1625 1625
1626 1626 return data
1627 1627
1628 1628
1629 1629 class UserGroupMember(Base, BaseModel):
1630 1630 __tablename__ = 'users_groups_members'
1631 1631 __table_args__ = (
1632 1632 base_table_args,
1633 1633 )
1634 1634
1635 1635 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1636 1636 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1637 1637 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1638 1638
1639 1639 user = relationship('User', lazy='joined')
1640 1640 users_group = relationship('UserGroup')
1641 1641
1642 1642 def __init__(self, gr_id='', u_id=''):
1643 1643 self.users_group_id = gr_id
1644 1644 self.user_id = u_id
1645 1645
1646 1646
1647 1647 class RepositoryField(Base, BaseModel):
1648 1648 __tablename__ = 'repositories_fields'
1649 1649 __table_args__ = (
1650 1650 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1651 1651 base_table_args,
1652 1652 )
1653 1653
1654 1654 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1655 1655
1656 1656 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1657 1657 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1658 1658 field_key = Column("field_key", String(250))
1659 1659 field_label = Column("field_label", String(1024), nullable=False)
1660 1660 field_value = Column("field_value", String(10000), nullable=False)
1661 1661 field_desc = Column("field_desc", String(1024), nullable=False)
1662 1662 field_type = Column("field_type", String(255), nullable=False, unique=None)
1663 1663 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1664 1664
1665 1665 repository = relationship('Repository')
1666 1666
1667 1667 @property
1668 1668 def field_key_prefixed(self):
1669 1669 return 'ex_%s' % self.field_key
1670 1670
1671 1671 @classmethod
1672 1672 def un_prefix_key(cls, key):
1673 1673 if key.startswith(cls.PREFIX):
1674 1674 return key[len(cls.PREFIX):]
1675 1675 return key
1676 1676
1677 1677 @classmethod
1678 1678 def get_by_key_name(cls, key, repo):
1679 1679 row = cls.query()\
1680 1680 .filter(cls.repository == repo)\
1681 1681 .filter(cls.field_key == key).scalar()
1682 1682 return row
1683 1683
1684 1684
1685 1685 class Repository(Base, BaseModel):
1686 1686 __tablename__ = 'repositories'
1687 1687 __table_args__ = (
1688 1688 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1689 1689 base_table_args,
1690 1690 )
1691 1691 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1692 1692 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1693 1693 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1694 1694
1695 1695 STATE_CREATED = 'repo_state_created'
1696 1696 STATE_PENDING = 'repo_state_pending'
1697 1697 STATE_ERROR = 'repo_state_error'
1698 1698
1699 1699 LOCK_AUTOMATIC = 'lock_auto'
1700 1700 LOCK_API = 'lock_api'
1701 1701 LOCK_WEB = 'lock_web'
1702 1702 LOCK_PULL = 'lock_pull'
1703 1703
1704 1704 NAME_SEP = URL_SEP
1705 1705
1706 1706 repo_id = Column(
1707 1707 "repo_id", Integer(), nullable=False, unique=True, default=None,
1708 1708 primary_key=True)
1709 1709 _repo_name = Column(
1710 1710 "repo_name", Text(), nullable=False, default=None)
1711 1711 repo_name_hash = Column(
1712 1712 "repo_name_hash", String(255), nullable=False, unique=True)
1713 1713 repo_state = Column("repo_state", String(255), nullable=True)
1714 1714
1715 1715 clone_uri = Column(
1716 1716 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1717 1717 default=None)
1718 1718 push_uri = Column(
1719 1719 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1720 1720 default=None)
1721 1721 repo_type = Column(
1722 1722 "repo_type", String(255), nullable=False, unique=False, default=None)
1723 1723 user_id = Column(
1724 1724 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1725 1725 unique=False, default=None)
1726 1726 private = Column(
1727 1727 "private", Boolean(), nullable=True, unique=None, default=None)
1728 1728 archived = Column(
1729 1729 "archived", Boolean(), nullable=True, unique=None, default=None)
1730 1730 enable_statistics = Column(
1731 1731 "statistics", Boolean(), nullable=True, unique=None, default=True)
1732 1732 enable_downloads = Column(
1733 1733 "downloads", Boolean(), nullable=True, unique=None, default=True)
1734 1734 description = Column(
1735 1735 "description", String(10000), nullable=True, unique=None, default=None)
1736 1736 created_on = Column(
1737 1737 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1738 1738 default=datetime.datetime.now)
1739 1739 updated_on = Column(
1740 1740 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1741 1741 default=datetime.datetime.now)
1742 1742 _landing_revision = Column(
1743 1743 "landing_revision", String(255), nullable=False, unique=False,
1744 1744 default=None)
1745 1745 enable_locking = Column(
1746 1746 "enable_locking", Boolean(), nullable=False, unique=None,
1747 1747 default=False)
1748 1748 _locked = Column(
1749 1749 "locked", String(255), nullable=True, unique=False, default=None)
1750 1750 _changeset_cache = Column(
1751 1751 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1752 1752
1753 1753 fork_id = Column(
1754 1754 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1755 1755 nullable=True, unique=False, default=None)
1756 1756 group_id = Column(
1757 1757 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1758 1758 unique=False, default=None)
1759 1759
1760 1760 user = relationship('User', lazy='joined')
1761 1761 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1762 1762 group = relationship('RepoGroup', lazy='joined')
1763 1763 repo_to_perm = relationship(
1764 1764 'UserRepoToPerm', cascade='all',
1765 1765 order_by='UserRepoToPerm.repo_to_perm_id')
1766 1766 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1767 1767 stats = relationship('Statistics', cascade='all', uselist=False)
1768 1768
1769 1769 followers = relationship(
1770 1770 'UserFollowing',
1771 1771 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1772 1772 cascade='all')
1773 1773 extra_fields = relationship(
1774 1774 'RepositoryField', cascade="all, delete-orphan")
1775 1775 logs = relationship('UserLog')
1776 1776 comments = relationship(
1777 1777 'ChangesetComment', cascade="all, delete-orphan")
1778 1778 pull_requests_source = relationship(
1779 1779 'PullRequest',
1780 1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1781 1781 cascade="all, delete-orphan")
1782 1782 pull_requests_target = relationship(
1783 1783 'PullRequest',
1784 1784 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1785 1785 cascade="all, delete-orphan")
1786 1786 ui = relationship('RepoRhodeCodeUi', cascade="all")
1787 1787 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1788 1788 integrations = relationship('Integration', cascade="all, delete-orphan")
1789 1789
1790 1790 scoped_tokens = relationship('UserApiKeys', cascade="all")
1791 1791
1792 1792 # no cascade, set NULL
1793 1793 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1794 1794
1795 1795 def __unicode__(self):
1796 1796 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1797 1797 safe_unicode(self.repo_name))
1798 1798
1799 1799 @hybrid_property
1800 1800 def description_safe(self):
1801 1801 from rhodecode.lib import helpers as h
1802 1802 return h.escape(self.description)
1803 1803
1804 1804 @hybrid_property
1805 1805 def landing_rev(self):
1806 1806 # always should return [rev_type, rev], e.g ['branch', 'master']
1807 1807 if self._landing_revision:
1808 1808 _rev_info = self._landing_revision.split(':')
1809 1809 if len(_rev_info) < 2:
1810 1810 _rev_info.insert(0, 'rev')
1811 1811 return [_rev_info[0], _rev_info[1]]
1812 1812 return [None, None]
1813 1813
1814 1814 @property
1815 1815 def landing_ref_type(self):
1816 1816 return self.landing_rev[0]
1817 1817
1818 1818 @property
1819 1819 def landing_ref_name(self):
1820 1820 return self.landing_rev[1]
1821 1821
1822 1822 @landing_rev.setter
1823 1823 def landing_rev(self, val):
1824 1824 if ':' not in val:
1825 1825 raise ValueError('value must be delimited with `:` and consist '
1826 1826 'of <rev_type>:<rev>, got %s instead' % val)
1827 1827 self._landing_revision = val
1828 1828
1829 1829 @hybrid_property
1830 1830 def locked(self):
1831 1831 if self._locked:
1832 1832 user_id, timelocked, reason = self._locked.split(':')
1833 1833 lock_values = int(user_id), timelocked, reason
1834 1834 else:
1835 1835 lock_values = [None, None, None]
1836 1836 return lock_values
1837 1837
1838 1838 @locked.setter
1839 1839 def locked(self, val):
1840 1840 if val and isinstance(val, (list, tuple)):
1841 1841 self._locked = ':'.join(map(str, val))
1842 1842 else:
1843 1843 self._locked = None
1844 1844
1845 1845 @classmethod
1846 1846 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1847 1847 from rhodecode.lib.vcs.backends.base import EmptyCommit
1848 1848 dummy = EmptyCommit().__json__()
1849 1849 if not changeset_cache_raw:
1850 1850 dummy['source_repo_id'] = repo_id
1851 1851 return json.loads(json.dumps(dummy))
1852 1852
1853 1853 try:
1854 1854 return json.loads(changeset_cache_raw)
1855 1855 except TypeError:
1856 1856 return dummy
1857 1857 except Exception:
1858 1858 log.error(traceback.format_exc())
1859 1859 return dummy
1860 1860
1861 1861 @hybrid_property
1862 1862 def changeset_cache(self):
1863 1863 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1864 1864
1865 1865 @changeset_cache.setter
1866 1866 def changeset_cache(self, val):
1867 1867 try:
1868 1868 self._changeset_cache = json.dumps(val)
1869 1869 except Exception:
1870 1870 log.error(traceback.format_exc())
1871 1871
1872 1872 @hybrid_property
1873 1873 def repo_name(self):
1874 1874 return self._repo_name
1875 1875
1876 1876 @repo_name.setter
1877 1877 def repo_name(self, value):
1878 1878 self._repo_name = value
1879 1879 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1880 1880
1881 1881 @classmethod
1882 1882 def normalize_repo_name(cls, repo_name):
1883 1883 """
1884 1884 Normalizes os specific repo_name to the format internally stored inside
1885 1885 database using URL_SEP
1886 1886
1887 1887 :param cls:
1888 1888 :param repo_name:
1889 1889 """
1890 1890 return cls.NAME_SEP.join(repo_name.split(os.sep))
1891 1891
1892 1892 @classmethod
1893 1893 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1894 1894 session = Session()
1895 1895 q = session.query(cls).filter(cls.repo_name == repo_name)
1896 1896
1897 1897 if cache:
1898 1898 if identity_cache:
1899 1899 val = cls.identity_cache(session, 'repo_name', repo_name)
1900 1900 if val:
1901 1901 return val
1902 1902 else:
1903 1903 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1904 1904 q = q.options(
1905 1905 FromCache("sql_cache_short", cache_key))
1906 1906
1907 1907 return q.scalar()
1908 1908
1909 1909 @classmethod
1910 1910 def get_by_id_or_repo_name(cls, repoid):
1911 1911 if isinstance(repoid, (int, long)):
1912 1912 try:
1913 1913 repo = cls.get(repoid)
1914 1914 except ValueError:
1915 1915 repo = None
1916 1916 else:
1917 1917 repo = cls.get_by_repo_name(repoid)
1918 1918 return repo
1919 1919
1920 1920 @classmethod
1921 1921 def get_by_full_path(cls, repo_full_path):
1922 1922 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1923 1923 repo_name = cls.normalize_repo_name(repo_name)
1924 1924 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1925 1925
1926 1926 @classmethod
1927 1927 def get_repo_forks(cls, repo_id):
1928 1928 return cls.query().filter(Repository.fork_id == repo_id)
1929 1929
1930 1930 @classmethod
1931 1931 def base_path(cls):
1932 1932 """
1933 1933 Returns base path when all repos are stored
1934 1934
1935 1935 :param cls:
1936 1936 """
1937 1937 q = Session().query(RhodeCodeUi)\
1938 1938 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1939 1939 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1940 1940 return q.one().ui_value
1941 1941
1942 1942 @classmethod
1943 1943 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1944 1944 case_insensitive=True, archived=False):
1945 1945 q = Repository.query()
1946 1946
1947 1947 if not archived:
1948 1948 q = q.filter(Repository.archived.isnot(true()))
1949 1949
1950 1950 if not isinstance(user_id, Optional):
1951 1951 q = q.filter(Repository.user_id == user_id)
1952 1952
1953 1953 if not isinstance(group_id, Optional):
1954 1954 q = q.filter(Repository.group_id == group_id)
1955 1955
1956 1956 if case_insensitive:
1957 1957 q = q.order_by(func.lower(Repository.repo_name))
1958 1958 else:
1959 1959 q = q.order_by(Repository.repo_name)
1960 1960
1961 1961 return q.all()
1962 1962
1963 1963 @property
1964 1964 def repo_uid(self):
1965 1965 return '_{}'.format(self.repo_id)
1966 1966
1967 1967 @property
1968 1968 def forks(self):
1969 1969 """
1970 1970 Return forks of this repo
1971 1971 """
1972 1972 return Repository.get_repo_forks(self.repo_id)
1973 1973
1974 1974 @property
1975 1975 def parent(self):
1976 1976 """
1977 1977 Returns fork parent
1978 1978 """
1979 1979 return self.fork
1980 1980
1981 1981 @property
1982 1982 def just_name(self):
1983 1983 return self.repo_name.split(self.NAME_SEP)[-1]
1984 1984
1985 1985 @property
1986 1986 def groups_with_parents(self):
1987 1987 groups = []
1988 1988 if self.group is None:
1989 1989 return groups
1990 1990
1991 1991 cur_gr = self.group
1992 1992 groups.insert(0, cur_gr)
1993 1993 while 1:
1994 1994 gr = getattr(cur_gr, 'parent_group', None)
1995 1995 cur_gr = cur_gr.parent_group
1996 1996 if gr is None:
1997 1997 break
1998 1998 groups.insert(0, gr)
1999 1999
2000 2000 return groups
2001 2001
2002 2002 @property
2003 2003 def groups_and_repo(self):
2004 2004 return self.groups_with_parents, self
2005 2005
2006 2006 @LazyProperty
2007 2007 def repo_path(self):
2008 2008 """
2009 2009 Returns base full path for that repository means where it actually
2010 2010 exists on a filesystem
2011 2011 """
2012 2012 q = Session().query(RhodeCodeUi).filter(
2013 2013 RhodeCodeUi.ui_key == self.NAME_SEP)
2014 2014 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2015 2015 return q.one().ui_value
2016 2016
2017 2017 @property
2018 2018 def repo_full_path(self):
2019 2019 p = [self.repo_path]
2020 2020 # we need to split the name by / since this is how we store the
2021 2021 # names in the database, but that eventually needs to be converted
2022 2022 # into a valid system path
2023 2023 p += self.repo_name.split(self.NAME_SEP)
2024 2024 return os.path.join(*map(safe_unicode, p))
2025 2025
2026 2026 @property
2027 2027 def cache_keys(self):
2028 2028 """
2029 2029 Returns associated cache keys for that repo
2030 2030 """
2031 2031 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2032 2032 repo_id=self.repo_id)
2033 2033 return CacheKey.query()\
2034 2034 .filter(CacheKey.cache_args == invalidation_namespace)\
2035 2035 .order_by(CacheKey.cache_key)\
2036 2036 .all()
2037 2037
2038 2038 @property
2039 2039 def cached_diffs_relative_dir(self):
2040 2040 """
2041 2041 Return a relative to the repository store path of cached diffs
2042 2042 used for safe display for users, who shouldn't know the absolute store
2043 2043 path
2044 2044 """
2045 2045 return os.path.join(
2046 2046 os.path.dirname(self.repo_name),
2047 2047 self.cached_diffs_dir.split(os.path.sep)[-1])
2048 2048
2049 2049 @property
2050 2050 def cached_diffs_dir(self):
2051 2051 path = self.repo_full_path
2052 2052 return os.path.join(
2053 2053 os.path.dirname(path),
2054 2054 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2055 2055
2056 2056 def cached_diffs(self):
2057 2057 diff_cache_dir = self.cached_diffs_dir
2058 2058 if os.path.isdir(diff_cache_dir):
2059 2059 return os.listdir(diff_cache_dir)
2060 2060 return []
2061 2061
2062 2062 def shadow_repos(self):
2063 2063 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2064 2064 return [
2065 2065 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2066 2066 if x.startswith(shadow_repos_pattern)]
2067 2067
2068 2068 def get_new_name(self, repo_name):
2069 2069 """
2070 2070 returns new full repository name based on assigned group and new new
2071 2071
2072 2072 :param group_name:
2073 2073 """
2074 2074 path_prefix = self.group.full_path_splitted if self.group else []
2075 2075 return self.NAME_SEP.join(path_prefix + [repo_name])
2076 2076
2077 2077 @property
2078 2078 def _config(self):
2079 2079 """
2080 2080 Returns db based config object.
2081 2081 """
2082 2082 from rhodecode.lib.utils import make_db_config
2083 2083 return make_db_config(clear_session=False, repo=self)
2084 2084
2085 2085 def permissions(self, with_admins=True, with_owner=True,
2086 2086 expand_from_user_groups=False):
2087 2087 """
2088 2088 Permissions for repositories
2089 2089 """
2090 2090 _admin_perm = 'repository.admin'
2091 2091
2092 2092 owner_row = []
2093 2093 if with_owner:
2094 2094 usr = AttributeDict(self.user.get_dict())
2095 2095 usr.owner_row = True
2096 2096 usr.permission = _admin_perm
2097 2097 usr.permission_id = None
2098 2098 owner_row.append(usr)
2099 2099
2100 2100 super_admin_ids = []
2101 2101 super_admin_rows = []
2102 2102 if with_admins:
2103 2103 for usr in User.get_all_super_admins():
2104 2104 super_admin_ids.append(usr.user_id)
2105 2105 # if this admin is also owner, don't double the record
2106 2106 if usr.user_id == owner_row[0].user_id:
2107 2107 owner_row[0].admin_row = True
2108 2108 else:
2109 2109 usr = AttributeDict(usr.get_dict())
2110 2110 usr.admin_row = True
2111 2111 usr.permission = _admin_perm
2112 2112 usr.permission_id = None
2113 2113 super_admin_rows.append(usr)
2114 2114
2115 2115 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2116 2116 q = q.options(joinedload(UserRepoToPerm.repository),
2117 2117 joinedload(UserRepoToPerm.user),
2118 2118 joinedload(UserRepoToPerm.permission),)
2119 2119
2120 2120 # get owners and admins and permissions. We do a trick of re-writing
2121 2121 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2122 2122 # has a global reference and changing one object propagates to all
2123 2123 # others. This means if admin is also an owner admin_row that change
2124 2124 # would propagate to both objects
2125 2125 perm_rows = []
2126 2126 for _usr in q.all():
2127 2127 usr = AttributeDict(_usr.user.get_dict())
2128 2128 # if this user is also owner/admin, mark as duplicate record
2129 2129 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2130 2130 usr.duplicate_perm = True
2131 2131 # also check if this permission is maybe used by branch_permissions
2132 2132 if _usr.branch_perm_entry:
2133 2133 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2134 2134
2135 2135 usr.permission = _usr.permission.permission_name
2136 2136 usr.permission_id = _usr.repo_to_perm_id
2137 2137 perm_rows.append(usr)
2138 2138
2139 2139 # filter the perm rows by 'default' first and then sort them by
2140 2140 # admin,write,read,none permissions sorted again alphabetically in
2141 2141 # each group
2142 2142 perm_rows = sorted(perm_rows, key=display_user_sort)
2143 2143
2144 2144 user_groups_rows = []
2145 2145 if expand_from_user_groups:
2146 2146 for ug in self.permission_user_groups(with_members=True):
2147 2147 for user_data in ug.members:
2148 2148 user_groups_rows.append(user_data)
2149 2149
2150 2150 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2151 2151
2152 2152 def permission_user_groups(self, with_members=True):
2153 2153 q = UserGroupRepoToPerm.query()\
2154 2154 .filter(UserGroupRepoToPerm.repository == self)
2155 2155 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2156 2156 joinedload(UserGroupRepoToPerm.users_group),
2157 2157 joinedload(UserGroupRepoToPerm.permission),)
2158 2158
2159 2159 perm_rows = []
2160 2160 for _user_group in q.all():
2161 2161 entry = AttributeDict(_user_group.users_group.get_dict())
2162 2162 entry.permission = _user_group.permission.permission_name
2163 2163 if with_members:
2164 2164 entry.members = [x.user.get_dict()
2165 2165 for x in _user_group.users_group.members]
2166 2166 perm_rows.append(entry)
2167 2167
2168 2168 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2169 2169 return perm_rows
2170 2170
2171 2171 def get_api_data(self, include_secrets=False):
2172 2172 """
2173 2173 Common function for generating repo api data
2174 2174
2175 2175 :param include_secrets: See :meth:`User.get_api_data`.
2176 2176
2177 2177 """
2178 2178 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2179 2179 # move this methods on models level.
2180 2180 from rhodecode.model.settings import SettingsModel
2181 2181 from rhodecode.model.repo import RepoModel
2182 2182
2183 2183 repo = self
2184 2184 _user_id, _time, _reason = self.locked
2185 2185
2186 2186 data = {
2187 2187 'repo_id': repo.repo_id,
2188 2188 'repo_name': repo.repo_name,
2189 2189 'repo_type': repo.repo_type,
2190 2190 'clone_uri': repo.clone_uri or '',
2191 2191 'push_uri': repo.push_uri or '',
2192 2192 'url': RepoModel().get_url(self),
2193 2193 'private': repo.private,
2194 2194 'created_on': repo.created_on,
2195 2195 'description': repo.description_safe,
2196 2196 'landing_rev': repo.landing_rev,
2197 2197 'owner': repo.user.username,
2198 2198 'fork_of': repo.fork.repo_name if repo.fork else None,
2199 2199 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2200 2200 'enable_statistics': repo.enable_statistics,
2201 2201 'enable_locking': repo.enable_locking,
2202 2202 'enable_downloads': repo.enable_downloads,
2203 2203 'last_changeset': repo.changeset_cache,
2204 2204 'locked_by': User.get(_user_id).get_api_data(
2205 2205 include_secrets=include_secrets) if _user_id else None,
2206 2206 'locked_date': time_to_datetime(_time) if _time else None,
2207 2207 'lock_reason': _reason if _reason else None,
2208 2208 }
2209 2209
2210 2210 # TODO: mikhail: should be per-repo settings here
2211 2211 rc_config = SettingsModel().get_all_settings()
2212 2212 repository_fields = str2bool(
2213 2213 rc_config.get('rhodecode_repository_fields'))
2214 2214 if repository_fields:
2215 2215 for f in self.extra_fields:
2216 2216 data[f.field_key_prefixed] = f.field_value
2217 2217
2218 2218 return data
2219 2219
2220 2220 @classmethod
2221 2221 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2222 2222 if not lock_time:
2223 2223 lock_time = time.time()
2224 2224 if not lock_reason:
2225 2225 lock_reason = cls.LOCK_AUTOMATIC
2226 2226 repo.locked = [user_id, lock_time, lock_reason]
2227 2227 Session().add(repo)
2228 2228 Session().commit()
2229 2229
2230 2230 @classmethod
2231 2231 def unlock(cls, repo):
2232 2232 repo.locked = None
2233 2233 Session().add(repo)
2234 2234 Session().commit()
2235 2235
2236 2236 @classmethod
2237 2237 def getlock(cls, repo):
2238 2238 return repo.locked
2239 2239
2240 2240 def is_user_lock(self, user_id):
2241 2241 if self.lock[0]:
2242 2242 lock_user_id = safe_int(self.lock[0])
2243 2243 user_id = safe_int(user_id)
2244 2244 # both are ints, and they are equal
2245 2245 return all([lock_user_id, user_id]) and lock_user_id == user_id
2246 2246
2247 2247 return False
2248 2248
2249 2249 def get_locking_state(self, action, user_id, only_when_enabled=True):
2250 2250 """
2251 2251 Checks locking on this repository, if locking is enabled and lock is
2252 2252 present returns a tuple of make_lock, locked, locked_by.
2253 2253 make_lock can have 3 states None (do nothing) True, make lock
2254 2254 False release lock, This value is later propagated to hooks, which
2255 2255 do the locking. Think about this as signals passed to hooks what to do.
2256 2256
2257 2257 """
2258 2258 # TODO: johbo: This is part of the business logic and should be moved
2259 2259 # into the RepositoryModel.
2260 2260
2261 2261 if action not in ('push', 'pull'):
2262 2262 raise ValueError("Invalid action value: %s" % repr(action))
2263 2263
2264 2264 # defines if locked error should be thrown to user
2265 2265 currently_locked = False
2266 2266 # defines if new lock should be made, tri-state
2267 2267 make_lock = None
2268 2268 repo = self
2269 2269 user = User.get(user_id)
2270 2270
2271 2271 lock_info = repo.locked
2272 2272
2273 2273 if repo and (repo.enable_locking or not only_when_enabled):
2274 2274 if action == 'push':
2275 2275 # check if it's already locked !, if it is compare users
2276 2276 locked_by_user_id = lock_info[0]
2277 2277 if user.user_id == locked_by_user_id:
2278 2278 log.debug(
2279 2279 'Got `push` action from user %s, now unlocking', user)
2280 2280 # unlock if we have push from user who locked
2281 2281 make_lock = False
2282 2282 else:
2283 2283 # we're not the same user who locked, ban with
2284 2284 # code defined in settings (default is 423 HTTP Locked) !
2285 2285 log.debug('Repo %s is currently locked by %s', repo, user)
2286 2286 currently_locked = True
2287 2287 elif action == 'pull':
2288 2288 # [0] user [1] date
2289 2289 if lock_info[0] and lock_info[1]:
2290 2290 log.debug('Repo %s is currently locked by %s', repo, user)
2291 2291 currently_locked = True
2292 2292 else:
2293 2293 log.debug('Setting lock on repo %s by %s', repo, user)
2294 2294 make_lock = True
2295 2295
2296 2296 else:
2297 2297 log.debug('Repository %s do not have locking enabled', repo)
2298 2298
2299 2299 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2300 2300 make_lock, currently_locked, lock_info)
2301 2301
2302 2302 from rhodecode.lib.auth import HasRepoPermissionAny
2303 2303 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2304 2304 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2305 2305 # if we don't have at least write permission we cannot make a lock
2306 2306 log.debug('lock state reset back to FALSE due to lack '
2307 2307 'of at least read permission')
2308 2308 make_lock = False
2309 2309
2310 2310 return make_lock, currently_locked, lock_info
2311 2311
2312 2312 @property
2313 2313 def last_commit_cache_update_diff(self):
2314 2314 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2315 2315
2316 2316 @classmethod
2317 2317 def _load_commit_change(cls, last_commit_cache):
2318 2318 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2319 2319 empty_date = datetime.datetime.fromtimestamp(0)
2320 2320 date_latest = last_commit_cache.get('date', empty_date)
2321 2321 try:
2322 2322 return parse_datetime(date_latest)
2323 2323 except Exception:
2324 2324 return empty_date
2325 2325
2326 2326 @property
2327 2327 def last_commit_change(self):
2328 2328 return self._load_commit_change(self.changeset_cache)
2329 2329
2330 2330 @property
2331 2331 def last_db_change(self):
2332 2332 return self.updated_on
2333 2333
2334 2334 @property
2335 2335 def clone_uri_hidden(self):
2336 2336 clone_uri = self.clone_uri
2337 2337 if clone_uri:
2338 2338 import urlobject
2339 2339 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2340 2340 if url_obj.password:
2341 2341 clone_uri = url_obj.with_password('*****')
2342 2342 return clone_uri
2343 2343
2344 2344 @property
2345 2345 def push_uri_hidden(self):
2346 2346 push_uri = self.push_uri
2347 2347 if push_uri:
2348 2348 import urlobject
2349 2349 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2350 2350 if url_obj.password:
2351 2351 push_uri = url_obj.with_password('*****')
2352 2352 return push_uri
2353 2353
2354 2354 def clone_url(self, **override):
2355 2355 from rhodecode.model.settings import SettingsModel
2356 2356
2357 2357 uri_tmpl = None
2358 2358 if 'with_id' in override:
2359 2359 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2360 2360 del override['with_id']
2361 2361
2362 2362 if 'uri_tmpl' in override:
2363 2363 uri_tmpl = override['uri_tmpl']
2364 2364 del override['uri_tmpl']
2365 2365
2366 2366 ssh = False
2367 2367 if 'ssh' in override:
2368 2368 ssh = True
2369 2369 del override['ssh']
2370 2370
2371 2371 # we didn't override our tmpl from **overrides
2372 2372 request = get_current_request()
2373 2373 if not uri_tmpl:
2374 2374 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2375 2375 rc_config = request.call_context.rc_config
2376 2376 else:
2377 2377 rc_config = SettingsModel().get_all_settings(cache=True)
2378 2378
2379 2379 if ssh:
2380 2380 uri_tmpl = rc_config.get(
2381 2381 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2382 2382
2383 2383 else:
2384 2384 uri_tmpl = rc_config.get(
2385 2385 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2386 2386
2387 2387 return get_clone_url(request=request,
2388 2388 uri_tmpl=uri_tmpl,
2389 2389 repo_name=self.repo_name,
2390 2390 repo_id=self.repo_id,
2391 2391 repo_type=self.repo_type,
2392 2392 **override)
2393 2393
2394 2394 def set_state(self, state):
2395 2395 self.repo_state = state
2396 2396 Session().add(self)
2397 2397 #==========================================================================
2398 2398 # SCM PROPERTIES
2399 2399 #==========================================================================
2400 2400
2401 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2401 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2402 2402 return get_commit_safe(
2403 2403 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2404 maybe_unreachable=maybe_unreachable)
2404 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2405 2405
2406 2406 def get_changeset(self, rev=None, pre_load=None):
2407 2407 warnings.warn("Use get_commit", DeprecationWarning)
2408 2408 commit_id = None
2409 2409 commit_idx = None
2410 2410 if isinstance(rev, compat.string_types):
2411 2411 commit_id = rev
2412 2412 else:
2413 2413 commit_idx = rev
2414 2414 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2415 2415 pre_load=pre_load)
2416 2416
2417 2417 def get_landing_commit(self):
2418 2418 """
2419 2419 Returns landing commit, or if that doesn't exist returns the tip
2420 2420 """
2421 2421 _rev_type, _rev = self.landing_rev
2422 2422 commit = self.get_commit(_rev)
2423 2423 if isinstance(commit, EmptyCommit):
2424 2424 return self.get_commit()
2425 2425 return commit
2426 2426
2427 2427 def flush_commit_cache(self):
2428 2428 self.update_commit_cache(cs_cache={'raw_id':'0'})
2429 2429 self.update_commit_cache()
2430 2430
2431 2431 def update_commit_cache(self, cs_cache=None, config=None):
2432 2432 """
2433 2433 Update cache of last commit for repository
2434 2434 cache_keys should be::
2435 2435
2436 2436 source_repo_id
2437 2437 short_id
2438 2438 raw_id
2439 2439 revision
2440 2440 parents
2441 2441 message
2442 2442 date
2443 2443 author
2444 2444 updated_on
2445 2445
2446 2446 """
2447 2447 from rhodecode.lib.vcs.backends.base import BaseChangeset
2448 2448 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2449 2449 empty_date = datetime.datetime.fromtimestamp(0)
2450 2450
2451 2451 if cs_cache is None:
2452 2452 # use no-cache version here
2453 2453 try:
2454 2454 scm_repo = self.scm_instance(cache=False, config=config)
2455 2455 except VCSError:
2456 2456 scm_repo = None
2457 2457 empty = scm_repo is None or scm_repo.is_empty()
2458 2458
2459 2459 if not empty:
2460 2460 cs_cache = scm_repo.get_commit(
2461 2461 pre_load=["author", "date", "message", "parents", "branch"])
2462 2462 else:
2463 2463 cs_cache = EmptyCommit()
2464 2464
2465 2465 if isinstance(cs_cache, BaseChangeset):
2466 2466 cs_cache = cs_cache.__json__()
2467 2467
2468 2468 def is_outdated(new_cs_cache):
2469 2469 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2470 2470 new_cs_cache['revision'] != self.changeset_cache['revision']):
2471 2471 return True
2472 2472 return False
2473 2473
2474 2474 # check if we have maybe already latest cached revision
2475 2475 if is_outdated(cs_cache) or not self.changeset_cache:
2476 2476 _current_datetime = datetime.datetime.utcnow()
2477 2477 last_change = cs_cache.get('date') or _current_datetime
2478 2478 # we check if last update is newer than the new value
2479 2479 # if yes, we use the current timestamp instead. Imagine you get
2480 2480 # old commit pushed 1y ago, we'd set last update 1y to ago.
2481 2481 last_change_timestamp = datetime_to_time(last_change)
2482 2482 current_timestamp = datetime_to_time(last_change)
2483 2483 if last_change_timestamp > current_timestamp and not empty:
2484 2484 cs_cache['date'] = _current_datetime
2485 2485
2486 2486 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2487 2487 cs_cache['updated_on'] = time.time()
2488 2488 self.changeset_cache = cs_cache
2489 2489 self.updated_on = last_change
2490 2490 Session().add(self)
2491 2491 Session().commit()
2492 2492
2493 2493 else:
2494 2494 if empty:
2495 2495 cs_cache = EmptyCommit().__json__()
2496 2496 else:
2497 2497 cs_cache = self.changeset_cache
2498 2498
2499 2499 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2500 2500
2501 2501 cs_cache['updated_on'] = time.time()
2502 2502 self.changeset_cache = cs_cache
2503 2503 self.updated_on = _date_latest
2504 2504 Session().add(self)
2505 2505 Session().commit()
2506 2506
2507 2507 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2508 2508 self.repo_name, cs_cache, _date_latest)
2509 2509
2510 2510 @property
2511 2511 def tip(self):
2512 2512 return self.get_commit('tip')
2513 2513
2514 2514 @property
2515 2515 def author(self):
2516 2516 return self.tip.author
2517 2517
2518 2518 @property
2519 2519 def last_change(self):
2520 2520 return self.scm_instance().last_change
2521 2521
2522 2522 def get_comments(self, revisions=None):
2523 2523 """
2524 2524 Returns comments for this repository grouped by revisions
2525 2525
2526 2526 :param revisions: filter query by revisions only
2527 2527 """
2528 2528 cmts = ChangesetComment.query()\
2529 2529 .filter(ChangesetComment.repo == self)
2530 2530 if revisions:
2531 2531 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2532 2532 grouped = collections.defaultdict(list)
2533 2533 for cmt in cmts.all():
2534 2534 grouped[cmt.revision].append(cmt)
2535 2535 return grouped
2536 2536
2537 2537 def statuses(self, revisions=None):
2538 2538 """
2539 2539 Returns statuses for this repository
2540 2540
2541 2541 :param revisions: list of revisions to get statuses for
2542 2542 """
2543 2543 statuses = ChangesetStatus.query()\
2544 2544 .filter(ChangesetStatus.repo == self)\
2545 2545 .filter(ChangesetStatus.version == 0)
2546 2546
2547 2547 if revisions:
2548 2548 # Try doing the filtering in chunks to avoid hitting limits
2549 2549 size = 500
2550 2550 status_results = []
2551 2551 for chunk in xrange(0, len(revisions), size):
2552 2552 status_results += statuses.filter(
2553 2553 ChangesetStatus.revision.in_(
2554 2554 revisions[chunk: chunk+size])
2555 2555 ).all()
2556 2556 else:
2557 2557 status_results = statuses.all()
2558 2558
2559 2559 grouped = {}
2560 2560
2561 2561 # maybe we have open new pullrequest without a status?
2562 2562 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2563 2563 status_lbl = ChangesetStatus.get_status_lbl(stat)
2564 2564 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2565 2565 for rev in pr.revisions:
2566 2566 pr_id = pr.pull_request_id
2567 2567 pr_repo = pr.target_repo.repo_name
2568 2568 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2569 2569
2570 2570 for stat in status_results:
2571 2571 pr_id = pr_repo = None
2572 2572 if stat.pull_request:
2573 2573 pr_id = stat.pull_request.pull_request_id
2574 2574 pr_repo = stat.pull_request.target_repo.repo_name
2575 2575 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2576 2576 pr_id, pr_repo]
2577 2577 return grouped
2578 2578
2579 2579 # ==========================================================================
2580 2580 # SCM CACHE INSTANCE
2581 2581 # ==========================================================================
2582 2582
2583 2583 def scm_instance(self, **kwargs):
2584 2584 import rhodecode
2585 2585
2586 2586 # Passing a config will not hit the cache currently only used
2587 2587 # for repo2dbmapper
2588 2588 config = kwargs.pop('config', None)
2589 2589 cache = kwargs.pop('cache', None)
2590 2590 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2591 2591 if vcs_full_cache is not None:
2592 2592 # allows override global config
2593 2593 full_cache = vcs_full_cache
2594 2594 else:
2595 2595 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2596 2596 # if cache is NOT defined use default global, else we have a full
2597 2597 # control over cache behaviour
2598 2598 if cache is None and full_cache and not config:
2599 2599 log.debug('Initializing pure cached instance for %s', self.repo_path)
2600 2600 return self._get_instance_cached()
2601 2601
2602 2602 # cache here is sent to the "vcs server"
2603 2603 return self._get_instance(cache=bool(cache), config=config)
2604 2604
2605 2605 def _get_instance_cached(self):
2606 2606 from rhodecode.lib import rc_cache
2607 2607
2608 2608 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2609 2609 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2610 2610 repo_id=self.repo_id)
2611 2611 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2612 2612
2613 2613 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2614 2614 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2615 2615 return self._get_instance(repo_state_uid=_cache_state_uid)
2616 2616
2617 2617 # we must use thread scoped cache here,
2618 2618 # because each thread of gevent needs it's own not shared connection and cache
2619 2619 # we also alter `args` so the cache key is individual for every green thread.
2620 2620 inv_context_manager = rc_cache.InvalidationContext(
2621 2621 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2622 2622 thread_scoped=True)
2623 2623 with inv_context_manager as invalidation_context:
2624 2624 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2625 2625 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2626 2626
2627 2627 # re-compute and store cache if we get invalidate signal
2628 2628 if invalidation_context.should_invalidate():
2629 2629 instance = get_instance_cached.refresh(*args)
2630 2630 else:
2631 2631 instance = get_instance_cached(*args)
2632 2632
2633 2633 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2634 2634 return instance
2635 2635
2636 2636 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2637 2637 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2638 2638 self.repo_type, self.repo_path, cache)
2639 2639 config = config or self._config
2640 2640 custom_wire = {
2641 2641 'cache': cache, # controls the vcs.remote cache
2642 2642 'repo_state_uid': repo_state_uid
2643 2643 }
2644 2644 repo = get_vcs_instance(
2645 2645 repo_path=safe_str(self.repo_full_path),
2646 2646 config=config,
2647 2647 with_wire=custom_wire,
2648 2648 create=False,
2649 2649 _vcs_alias=self.repo_type)
2650 2650 if repo is not None:
2651 2651 repo.count() # cache rebuild
2652 2652 return repo
2653 2653
2654 2654 def get_shadow_repository_path(self, workspace_id):
2655 2655 from rhodecode.lib.vcs.backends.base import BaseRepository
2656 2656 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2657 2657 self.repo_full_path, self.repo_id, workspace_id)
2658 2658 return shadow_repo_path
2659 2659
2660 2660 def __json__(self):
2661 2661 return {'landing_rev': self.landing_rev}
2662 2662
2663 2663 def get_dict(self):
2664 2664
2665 2665 # Since we transformed `repo_name` to a hybrid property, we need to
2666 2666 # keep compatibility with the code which uses `repo_name` field.
2667 2667
2668 2668 result = super(Repository, self).get_dict()
2669 2669 result['repo_name'] = result.pop('_repo_name', None)
2670 2670 return result
2671 2671
2672 2672
2673 2673 class RepoGroup(Base, BaseModel):
2674 2674 __tablename__ = 'groups'
2675 2675 __table_args__ = (
2676 2676 UniqueConstraint('group_name', 'group_parent_id'),
2677 2677 base_table_args,
2678 2678 )
2679 2679 __mapper_args__ = {'order_by': 'group_name'}
2680 2680
2681 2681 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2682 2682
2683 2683 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2684 2684 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2685 2685 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2686 2686 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2687 2687 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2688 2688 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2689 2689 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2690 2690 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2691 2691 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2692 2692 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2693 2693 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2694 2694
2695 2695 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2696 2696 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2697 2697 parent_group = relationship('RepoGroup', remote_side=group_id)
2698 2698 user = relationship('User')
2699 2699 integrations = relationship('Integration', cascade="all, delete-orphan")
2700 2700
2701 2701 # no cascade, set NULL
2702 2702 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2703 2703
2704 2704 def __init__(self, group_name='', parent_group=None):
2705 2705 self.group_name = group_name
2706 2706 self.parent_group = parent_group
2707 2707
2708 2708 def __unicode__(self):
2709 2709 return u"<%s('id:%s:%s')>" % (
2710 2710 self.__class__.__name__, self.group_id, self.group_name)
2711 2711
2712 2712 @hybrid_property
2713 2713 def group_name(self):
2714 2714 return self._group_name
2715 2715
2716 2716 @group_name.setter
2717 2717 def group_name(self, value):
2718 2718 self._group_name = value
2719 2719 self.group_name_hash = self.hash_repo_group_name(value)
2720 2720
2721 2721 @classmethod
2722 2722 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2723 2723 from rhodecode.lib.vcs.backends.base import EmptyCommit
2724 2724 dummy = EmptyCommit().__json__()
2725 2725 if not changeset_cache_raw:
2726 2726 dummy['source_repo_id'] = repo_id
2727 2727 return json.loads(json.dumps(dummy))
2728 2728
2729 2729 try:
2730 2730 return json.loads(changeset_cache_raw)
2731 2731 except TypeError:
2732 2732 return dummy
2733 2733 except Exception:
2734 2734 log.error(traceback.format_exc())
2735 2735 return dummy
2736 2736
2737 2737 @hybrid_property
2738 2738 def changeset_cache(self):
2739 2739 return self._load_changeset_cache('', self._changeset_cache)
2740 2740
2741 2741 @changeset_cache.setter
2742 2742 def changeset_cache(self, val):
2743 2743 try:
2744 2744 self._changeset_cache = json.dumps(val)
2745 2745 except Exception:
2746 2746 log.error(traceback.format_exc())
2747 2747
2748 2748 @validates('group_parent_id')
2749 2749 def validate_group_parent_id(self, key, val):
2750 2750 """
2751 2751 Check cycle references for a parent group to self
2752 2752 """
2753 2753 if self.group_id and val:
2754 2754 assert val != self.group_id
2755 2755
2756 2756 return val
2757 2757
2758 2758 @hybrid_property
2759 2759 def description_safe(self):
2760 2760 from rhodecode.lib import helpers as h
2761 2761 return h.escape(self.group_description)
2762 2762
2763 2763 @classmethod
2764 2764 def hash_repo_group_name(cls, repo_group_name):
2765 2765 val = remove_formatting(repo_group_name)
2766 2766 val = safe_str(val).lower()
2767 2767 chars = []
2768 2768 for c in val:
2769 2769 if c not in string.ascii_letters:
2770 2770 c = str(ord(c))
2771 2771 chars.append(c)
2772 2772
2773 2773 return ''.join(chars)
2774 2774
2775 2775 @classmethod
2776 2776 def _generate_choice(cls, repo_group):
2777 2777 from webhelpers2.html import literal as _literal
2778 2778 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2779 2779 return repo_group.group_id, _name(repo_group.full_path_splitted)
2780 2780
2781 2781 @classmethod
2782 2782 def groups_choices(cls, groups=None, show_empty_group=True):
2783 2783 if not groups:
2784 2784 groups = cls.query().all()
2785 2785
2786 2786 repo_groups = []
2787 2787 if show_empty_group:
2788 2788 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2789 2789
2790 2790 repo_groups.extend([cls._generate_choice(x) for x in groups])
2791 2791
2792 2792 repo_groups = sorted(
2793 2793 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2794 2794 return repo_groups
2795 2795
2796 2796 @classmethod
2797 2797 def url_sep(cls):
2798 2798 return URL_SEP
2799 2799
2800 2800 @classmethod
2801 2801 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2802 2802 if case_insensitive:
2803 2803 gr = cls.query().filter(func.lower(cls.group_name)
2804 2804 == func.lower(group_name))
2805 2805 else:
2806 2806 gr = cls.query().filter(cls.group_name == group_name)
2807 2807 if cache:
2808 2808 name_key = _hash_key(group_name)
2809 2809 gr = gr.options(
2810 2810 FromCache("sql_cache_short", "get_group_%s" % name_key))
2811 2811 return gr.scalar()
2812 2812
2813 2813 @classmethod
2814 2814 def get_user_personal_repo_group(cls, user_id):
2815 2815 user = User.get(user_id)
2816 2816 if user.username == User.DEFAULT_USER:
2817 2817 return None
2818 2818
2819 2819 return cls.query()\
2820 2820 .filter(cls.personal == true()) \
2821 2821 .filter(cls.user == user) \
2822 2822 .order_by(cls.group_id.asc()) \
2823 2823 .first()
2824 2824
2825 2825 @classmethod
2826 2826 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2827 2827 case_insensitive=True):
2828 2828 q = RepoGroup.query()
2829 2829
2830 2830 if not isinstance(user_id, Optional):
2831 2831 q = q.filter(RepoGroup.user_id == user_id)
2832 2832
2833 2833 if not isinstance(group_id, Optional):
2834 2834 q = q.filter(RepoGroup.group_parent_id == group_id)
2835 2835
2836 2836 if case_insensitive:
2837 2837 q = q.order_by(func.lower(RepoGroup.group_name))
2838 2838 else:
2839 2839 q = q.order_by(RepoGroup.group_name)
2840 2840 return q.all()
2841 2841
2842 2842 @property
2843 2843 def parents(self, parents_recursion_limit=10):
2844 2844 groups = []
2845 2845 if self.parent_group is None:
2846 2846 return groups
2847 2847 cur_gr = self.parent_group
2848 2848 groups.insert(0, cur_gr)
2849 2849 cnt = 0
2850 2850 while 1:
2851 2851 cnt += 1
2852 2852 gr = getattr(cur_gr, 'parent_group', None)
2853 2853 cur_gr = cur_gr.parent_group
2854 2854 if gr is None:
2855 2855 break
2856 2856 if cnt == parents_recursion_limit:
2857 2857 # this will prevent accidental infinit loops
2858 2858 log.error('more than %s parents found for group %s, stopping '
2859 2859 'recursive parent fetching', parents_recursion_limit, self)
2860 2860 break
2861 2861
2862 2862 groups.insert(0, gr)
2863 2863 return groups
2864 2864
2865 2865 @property
2866 2866 def last_commit_cache_update_diff(self):
2867 2867 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2868 2868
2869 2869 @classmethod
2870 2870 def _load_commit_change(cls, last_commit_cache):
2871 2871 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2872 2872 empty_date = datetime.datetime.fromtimestamp(0)
2873 2873 date_latest = last_commit_cache.get('date', empty_date)
2874 2874 try:
2875 2875 return parse_datetime(date_latest)
2876 2876 except Exception:
2877 2877 return empty_date
2878 2878
2879 2879 @property
2880 2880 def last_commit_change(self):
2881 2881 return self._load_commit_change(self.changeset_cache)
2882 2882
2883 2883 @property
2884 2884 def last_db_change(self):
2885 2885 return self.updated_on
2886 2886
2887 2887 @property
2888 2888 def children(self):
2889 2889 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2890 2890
2891 2891 @property
2892 2892 def name(self):
2893 2893 return self.group_name.split(RepoGroup.url_sep())[-1]
2894 2894
2895 2895 @property
2896 2896 def full_path(self):
2897 2897 return self.group_name
2898 2898
2899 2899 @property
2900 2900 def full_path_splitted(self):
2901 2901 return self.group_name.split(RepoGroup.url_sep())
2902 2902
2903 2903 @property
2904 2904 def repositories(self):
2905 2905 return Repository.query()\
2906 2906 .filter(Repository.group == self)\
2907 2907 .order_by(Repository.repo_name)
2908 2908
2909 2909 @property
2910 2910 def repositories_recursive_count(self):
2911 2911 cnt = self.repositories.count()
2912 2912
2913 2913 def children_count(group):
2914 2914 cnt = 0
2915 2915 for child in group.children:
2916 2916 cnt += child.repositories.count()
2917 2917 cnt += children_count(child)
2918 2918 return cnt
2919 2919
2920 2920 return cnt + children_count(self)
2921 2921
2922 2922 def _recursive_objects(self, include_repos=True, include_groups=True):
2923 2923 all_ = []
2924 2924
2925 2925 def _get_members(root_gr):
2926 2926 if include_repos:
2927 2927 for r in root_gr.repositories:
2928 2928 all_.append(r)
2929 2929 childs = root_gr.children.all()
2930 2930 if childs:
2931 2931 for gr in childs:
2932 2932 if include_groups:
2933 2933 all_.append(gr)
2934 2934 _get_members(gr)
2935 2935
2936 2936 root_group = []
2937 2937 if include_groups:
2938 2938 root_group = [self]
2939 2939
2940 2940 _get_members(self)
2941 2941 return root_group + all_
2942 2942
2943 2943 def recursive_groups_and_repos(self):
2944 2944 """
2945 2945 Recursive return all groups, with repositories in those groups
2946 2946 """
2947 2947 return self._recursive_objects()
2948 2948
2949 2949 def recursive_groups(self):
2950 2950 """
2951 2951 Returns all children groups for this group including children of children
2952 2952 """
2953 2953 return self._recursive_objects(include_repos=False)
2954 2954
2955 2955 def recursive_repos(self):
2956 2956 """
2957 2957 Returns all children repositories for this group
2958 2958 """
2959 2959 return self._recursive_objects(include_groups=False)
2960 2960
2961 2961 def get_new_name(self, group_name):
2962 2962 """
2963 2963 returns new full group name based on parent and new name
2964 2964
2965 2965 :param group_name:
2966 2966 """
2967 2967 path_prefix = (self.parent_group.full_path_splitted if
2968 2968 self.parent_group else [])
2969 2969 return RepoGroup.url_sep().join(path_prefix + [group_name])
2970 2970
2971 2971 def update_commit_cache(self, config=None):
2972 2972 """
2973 2973 Update cache of last commit for newest repository inside this repository group.
2974 2974 cache_keys should be::
2975 2975
2976 2976 source_repo_id
2977 2977 short_id
2978 2978 raw_id
2979 2979 revision
2980 2980 parents
2981 2981 message
2982 2982 date
2983 2983 author
2984 2984
2985 2985 """
2986 2986 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2987 2987 empty_date = datetime.datetime.fromtimestamp(0)
2988 2988
2989 2989 def repo_groups_and_repos(root_gr):
2990 2990 for _repo in root_gr.repositories:
2991 2991 yield _repo
2992 2992 for child_group in root_gr.children.all():
2993 2993 yield child_group
2994 2994
2995 2995 latest_repo_cs_cache = {}
2996 2996 for obj in repo_groups_and_repos(self):
2997 2997 repo_cs_cache = obj.changeset_cache
2998 2998 date_latest = latest_repo_cs_cache.get('date', empty_date)
2999 2999 date_current = repo_cs_cache.get('date', empty_date)
3000 3000 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3001 3001 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3002 3002 latest_repo_cs_cache = repo_cs_cache
3003 3003 if hasattr(obj, 'repo_id'):
3004 3004 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3005 3005 else:
3006 3006 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3007 3007
3008 3008 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3009 3009
3010 3010 latest_repo_cs_cache['updated_on'] = time.time()
3011 3011 self.changeset_cache = latest_repo_cs_cache
3012 3012 self.updated_on = _date_latest
3013 3013 Session().add(self)
3014 3014 Session().commit()
3015 3015
3016 3016 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3017 3017 self.group_name, latest_repo_cs_cache, _date_latest)
3018 3018
3019 3019 def permissions(self, with_admins=True, with_owner=True,
3020 3020 expand_from_user_groups=False):
3021 3021 """
3022 3022 Permissions for repository groups
3023 3023 """
3024 3024 _admin_perm = 'group.admin'
3025 3025
3026 3026 owner_row = []
3027 3027 if with_owner:
3028 3028 usr = AttributeDict(self.user.get_dict())
3029 3029 usr.owner_row = True
3030 3030 usr.permission = _admin_perm
3031 3031 owner_row.append(usr)
3032 3032
3033 3033 super_admin_ids = []
3034 3034 super_admin_rows = []
3035 3035 if with_admins:
3036 3036 for usr in User.get_all_super_admins():
3037 3037 super_admin_ids.append(usr.user_id)
3038 3038 # if this admin is also owner, don't double the record
3039 3039 if usr.user_id == owner_row[0].user_id:
3040 3040 owner_row[0].admin_row = True
3041 3041 else:
3042 3042 usr = AttributeDict(usr.get_dict())
3043 3043 usr.admin_row = True
3044 3044 usr.permission = _admin_perm
3045 3045 super_admin_rows.append(usr)
3046 3046
3047 3047 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3048 3048 q = q.options(joinedload(UserRepoGroupToPerm.group),
3049 3049 joinedload(UserRepoGroupToPerm.user),
3050 3050 joinedload(UserRepoGroupToPerm.permission),)
3051 3051
3052 3052 # get owners and admins and permissions. We do a trick of re-writing
3053 3053 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3054 3054 # has a global reference and changing one object propagates to all
3055 3055 # others. This means if admin is also an owner admin_row that change
3056 3056 # would propagate to both objects
3057 3057 perm_rows = []
3058 3058 for _usr in q.all():
3059 3059 usr = AttributeDict(_usr.user.get_dict())
3060 3060 # if this user is also owner/admin, mark as duplicate record
3061 3061 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3062 3062 usr.duplicate_perm = True
3063 3063 usr.permission = _usr.permission.permission_name
3064 3064 perm_rows.append(usr)
3065 3065
3066 3066 # filter the perm rows by 'default' first and then sort them by
3067 3067 # admin,write,read,none permissions sorted again alphabetically in
3068 3068 # each group
3069 3069 perm_rows = sorted(perm_rows, key=display_user_sort)
3070 3070
3071 3071 user_groups_rows = []
3072 3072 if expand_from_user_groups:
3073 3073 for ug in self.permission_user_groups(with_members=True):
3074 3074 for user_data in ug.members:
3075 3075 user_groups_rows.append(user_data)
3076 3076
3077 3077 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3078 3078
3079 3079 def permission_user_groups(self, with_members=False):
3080 3080 q = UserGroupRepoGroupToPerm.query()\
3081 3081 .filter(UserGroupRepoGroupToPerm.group == self)
3082 3082 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3083 3083 joinedload(UserGroupRepoGroupToPerm.users_group),
3084 3084 joinedload(UserGroupRepoGroupToPerm.permission),)
3085 3085
3086 3086 perm_rows = []
3087 3087 for _user_group in q.all():
3088 3088 entry = AttributeDict(_user_group.users_group.get_dict())
3089 3089 entry.permission = _user_group.permission.permission_name
3090 3090 if with_members:
3091 3091 entry.members = [x.user.get_dict()
3092 3092 for x in _user_group.users_group.members]
3093 3093 perm_rows.append(entry)
3094 3094
3095 3095 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3096 3096 return perm_rows
3097 3097
3098 3098 def get_api_data(self):
3099 3099 """
3100 3100 Common function for generating api data
3101 3101
3102 3102 """
3103 3103 group = self
3104 3104 data = {
3105 3105 'group_id': group.group_id,
3106 3106 'group_name': group.group_name,
3107 3107 'group_description': group.description_safe,
3108 3108 'parent_group': group.parent_group.group_name if group.parent_group else None,
3109 3109 'repositories': [x.repo_name for x in group.repositories],
3110 3110 'owner': group.user.username,
3111 3111 }
3112 3112 return data
3113 3113
3114 3114 def get_dict(self):
3115 3115 # Since we transformed `group_name` to a hybrid property, we need to
3116 3116 # keep compatibility with the code which uses `group_name` field.
3117 3117 result = super(RepoGroup, self).get_dict()
3118 3118 result['group_name'] = result.pop('_group_name', None)
3119 3119 return result
3120 3120
3121 3121
3122 3122 class Permission(Base, BaseModel):
3123 3123 __tablename__ = 'permissions'
3124 3124 __table_args__ = (
3125 3125 Index('p_perm_name_idx', 'permission_name'),
3126 3126 base_table_args,
3127 3127 )
3128 3128
3129 3129 PERMS = [
3130 3130 ('hg.admin', _('RhodeCode Super Administrator')),
3131 3131
3132 3132 ('repository.none', _('Repository no access')),
3133 3133 ('repository.read', _('Repository read access')),
3134 3134 ('repository.write', _('Repository write access')),
3135 3135 ('repository.admin', _('Repository admin access')),
3136 3136
3137 3137 ('group.none', _('Repository group no access')),
3138 3138 ('group.read', _('Repository group read access')),
3139 3139 ('group.write', _('Repository group write access')),
3140 3140 ('group.admin', _('Repository group admin access')),
3141 3141
3142 3142 ('usergroup.none', _('User group no access')),
3143 3143 ('usergroup.read', _('User group read access')),
3144 3144 ('usergroup.write', _('User group write access')),
3145 3145 ('usergroup.admin', _('User group admin access')),
3146 3146
3147 3147 ('branch.none', _('Branch no permissions')),
3148 3148 ('branch.merge', _('Branch access by web merge')),
3149 3149 ('branch.push', _('Branch access by push')),
3150 3150 ('branch.push_force', _('Branch access by push with force')),
3151 3151
3152 3152 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3153 3153 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3154 3154
3155 3155 ('hg.usergroup.create.false', _('User Group creation disabled')),
3156 3156 ('hg.usergroup.create.true', _('User Group creation enabled')),
3157 3157
3158 3158 ('hg.create.none', _('Repository creation disabled')),
3159 3159 ('hg.create.repository', _('Repository creation enabled')),
3160 3160 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3161 3161 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3162 3162
3163 3163 ('hg.fork.none', _('Repository forking disabled')),
3164 3164 ('hg.fork.repository', _('Repository forking enabled')),
3165 3165
3166 3166 ('hg.register.none', _('Registration disabled')),
3167 3167 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3168 3168 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3169 3169
3170 3170 ('hg.password_reset.enabled', _('Password reset enabled')),
3171 3171 ('hg.password_reset.hidden', _('Password reset hidden')),
3172 3172 ('hg.password_reset.disabled', _('Password reset disabled')),
3173 3173
3174 3174 ('hg.extern_activate.manual', _('Manual activation of external account')),
3175 3175 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3176 3176
3177 3177 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3178 3178 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3179 3179 ]
3180 3180
3181 3181 # definition of system default permissions for DEFAULT user, created on
3182 3182 # system setup
3183 3183 DEFAULT_USER_PERMISSIONS = [
3184 3184 # object perms
3185 3185 'repository.read',
3186 3186 'group.read',
3187 3187 'usergroup.read',
3188 3188 # branch, for backward compat we need same value as before so forced pushed
3189 3189 'branch.push_force',
3190 3190 # global
3191 3191 'hg.create.repository',
3192 3192 'hg.repogroup.create.false',
3193 3193 'hg.usergroup.create.false',
3194 3194 'hg.create.write_on_repogroup.true',
3195 3195 'hg.fork.repository',
3196 3196 'hg.register.manual_activate',
3197 3197 'hg.password_reset.enabled',
3198 3198 'hg.extern_activate.auto',
3199 3199 'hg.inherit_default_perms.true',
3200 3200 ]
3201 3201
3202 3202 # defines which permissions are more important higher the more important
3203 3203 # Weight defines which permissions are more important.
3204 3204 # The higher number the more important.
3205 3205 PERM_WEIGHTS = {
3206 3206 'repository.none': 0,
3207 3207 'repository.read': 1,
3208 3208 'repository.write': 3,
3209 3209 'repository.admin': 4,
3210 3210
3211 3211 'group.none': 0,
3212 3212 'group.read': 1,
3213 3213 'group.write': 3,
3214 3214 'group.admin': 4,
3215 3215
3216 3216 'usergroup.none': 0,
3217 3217 'usergroup.read': 1,
3218 3218 'usergroup.write': 3,
3219 3219 'usergroup.admin': 4,
3220 3220
3221 3221 'branch.none': 0,
3222 3222 'branch.merge': 1,
3223 3223 'branch.push': 3,
3224 3224 'branch.push_force': 4,
3225 3225
3226 3226 'hg.repogroup.create.false': 0,
3227 3227 'hg.repogroup.create.true': 1,
3228 3228
3229 3229 'hg.usergroup.create.false': 0,
3230 3230 'hg.usergroup.create.true': 1,
3231 3231
3232 3232 'hg.fork.none': 0,
3233 3233 'hg.fork.repository': 1,
3234 3234 'hg.create.none': 0,
3235 3235 'hg.create.repository': 1
3236 3236 }
3237 3237
3238 3238 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3239 3239 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3240 3240 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3241 3241
3242 3242 def __unicode__(self):
3243 3243 return u"<%s('%s:%s')>" % (
3244 3244 self.__class__.__name__, self.permission_id, self.permission_name
3245 3245 )
3246 3246
3247 3247 @classmethod
3248 3248 def get_by_key(cls, key):
3249 3249 return cls.query().filter(cls.permission_name == key).scalar()
3250 3250
3251 3251 @classmethod
3252 3252 def get_default_repo_perms(cls, user_id, repo_id=None):
3253 3253 q = Session().query(UserRepoToPerm, Repository, Permission)\
3254 3254 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3255 3255 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3256 3256 .filter(UserRepoToPerm.user_id == user_id)
3257 3257 if repo_id:
3258 3258 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3259 3259 return q.all()
3260 3260
3261 3261 @classmethod
3262 3262 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3263 3263 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3264 3264 .join(
3265 3265 Permission,
3266 3266 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3267 3267 .join(
3268 3268 UserRepoToPerm,
3269 3269 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3270 3270 .filter(UserRepoToPerm.user_id == user_id)
3271 3271
3272 3272 if repo_id:
3273 3273 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3274 3274 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3275 3275
3276 3276 @classmethod
3277 3277 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3278 3278 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3279 3279 .join(
3280 3280 Permission,
3281 3281 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3282 3282 .join(
3283 3283 Repository,
3284 3284 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3285 3285 .join(
3286 3286 UserGroup,
3287 3287 UserGroupRepoToPerm.users_group_id ==
3288 3288 UserGroup.users_group_id)\
3289 3289 .join(
3290 3290 UserGroupMember,
3291 3291 UserGroupRepoToPerm.users_group_id ==
3292 3292 UserGroupMember.users_group_id)\
3293 3293 .filter(
3294 3294 UserGroupMember.user_id == user_id,
3295 3295 UserGroup.users_group_active == true())
3296 3296 if repo_id:
3297 3297 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3298 3298 return q.all()
3299 3299
3300 3300 @classmethod
3301 3301 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3302 3302 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3303 3303 .join(
3304 3304 Permission,
3305 3305 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3306 3306 .join(
3307 3307 UserGroupRepoToPerm,
3308 3308 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3309 3309 .join(
3310 3310 UserGroup,
3311 3311 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3312 3312 .join(
3313 3313 UserGroupMember,
3314 3314 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3315 3315 .filter(
3316 3316 UserGroupMember.user_id == user_id,
3317 3317 UserGroup.users_group_active == true())
3318 3318
3319 3319 if repo_id:
3320 3320 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3321 3321 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3322 3322
3323 3323 @classmethod
3324 3324 def get_default_group_perms(cls, user_id, repo_group_id=None):
3325 3325 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3326 3326 .join(
3327 3327 Permission,
3328 3328 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3329 3329 .join(
3330 3330 RepoGroup,
3331 3331 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3332 3332 .filter(UserRepoGroupToPerm.user_id == user_id)
3333 3333 if repo_group_id:
3334 3334 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3335 3335 return q.all()
3336 3336
3337 3337 @classmethod
3338 3338 def get_default_group_perms_from_user_group(
3339 3339 cls, user_id, repo_group_id=None):
3340 3340 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3341 3341 .join(
3342 3342 Permission,
3343 3343 UserGroupRepoGroupToPerm.permission_id ==
3344 3344 Permission.permission_id)\
3345 3345 .join(
3346 3346 RepoGroup,
3347 3347 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3348 3348 .join(
3349 3349 UserGroup,
3350 3350 UserGroupRepoGroupToPerm.users_group_id ==
3351 3351 UserGroup.users_group_id)\
3352 3352 .join(
3353 3353 UserGroupMember,
3354 3354 UserGroupRepoGroupToPerm.users_group_id ==
3355 3355 UserGroupMember.users_group_id)\
3356 3356 .filter(
3357 3357 UserGroupMember.user_id == user_id,
3358 3358 UserGroup.users_group_active == true())
3359 3359 if repo_group_id:
3360 3360 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3361 3361 return q.all()
3362 3362
3363 3363 @classmethod
3364 3364 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3365 3365 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3366 3366 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3367 3367 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3368 3368 .filter(UserUserGroupToPerm.user_id == user_id)
3369 3369 if user_group_id:
3370 3370 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3371 3371 return q.all()
3372 3372
3373 3373 @classmethod
3374 3374 def get_default_user_group_perms_from_user_group(
3375 3375 cls, user_id, user_group_id=None):
3376 3376 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3377 3377 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3378 3378 .join(
3379 3379 Permission,
3380 3380 UserGroupUserGroupToPerm.permission_id ==
3381 3381 Permission.permission_id)\
3382 3382 .join(
3383 3383 TargetUserGroup,
3384 3384 UserGroupUserGroupToPerm.target_user_group_id ==
3385 3385 TargetUserGroup.users_group_id)\
3386 3386 .join(
3387 3387 UserGroup,
3388 3388 UserGroupUserGroupToPerm.user_group_id ==
3389 3389 UserGroup.users_group_id)\
3390 3390 .join(
3391 3391 UserGroupMember,
3392 3392 UserGroupUserGroupToPerm.user_group_id ==
3393 3393 UserGroupMember.users_group_id)\
3394 3394 .filter(
3395 3395 UserGroupMember.user_id == user_id,
3396 3396 UserGroup.users_group_active == true())
3397 3397 if user_group_id:
3398 3398 q = q.filter(
3399 3399 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3400 3400
3401 3401 return q.all()
3402 3402
3403 3403
3404 3404 class UserRepoToPerm(Base, BaseModel):
3405 3405 __tablename__ = 'repo_to_perm'
3406 3406 __table_args__ = (
3407 3407 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3408 3408 base_table_args
3409 3409 )
3410 3410
3411 3411 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3412 3412 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3413 3413 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3414 3414 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3415 3415
3416 3416 user = relationship('User')
3417 3417 repository = relationship('Repository')
3418 3418 permission = relationship('Permission')
3419 3419
3420 3420 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3421 3421
3422 3422 @classmethod
3423 3423 def create(cls, user, repository, permission):
3424 3424 n = cls()
3425 3425 n.user = user
3426 3426 n.repository = repository
3427 3427 n.permission = permission
3428 3428 Session().add(n)
3429 3429 return n
3430 3430
3431 3431 def __unicode__(self):
3432 3432 return u'<%s => %s >' % (self.user, self.repository)
3433 3433
3434 3434
3435 3435 class UserUserGroupToPerm(Base, BaseModel):
3436 3436 __tablename__ = 'user_user_group_to_perm'
3437 3437 __table_args__ = (
3438 3438 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3439 3439 base_table_args
3440 3440 )
3441 3441
3442 3442 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3443 3443 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3444 3444 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3445 3445 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3446 3446
3447 3447 user = relationship('User')
3448 3448 user_group = relationship('UserGroup')
3449 3449 permission = relationship('Permission')
3450 3450
3451 3451 @classmethod
3452 3452 def create(cls, user, user_group, permission):
3453 3453 n = cls()
3454 3454 n.user = user
3455 3455 n.user_group = user_group
3456 3456 n.permission = permission
3457 3457 Session().add(n)
3458 3458 return n
3459 3459
3460 3460 def __unicode__(self):
3461 3461 return u'<%s => %s >' % (self.user, self.user_group)
3462 3462
3463 3463
3464 3464 class UserToPerm(Base, BaseModel):
3465 3465 __tablename__ = 'user_to_perm'
3466 3466 __table_args__ = (
3467 3467 UniqueConstraint('user_id', 'permission_id'),
3468 3468 base_table_args
3469 3469 )
3470 3470
3471 3471 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3472 3472 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3473 3473 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3474 3474
3475 3475 user = relationship('User')
3476 3476 permission = relationship('Permission', lazy='joined')
3477 3477
3478 3478 def __unicode__(self):
3479 3479 return u'<%s => %s >' % (self.user, self.permission)
3480 3480
3481 3481
3482 3482 class UserGroupRepoToPerm(Base, BaseModel):
3483 3483 __tablename__ = 'users_group_repo_to_perm'
3484 3484 __table_args__ = (
3485 3485 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3486 3486 base_table_args
3487 3487 )
3488 3488
3489 3489 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3490 3490 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3491 3491 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3492 3492 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3493 3493
3494 3494 users_group = relationship('UserGroup')
3495 3495 permission = relationship('Permission')
3496 3496 repository = relationship('Repository')
3497 3497 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3498 3498
3499 3499 @classmethod
3500 3500 def create(cls, users_group, repository, permission):
3501 3501 n = cls()
3502 3502 n.users_group = users_group
3503 3503 n.repository = repository
3504 3504 n.permission = permission
3505 3505 Session().add(n)
3506 3506 return n
3507 3507
3508 3508 def __unicode__(self):
3509 3509 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3510 3510
3511 3511
3512 3512 class UserGroupUserGroupToPerm(Base, BaseModel):
3513 3513 __tablename__ = 'user_group_user_group_to_perm'
3514 3514 __table_args__ = (
3515 3515 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3516 3516 CheckConstraint('target_user_group_id != user_group_id'),
3517 3517 base_table_args
3518 3518 )
3519 3519
3520 3520 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3521 3521 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3522 3522 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3523 3523 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3524 3524
3525 3525 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3526 3526 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3527 3527 permission = relationship('Permission')
3528 3528
3529 3529 @classmethod
3530 3530 def create(cls, target_user_group, user_group, permission):
3531 3531 n = cls()
3532 3532 n.target_user_group = target_user_group
3533 3533 n.user_group = user_group
3534 3534 n.permission = permission
3535 3535 Session().add(n)
3536 3536 return n
3537 3537
3538 3538 def __unicode__(self):
3539 3539 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3540 3540
3541 3541
3542 3542 class UserGroupToPerm(Base, BaseModel):
3543 3543 __tablename__ = 'users_group_to_perm'
3544 3544 __table_args__ = (
3545 3545 UniqueConstraint('users_group_id', 'permission_id',),
3546 3546 base_table_args
3547 3547 )
3548 3548
3549 3549 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3550 3550 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3551 3551 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3552 3552
3553 3553 users_group = relationship('UserGroup')
3554 3554 permission = relationship('Permission')
3555 3555
3556 3556
3557 3557 class UserRepoGroupToPerm(Base, BaseModel):
3558 3558 __tablename__ = 'user_repo_group_to_perm'
3559 3559 __table_args__ = (
3560 3560 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3561 3561 base_table_args
3562 3562 )
3563 3563
3564 3564 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3565 3565 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3566 3566 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3567 3567 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3568 3568
3569 3569 user = relationship('User')
3570 3570 group = relationship('RepoGroup')
3571 3571 permission = relationship('Permission')
3572 3572
3573 3573 @classmethod
3574 3574 def create(cls, user, repository_group, permission):
3575 3575 n = cls()
3576 3576 n.user = user
3577 3577 n.group = repository_group
3578 3578 n.permission = permission
3579 3579 Session().add(n)
3580 3580 return n
3581 3581
3582 3582
3583 3583 class UserGroupRepoGroupToPerm(Base, BaseModel):
3584 3584 __tablename__ = 'users_group_repo_group_to_perm'
3585 3585 __table_args__ = (
3586 3586 UniqueConstraint('users_group_id', 'group_id'),
3587 3587 base_table_args
3588 3588 )
3589 3589
3590 3590 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3591 3591 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3592 3592 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3593 3593 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3594 3594
3595 3595 users_group = relationship('UserGroup')
3596 3596 permission = relationship('Permission')
3597 3597 group = relationship('RepoGroup')
3598 3598
3599 3599 @classmethod
3600 3600 def create(cls, user_group, repository_group, permission):
3601 3601 n = cls()
3602 3602 n.users_group = user_group
3603 3603 n.group = repository_group
3604 3604 n.permission = permission
3605 3605 Session().add(n)
3606 3606 return n
3607 3607
3608 3608 def __unicode__(self):
3609 3609 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3610 3610
3611 3611
3612 3612 class Statistics(Base, BaseModel):
3613 3613 __tablename__ = 'statistics'
3614 3614 __table_args__ = (
3615 3615 base_table_args
3616 3616 )
3617 3617
3618 3618 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3619 3619 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3620 3620 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3621 3621 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3622 3622 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3623 3623 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3624 3624
3625 3625 repository = relationship('Repository', single_parent=True)
3626 3626
3627 3627
3628 3628 class UserFollowing(Base, BaseModel):
3629 3629 __tablename__ = 'user_followings'
3630 3630 __table_args__ = (
3631 3631 UniqueConstraint('user_id', 'follows_repository_id'),
3632 3632 UniqueConstraint('user_id', 'follows_user_id'),
3633 3633 base_table_args
3634 3634 )
3635 3635
3636 3636 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3637 3637 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3638 3638 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3639 3639 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3640 3640 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3641 3641
3642 3642 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3643 3643
3644 3644 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3645 3645 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3646 3646
3647 3647 @classmethod
3648 3648 def get_repo_followers(cls, repo_id):
3649 3649 return cls.query().filter(cls.follows_repo_id == repo_id)
3650 3650
3651 3651
3652 3652 class CacheKey(Base, BaseModel):
3653 3653 __tablename__ = 'cache_invalidation'
3654 3654 __table_args__ = (
3655 3655 UniqueConstraint('cache_key'),
3656 3656 Index('key_idx', 'cache_key'),
3657 3657 base_table_args,
3658 3658 )
3659 3659
3660 3660 CACHE_TYPE_FEED = 'FEED'
3661 3661
3662 3662 # namespaces used to register process/thread aware caches
3663 3663 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3664 3664 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3665 3665
3666 3666 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3667 3667 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3668 3668 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3669 3669 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3670 3670 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3671 3671
3672 3672 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3673 3673 self.cache_key = cache_key
3674 3674 self.cache_args = cache_args
3675 3675 self.cache_active = False
3676 3676 # first key should be same for all entries, since all workers should share it
3677 3677 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3678 3678
3679 3679 def __unicode__(self):
3680 3680 return u"<%s('%s:%s[%s]')>" % (
3681 3681 self.__class__.__name__,
3682 3682 self.cache_id, self.cache_key, self.cache_active)
3683 3683
3684 3684 def _cache_key_partition(self):
3685 3685 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3686 3686 return prefix, repo_name, suffix
3687 3687
3688 3688 def get_prefix(self):
3689 3689 """
3690 3690 Try to extract prefix from existing cache key. The key could consist
3691 3691 of prefix, repo_name, suffix
3692 3692 """
3693 3693 # this returns prefix, repo_name, suffix
3694 3694 return self._cache_key_partition()[0]
3695 3695
3696 3696 def get_suffix(self):
3697 3697 """
3698 3698 get suffix that might have been used in _get_cache_key to
3699 3699 generate self.cache_key. Only used for informational purposes
3700 3700 in repo_edit.mako.
3701 3701 """
3702 3702 # prefix, repo_name, suffix
3703 3703 return self._cache_key_partition()[2]
3704 3704
3705 3705 @classmethod
3706 3706 def generate_new_state_uid(cls, based_on=None):
3707 3707 if based_on:
3708 3708 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3709 3709 else:
3710 3710 return str(uuid.uuid4())
3711 3711
3712 3712 @classmethod
3713 3713 def delete_all_cache(cls):
3714 3714 """
3715 3715 Delete all cache keys from database.
3716 3716 Should only be run when all instances are down and all entries
3717 3717 thus stale.
3718 3718 """
3719 3719 cls.query().delete()
3720 3720 Session().commit()
3721 3721
3722 3722 @classmethod
3723 3723 def set_invalidate(cls, cache_uid, delete=False):
3724 3724 """
3725 3725 Mark all caches of a repo as invalid in the database.
3726 3726 """
3727 3727
3728 3728 try:
3729 3729 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3730 3730 if delete:
3731 3731 qry.delete()
3732 3732 log.debug('cache objects deleted for cache args %s',
3733 3733 safe_str(cache_uid))
3734 3734 else:
3735 3735 qry.update({"cache_active": False,
3736 3736 "cache_state_uid": cls.generate_new_state_uid()})
3737 3737 log.debug('cache objects marked as invalid for cache args %s',
3738 3738 safe_str(cache_uid))
3739 3739
3740 3740 Session().commit()
3741 3741 except Exception:
3742 3742 log.exception(
3743 3743 'Cache key invalidation failed for cache args %s',
3744 3744 safe_str(cache_uid))
3745 3745 Session().rollback()
3746 3746
3747 3747 @classmethod
3748 3748 def get_active_cache(cls, cache_key):
3749 3749 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3750 3750 if inv_obj:
3751 3751 return inv_obj
3752 3752 return None
3753 3753
3754 3754 @classmethod
3755 3755 def get_namespace_map(cls, namespace):
3756 3756 return {
3757 3757 x.cache_key: x
3758 3758 for x in cls.query().filter(cls.cache_args == namespace)}
3759 3759
3760 3760
3761 3761 class ChangesetComment(Base, BaseModel):
3762 3762 __tablename__ = 'changeset_comments'
3763 3763 __table_args__ = (
3764 3764 Index('cc_revision_idx', 'revision'),
3765 3765 base_table_args,
3766 3766 )
3767 3767
3768 3768 COMMENT_OUTDATED = u'comment_outdated'
3769 3769 COMMENT_TYPE_NOTE = u'note'
3770 3770 COMMENT_TYPE_TODO = u'todo'
3771 3771 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3772 3772
3773 3773 OP_IMMUTABLE = u'immutable'
3774 3774 OP_CHANGEABLE = u'changeable'
3775 3775
3776 3776 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3777 3777 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3778 3778 revision = Column('revision', String(40), nullable=True)
3779 3779 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3780 3780 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3781 3781 line_no = Column('line_no', Unicode(10), nullable=True)
3782 3782 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3783 3783 f_path = Column('f_path', Unicode(1000), nullable=True)
3784 3784 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3785 3785 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3786 3786 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3787 3787 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3788 3788 renderer = Column('renderer', Unicode(64), nullable=True)
3789 3789 display_state = Column('display_state', Unicode(128), nullable=True)
3790 3790 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3791 3791 draft = Column('draft', Boolean(), nullable=True, default=False)
3792 3792
3793 3793 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3794 3794 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3795 3795
3796 3796 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3797 3797 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3798 3798
3799 3799 author = relationship('User', lazy='select')
3800 3800 repo = relationship('Repository')
3801 3801 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3802 3802 pull_request = relationship('PullRequest', lazy='select')
3803 3803 pull_request_version = relationship('PullRequestVersion', lazy='select')
3804 3804 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3805 3805
3806 3806 @classmethod
3807 3807 def get_users(cls, revision=None, pull_request_id=None):
3808 3808 """
3809 3809 Returns user associated with this ChangesetComment. ie those
3810 3810 who actually commented
3811 3811
3812 3812 :param cls:
3813 3813 :param revision:
3814 3814 """
3815 3815 q = Session().query(User)\
3816 3816 .join(ChangesetComment.author)
3817 3817 if revision:
3818 3818 q = q.filter(cls.revision == revision)
3819 3819 elif pull_request_id:
3820 3820 q = q.filter(cls.pull_request_id == pull_request_id)
3821 3821 return q.all()
3822 3822
3823 3823 @classmethod
3824 3824 def get_index_from_version(cls, pr_version, versions=None, num_versions=None):
3825 3825
3826 3826 if versions is not None:
3827 3827 num_versions = [x.pull_request_version_id for x in versions]
3828 3828
3829 3829 num_versions = num_versions or []
3830 3830 try:
3831 3831 return num_versions.index(pr_version) + 1
3832 3832 except (IndexError, ValueError):
3833 3833 return
3834 3834
3835 3835 @property
3836 3836 def outdated(self):
3837 3837 return self.display_state == self.COMMENT_OUTDATED
3838 3838
3839 3839 @property
3840 3840 def outdated_js(self):
3841 3841 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3842 3842
3843 3843 @property
3844 3844 def immutable(self):
3845 3845 return self.immutable_state == self.OP_IMMUTABLE
3846 3846
3847 3847 def outdated_at_version(self, version):
3848 3848 """
3849 3849 Checks if comment is outdated for given pull request version
3850 3850 """
3851 3851 def version_check():
3852 3852 return self.pull_request_version_id and self.pull_request_version_id != version
3853 3853
3854 3854 if self.is_inline:
3855 3855 return self.outdated and version_check()
3856 3856 else:
3857 3857 # general comments don't have .outdated set, also latest don't have a version
3858 3858 return version_check()
3859 3859
3860 3860 def outdated_at_version_js(self, version):
3861 3861 """
3862 3862 Checks if comment is outdated for given pull request version
3863 3863 """
3864 3864 return json.dumps(self.outdated_at_version(version))
3865 3865
3866 3866 def older_than_version(self, version):
3867 3867 """
3868 3868 Checks if comment is made from previous version than given
3869 3869 """
3870 3870 if version is None:
3871 3871 return self.pull_request_version != version
3872 3872
3873 3873 return self.pull_request_version < version
3874 3874
3875 3875 def older_than_version_js(self, version):
3876 3876 """
3877 3877 Checks if comment is made from previous version than given
3878 3878 """
3879 3879 return json.dumps(self.older_than_version(version))
3880 3880
3881 3881 @property
3882 3882 def commit_id(self):
3883 3883 """New style naming to stop using .revision"""
3884 3884 return self.revision
3885 3885
3886 3886 @property
3887 3887 def resolved(self):
3888 3888 return self.resolved_by[0] if self.resolved_by else None
3889 3889
3890 3890 @property
3891 3891 def is_todo(self):
3892 3892 return self.comment_type == self.COMMENT_TYPE_TODO
3893 3893
3894 3894 @property
3895 3895 def is_inline(self):
3896 3896 if self.line_no and self.f_path:
3897 3897 return True
3898 3898 return False
3899 3899
3900 3900 @property
3901 3901 def last_version(self):
3902 3902 version = 0
3903 3903 if self.history:
3904 3904 version = self.history[-1].version
3905 3905 return version
3906 3906
3907 3907 def get_index_version(self, versions):
3908 3908 return self.get_index_from_version(
3909 3909 self.pull_request_version_id, versions)
3910 3910
3911 3911 @property
3912 3912 def review_status(self):
3913 3913 if self.status_change:
3914 3914 return self.status_change[0].status
3915 3915
3916 3916 @property
3917 3917 def review_status_lbl(self):
3918 3918 if self.status_change:
3919 3919 return self.status_change[0].status_lbl
3920 3920
3921 3921 def __repr__(self):
3922 3922 if self.comment_id:
3923 3923 return '<DB:Comment #%s>' % self.comment_id
3924 3924 else:
3925 3925 return '<DB:Comment at %#x>' % id(self)
3926 3926
3927 3927 def get_api_data(self):
3928 3928 comment = self
3929 3929
3930 3930 data = {
3931 3931 'comment_id': comment.comment_id,
3932 3932 'comment_type': comment.comment_type,
3933 3933 'comment_text': comment.text,
3934 3934 'comment_status': comment.status_change,
3935 3935 'comment_f_path': comment.f_path,
3936 3936 'comment_lineno': comment.line_no,
3937 3937 'comment_author': comment.author,
3938 3938 'comment_created_on': comment.created_on,
3939 3939 'comment_resolved_by': self.resolved,
3940 3940 'comment_commit_id': comment.revision,
3941 3941 'comment_pull_request_id': comment.pull_request_id,
3942 3942 'comment_last_version': self.last_version
3943 3943 }
3944 3944 return data
3945 3945
3946 3946 def __json__(self):
3947 3947 data = dict()
3948 3948 data.update(self.get_api_data())
3949 3949 return data
3950 3950
3951 3951
3952 3952 class ChangesetCommentHistory(Base, BaseModel):
3953 3953 __tablename__ = 'changeset_comments_history'
3954 3954 __table_args__ = (
3955 3955 Index('cch_comment_id_idx', 'comment_id'),
3956 3956 base_table_args,
3957 3957 )
3958 3958
3959 3959 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3960 3960 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3961 3961 version = Column("version", Integer(), nullable=False, default=0)
3962 3962 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3963 3963 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3964 3964 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3965 3965 deleted = Column('deleted', Boolean(), default=False)
3966 3966
3967 3967 author = relationship('User', lazy='joined')
3968 3968 comment = relationship('ChangesetComment', cascade="all, delete")
3969 3969
3970 3970 @classmethod
3971 3971 def get_version(cls, comment_id):
3972 3972 q = Session().query(ChangesetCommentHistory).filter(
3973 3973 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3974 3974 if q.count() == 0:
3975 3975 return 1
3976 3976 elif q.count() >= q[0].version:
3977 3977 return q.count() + 1
3978 3978 else:
3979 3979 return q[0].version + 1
3980 3980
3981 3981
3982 3982 class ChangesetStatus(Base, BaseModel):
3983 3983 __tablename__ = 'changeset_statuses'
3984 3984 __table_args__ = (
3985 3985 Index('cs_revision_idx', 'revision'),
3986 3986 Index('cs_version_idx', 'version'),
3987 3987 UniqueConstraint('repo_id', 'revision', 'version'),
3988 3988 base_table_args
3989 3989 )
3990 3990
3991 3991 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3992 3992 STATUS_APPROVED = 'approved'
3993 3993 STATUS_REJECTED = 'rejected'
3994 3994 STATUS_UNDER_REVIEW = 'under_review'
3995 3995 CheckConstraint,
3996 3996 STATUSES = [
3997 3997 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3998 3998 (STATUS_APPROVED, _("Approved")),
3999 3999 (STATUS_REJECTED, _("Rejected")),
4000 4000 (STATUS_UNDER_REVIEW, _("Under Review")),
4001 4001 ]
4002 4002
4003 4003 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4004 4004 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4005 4005 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4006 4006 revision = Column('revision', String(40), nullable=False)
4007 4007 status = Column('status', String(128), nullable=False, default=DEFAULT)
4008 4008 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4009 4009 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4010 4010 version = Column('version', Integer(), nullable=False, default=0)
4011 4011 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4012 4012
4013 4013 author = relationship('User', lazy='select')
4014 4014 repo = relationship('Repository', lazy='select')
4015 4015 comment = relationship('ChangesetComment', lazy='select')
4016 4016 pull_request = relationship('PullRequest', lazy='select')
4017 4017
4018 4018 def __unicode__(self):
4019 4019 return u"<%s('%s[v%s]:%s')>" % (
4020 4020 self.__class__.__name__,
4021 4021 self.status, self.version, self.author
4022 4022 )
4023 4023
4024 4024 @classmethod
4025 4025 def get_status_lbl(cls, value):
4026 4026 return dict(cls.STATUSES).get(value)
4027 4027
4028 4028 @property
4029 4029 def status_lbl(self):
4030 4030 return ChangesetStatus.get_status_lbl(self.status)
4031 4031
4032 4032 def get_api_data(self):
4033 4033 status = self
4034 4034 data = {
4035 4035 'status_id': status.changeset_status_id,
4036 4036 'status': status.status,
4037 4037 }
4038 4038 return data
4039 4039
4040 4040 def __json__(self):
4041 4041 data = dict()
4042 4042 data.update(self.get_api_data())
4043 4043 return data
4044 4044
4045 4045
4046 4046 class _SetState(object):
4047 4047 """
4048 4048 Context processor allowing changing state for sensitive operation such as
4049 4049 pull request update or merge
4050 4050 """
4051 4051
4052 4052 def __init__(self, pull_request, pr_state, back_state=None):
4053 4053 self._pr = pull_request
4054 4054 self._org_state = back_state or pull_request.pull_request_state
4055 4055 self._pr_state = pr_state
4056 4056 self._current_state = None
4057 4057
4058 4058 def __enter__(self):
4059 4059 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4060 4060 self._pr, self._pr_state)
4061 4061 self.set_pr_state(self._pr_state)
4062 4062 return self
4063 4063
4064 4064 def __exit__(self, exc_type, exc_val, exc_tb):
4065 4065 if exc_val is not None:
4066 4066 log.error(traceback.format_exc(exc_tb))
4067 4067 return None
4068 4068
4069 4069 self.set_pr_state(self._org_state)
4070 4070 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4071 4071 self._pr, self._org_state)
4072 4072
4073 4073 @property
4074 4074 def state(self):
4075 4075 return self._current_state
4076 4076
4077 4077 def set_pr_state(self, pr_state):
4078 4078 try:
4079 4079 self._pr.pull_request_state = pr_state
4080 4080 Session().add(self._pr)
4081 4081 Session().commit()
4082 4082 self._current_state = pr_state
4083 4083 except Exception:
4084 4084 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4085 4085 raise
4086 4086
4087 4087
4088 4088 class _PullRequestBase(BaseModel):
4089 4089 """
4090 4090 Common attributes of pull request and version entries.
4091 4091 """
4092 4092
4093 4093 # .status values
4094 4094 STATUS_NEW = u'new'
4095 4095 STATUS_OPEN = u'open'
4096 4096 STATUS_CLOSED = u'closed'
4097 4097
4098 4098 # available states
4099 4099 STATE_CREATING = u'creating'
4100 4100 STATE_UPDATING = u'updating'
4101 4101 STATE_MERGING = u'merging'
4102 4102 STATE_CREATED = u'created'
4103 4103
4104 4104 title = Column('title', Unicode(255), nullable=True)
4105 4105 description = Column(
4106 4106 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4107 4107 nullable=True)
4108 4108 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4109 4109
4110 4110 # new/open/closed status of pull request (not approve/reject/etc)
4111 4111 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4112 4112 created_on = Column(
4113 4113 'created_on', DateTime(timezone=False), nullable=False,
4114 4114 default=datetime.datetime.now)
4115 4115 updated_on = Column(
4116 4116 'updated_on', DateTime(timezone=False), nullable=False,
4117 4117 default=datetime.datetime.now)
4118 4118
4119 4119 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4120 4120
4121 4121 @declared_attr
4122 4122 def user_id(cls):
4123 4123 return Column(
4124 4124 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4125 4125 unique=None)
4126 4126
4127 4127 # 500 revisions max
4128 4128 _revisions = Column(
4129 4129 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4130 4130
4131 4131 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4132 4132
4133 4133 @declared_attr
4134 4134 def source_repo_id(cls):
4135 4135 # TODO: dan: rename column to source_repo_id
4136 4136 return Column(
4137 4137 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4138 4138 nullable=False)
4139 4139
4140 4140 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4141 4141
4142 4142 @hybrid_property
4143 4143 def source_ref(self):
4144 4144 return self._source_ref
4145 4145
4146 4146 @source_ref.setter
4147 4147 def source_ref(self, val):
4148 4148 parts = (val or '').split(':')
4149 4149 if len(parts) != 3:
4150 4150 raise ValueError(
4151 4151 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4152 4152 self._source_ref = safe_unicode(val)
4153 4153
4154 4154 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4155 4155
4156 4156 @hybrid_property
4157 4157 def target_ref(self):
4158 4158 return self._target_ref
4159 4159
4160 4160 @target_ref.setter
4161 4161 def target_ref(self, val):
4162 4162 parts = (val or '').split(':')
4163 4163 if len(parts) != 3:
4164 4164 raise ValueError(
4165 4165 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4166 4166 self._target_ref = safe_unicode(val)
4167 4167
4168 4168 @declared_attr
4169 4169 def target_repo_id(cls):
4170 4170 # TODO: dan: rename column to target_repo_id
4171 4171 return Column(
4172 4172 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4173 4173 nullable=False)
4174 4174
4175 4175 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4176 4176
4177 4177 # TODO: dan: rename column to last_merge_source_rev
4178 4178 _last_merge_source_rev = Column(
4179 4179 'last_merge_org_rev', String(40), nullable=True)
4180 4180 # TODO: dan: rename column to last_merge_target_rev
4181 4181 _last_merge_target_rev = Column(
4182 4182 'last_merge_other_rev', String(40), nullable=True)
4183 4183 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4184 4184 last_merge_metadata = Column(
4185 4185 'last_merge_metadata', MutationObj.as_mutable(
4186 4186 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4187 4187
4188 4188 merge_rev = Column('merge_rev', String(40), nullable=True)
4189 4189
4190 4190 reviewer_data = Column(
4191 4191 'reviewer_data_json', MutationObj.as_mutable(
4192 4192 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4193 4193
4194 4194 @property
4195 4195 def reviewer_data_json(self):
4196 4196 return json.dumps(self.reviewer_data)
4197 4197
4198 4198 @property
4199 4199 def last_merge_metadata_parsed(self):
4200 4200 metadata = {}
4201 4201 if not self.last_merge_metadata:
4202 4202 return metadata
4203 4203
4204 4204 if hasattr(self.last_merge_metadata, 'de_coerce'):
4205 4205 for k, v in self.last_merge_metadata.de_coerce().items():
4206 4206 if k in ['target_ref', 'source_ref']:
4207 4207 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4208 4208 else:
4209 4209 if hasattr(v, 'de_coerce'):
4210 4210 metadata[k] = v.de_coerce()
4211 4211 else:
4212 4212 metadata[k] = v
4213 4213 return metadata
4214 4214
4215 4215 @property
4216 4216 def work_in_progress(self):
4217 4217 """checks if pull request is work in progress by checking the title"""
4218 4218 title = self.title.upper()
4219 4219 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4220 4220 return True
4221 4221 return False
4222 4222
4223 4223 @property
4224 4224 def title_safe(self):
4225 4225 return self.title\
4226 4226 .replace('{', '{{')\
4227 4227 .replace('}', '}}')
4228 4228
4229 4229 @hybrid_property
4230 4230 def description_safe(self):
4231 4231 from rhodecode.lib import helpers as h
4232 4232 return h.escape(self.description)
4233 4233
4234 4234 @hybrid_property
4235 4235 def revisions(self):
4236 4236 return self._revisions.split(':') if self._revisions else []
4237 4237
4238 4238 @revisions.setter
4239 4239 def revisions(self, val):
4240 4240 self._revisions = u':'.join(val)
4241 4241
4242 4242 @hybrid_property
4243 4243 def last_merge_status(self):
4244 4244 return safe_int(self._last_merge_status)
4245 4245
4246 4246 @last_merge_status.setter
4247 4247 def last_merge_status(self, val):
4248 4248 self._last_merge_status = val
4249 4249
4250 4250 @declared_attr
4251 4251 def author(cls):
4252 4252 return relationship('User', lazy='joined')
4253 4253
4254 4254 @declared_attr
4255 4255 def source_repo(cls):
4256 4256 return relationship(
4257 4257 'Repository',
4258 4258 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4259 4259
4260 4260 @property
4261 4261 def source_ref_parts(self):
4262 4262 return self.unicode_to_reference(self.source_ref)
4263 4263
4264 4264 @declared_attr
4265 4265 def target_repo(cls):
4266 4266 return relationship(
4267 4267 'Repository',
4268 4268 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4269 4269
4270 4270 @property
4271 4271 def target_ref_parts(self):
4272 4272 return self.unicode_to_reference(self.target_ref)
4273 4273
4274 4274 @property
4275 4275 def shadow_merge_ref(self):
4276 4276 return self.unicode_to_reference(self._shadow_merge_ref)
4277 4277
4278 4278 @shadow_merge_ref.setter
4279 4279 def shadow_merge_ref(self, ref):
4280 4280 self._shadow_merge_ref = self.reference_to_unicode(ref)
4281 4281
4282 4282 @staticmethod
4283 4283 def unicode_to_reference(raw):
4284 4284 return unicode_to_reference(raw)
4285 4285
4286 4286 @staticmethod
4287 4287 def reference_to_unicode(ref):
4288 4288 return reference_to_unicode(ref)
4289 4289
4290 4290 def get_api_data(self, with_merge_state=True):
4291 4291 from rhodecode.model.pull_request import PullRequestModel
4292 4292
4293 4293 pull_request = self
4294 4294 if with_merge_state:
4295 4295 merge_response, merge_status, msg = \
4296 4296 PullRequestModel().merge_status(pull_request)
4297 4297 merge_state = {
4298 4298 'status': merge_status,
4299 4299 'message': safe_unicode(msg),
4300 4300 }
4301 4301 else:
4302 4302 merge_state = {'status': 'not_available',
4303 4303 'message': 'not_available'}
4304 4304
4305 4305 merge_data = {
4306 4306 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4307 4307 'reference': (
4308 4308 pull_request.shadow_merge_ref._asdict()
4309 4309 if pull_request.shadow_merge_ref else None),
4310 4310 }
4311 4311
4312 4312 data = {
4313 4313 'pull_request_id': pull_request.pull_request_id,
4314 4314 'url': PullRequestModel().get_url(pull_request),
4315 4315 'title': pull_request.title,
4316 4316 'description': pull_request.description,
4317 4317 'status': pull_request.status,
4318 4318 'state': pull_request.pull_request_state,
4319 4319 'created_on': pull_request.created_on,
4320 4320 'updated_on': pull_request.updated_on,
4321 4321 'commit_ids': pull_request.revisions,
4322 4322 'review_status': pull_request.calculated_review_status(),
4323 4323 'mergeable': merge_state,
4324 4324 'source': {
4325 4325 'clone_url': pull_request.source_repo.clone_url(),
4326 4326 'repository': pull_request.source_repo.repo_name,
4327 4327 'reference': {
4328 4328 'name': pull_request.source_ref_parts.name,
4329 4329 'type': pull_request.source_ref_parts.type,
4330 4330 'commit_id': pull_request.source_ref_parts.commit_id,
4331 4331 },
4332 4332 },
4333 4333 'target': {
4334 4334 'clone_url': pull_request.target_repo.clone_url(),
4335 4335 'repository': pull_request.target_repo.repo_name,
4336 4336 'reference': {
4337 4337 'name': pull_request.target_ref_parts.name,
4338 4338 'type': pull_request.target_ref_parts.type,
4339 4339 'commit_id': pull_request.target_ref_parts.commit_id,
4340 4340 },
4341 4341 },
4342 4342 'merge': merge_data,
4343 4343 'author': pull_request.author.get_api_data(include_secrets=False,
4344 4344 details='basic'),
4345 4345 'reviewers': [
4346 4346 {
4347 4347 'user': reviewer.get_api_data(include_secrets=False,
4348 4348 details='basic'),
4349 4349 'reasons': reasons,
4350 4350 'review_status': st[0][1].status if st else 'not_reviewed',
4351 4351 }
4352 4352 for obj, reviewer, reasons, mandatory, st in
4353 4353 pull_request.reviewers_statuses()
4354 4354 ]
4355 4355 }
4356 4356
4357 4357 return data
4358 4358
4359 4359 def set_state(self, pull_request_state, final_state=None):
4360 4360 """
4361 4361 # goes from initial state to updating to initial state.
4362 4362 # initial state can be changed by specifying back_state=
4363 4363 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4364 4364 pull_request.merge()
4365 4365
4366 4366 :param pull_request_state:
4367 4367 :param final_state:
4368 4368
4369 4369 """
4370 4370
4371 4371 return _SetState(self, pull_request_state, back_state=final_state)
4372 4372
4373 4373
4374 4374 class PullRequest(Base, _PullRequestBase):
4375 4375 __tablename__ = 'pull_requests'
4376 4376 __table_args__ = (
4377 4377 base_table_args,
4378 4378 )
4379 4379 LATEST_VER = 'latest'
4380 4380
4381 4381 pull_request_id = Column(
4382 4382 'pull_request_id', Integer(), nullable=False, primary_key=True)
4383 4383
4384 4384 def __repr__(self):
4385 4385 if self.pull_request_id:
4386 4386 return '<DB:PullRequest #%s>' % self.pull_request_id
4387 4387 else:
4388 4388 return '<DB:PullRequest at %#x>' % id(self)
4389 4389
4390 4390 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4391 4391 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4392 4392 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4393 4393 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4394 4394 lazy='dynamic')
4395 4395
4396 4396 @classmethod
4397 4397 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4398 4398 internal_methods=None):
4399 4399
4400 4400 class PullRequestDisplay(object):
4401 4401 """
4402 4402 Special object wrapper for showing PullRequest data via Versions
4403 4403 It mimics PR object as close as possible. This is read only object
4404 4404 just for display
4405 4405 """
4406 4406
4407 4407 def __init__(self, attrs, internal=None):
4408 4408 self.attrs = attrs
4409 4409 # internal have priority over the given ones via attrs
4410 4410 self.internal = internal or ['versions']
4411 4411
4412 4412 def __getattr__(self, item):
4413 4413 if item in self.internal:
4414 4414 return getattr(self, item)
4415 4415 try:
4416 4416 return self.attrs[item]
4417 4417 except KeyError:
4418 4418 raise AttributeError(
4419 4419 '%s object has no attribute %s' % (self, item))
4420 4420
4421 4421 def __repr__(self):
4422 4422 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4423 4423
4424 4424 def versions(self):
4425 4425 return pull_request_obj.versions.order_by(
4426 4426 PullRequestVersion.pull_request_version_id).all()
4427 4427
4428 4428 def is_closed(self):
4429 4429 return pull_request_obj.is_closed()
4430 4430
4431 4431 def is_state_changing(self):
4432 4432 return pull_request_obj.is_state_changing()
4433 4433
4434 4434 @property
4435 4435 def pull_request_version_id(self):
4436 4436 return getattr(pull_request_obj, 'pull_request_version_id', None)
4437 4437
4438 4438 @property
4439 4439 def pull_request_last_version(self):
4440 4440 return pull_request_obj.pull_request_last_version
4441 4441
4442 4442 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4443 4443
4444 4444 attrs.author = StrictAttributeDict(
4445 4445 pull_request_obj.author.get_api_data())
4446 4446 if pull_request_obj.target_repo:
4447 4447 attrs.target_repo = StrictAttributeDict(
4448 4448 pull_request_obj.target_repo.get_api_data())
4449 4449 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4450 4450
4451 4451 if pull_request_obj.source_repo:
4452 4452 attrs.source_repo = StrictAttributeDict(
4453 4453 pull_request_obj.source_repo.get_api_data())
4454 4454 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4455 4455
4456 4456 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4457 4457 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4458 4458 attrs.revisions = pull_request_obj.revisions
4459 4459 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4460 4460 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4461 4461 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4462 4462 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4463 4463
4464 4464 return PullRequestDisplay(attrs, internal=internal_methods)
4465 4465
4466 4466 def is_closed(self):
4467 4467 return self.status == self.STATUS_CLOSED
4468 4468
4469 4469 def is_state_changing(self):
4470 4470 return self.pull_request_state != PullRequest.STATE_CREATED
4471 4471
4472 4472 def __json__(self):
4473 4473 return {
4474 4474 'revisions': self.revisions,
4475 4475 'versions': self.versions_count
4476 4476 }
4477 4477
4478 4478 def calculated_review_status(self):
4479 4479 from rhodecode.model.changeset_status import ChangesetStatusModel
4480 4480 return ChangesetStatusModel().calculated_review_status(self)
4481 4481
4482 4482 def reviewers_statuses(self):
4483 4483 from rhodecode.model.changeset_status import ChangesetStatusModel
4484 4484 return ChangesetStatusModel().reviewers_statuses(self)
4485 4485
4486 4486 def get_pull_request_reviewers(self, role=None):
4487 4487 qry = PullRequestReviewers.query()\
4488 4488 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4489 4489 if role:
4490 4490 qry = qry.filter(PullRequestReviewers.role == role)
4491 4491
4492 4492 return qry.all()
4493 4493
4494 4494 @property
4495 4495 def reviewers_count(self):
4496 4496 qry = PullRequestReviewers.query()\
4497 4497 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4498 4498 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4499 4499 return qry.count()
4500 4500
4501 4501 @property
4502 4502 def observers_count(self):
4503 4503 qry = PullRequestReviewers.query()\
4504 4504 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4505 4505 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4506 4506 return qry.count()
4507 4507
4508 4508 def observers(self):
4509 4509 qry = PullRequestReviewers.query()\
4510 4510 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4511 4511 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4512 4512 .all()
4513 4513
4514 4514 for entry in qry:
4515 4515 yield entry, entry.user
4516 4516
4517 4517 @property
4518 4518 def workspace_id(self):
4519 4519 from rhodecode.model.pull_request import PullRequestModel
4520 4520 return PullRequestModel()._workspace_id(self)
4521 4521
4522 4522 def get_shadow_repo(self):
4523 4523 workspace_id = self.workspace_id
4524 4524 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4525 4525 if os.path.isdir(shadow_repository_path):
4526 4526 vcs_obj = self.target_repo.scm_instance()
4527 4527 return vcs_obj.get_shadow_instance(shadow_repository_path)
4528 4528
4529 4529 @property
4530 4530 def versions_count(self):
4531 4531 """
4532 4532 return number of versions this PR have, e.g a PR that once been
4533 4533 updated will have 2 versions
4534 4534 """
4535 4535 return self.versions.count() + 1
4536 4536
4537 4537 @property
4538 4538 def pull_request_last_version(self):
4539 4539 return self.versions_count
4540 4540
4541 4541
4542 4542 class PullRequestVersion(Base, _PullRequestBase):
4543 4543 __tablename__ = 'pull_request_versions'
4544 4544 __table_args__ = (
4545 4545 base_table_args,
4546 4546 )
4547 4547
4548 4548 pull_request_version_id = Column(
4549 4549 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4550 4550 pull_request_id = Column(
4551 4551 'pull_request_id', Integer(),
4552 4552 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4553 4553 pull_request = relationship('PullRequest')
4554 4554
4555 4555 def __repr__(self):
4556 4556 if self.pull_request_version_id:
4557 4557 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4558 4558 else:
4559 4559 return '<DB:PullRequestVersion at %#x>' % id(self)
4560 4560
4561 4561 @property
4562 4562 def reviewers(self):
4563 4563 return self.pull_request.reviewers
4564 4564 @property
4565 4565 def reviewers(self):
4566 4566 return self.pull_request.reviewers
4567 4567
4568 4568 @property
4569 4569 def versions(self):
4570 4570 return self.pull_request.versions
4571 4571
4572 4572 def is_closed(self):
4573 4573 # calculate from original
4574 4574 return self.pull_request.status == self.STATUS_CLOSED
4575 4575
4576 4576 def is_state_changing(self):
4577 4577 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4578 4578
4579 4579 def calculated_review_status(self):
4580 4580 return self.pull_request.calculated_review_status()
4581 4581
4582 4582 def reviewers_statuses(self):
4583 4583 return self.pull_request.reviewers_statuses()
4584 4584
4585 4585 def observers(self):
4586 4586 return self.pull_request.observers()
4587 4587
4588 4588
4589 4589 class PullRequestReviewers(Base, BaseModel):
4590 4590 __tablename__ = 'pull_request_reviewers'
4591 4591 __table_args__ = (
4592 4592 base_table_args,
4593 4593 )
4594 4594 ROLE_REVIEWER = u'reviewer'
4595 4595 ROLE_OBSERVER = u'observer'
4596 4596 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4597 4597
4598 4598 @hybrid_property
4599 4599 def reasons(self):
4600 4600 if not self._reasons:
4601 4601 return []
4602 4602 return self._reasons
4603 4603
4604 4604 @reasons.setter
4605 4605 def reasons(self, val):
4606 4606 val = val or []
4607 4607 if any(not isinstance(x, compat.string_types) for x in val):
4608 4608 raise Exception('invalid reasons type, must be list of strings')
4609 4609 self._reasons = val
4610 4610
4611 4611 pull_requests_reviewers_id = Column(
4612 4612 'pull_requests_reviewers_id', Integer(), nullable=False,
4613 4613 primary_key=True)
4614 4614 pull_request_id = Column(
4615 4615 "pull_request_id", Integer(),
4616 4616 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4617 4617 user_id = Column(
4618 4618 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4619 4619 _reasons = Column(
4620 4620 'reason', MutationList.as_mutable(
4621 4621 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4622 4622
4623 4623 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4624 4624 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4625 4625
4626 4626 user = relationship('User')
4627 4627 pull_request = relationship('PullRequest')
4628 4628
4629 4629 rule_data = Column(
4630 4630 'rule_data_json',
4631 4631 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4632 4632
4633 4633 def rule_user_group_data(self):
4634 4634 """
4635 4635 Returns the voting user group rule data for this reviewer
4636 4636 """
4637 4637
4638 4638 if self.rule_data and 'vote_rule' in self.rule_data:
4639 4639 user_group_data = {}
4640 4640 if 'rule_user_group_entry_id' in self.rule_data:
4641 4641 # means a group with voting rules !
4642 4642 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4643 4643 user_group_data['name'] = self.rule_data['rule_name']
4644 4644 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4645 4645
4646 4646 return user_group_data
4647 4647
4648 4648 @classmethod
4649 4649 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4650 4650 qry = PullRequestReviewers.query()\
4651 4651 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4652 4652 if role:
4653 4653 qry = qry.filter(PullRequestReviewers.role == role)
4654 4654
4655 4655 return qry.all()
4656 4656
4657 4657 def __unicode__(self):
4658 4658 return u"<%s('id:%s')>" % (self.__class__.__name__,
4659 4659 self.pull_requests_reviewers_id)
4660 4660
4661 4661
4662 4662 class Notification(Base, BaseModel):
4663 4663 __tablename__ = 'notifications'
4664 4664 __table_args__ = (
4665 4665 Index('notification_type_idx', 'type'),
4666 4666 base_table_args,
4667 4667 )
4668 4668
4669 4669 TYPE_CHANGESET_COMMENT = u'cs_comment'
4670 4670 TYPE_MESSAGE = u'message'
4671 4671 TYPE_MENTION = u'mention'
4672 4672 TYPE_REGISTRATION = u'registration'
4673 4673 TYPE_PULL_REQUEST = u'pull_request'
4674 4674 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4675 4675 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4676 4676
4677 4677 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4678 4678 subject = Column('subject', Unicode(512), nullable=True)
4679 4679 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4680 4680 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4681 4681 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4682 4682 type_ = Column('type', Unicode(255))
4683 4683
4684 4684 created_by_user = relationship('User')
4685 4685 notifications_to_users = relationship('UserNotification', lazy='joined',
4686 4686 cascade="all, delete-orphan")
4687 4687
4688 4688 @property
4689 4689 def recipients(self):
4690 4690 return [x.user for x in UserNotification.query()\
4691 4691 .filter(UserNotification.notification == self)\
4692 4692 .order_by(UserNotification.user_id.asc()).all()]
4693 4693
4694 4694 @classmethod
4695 4695 def create(cls, created_by, subject, body, recipients, type_=None):
4696 4696 if type_ is None:
4697 4697 type_ = Notification.TYPE_MESSAGE
4698 4698
4699 4699 notification = cls()
4700 4700 notification.created_by_user = created_by
4701 4701 notification.subject = subject
4702 4702 notification.body = body
4703 4703 notification.type_ = type_
4704 4704 notification.created_on = datetime.datetime.now()
4705 4705
4706 4706 # For each recipient link the created notification to his account
4707 4707 for u in recipients:
4708 4708 assoc = UserNotification()
4709 4709 assoc.user_id = u.user_id
4710 4710 assoc.notification = notification
4711 4711
4712 4712 # if created_by is inside recipients mark his notification
4713 4713 # as read
4714 4714 if u.user_id == created_by.user_id:
4715 4715 assoc.read = True
4716 4716 Session().add(assoc)
4717 4717
4718 4718 Session().add(notification)
4719 4719
4720 4720 return notification
4721 4721
4722 4722
4723 4723 class UserNotification(Base, BaseModel):
4724 4724 __tablename__ = 'user_to_notification'
4725 4725 __table_args__ = (
4726 4726 UniqueConstraint('user_id', 'notification_id'),
4727 4727 base_table_args
4728 4728 )
4729 4729
4730 4730 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4731 4731 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4732 4732 read = Column('read', Boolean, default=False)
4733 4733 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4734 4734
4735 4735 user = relationship('User', lazy="joined")
4736 4736 notification = relationship('Notification', lazy="joined",
4737 4737 order_by=lambda: Notification.created_on.desc(),)
4738 4738
4739 4739 def mark_as_read(self):
4740 4740 self.read = True
4741 4741 Session().add(self)
4742 4742
4743 4743
4744 4744 class UserNotice(Base, BaseModel):
4745 4745 __tablename__ = 'user_notices'
4746 4746 __table_args__ = (
4747 4747 base_table_args
4748 4748 )
4749 4749
4750 4750 NOTIFICATION_TYPE_MESSAGE = 'message'
4751 4751 NOTIFICATION_TYPE_NOTICE = 'notice'
4752 4752
4753 4753 NOTIFICATION_LEVEL_INFO = 'info'
4754 4754 NOTIFICATION_LEVEL_WARNING = 'warning'
4755 4755 NOTIFICATION_LEVEL_ERROR = 'error'
4756 4756
4757 4757 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4758 4758
4759 4759 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4760 4760 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4761 4761
4762 4762 notice_read = Column('notice_read', Boolean, default=False)
4763 4763
4764 4764 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4765 4765 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4766 4766
4767 4767 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4768 4768 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4769 4769
4770 4770 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4771 4771 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4772 4772
4773 4773 @classmethod
4774 4774 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4775 4775
4776 4776 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4777 4777 cls.NOTIFICATION_LEVEL_WARNING,
4778 4778 cls.NOTIFICATION_LEVEL_INFO]:
4779 4779 return
4780 4780
4781 4781 from rhodecode.model.user import UserModel
4782 4782 user = UserModel().get_user(user)
4783 4783
4784 4784 new_notice = UserNotice()
4785 4785 if not allow_duplicate:
4786 4786 existing_msg = UserNotice().query() \
4787 4787 .filter(UserNotice.user == user) \
4788 4788 .filter(UserNotice.notice_body == body) \
4789 4789 .filter(UserNotice.notice_read == false()) \
4790 4790 .scalar()
4791 4791 if existing_msg:
4792 4792 log.warning('Ignoring duplicate notice for user %s', user)
4793 4793 return
4794 4794
4795 4795 new_notice.user = user
4796 4796 new_notice.notice_subject = subject
4797 4797 new_notice.notice_body = body
4798 4798 new_notice.notification_level = notice_level
4799 4799 Session().add(new_notice)
4800 4800 Session().commit()
4801 4801
4802 4802
4803 4803 class Gist(Base, BaseModel):
4804 4804 __tablename__ = 'gists'
4805 4805 __table_args__ = (
4806 4806 Index('g_gist_access_id_idx', 'gist_access_id'),
4807 4807 Index('g_created_on_idx', 'created_on'),
4808 4808 base_table_args
4809 4809 )
4810 4810
4811 4811 GIST_PUBLIC = u'public'
4812 4812 GIST_PRIVATE = u'private'
4813 4813 DEFAULT_FILENAME = u'gistfile1.txt'
4814 4814
4815 4815 ACL_LEVEL_PUBLIC = u'acl_public'
4816 4816 ACL_LEVEL_PRIVATE = u'acl_private'
4817 4817
4818 4818 gist_id = Column('gist_id', Integer(), primary_key=True)
4819 4819 gist_access_id = Column('gist_access_id', Unicode(250))
4820 4820 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4821 4821 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4822 4822 gist_expires = Column('gist_expires', Float(53), nullable=False)
4823 4823 gist_type = Column('gist_type', Unicode(128), nullable=False)
4824 4824 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4825 4825 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4826 4826 acl_level = Column('acl_level', Unicode(128), nullable=True)
4827 4827
4828 4828 owner = relationship('User')
4829 4829
4830 4830 def __repr__(self):
4831 4831 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4832 4832
4833 4833 @hybrid_property
4834 4834 def description_safe(self):
4835 4835 from rhodecode.lib import helpers as h
4836 4836 return h.escape(self.gist_description)
4837 4837
4838 4838 @classmethod
4839 4839 def get_or_404(cls, id_):
4840 4840 from pyramid.httpexceptions import HTTPNotFound
4841 4841
4842 4842 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4843 4843 if not res:
4844 4844 log.debug('WARN: No DB entry with id %s', id_)
4845 4845 raise HTTPNotFound()
4846 4846 return res
4847 4847
4848 4848 @classmethod
4849 4849 def get_by_access_id(cls, gist_access_id):
4850 4850 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4851 4851
4852 4852 def gist_url(self):
4853 4853 from rhodecode.model.gist import GistModel
4854 4854 return GistModel().get_url(self)
4855 4855
4856 4856 @classmethod
4857 4857 def base_path(cls):
4858 4858 """
4859 4859 Returns base path when all gists are stored
4860 4860
4861 4861 :param cls:
4862 4862 """
4863 4863 from rhodecode.model.gist import GIST_STORE_LOC
4864 4864 q = Session().query(RhodeCodeUi)\
4865 4865 .filter(RhodeCodeUi.ui_key == URL_SEP)
4866 4866 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4867 4867 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4868 4868
4869 4869 def get_api_data(self):
4870 4870 """
4871 4871 Common function for generating gist related data for API
4872 4872 """
4873 4873 gist = self
4874 4874 data = {
4875 4875 'gist_id': gist.gist_id,
4876 4876 'type': gist.gist_type,
4877 4877 'access_id': gist.gist_access_id,
4878 4878 'description': gist.gist_description,
4879 4879 'url': gist.gist_url(),
4880 4880 'expires': gist.gist_expires,
4881 4881 'created_on': gist.created_on,
4882 4882 'modified_at': gist.modified_at,
4883 4883 'content': None,
4884 4884 'acl_level': gist.acl_level,
4885 4885 }
4886 4886 return data
4887 4887
4888 4888 def __json__(self):
4889 4889 data = dict(
4890 4890 )
4891 4891 data.update(self.get_api_data())
4892 4892 return data
4893 4893 # SCM functions
4894 4894
4895 4895 def scm_instance(self, **kwargs):
4896 4896 """
4897 4897 Get an instance of VCS Repository
4898 4898
4899 4899 :param kwargs:
4900 4900 """
4901 4901 from rhodecode.model.gist import GistModel
4902 4902 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4903 4903 return get_vcs_instance(
4904 4904 repo_path=safe_str(full_repo_path), create=False,
4905 4905 _vcs_alias=GistModel.vcs_backend)
4906 4906
4907 4907
4908 4908 class ExternalIdentity(Base, BaseModel):
4909 4909 __tablename__ = 'external_identities'
4910 4910 __table_args__ = (
4911 4911 Index('local_user_id_idx', 'local_user_id'),
4912 4912 Index('external_id_idx', 'external_id'),
4913 4913 base_table_args
4914 4914 )
4915 4915
4916 4916 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4917 4917 external_username = Column('external_username', Unicode(1024), default=u'')
4918 4918 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4919 4919 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4920 4920 access_token = Column('access_token', String(1024), default=u'')
4921 4921 alt_token = Column('alt_token', String(1024), default=u'')
4922 4922 token_secret = Column('token_secret', String(1024), default=u'')
4923 4923
4924 4924 @classmethod
4925 4925 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4926 4926 """
4927 4927 Returns ExternalIdentity instance based on search params
4928 4928
4929 4929 :param external_id:
4930 4930 :param provider_name:
4931 4931 :return: ExternalIdentity
4932 4932 """
4933 4933 query = cls.query()
4934 4934 query = query.filter(cls.external_id == external_id)
4935 4935 query = query.filter(cls.provider_name == provider_name)
4936 4936 if local_user_id:
4937 4937 query = query.filter(cls.local_user_id == local_user_id)
4938 4938 return query.first()
4939 4939
4940 4940 @classmethod
4941 4941 def user_by_external_id_and_provider(cls, external_id, provider_name):
4942 4942 """
4943 4943 Returns User instance based on search params
4944 4944
4945 4945 :param external_id:
4946 4946 :param provider_name:
4947 4947 :return: User
4948 4948 """
4949 4949 query = User.query()
4950 4950 query = query.filter(cls.external_id == external_id)
4951 4951 query = query.filter(cls.provider_name == provider_name)
4952 4952 query = query.filter(User.user_id == cls.local_user_id)
4953 4953 return query.first()
4954 4954
4955 4955 @classmethod
4956 4956 def by_local_user_id(cls, local_user_id):
4957 4957 """
4958 4958 Returns all tokens for user
4959 4959
4960 4960 :param local_user_id:
4961 4961 :return: ExternalIdentity
4962 4962 """
4963 4963 query = cls.query()
4964 4964 query = query.filter(cls.local_user_id == local_user_id)
4965 4965 return query
4966 4966
4967 4967 @classmethod
4968 4968 def load_provider_plugin(cls, plugin_id):
4969 4969 from rhodecode.authentication.base import loadplugin
4970 4970 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4971 4971 auth_plugin = loadplugin(_plugin_id)
4972 4972 return auth_plugin
4973 4973
4974 4974
4975 4975 class Integration(Base, BaseModel):
4976 4976 __tablename__ = 'integrations'
4977 4977 __table_args__ = (
4978 4978 base_table_args
4979 4979 )
4980 4980
4981 4981 integration_id = Column('integration_id', Integer(), primary_key=True)
4982 4982 integration_type = Column('integration_type', String(255))
4983 4983 enabled = Column('enabled', Boolean(), nullable=False)
4984 4984 name = Column('name', String(255), nullable=False)
4985 4985 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4986 4986 default=False)
4987 4987
4988 4988 settings = Column(
4989 4989 'settings_json', MutationObj.as_mutable(
4990 4990 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4991 4991 repo_id = Column(
4992 4992 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4993 4993 nullable=True, unique=None, default=None)
4994 4994 repo = relationship('Repository', lazy='joined')
4995 4995
4996 4996 repo_group_id = Column(
4997 4997 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4998 4998 nullable=True, unique=None, default=None)
4999 4999 repo_group = relationship('RepoGroup', lazy='joined')
5000 5000
5001 5001 @property
5002 5002 def scope(self):
5003 5003 if self.repo:
5004 5004 return repr(self.repo)
5005 5005 if self.repo_group:
5006 5006 if self.child_repos_only:
5007 5007 return repr(self.repo_group) + ' (child repos only)'
5008 5008 else:
5009 5009 return repr(self.repo_group) + ' (recursive)'
5010 5010 if self.child_repos_only:
5011 5011 return 'root_repos'
5012 5012 return 'global'
5013 5013
5014 5014 def __repr__(self):
5015 5015 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5016 5016
5017 5017
5018 5018 class RepoReviewRuleUser(Base, BaseModel):
5019 5019 __tablename__ = 'repo_review_rules_users'
5020 5020 __table_args__ = (
5021 5021 base_table_args
5022 5022 )
5023 5023 ROLE_REVIEWER = u'reviewer'
5024 5024 ROLE_OBSERVER = u'observer'
5025 5025 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5026 5026
5027 5027 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5028 5028 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5029 5029 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5030 5030 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5031 5031 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5032 5032 user = relationship('User')
5033 5033
5034 5034 def rule_data(self):
5035 5035 return {
5036 5036 'mandatory': self.mandatory,
5037 5037 'role': self.role,
5038 5038 }
5039 5039
5040 5040
5041 5041 class RepoReviewRuleUserGroup(Base, BaseModel):
5042 5042 __tablename__ = 'repo_review_rules_users_groups'
5043 5043 __table_args__ = (
5044 5044 base_table_args
5045 5045 )
5046 5046
5047 5047 VOTE_RULE_ALL = -1
5048 5048 ROLE_REVIEWER = u'reviewer'
5049 5049 ROLE_OBSERVER = u'observer'
5050 5050 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5051 5051
5052 5052 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5053 5053 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5054 5054 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5055 5055 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5056 5056 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5057 5057 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5058 5058 users_group = relationship('UserGroup')
5059 5059
5060 5060 def rule_data(self):
5061 5061 return {
5062 5062 'mandatory': self.mandatory,
5063 5063 'role': self.role,
5064 5064 'vote_rule': self.vote_rule
5065 5065 }
5066 5066
5067 5067 @property
5068 5068 def vote_rule_label(self):
5069 5069 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5070 5070 return 'all must vote'
5071 5071 else:
5072 5072 return 'min. vote {}'.format(self.vote_rule)
5073 5073
5074 5074
5075 5075 class RepoReviewRule(Base, BaseModel):
5076 5076 __tablename__ = 'repo_review_rules'
5077 5077 __table_args__ = (
5078 5078 base_table_args
5079 5079 )
5080 5080
5081 5081 repo_review_rule_id = Column(
5082 5082 'repo_review_rule_id', Integer(), primary_key=True)
5083 5083 repo_id = Column(
5084 5084 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5085 5085 repo = relationship('Repository', backref='review_rules')
5086 5086
5087 5087 review_rule_name = Column('review_rule_name', String(255))
5088 5088 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5089 5089 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5090 5090 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5091 5091
5092 5092 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5093 5093
5094 5094 # Legacy fields, just for backward compat
5095 5095 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5096 5096 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5097 5097
5098 5098 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5099 5099 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5100 5100
5101 5101 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5102 5102
5103 5103 rule_users = relationship('RepoReviewRuleUser')
5104 5104 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5105 5105
5106 5106 def _validate_pattern(self, value):
5107 5107 re.compile('^' + glob2re(value) + '$')
5108 5108
5109 5109 @hybrid_property
5110 5110 def source_branch_pattern(self):
5111 5111 return self._branch_pattern or '*'
5112 5112
5113 5113 @source_branch_pattern.setter
5114 5114 def source_branch_pattern(self, value):
5115 5115 self._validate_pattern(value)
5116 5116 self._branch_pattern = value or '*'
5117 5117
5118 5118 @hybrid_property
5119 5119 def target_branch_pattern(self):
5120 5120 return self._target_branch_pattern or '*'
5121 5121
5122 5122 @target_branch_pattern.setter
5123 5123 def target_branch_pattern(self, value):
5124 5124 self._validate_pattern(value)
5125 5125 self._target_branch_pattern = value or '*'
5126 5126
5127 5127 @hybrid_property
5128 5128 def file_pattern(self):
5129 5129 return self._file_pattern or '*'
5130 5130
5131 5131 @file_pattern.setter
5132 5132 def file_pattern(self, value):
5133 5133 self._validate_pattern(value)
5134 5134 self._file_pattern = value or '*'
5135 5135
5136 5136 @hybrid_property
5137 5137 def forbid_pr_author_to_review(self):
5138 5138 return self.pr_author == 'forbid_pr_author'
5139 5139
5140 5140 @hybrid_property
5141 5141 def include_pr_author_to_review(self):
5142 5142 return self.pr_author == 'include_pr_author'
5143 5143
5144 5144 @hybrid_property
5145 5145 def forbid_commit_author_to_review(self):
5146 5146 return self.commit_author == 'forbid_commit_author'
5147 5147
5148 5148 @hybrid_property
5149 5149 def include_commit_author_to_review(self):
5150 5150 return self.commit_author == 'include_commit_author'
5151 5151
5152 5152 def matches(self, source_branch, target_branch, files_changed):
5153 5153 """
5154 5154 Check if this review rule matches a branch/files in a pull request
5155 5155
5156 5156 :param source_branch: source branch name for the commit
5157 5157 :param target_branch: target branch name for the commit
5158 5158 :param files_changed: list of file paths changed in the pull request
5159 5159 """
5160 5160
5161 5161 source_branch = source_branch or ''
5162 5162 target_branch = target_branch or ''
5163 5163 files_changed = files_changed or []
5164 5164
5165 5165 branch_matches = True
5166 5166 if source_branch or target_branch:
5167 5167 if self.source_branch_pattern == '*':
5168 5168 source_branch_match = True
5169 5169 else:
5170 5170 if self.source_branch_pattern.startswith('re:'):
5171 5171 source_pattern = self.source_branch_pattern[3:]
5172 5172 else:
5173 5173 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5174 5174 source_branch_regex = re.compile(source_pattern)
5175 5175 source_branch_match = bool(source_branch_regex.search(source_branch))
5176 5176 if self.target_branch_pattern == '*':
5177 5177 target_branch_match = True
5178 5178 else:
5179 5179 if self.target_branch_pattern.startswith('re:'):
5180 5180 target_pattern = self.target_branch_pattern[3:]
5181 5181 else:
5182 5182 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5183 5183 target_branch_regex = re.compile(target_pattern)
5184 5184 target_branch_match = bool(target_branch_regex.search(target_branch))
5185 5185
5186 5186 branch_matches = source_branch_match and target_branch_match
5187 5187
5188 5188 files_matches = True
5189 5189 if self.file_pattern != '*':
5190 5190 files_matches = False
5191 5191 if self.file_pattern.startswith('re:'):
5192 5192 file_pattern = self.file_pattern[3:]
5193 5193 else:
5194 5194 file_pattern = glob2re(self.file_pattern)
5195 5195 file_regex = re.compile(file_pattern)
5196 5196 for file_data in files_changed:
5197 5197 filename = file_data.get('filename')
5198 5198
5199 5199 if file_regex.search(filename):
5200 5200 files_matches = True
5201 5201 break
5202 5202
5203 5203 return branch_matches and files_matches
5204 5204
5205 5205 @property
5206 5206 def review_users(self):
5207 5207 """ Returns the users which this rule applies to """
5208 5208
5209 5209 users = collections.OrderedDict()
5210 5210
5211 5211 for rule_user in self.rule_users:
5212 5212 if rule_user.user.active:
5213 5213 if rule_user.user not in users:
5214 5214 users[rule_user.user.username] = {
5215 5215 'user': rule_user.user,
5216 5216 'source': 'user',
5217 5217 'source_data': {},
5218 5218 'data': rule_user.rule_data()
5219 5219 }
5220 5220
5221 5221 for rule_user_group in self.rule_user_groups:
5222 5222 source_data = {
5223 5223 'user_group_id': rule_user_group.users_group.users_group_id,
5224 5224 'name': rule_user_group.users_group.users_group_name,
5225 5225 'members': len(rule_user_group.users_group.members)
5226 5226 }
5227 5227 for member in rule_user_group.users_group.members:
5228 5228 if member.user.active:
5229 5229 key = member.user.username
5230 5230 if key in users:
5231 5231 # skip this member as we have him already
5232 5232 # this prevents from override the "first" matched
5233 5233 # users with duplicates in multiple groups
5234 5234 continue
5235 5235
5236 5236 users[key] = {
5237 5237 'user': member.user,
5238 5238 'source': 'user_group',
5239 5239 'source_data': source_data,
5240 5240 'data': rule_user_group.rule_data()
5241 5241 }
5242 5242
5243 5243 return users
5244 5244
5245 5245 def user_group_vote_rule(self, user_id):
5246 5246
5247 5247 rules = []
5248 5248 if not self.rule_user_groups:
5249 5249 return rules
5250 5250
5251 5251 for user_group in self.rule_user_groups:
5252 5252 user_group_members = [x.user_id for x in user_group.users_group.members]
5253 5253 if user_id in user_group_members:
5254 5254 rules.append(user_group)
5255 5255 return rules
5256 5256
5257 5257 def __repr__(self):
5258 5258 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5259 5259 self.repo_review_rule_id, self.repo)
5260 5260
5261 5261
5262 5262 class ScheduleEntry(Base, BaseModel):
5263 5263 __tablename__ = 'schedule_entries'
5264 5264 __table_args__ = (
5265 5265 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5266 5266 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5267 5267 base_table_args,
5268 5268 )
5269 5269
5270 5270 schedule_types = ['crontab', 'timedelta', 'integer']
5271 5271 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5272 5272
5273 5273 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5274 5274 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5275 5275 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5276 5276
5277 5277 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5278 5278 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5279 5279
5280 5280 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5281 5281 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5282 5282
5283 5283 # task
5284 5284 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5285 5285 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5286 5286 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5287 5287 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5288 5288
5289 5289 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5290 5290 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5291 5291
5292 5292 @hybrid_property
5293 5293 def schedule_type(self):
5294 5294 return self._schedule_type
5295 5295
5296 5296 @schedule_type.setter
5297 5297 def schedule_type(self, val):
5298 5298 if val not in self.schedule_types:
5299 5299 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5300 5300 val, self.schedule_type))
5301 5301
5302 5302 self._schedule_type = val
5303 5303
5304 5304 @classmethod
5305 5305 def get_uid(cls, obj):
5306 5306 args = obj.task_args
5307 5307 kwargs = obj.task_kwargs
5308 5308 if isinstance(args, JsonRaw):
5309 5309 try:
5310 5310 args = json.loads(args)
5311 5311 except ValueError:
5312 5312 args = tuple()
5313 5313
5314 5314 if isinstance(kwargs, JsonRaw):
5315 5315 try:
5316 5316 kwargs = json.loads(kwargs)
5317 5317 except ValueError:
5318 5318 kwargs = dict()
5319 5319
5320 5320 dot_notation = obj.task_dot_notation
5321 5321 val = '.'.join(map(safe_str, [
5322 5322 sorted(dot_notation), args, sorted(kwargs.items())]))
5323 5323 return hashlib.sha1(val).hexdigest()
5324 5324
5325 5325 @classmethod
5326 5326 def get_by_schedule_name(cls, schedule_name):
5327 5327 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5328 5328
5329 5329 @classmethod
5330 5330 def get_by_schedule_id(cls, schedule_id):
5331 5331 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5332 5332
5333 5333 @property
5334 5334 def task(self):
5335 5335 return self.task_dot_notation
5336 5336
5337 5337 @property
5338 5338 def schedule(self):
5339 5339 from rhodecode.lib.celerylib.utils import raw_2_schedule
5340 5340 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5341 5341 return schedule
5342 5342
5343 5343 @property
5344 5344 def args(self):
5345 5345 try:
5346 5346 return list(self.task_args or [])
5347 5347 except ValueError:
5348 5348 return list()
5349 5349
5350 5350 @property
5351 5351 def kwargs(self):
5352 5352 try:
5353 5353 return dict(self.task_kwargs or {})
5354 5354 except ValueError:
5355 5355 return dict()
5356 5356
5357 5357 def _as_raw(self, val, indent=None):
5358 5358 if hasattr(val, 'de_coerce'):
5359 5359 val = val.de_coerce()
5360 5360 if val:
5361 5361 val = json.dumps(val, indent=indent, sort_keys=True)
5362 5362
5363 5363 return val
5364 5364
5365 5365 @property
5366 5366 def schedule_definition_raw(self):
5367 5367 return self._as_raw(self.schedule_definition)
5368 5368
5369 5369 def args_raw(self, indent=None):
5370 5370 return self._as_raw(self.task_args, indent)
5371 5371
5372 5372 def kwargs_raw(self, indent=None):
5373 5373 return self._as_raw(self.task_kwargs, indent)
5374 5374
5375 5375 def __repr__(self):
5376 5376 return '<DB:ScheduleEntry({}:{})>'.format(
5377 5377 self.schedule_entry_id, self.schedule_name)
5378 5378
5379 5379
5380 5380 @event.listens_for(ScheduleEntry, 'before_update')
5381 5381 def update_task_uid(mapper, connection, target):
5382 5382 target.task_uid = ScheduleEntry.get_uid(target)
5383 5383
5384 5384
5385 5385 @event.listens_for(ScheduleEntry, 'before_insert')
5386 5386 def set_task_uid(mapper, connection, target):
5387 5387 target.task_uid = ScheduleEntry.get_uid(target)
5388 5388
5389 5389
5390 5390 class _BaseBranchPerms(BaseModel):
5391 5391 @classmethod
5392 5392 def compute_hash(cls, value):
5393 5393 return sha1_safe(value)
5394 5394
5395 5395 @hybrid_property
5396 5396 def branch_pattern(self):
5397 5397 return self._branch_pattern or '*'
5398 5398
5399 5399 @hybrid_property
5400 5400 def branch_hash(self):
5401 5401 return self._branch_hash
5402 5402
5403 5403 def _validate_glob(self, value):
5404 5404 re.compile('^' + glob2re(value) + '$')
5405 5405
5406 5406 @branch_pattern.setter
5407 5407 def branch_pattern(self, value):
5408 5408 self._validate_glob(value)
5409 5409 self._branch_pattern = value or '*'
5410 5410 # set the Hash when setting the branch pattern
5411 5411 self._branch_hash = self.compute_hash(self._branch_pattern)
5412 5412
5413 5413 def matches(self, branch):
5414 5414 """
5415 5415 Check if this the branch matches entry
5416 5416
5417 5417 :param branch: branch name for the commit
5418 5418 """
5419 5419
5420 5420 branch = branch or ''
5421 5421
5422 5422 branch_matches = True
5423 5423 if branch:
5424 5424 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5425 5425 branch_matches = bool(branch_regex.search(branch))
5426 5426
5427 5427 return branch_matches
5428 5428
5429 5429
5430 5430 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5431 5431 __tablename__ = 'user_to_repo_branch_permissions'
5432 5432 __table_args__ = (
5433 5433 base_table_args
5434 5434 )
5435 5435
5436 5436 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5437 5437
5438 5438 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5439 5439 repo = relationship('Repository', backref='user_branch_perms')
5440 5440
5441 5441 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5442 5442 permission = relationship('Permission')
5443 5443
5444 5444 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5445 5445 user_repo_to_perm = relationship('UserRepoToPerm')
5446 5446
5447 5447 rule_order = Column('rule_order', Integer(), nullable=False)
5448 5448 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5449 5449 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5450 5450
5451 5451 def __unicode__(self):
5452 5452 return u'<UserBranchPermission(%s => %r)>' % (
5453 5453 self.user_repo_to_perm, self.branch_pattern)
5454 5454
5455 5455
5456 5456 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5457 5457 __tablename__ = 'user_group_to_repo_branch_permissions'
5458 5458 __table_args__ = (
5459 5459 base_table_args
5460 5460 )
5461 5461
5462 5462 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5463 5463
5464 5464 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5465 5465 repo = relationship('Repository', backref='user_group_branch_perms')
5466 5466
5467 5467 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5468 5468 permission = relationship('Permission')
5469 5469
5470 5470 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5471 5471 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5472 5472
5473 5473 rule_order = Column('rule_order', Integer(), nullable=False)
5474 5474 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5475 5475 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5476 5476
5477 5477 def __unicode__(self):
5478 5478 return u'<UserBranchPermission(%s => %r)>' % (
5479 5479 self.user_group_repo_to_perm, self.branch_pattern)
5480 5480
5481 5481
5482 5482 class UserBookmark(Base, BaseModel):
5483 5483 __tablename__ = 'user_bookmarks'
5484 5484 __table_args__ = (
5485 5485 UniqueConstraint('user_id', 'bookmark_repo_id'),
5486 5486 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5487 5487 UniqueConstraint('user_id', 'bookmark_position'),
5488 5488 base_table_args
5489 5489 )
5490 5490
5491 5491 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5492 5492 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5493 5493 position = Column("bookmark_position", Integer(), nullable=False)
5494 5494 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5495 5495 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5496 5496 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5497 5497
5498 5498 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5499 5499 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5500 5500
5501 5501 user = relationship("User")
5502 5502
5503 5503 repository = relationship("Repository")
5504 5504 repository_group = relationship("RepoGroup")
5505 5505
5506 5506 @classmethod
5507 5507 def get_by_position_for_user(cls, position, user_id):
5508 5508 return cls.query() \
5509 5509 .filter(UserBookmark.user_id == user_id) \
5510 5510 .filter(UserBookmark.position == position).scalar()
5511 5511
5512 5512 @classmethod
5513 5513 def get_bookmarks_for_user(cls, user_id, cache=True):
5514 5514 bookmarks = cls.query() \
5515 5515 .filter(UserBookmark.user_id == user_id) \
5516 5516 .options(joinedload(UserBookmark.repository)) \
5517 5517 .options(joinedload(UserBookmark.repository_group)) \
5518 5518 .order_by(UserBookmark.position.asc())
5519 5519
5520 5520 if cache:
5521 5521 bookmarks = bookmarks.options(
5522 5522 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5523 5523 )
5524 5524
5525 5525 return bookmarks.all()
5526 5526
5527 5527 def __unicode__(self):
5528 5528 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5529 5529
5530 5530
5531 5531 class FileStore(Base, BaseModel):
5532 5532 __tablename__ = 'file_store'
5533 5533 __table_args__ = (
5534 5534 base_table_args
5535 5535 )
5536 5536
5537 5537 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5538 5538 file_uid = Column('file_uid', String(1024), nullable=False)
5539 5539 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5540 5540 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5541 5541 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5542 5542
5543 5543 # sha256 hash
5544 5544 file_hash = Column('file_hash', String(512), nullable=False)
5545 5545 file_size = Column('file_size', BigInteger(), nullable=False)
5546 5546
5547 5547 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5548 5548 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5549 5549 accessed_count = Column('accessed_count', Integer(), default=0)
5550 5550
5551 5551 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5552 5552
5553 5553 # if repo/repo_group reference is set, check for permissions
5554 5554 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5555 5555
5556 5556 # hidden defines an attachment that should be hidden from showing in artifact listing
5557 5557 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5558 5558
5559 5559 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5560 5560 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5561 5561
5562 5562 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5563 5563
5564 5564 # scope limited to user, which requester have access to
5565 5565 scope_user_id = Column(
5566 5566 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5567 5567 nullable=True, unique=None, default=None)
5568 5568 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5569 5569
5570 5570 # scope limited to user group, which requester have access to
5571 5571 scope_user_group_id = Column(
5572 5572 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5573 5573 nullable=True, unique=None, default=None)
5574 5574 user_group = relationship('UserGroup', lazy='joined')
5575 5575
5576 5576 # scope limited to repo, which requester have access to
5577 5577 scope_repo_id = Column(
5578 5578 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5579 5579 nullable=True, unique=None, default=None)
5580 5580 repo = relationship('Repository', lazy='joined')
5581 5581
5582 5582 # scope limited to repo group, which requester have access to
5583 5583 scope_repo_group_id = Column(
5584 5584 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5585 5585 nullable=True, unique=None, default=None)
5586 5586 repo_group = relationship('RepoGroup', lazy='joined')
5587 5587
5588 5588 @classmethod
5589 5589 def get_by_store_uid(cls, file_store_uid, safe=False):
5590 5590 if safe:
5591 5591 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5592 5592 else:
5593 5593 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5594 5594
5595 5595 @classmethod
5596 5596 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5597 5597 file_description='', enabled=True, hidden=False, check_acl=True,
5598 5598 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5599 5599
5600 5600 store_entry = FileStore()
5601 5601 store_entry.file_uid = file_uid
5602 5602 store_entry.file_display_name = file_display_name
5603 5603 store_entry.file_org_name = filename
5604 5604 store_entry.file_size = file_size
5605 5605 store_entry.file_hash = file_hash
5606 5606 store_entry.file_description = file_description
5607 5607
5608 5608 store_entry.check_acl = check_acl
5609 5609 store_entry.enabled = enabled
5610 5610 store_entry.hidden = hidden
5611 5611
5612 5612 store_entry.user_id = user_id
5613 5613 store_entry.scope_user_id = scope_user_id
5614 5614 store_entry.scope_repo_id = scope_repo_id
5615 5615 store_entry.scope_repo_group_id = scope_repo_group_id
5616 5616
5617 5617 return store_entry
5618 5618
5619 5619 @classmethod
5620 5620 def store_metadata(cls, file_store_id, args, commit=True):
5621 5621 file_store = FileStore.get(file_store_id)
5622 5622 if file_store is None:
5623 5623 return
5624 5624
5625 5625 for section, key, value, value_type in args:
5626 5626 has_key = FileStoreMetadata().query() \
5627 5627 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5628 5628 .filter(FileStoreMetadata.file_store_meta_section == section) \
5629 5629 .filter(FileStoreMetadata.file_store_meta_key == key) \
5630 5630 .scalar()
5631 5631 if has_key:
5632 5632 msg = 'key `{}` already defined under section `{}` for this file.'\
5633 5633 .format(key, section)
5634 5634 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5635 5635
5636 5636 # NOTE(marcink): raises ArtifactMetadataBadValueType
5637 5637 FileStoreMetadata.valid_value_type(value_type)
5638 5638
5639 5639 meta_entry = FileStoreMetadata()
5640 5640 meta_entry.file_store = file_store
5641 5641 meta_entry.file_store_meta_section = section
5642 5642 meta_entry.file_store_meta_key = key
5643 5643 meta_entry.file_store_meta_value_type = value_type
5644 5644 meta_entry.file_store_meta_value = value
5645 5645
5646 5646 Session().add(meta_entry)
5647 5647
5648 5648 try:
5649 5649 if commit:
5650 5650 Session().commit()
5651 5651 except IntegrityError:
5652 5652 Session().rollback()
5653 5653 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5654 5654
5655 5655 @classmethod
5656 5656 def bump_access_counter(cls, file_uid, commit=True):
5657 5657 FileStore().query()\
5658 5658 .filter(FileStore.file_uid == file_uid)\
5659 5659 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5660 5660 FileStore.accessed_on: datetime.datetime.now()})
5661 5661 if commit:
5662 5662 Session().commit()
5663 5663
5664 5664 def __json__(self):
5665 5665 data = {
5666 5666 'filename': self.file_display_name,
5667 5667 'filename_org': self.file_org_name,
5668 5668 'file_uid': self.file_uid,
5669 5669 'description': self.file_description,
5670 5670 'hidden': self.hidden,
5671 5671 'size': self.file_size,
5672 5672 'created_on': self.created_on,
5673 5673 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5674 5674 'downloaded_times': self.accessed_count,
5675 5675 'sha256': self.file_hash,
5676 5676 'metadata': self.file_metadata,
5677 5677 }
5678 5678
5679 5679 return data
5680 5680
5681 5681 def __repr__(self):
5682 5682 return '<FileStore({})>'.format(self.file_store_id)
5683 5683
5684 5684
5685 5685 class FileStoreMetadata(Base, BaseModel):
5686 5686 __tablename__ = 'file_store_metadata'
5687 5687 __table_args__ = (
5688 5688 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5689 5689 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5690 5690 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5691 5691 base_table_args
5692 5692 )
5693 5693 SETTINGS_TYPES = {
5694 5694 'str': safe_str,
5695 5695 'int': safe_int,
5696 5696 'unicode': safe_unicode,
5697 5697 'bool': str2bool,
5698 5698 'list': functools.partial(aslist, sep=',')
5699 5699 }
5700 5700
5701 5701 file_store_meta_id = Column(
5702 5702 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5703 5703 primary_key=True)
5704 5704 _file_store_meta_section = Column(
5705 5705 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5706 5706 nullable=True, unique=None, default=None)
5707 5707 _file_store_meta_section_hash = Column(
5708 5708 "file_store_meta_section_hash", String(255),
5709 5709 nullable=True, unique=None, default=None)
5710 5710 _file_store_meta_key = Column(
5711 5711 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5712 5712 nullable=True, unique=None, default=None)
5713 5713 _file_store_meta_key_hash = Column(
5714 5714 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5715 5715 _file_store_meta_value = Column(
5716 5716 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5717 5717 nullable=True, unique=None, default=None)
5718 5718 _file_store_meta_value_type = Column(
5719 5719 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5720 5720 default='unicode')
5721 5721
5722 5722 file_store_id = Column(
5723 5723 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5724 5724 nullable=True, unique=None, default=None)
5725 5725
5726 5726 file_store = relationship('FileStore', lazy='joined')
5727 5727
5728 5728 @classmethod
5729 5729 def valid_value_type(cls, value):
5730 5730 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5731 5731 raise ArtifactMetadataBadValueType(
5732 5732 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5733 5733
5734 5734 @hybrid_property
5735 5735 def file_store_meta_section(self):
5736 5736 return self._file_store_meta_section
5737 5737
5738 5738 @file_store_meta_section.setter
5739 5739 def file_store_meta_section(self, value):
5740 5740 self._file_store_meta_section = value
5741 5741 self._file_store_meta_section_hash = _hash_key(value)
5742 5742
5743 5743 @hybrid_property
5744 5744 def file_store_meta_key(self):
5745 5745 return self._file_store_meta_key
5746 5746
5747 5747 @file_store_meta_key.setter
5748 5748 def file_store_meta_key(self, value):
5749 5749 self._file_store_meta_key = value
5750 5750 self._file_store_meta_key_hash = _hash_key(value)
5751 5751
5752 5752 @hybrid_property
5753 5753 def file_store_meta_value(self):
5754 5754 val = self._file_store_meta_value
5755 5755
5756 5756 if self._file_store_meta_value_type:
5757 5757 # e.g unicode.encrypted == unicode
5758 5758 _type = self._file_store_meta_value_type.split('.')[0]
5759 5759 # decode the encrypted value if it's encrypted field type
5760 5760 if '.encrypted' in self._file_store_meta_value_type:
5761 5761 cipher = EncryptedTextValue()
5762 5762 val = safe_unicode(cipher.process_result_value(val, None))
5763 5763 # do final type conversion
5764 5764 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5765 5765 val = converter(val)
5766 5766
5767 5767 return val
5768 5768
5769 5769 @file_store_meta_value.setter
5770 5770 def file_store_meta_value(self, val):
5771 5771 val = safe_unicode(val)
5772 5772 # encode the encrypted value
5773 5773 if '.encrypted' in self.file_store_meta_value_type:
5774 5774 cipher = EncryptedTextValue()
5775 5775 val = safe_unicode(cipher.process_bind_param(val, None))
5776 5776 self._file_store_meta_value = val
5777 5777
5778 5778 @hybrid_property
5779 5779 def file_store_meta_value_type(self):
5780 5780 return self._file_store_meta_value_type
5781 5781
5782 5782 @file_store_meta_value_type.setter
5783 5783 def file_store_meta_value_type(self, val):
5784 5784 # e.g unicode.encrypted
5785 5785 self.valid_value_type(val)
5786 5786 self._file_store_meta_value_type = val
5787 5787
5788 5788 def __json__(self):
5789 5789 data = {
5790 5790 'artifact': self.file_store.file_uid,
5791 5791 'section': self.file_store_meta_section,
5792 5792 'key': self.file_store_meta_key,
5793 5793 'value': self.file_store_meta_value,
5794 5794 }
5795 5795
5796 5796 return data
5797 5797
5798 5798 def __repr__(self):
5799 5799 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5800 5800 self.file_store_meta_key, self.file_store_meta_value)
5801 5801
5802 5802
5803 5803 class DbMigrateVersion(Base, BaseModel):
5804 5804 __tablename__ = 'db_migrate_version'
5805 5805 __table_args__ = (
5806 5806 base_table_args,
5807 5807 )
5808 5808
5809 5809 repository_id = Column('repository_id', String(250), primary_key=True)
5810 5810 repository_path = Column('repository_path', Text)
5811 5811 version = Column('version', Integer)
5812 5812
5813 5813 @classmethod
5814 5814 def set_version(cls, version):
5815 5815 """
5816 5816 Helper for forcing a different version, usually for debugging purposes via ishell.
5817 5817 """
5818 5818 ver = DbMigrateVersion.query().first()
5819 5819 ver.version = version
5820 5820 Session().commit()
5821 5821
5822 5822
5823 5823 class DbSession(Base, BaseModel):
5824 5824 __tablename__ = 'db_session'
5825 5825 __table_args__ = (
5826 5826 base_table_args,
5827 5827 )
5828 5828
5829 5829 def __repr__(self):
5830 5830 return '<DB:DbSession({})>'.format(self.id)
5831 5831
5832 5832 id = Column('id', Integer())
5833 5833 namespace = Column('namespace', String(255), primary_key=True)
5834 5834 accessed = Column('accessed', DateTime, nullable=False)
5835 5835 created = Column('created', DateTime, nullable=False)
5836 5836 data = Column('data', PickleType, nullable=False)
@@ -1,2247 +1,2249 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'id': PullRequest.pull_request_id,
352 352 'title': PullRequest.title,
353 353 'updated_on_raw': PullRequest.updated_on,
354 354 'target_repo': PullRequest.target_repo_id
355 355 }
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
409 409 opened_by=None):
410 410 """
411 411 Count the number of pull requests for a specific repository that are
412 412 awaiting review.
413 413
414 414 :param repo_name: target or source repo
415 415 :param search_q: filter by text
416 416 :param source: boolean flag to specify if repo_name refers to source
417 417 :param statuses: list of pull request statuses
418 418 :param opened_by: author user of the pull request
419 419 :returns: int number of pull requests
420 420 """
421 421 pull_requests = self.get_awaiting_review(
422 422 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
423 423
424 424 return len(pull_requests)
425 425
426 426 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
427 427 opened_by=None, offset=0, length=None,
428 428 order_by=None, order_dir='desc'):
429 429 """
430 430 Get all pull requests for a specific repository that are awaiting
431 431 review.
432 432
433 433 :param repo_name: target or source repo
434 434 :param search_q: filter by text
435 435 :param source: boolean flag to specify if repo_name refers to source
436 436 :param statuses: list of pull request statuses
437 437 :param opened_by: author user of the pull request
438 438 :param offset: pagination offset
439 439 :param length: length of returned list
440 440 :param order_by: order of the returned list
441 441 :param order_dir: 'asc' or 'desc' ordering direction
442 442 :returns: list of pull requests
443 443 """
444 444 pull_requests = self.get_all(
445 445 repo_name, search_q=search_q, source=source, statuses=statuses,
446 446 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
447 447
448 448 _filtered_pull_requests = []
449 449 for pr in pull_requests:
450 450 status = pr.calculated_review_status()
451 451 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
452 452 ChangesetStatus.STATUS_UNDER_REVIEW]:
453 453 _filtered_pull_requests.append(pr)
454 454 if length:
455 455 return _filtered_pull_requests[offset:offset+length]
456 456 else:
457 457 return _filtered_pull_requests
458 458
459 459 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
460 460 opened_by=None, user_id=None):
461 461 """
462 462 Count the number of pull requests for a specific repository that are
463 463 awaiting review from a specific user.
464 464
465 465 :param repo_name: target or source repo
466 466 :param search_q: filter by text
467 467 :param source: boolean flag to specify if repo_name refers to source
468 468 :param statuses: list of pull request statuses
469 469 :param opened_by: author user of the pull request
470 470 :param user_id: reviewer user of the pull request
471 471 :returns: int number of pull requests
472 472 """
473 473 pull_requests = self.get_awaiting_my_review(
474 474 repo_name, search_q=search_q, source=source, statuses=statuses,
475 475 opened_by=opened_by, user_id=user_id)
476 476
477 477 return len(pull_requests)
478 478
479 479 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
480 480 opened_by=None, user_id=None, offset=0,
481 481 length=None, order_by=None, order_dir='desc'):
482 482 """
483 483 Get all pull requests for a specific repository that are awaiting
484 484 review from a specific user.
485 485
486 486 :param repo_name: target or source repo
487 487 :param search_q: filter by text
488 488 :param source: boolean flag to specify if repo_name refers to source
489 489 :param statuses: list of pull request statuses
490 490 :param opened_by: author user of the pull request
491 491 :param user_id: reviewer user of the pull request
492 492 :param offset: pagination offset
493 493 :param length: length of returned list
494 494 :param order_by: order of the returned list
495 495 :param order_dir: 'asc' or 'desc' ordering direction
496 496 :returns: list of pull requests
497 497 """
498 498 pull_requests = self.get_all(
499 499 repo_name, search_q=search_q, source=source, statuses=statuses,
500 500 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
501 501
502 502 _my = PullRequestModel().get_not_reviewed(user_id)
503 503 my_participation = []
504 504 for pr in pull_requests:
505 505 if pr in _my:
506 506 my_participation.append(pr)
507 507 _filtered_pull_requests = my_participation
508 508 if length:
509 509 return _filtered_pull_requests[offset:offset+length]
510 510 else:
511 511 return _filtered_pull_requests
512 512
513 513 def get_not_reviewed(self, user_id):
514 514 return [
515 515 x.pull_request for x in PullRequestReviewers.query().filter(
516 516 PullRequestReviewers.user_id == user_id).all()
517 517 ]
518 518
519 519 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
520 520 order_by=None, order_dir='desc'):
521 521 q = PullRequest.query()
522 522 if user_id:
523 523 reviewers_subquery = Session().query(
524 524 PullRequestReviewers.pull_request_id).filter(
525 525 PullRequestReviewers.user_id == user_id).subquery()
526 526 user_filter = or_(
527 527 PullRequest.user_id == user_id,
528 528 PullRequest.pull_request_id.in_(reviewers_subquery)
529 529 )
530 530 q = PullRequest.query().filter(user_filter)
531 531
532 532 # closed,opened
533 533 if statuses:
534 534 q = q.filter(PullRequest.status.in_(statuses))
535 535
536 536 if query:
537 537 like_expression = u'%{}%'.format(safe_unicode(query))
538 538 q = q.join(User)
539 539 q = q.filter(or_(
540 540 cast(PullRequest.pull_request_id, String).ilike(like_expression),
541 541 User.username.ilike(like_expression),
542 542 PullRequest.title.ilike(like_expression),
543 543 PullRequest.description.ilike(like_expression),
544 544 ))
545 545 if order_by:
546 546 order_map = {
547 547 'name_raw': PullRequest.pull_request_id,
548 548 'title': PullRequest.title,
549 549 'updated_on_raw': PullRequest.updated_on,
550 550 'target_repo': PullRequest.target_repo_id
551 551 }
552 552 if order_dir == 'asc':
553 553 q = q.order_by(order_map[order_by].asc())
554 554 else:
555 555 q = q.order_by(order_map[order_by].desc())
556 556
557 557 return q
558 558
559 559 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
560 560 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
561 561 return q.count()
562 562
563 563 def get_im_participating_in(
564 564 self, user_id=None, statuses=None, query='', offset=0,
565 565 length=None, order_by=None, order_dir='desc'):
566 566 """
567 567 Get all Pull requests that i'm participating in, or i have opened
568 568 """
569 569
570 570 q = self._prepare_participating_query(
571 571 user_id, statuses=statuses, query=query, order_by=order_by,
572 572 order_dir=order_dir)
573 573
574 574 if length:
575 575 pull_requests = q.limit(length).offset(offset).all()
576 576 else:
577 577 pull_requests = q.all()
578 578
579 579 return pull_requests
580 580
581 581 def get_versions(self, pull_request):
582 582 """
583 583 returns version of pull request sorted by ID descending
584 584 """
585 585 return PullRequestVersion.query()\
586 586 .filter(PullRequestVersion.pull_request == pull_request)\
587 587 .order_by(PullRequestVersion.pull_request_version_id.asc())\
588 588 .all()
589 589
590 590 def get_pr_version(self, pull_request_id, version=None):
591 591 at_version = None
592 592
593 593 if version and version == 'latest':
594 594 pull_request_ver = PullRequest.get(pull_request_id)
595 595 pull_request_obj = pull_request_ver
596 596 _org_pull_request_obj = pull_request_obj
597 597 at_version = 'latest'
598 598 elif version:
599 599 pull_request_ver = PullRequestVersion.get_or_404(version)
600 600 pull_request_obj = pull_request_ver
601 601 _org_pull_request_obj = pull_request_ver.pull_request
602 602 at_version = pull_request_ver.pull_request_version_id
603 603 else:
604 604 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
605 605 pull_request_id)
606 606
607 607 pull_request_display_obj = PullRequest.get_pr_display_object(
608 608 pull_request_obj, _org_pull_request_obj)
609 609
610 610 return _org_pull_request_obj, pull_request_obj, \
611 611 pull_request_display_obj, at_version
612 612
613 613 def pr_commits_versions(self, versions):
614 614 """
615 615 Maps the pull-request commits into all known PR versions. This way we can obtain
616 616 each pr version the commit was introduced in.
617 617 """
618 618 commit_versions = collections.defaultdict(list)
619 619 num_versions = [x.pull_request_version_id for x in versions]
620 620 for ver in versions:
621 621 for commit_id in ver.revisions:
622 622 ver_idx = ChangesetComment.get_index_from_version(
623 623 ver.pull_request_version_id, num_versions=num_versions)
624 624 commit_versions[commit_id].append(ver_idx)
625 625 return commit_versions
626 626
627 627 def create(self, created_by, source_repo, source_ref, target_repo,
628 628 target_ref, revisions, reviewers, observers, title, description=None,
629 629 common_ancestor_id=None,
630 630 description_renderer=None,
631 631 reviewer_data=None, translator=None, auth_user=None):
632 632 translator = translator or get_current_request().translate
633 633
634 634 created_by_user = self._get_user(created_by)
635 635 auth_user = auth_user or created_by_user.AuthUser()
636 636 source_repo = self._get_repo(source_repo)
637 637 target_repo = self._get_repo(target_repo)
638 638
639 639 pull_request = PullRequest()
640 640 pull_request.source_repo = source_repo
641 641 pull_request.source_ref = source_ref
642 642 pull_request.target_repo = target_repo
643 643 pull_request.target_ref = target_ref
644 644 pull_request.revisions = revisions
645 645 pull_request.title = title
646 646 pull_request.description = description
647 647 pull_request.description_renderer = description_renderer
648 648 pull_request.author = created_by_user
649 649 pull_request.reviewer_data = reviewer_data
650 650 pull_request.pull_request_state = pull_request.STATE_CREATING
651 651 pull_request.common_ancestor_id = common_ancestor_id
652 652
653 653 Session().add(pull_request)
654 654 Session().flush()
655 655
656 656 reviewer_ids = set()
657 657 # members / reviewers
658 658 for reviewer_object in reviewers:
659 659 user_id, reasons, mandatory, role, rules = reviewer_object
660 660 user = self._get_user(user_id)
661 661
662 662 # skip duplicates
663 663 if user.user_id in reviewer_ids:
664 664 continue
665 665
666 666 reviewer_ids.add(user.user_id)
667 667
668 668 reviewer = PullRequestReviewers()
669 669 reviewer.user = user
670 670 reviewer.pull_request = pull_request
671 671 reviewer.reasons = reasons
672 672 reviewer.mandatory = mandatory
673 673 reviewer.role = role
674 674
675 675 # NOTE(marcink): pick only first rule for now
676 676 rule_id = list(rules)[0] if rules else None
677 677 rule = RepoReviewRule.get(rule_id) if rule_id else None
678 678 if rule:
679 679 review_group = rule.user_group_vote_rule(user_id)
680 680 # we check if this particular reviewer is member of a voting group
681 681 if review_group:
682 682 # NOTE(marcink):
683 683 # can be that user is member of more but we pick the first same,
684 684 # same as default reviewers algo
685 685 review_group = review_group[0]
686 686
687 687 rule_data = {
688 688 'rule_name':
689 689 rule.review_rule_name,
690 690 'rule_user_group_entry_id':
691 691 review_group.repo_review_rule_users_group_id,
692 692 'rule_user_group_name':
693 693 review_group.users_group.users_group_name,
694 694 'rule_user_group_members':
695 695 [x.user.username for x in review_group.users_group.members],
696 696 'rule_user_group_members_id':
697 697 [x.user.user_id for x in review_group.users_group.members],
698 698 }
699 699 # e.g {'vote_rule': -1, 'mandatory': True}
700 700 rule_data.update(review_group.rule_data())
701 701
702 702 reviewer.rule_data = rule_data
703 703
704 704 Session().add(reviewer)
705 705 Session().flush()
706 706
707 707 for observer_object in observers:
708 708 user_id, reasons, mandatory, role, rules = observer_object
709 709 user = self._get_user(user_id)
710 710
711 711 # skip duplicates from reviewers
712 712 if user.user_id in reviewer_ids:
713 713 continue
714 714
715 715 #reviewer_ids.add(user.user_id)
716 716
717 717 observer = PullRequestReviewers()
718 718 observer.user = user
719 719 observer.pull_request = pull_request
720 720 observer.reasons = reasons
721 721 observer.mandatory = mandatory
722 722 observer.role = role
723 723
724 724 # NOTE(marcink): pick only first rule for now
725 725 rule_id = list(rules)[0] if rules else None
726 726 rule = RepoReviewRule.get(rule_id) if rule_id else None
727 727 if rule:
728 728 # TODO(marcink): do we need this for observers ??
729 729 pass
730 730
731 731 Session().add(observer)
732 732 Session().flush()
733 733
734 734 # Set approval status to "Under Review" for all commits which are
735 735 # part of this pull request.
736 736 ChangesetStatusModel().set_status(
737 737 repo=target_repo,
738 738 status=ChangesetStatus.STATUS_UNDER_REVIEW,
739 739 user=created_by_user,
740 740 pull_request=pull_request
741 741 )
742 742 # we commit early at this point. This has to do with a fact
743 743 # that before queries do some row-locking. And because of that
744 744 # we need to commit and finish transaction before below validate call
745 745 # that for large repos could be long resulting in long row locks
746 746 Session().commit()
747 747
748 748 # prepare workspace, and run initial merge simulation. Set state during that
749 749 # operation
750 750 pull_request = PullRequest.get(pull_request.pull_request_id)
751 751
752 752 # set as merging, for merge simulation, and if finished to created so we mark
753 753 # simulation is working fine
754 754 with pull_request.set_state(PullRequest.STATE_MERGING,
755 755 final_state=PullRequest.STATE_CREATED) as state_obj:
756 756 MergeCheck.validate(
757 757 pull_request, auth_user=auth_user, translator=translator)
758 758
759 759 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
760 760 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
761 761
762 762 creation_data = pull_request.get_api_data(with_merge_state=False)
763 763 self._log_audit_action(
764 764 'repo.pull_request.create', {'data': creation_data},
765 765 auth_user, pull_request)
766 766
767 767 return pull_request
768 768
769 769 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
770 770 pull_request = self.__get_pull_request(pull_request)
771 771 target_scm = pull_request.target_repo.scm_instance()
772 772 if action == 'create':
773 773 trigger_hook = hooks_utils.trigger_create_pull_request_hook
774 774 elif action == 'merge':
775 775 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
776 776 elif action == 'close':
777 777 trigger_hook = hooks_utils.trigger_close_pull_request_hook
778 778 elif action == 'review_status_change':
779 779 trigger_hook = hooks_utils.trigger_review_pull_request_hook
780 780 elif action == 'update':
781 781 trigger_hook = hooks_utils.trigger_update_pull_request_hook
782 782 elif action == 'comment':
783 783 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
784 784 elif action == 'comment_edit':
785 785 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
786 786 else:
787 787 return
788 788
789 789 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
790 790 pull_request, action, trigger_hook)
791 791 trigger_hook(
792 792 username=user.username,
793 793 repo_name=pull_request.target_repo.repo_name,
794 794 repo_type=target_scm.alias,
795 795 pull_request=pull_request,
796 796 data=data)
797 797
798 798 def _get_commit_ids(self, pull_request):
799 799 """
800 800 Return the commit ids of the merged pull request.
801 801
802 802 This method is not dealing correctly yet with the lack of autoupdates
803 803 nor with the implicit target updates.
804 804 For example: if a commit in the source repo is already in the target it
805 805 will be reported anyways.
806 806 """
807 807 merge_rev = pull_request.merge_rev
808 808 if merge_rev is None:
809 809 raise ValueError('This pull request was not merged yet')
810 810
811 811 commit_ids = list(pull_request.revisions)
812 812 if merge_rev not in commit_ids:
813 813 commit_ids.append(merge_rev)
814 814
815 815 return commit_ids
816 816
817 817 def merge_repo(self, pull_request, user, extras):
818 818 log.debug("Merging pull request %s", pull_request.pull_request_id)
819 819 extras['user_agent'] = 'internal-merge'
820 820 merge_state = self._merge_pull_request(pull_request, user, extras)
821 821 if merge_state.executed:
822 822 log.debug("Merge was successful, updating the pull request comments.")
823 823 self._comment_and_close_pr(pull_request, user, merge_state)
824 824
825 825 self._log_audit_action(
826 826 'repo.pull_request.merge',
827 827 {'merge_state': merge_state.__dict__},
828 828 user, pull_request)
829 829
830 830 else:
831 831 log.warn("Merge failed, not updating the pull request.")
832 832 return merge_state
833 833
834 834 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
835 835 target_vcs = pull_request.target_repo.scm_instance()
836 836 source_vcs = pull_request.source_repo.scm_instance()
837 837
838 838 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
839 839 pr_id=pull_request.pull_request_id,
840 840 pr_title=pull_request.title,
841 841 source_repo=source_vcs.name,
842 842 source_ref_name=pull_request.source_ref_parts.name,
843 843 target_repo=target_vcs.name,
844 844 target_ref_name=pull_request.target_ref_parts.name,
845 845 )
846 846
847 847 workspace_id = self._workspace_id(pull_request)
848 848 repo_id = pull_request.target_repo.repo_id
849 849 use_rebase = self._use_rebase_for_merging(pull_request)
850 850 close_branch = self._close_branch_before_merging(pull_request)
851 851 user_name = self._user_name_for_merging(pull_request, user)
852 852
853 853 target_ref = self._refresh_reference(
854 854 pull_request.target_ref_parts, target_vcs)
855 855
856 856 callback_daemon, extras = prepare_callback_daemon(
857 857 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
858 858 host=vcs_settings.HOOKS_HOST,
859 859 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
860 860
861 861 with callback_daemon:
862 862 # TODO: johbo: Implement a clean way to run a config_override
863 863 # for a single call.
864 864 target_vcs.config.set(
865 865 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
866 866
867 867 merge_state = target_vcs.merge(
868 868 repo_id, workspace_id, target_ref, source_vcs,
869 869 pull_request.source_ref_parts,
870 870 user_name=user_name, user_email=user.email,
871 871 message=message, use_rebase=use_rebase,
872 872 close_branch=close_branch)
873 873 return merge_state
874 874
875 875 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
876 876 pull_request.merge_rev = merge_state.merge_ref.commit_id
877 877 pull_request.updated_on = datetime.datetime.now()
878 878 close_msg = close_msg or 'Pull request merged and closed'
879 879
880 880 CommentsModel().create(
881 881 text=safe_unicode(close_msg),
882 882 repo=pull_request.target_repo.repo_id,
883 883 user=user.user_id,
884 884 pull_request=pull_request.pull_request_id,
885 885 f_path=None,
886 886 line_no=None,
887 887 closing_pr=True
888 888 )
889 889
890 890 Session().add(pull_request)
891 891 Session().flush()
892 892 # TODO: paris: replace invalidation with less radical solution
893 893 ScmModel().mark_for_invalidation(
894 894 pull_request.target_repo.repo_name)
895 895 self.trigger_pull_request_hook(pull_request, user, 'merge')
896 896
897 897 def has_valid_update_type(self, pull_request):
898 898 source_ref_type = pull_request.source_ref_parts.type
899 899 return source_ref_type in self.REF_TYPES
900 900
901 901 def get_flow_commits(self, pull_request):
902 902
903 903 # source repo
904 904 source_ref_name = pull_request.source_ref_parts.name
905 905 source_ref_type = pull_request.source_ref_parts.type
906 906 source_ref_id = pull_request.source_ref_parts.commit_id
907 907 source_repo = pull_request.source_repo.scm_instance()
908 908
909 909 try:
910 910 if source_ref_type in self.REF_TYPES:
911 source_commit = source_repo.get_commit(source_ref_name)
911 source_commit = source_repo.get_commit(
912 source_ref_name, reference_obj=pull_request.source_ref_parts)
912 913 else:
913 914 source_commit = source_repo.get_commit(source_ref_id)
914 915 except CommitDoesNotExistError:
915 916 raise SourceRefMissing()
916 917
917 918 # target repo
918 919 target_ref_name = pull_request.target_ref_parts.name
919 920 target_ref_type = pull_request.target_ref_parts.type
920 921 target_ref_id = pull_request.target_ref_parts.commit_id
921 922 target_repo = pull_request.target_repo.scm_instance()
922 923
923 924 try:
924 925 if target_ref_type in self.REF_TYPES:
925 target_commit = target_repo.get_commit(target_ref_name)
926 target_commit = target_repo.get_commit(
927 target_ref_name, reference_obj=pull_request.target_ref_parts)
926 928 else:
927 929 target_commit = target_repo.get_commit(target_ref_id)
928 930 except CommitDoesNotExistError:
929 931 raise TargetRefMissing()
930 932
931 933 return source_commit, target_commit
932 934
933 935 def update_commits(self, pull_request, updating_user):
934 936 """
935 937 Get the updated list of commits for the pull request
936 938 and return the new pull request version and the list
937 939 of commits processed by this update action
938 940
939 941 updating_user is the user_object who triggered the update
940 942 """
941 943 pull_request = self.__get_pull_request(pull_request)
942 944 source_ref_type = pull_request.source_ref_parts.type
943 945 source_ref_name = pull_request.source_ref_parts.name
944 946 source_ref_id = pull_request.source_ref_parts.commit_id
945 947
946 948 target_ref_type = pull_request.target_ref_parts.type
947 949 target_ref_name = pull_request.target_ref_parts.name
948 950 target_ref_id = pull_request.target_ref_parts.commit_id
949 951
950 952 if not self.has_valid_update_type(pull_request):
951 953 log.debug("Skipping update of pull request %s due to ref type: %s",
952 954 pull_request, source_ref_type)
953 955 return UpdateResponse(
954 956 executed=False,
955 957 reason=UpdateFailureReason.WRONG_REF_TYPE,
956 958 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
957 959 source_changed=False, target_changed=False)
958 960
959 961 try:
960 962 source_commit, target_commit = self.get_flow_commits(pull_request)
961 963 except SourceRefMissing:
962 964 return UpdateResponse(
963 965 executed=False,
964 966 reason=UpdateFailureReason.MISSING_SOURCE_REF,
965 967 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
966 968 source_changed=False, target_changed=False)
967 969 except TargetRefMissing:
968 970 return UpdateResponse(
969 971 executed=False,
970 972 reason=UpdateFailureReason.MISSING_TARGET_REF,
971 973 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
972 974 source_changed=False, target_changed=False)
973 975
974 976 source_changed = source_ref_id != source_commit.raw_id
975 977 target_changed = target_ref_id != target_commit.raw_id
976 978
977 979 if not (source_changed or target_changed):
978 980 log.debug("Nothing changed in pull request %s", pull_request)
979 981 return UpdateResponse(
980 982 executed=False,
981 983 reason=UpdateFailureReason.NO_CHANGE,
982 984 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
983 985 source_changed=target_changed, target_changed=source_changed)
984 986
985 987 change_in_found = 'target repo' if target_changed else 'source repo'
986 988 log.debug('Updating pull request because of change in %s detected',
987 989 change_in_found)
988 990
989 991 # Finally there is a need for an update, in case of source change
990 992 # we create a new version, else just an update
991 993 if source_changed:
992 994 pull_request_version = self._create_version_from_snapshot(pull_request)
993 995 self._link_comments_to_version(pull_request_version)
994 996 else:
995 997 try:
996 998 ver = pull_request.versions[-1]
997 999 except IndexError:
998 1000 ver = None
999 1001
1000 1002 pull_request.pull_request_version_id = \
1001 1003 ver.pull_request_version_id if ver else None
1002 1004 pull_request_version = pull_request
1003 1005
1004 1006 source_repo = pull_request.source_repo.scm_instance()
1005 1007 target_repo = pull_request.target_repo.scm_instance()
1006 1008
1007 1009 # re-compute commit ids
1008 1010 old_commit_ids = pull_request.revisions
1009 1011 pre_load = ["author", "date", "message", "branch"]
1010 1012 commit_ranges = target_repo.compare(
1011 1013 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1012 1014 pre_load=pre_load)
1013 1015
1014 1016 target_ref = target_commit.raw_id
1015 1017 source_ref = source_commit.raw_id
1016 1018 ancestor_commit_id = target_repo.get_common_ancestor(
1017 1019 target_ref, source_ref, source_repo)
1018 1020
1019 1021 if not ancestor_commit_id:
1020 1022 raise ValueError(
1021 1023 'cannot calculate diff info without a common ancestor. '
1022 1024 'Make sure both repositories are related, and have a common forking commit.')
1023 1025
1024 1026 pull_request.common_ancestor_id = ancestor_commit_id
1025 1027
1026 1028 pull_request.source_ref = '%s:%s:%s' % (
1027 1029 source_ref_type, source_ref_name, source_commit.raw_id)
1028 1030 pull_request.target_ref = '%s:%s:%s' % (
1029 1031 target_ref_type, target_ref_name, ancestor_commit_id)
1030 1032
1031 1033 pull_request.revisions = [
1032 1034 commit.raw_id for commit in reversed(commit_ranges)]
1033 1035 pull_request.updated_on = datetime.datetime.now()
1034 1036 Session().add(pull_request)
1035 1037 new_commit_ids = pull_request.revisions
1036 1038
1037 1039 old_diff_data, new_diff_data = self._generate_update_diffs(
1038 1040 pull_request, pull_request_version)
1039 1041
1040 1042 # calculate commit and file changes
1041 1043 commit_changes = self._calculate_commit_id_changes(
1042 1044 old_commit_ids, new_commit_ids)
1043 1045 file_changes = self._calculate_file_changes(
1044 1046 old_diff_data, new_diff_data)
1045 1047
1046 1048 # set comments as outdated if DIFFS changed
1047 1049 CommentsModel().outdate_comments(
1048 1050 pull_request, old_diff_data=old_diff_data,
1049 1051 new_diff_data=new_diff_data)
1050 1052
1051 1053 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1052 1054 file_node_changes = (
1053 1055 file_changes.added or file_changes.modified or file_changes.removed)
1054 1056 pr_has_changes = valid_commit_changes or file_node_changes
1055 1057
1056 1058 # Add an automatic comment to the pull request, in case
1057 1059 # anything has changed
1058 1060 if pr_has_changes:
1059 1061 update_comment = CommentsModel().create(
1060 1062 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1061 1063 repo=pull_request.target_repo,
1062 1064 user=pull_request.author,
1063 1065 pull_request=pull_request,
1064 1066 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1065 1067
1066 1068 # Update status to "Under Review" for added commits
1067 1069 for commit_id in commit_changes.added:
1068 1070 ChangesetStatusModel().set_status(
1069 1071 repo=pull_request.source_repo,
1070 1072 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1071 1073 comment=update_comment,
1072 1074 user=pull_request.author,
1073 1075 pull_request=pull_request,
1074 1076 revision=commit_id)
1075 1077
1076 1078 # send update email to users
1077 1079 try:
1078 1080 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1079 1081 ancestor_commit_id=ancestor_commit_id,
1080 1082 commit_changes=commit_changes,
1081 1083 file_changes=file_changes)
1082 1084 except Exception:
1083 1085 log.exception('Failed to send email notification to users')
1084 1086
1085 1087 log.debug(
1086 1088 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1087 1089 'removed_ids: %s', pull_request.pull_request_id,
1088 1090 commit_changes.added, commit_changes.common, commit_changes.removed)
1089 1091 log.debug(
1090 1092 'Updated pull request with the following file changes: %s',
1091 1093 file_changes)
1092 1094
1093 1095 log.info(
1094 1096 "Updated pull request %s from commit %s to commit %s, "
1095 1097 "stored new version %s of this pull request.",
1096 1098 pull_request.pull_request_id, source_ref_id,
1097 1099 pull_request.source_ref_parts.commit_id,
1098 1100 pull_request_version.pull_request_version_id)
1099 1101 Session().commit()
1100 1102 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1101 1103
1102 1104 return UpdateResponse(
1103 1105 executed=True, reason=UpdateFailureReason.NONE,
1104 1106 old=pull_request, new=pull_request_version,
1105 1107 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1106 1108 source_changed=source_changed, target_changed=target_changed)
1107 1109
1108 1110 def _create_version_from_snapshot(self, pull_request):
1109 1111 version = PullRequestVersion()
1110 1112 version.title = pull_request.title
1111 1113 version.description = pull_request.description
1112 1114 version.status = pull_request.status
1113 1115 version.pull_request_state = pull_request.pull_request_state
1114 1116 version.created_on = datetime.datetime.now()
1115 1117 version.updated_on = pull_request.updated_on
1116 1118 version.user_id = pull_request.user_id
1117 1119 version.source_repo = pull_request.source_repo
1118 1120 version.source_ref = pull_request.source_ref
1119 1121 version.target_repo = pull_request.target_repo
1120 1122 version.target_ref = pull_request.target_ref
1121 1123
1122 1124 version._last_merge_source_rev = pull_request._last_merge_source_rev
1123 1125 version._last_merge_target_rev = pull_request._last_merge_target_rev
1124 1126 version.last_merge_status = pull_request.last_merge_status
1125 1127 version.last_merge_metadata = pull_request.last_merge_metadata
1126 1128 version.shadow_merge_ref = pull_request.shadow_merge_ref
1127 1129 version.merge_rev = pull_request.merge_rev
1128 1130 version.reviewer_data = pull_request.reviewer_data
1129 1131
1130 1132 version.revisions = pull_request.revisions
1131 1133 version.common_ancestor_id = pull_request.common_ancestor_id
1132 1134 version.pull_request = pull_request
1133 1135 Session().add(version)
1134 1136 Session().flush()
1135 1137
1136 1138 return version
1137 1139
1138 1140 def _generate_update_diffs(self, pull_request, pull_request_version):
1139 1141
1140 1142 diff_context = (
1141 1143 self.DIFF_CONTEXT +
1142 1144 CommentsModel.needed_extra_diff_context())
1143 1145 hide_whitespace_changes = False
1144 1146 source_repo = pull_request_version.source_repo
1145 1147 source_ref_id = pull_request_version.source_ref_parts.commit_id
1146 1148 target_ref_id = pull_request_version.target_ref_parts.commit_id
1147 1149 old_diff = self._get_diff_from_pr_or_version(
1148 1150 source_repo, source_ref_id, target_ref_id,
1149 1151 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1150 1152
1151 1153 source_repo = pull_request.source_repo
1152 1154 source_ref_id = pull_request.source_ref_parts.commit_id
1153 1155 target_ref_id = pull_request.target_ref_parts.commit_id
1154 1156
1155 1157 new_diff = self._get_diff_from_pr_or_version(
1156 1158 source_repo, source_ref_id, target_ref_id,
1157 1159 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1158 1160
1159 1161 old_diff_data = diffs.DiffProcessor(old_diff)
1160 1162 old_diff_data.prepare()
1161 1163 new_diff_data = diffs.DiffProcessor(new_diff)
1162 1164 new_diff_data.prepare()
1163 1165
1164 1166 return old_diff_data, new_diff_data
1165 1167
1166 1168 def _link_comments_to_version(self, pull_request_version):
1167 1169 """
1168 1170 Link all unlinked comments of this pull request to the given version.
1169 1171
1170 1172 :param pull_request_version: The `PullRequestVersion` to which
1171 1173 the comments shall be linked.
1172 1174
1173 1175 """
1174 1176 pull_request = pull_request_version.pull_request
1175 1177 comments = ChangesetComment.query()\
1176 1178 .filter(
1177 1179 # TODO: johbo: Should we query for the repo at all here?
1178 1180 # Pending decision on how comments of PRs are to be related
1179 1181 # to either the source repo, the target repo or no repo at all.
1180 1182 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1181 1183 ChangesetComment.pull_request == pull_request,
1182 1184 ChangesetComment.pull_request_version == None)\
1183 1185 .order_by(ChangesetComment.comment_id.asc())
1184 1186
1185 1187 # TODO: johbo: Find out why this breaks if it is done in a bulk
1186 1188 # operation.
1187 1189 for comment in comments:
1188 1190 comment.pull_request_version_id = (
1189 1191 pull_request_version.pull_request_version_id)
1190 1192 Session().add(comment)
1191 1193
1192 1194 def _calculate_commit_id_changes(self, old_ids, new_ids):
1193 1195 added = [x for x in new_ids if x not in old_ids]
1194 1196 common = [x for x in new_ids if x in old_ids]
1195 1197 removed = [x for x in old_ids if x not in new_ids]
1196 1198 total = new_ids
1197 1199 return ChangeTuple(added, common, removed, total)
1198 1200
1199 1201 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1200 1202
1201 1203 old_files = OrderedDict()
1202 1204 for diff_data in old_diff_data.parsed_diff:
1203 1205 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1204 1206
1205 1207 added_files = []
1206 1208 modified_files = []
1207 1209 removed_files = []
1208 1210 for diff_data in new_diff_data.parsed_diff:
1209 1211 new_filename = diff_data['filename']
1210 1212 new_hash = md5_safe(diff_data['raw_diff'])
1211 1213
1212 1214 old_hash = old_files.get(new_filename)
1213 1215 if not old_hash:
1214 1216 # file is not present in old diff, we have to figure out from parsed diff
1215 1217 # operation ADD/REMOVE
1216 1218 operations_dict = diff_data['stats']['ops']
1217 1219 if diffs.DEL_FILENODE in operations_dict:
1218 1220 removed_files.append(new_filename)
1219 1221 else:
1220 1222 added_files.append(new_filename)
1221 1223 else:
1222 1224 if new_hash != old_hash:
1223 1225 modified_files.append(new_filename)
1224 1226 # now remove a file from old, since we have seen it already
1225 1227 del old_files[new_filename]
1226 1228
1227 1229 # removed files is when there are present in old, but not in NEW,
1228 1230 # since we remove old files that are present in new diff, left-overs
1229 1231 # if any should be the removed files
1230 1232 removed_files.extend(old_files.keys())
1231 1233
1232 1234 return FileChangeTuple(added_files, modified_files, removed_files)
1233 1235
1234 1236 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1235 1237 """
1236 1238 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1237 1239 so it's always looking the same disregarding on which default
1238 1240 renderer system is using.
1239 1241
1240 1242 :param ancestor_commit_id: ancestor raw_id
1241 1243 :param changes: changes named tuple
1242 1244 :param file_changes: file changes named tuple
1243 1245
1244 1246 """
1245 1247 new_status = ChangesetStatus.get_status_lbl(
1246 1248 ChangesetStatus.STATUS_UNDER_REVIEW)
1247 1249
1248 1250 changed_files = (
1249 1251 file_changes.added + file_changes.modified + file_changes.removed)
1250 1252
1251 1253 params = {
1252 1254 'under_review_label': new_status,
1253 1255 'added_commits': changes.added,
1254 1256 'removed_commits': changes.removed,
1255 1257 'changed_files': changed_files,
1256 1258 'added_files': file_changes.added,
1257 1259 'modified_files': file_changes.modified,
1258 1260 'removed_files': file_changes.removed,
1259 1261 'ancestor_commit_id': ancestor_commit_id
1260 1262 }
1261 1263 renderer = RstTemplateRenderer()
1262 1264 return renderer.render('pull_request_update.mako', **params)
1263 1265
1264 1266 def edit(self, pull_request, title, description, description_renderer, user):
1265 1267 pull_request = self.__get_pull_request(pull_request)
1266 1268 old_data = pull_request.get_api_data(with_merge_state=False)
1267 1269 if pull_request.is_closed():
1268 1270 raise ValueError('This pull request is closed')
1269 1271 if title:
1270 1272 pull_request.title = title
1271 1273 pull_request.description = description
1272 1274 pull_request.updated_on = datetime.datetime.now()
1273 1275 pull_request.description_renderer = description_renderer
1274 1276 Session().add(pull_request)
1275 1277 self._log_audit_action(
1276 1278 'repo.pull_request.edit', {'old_data': old_data},
1277 1279 user, pull_request)
1278 1280
1279 1281 def update_reviewers(self, pull_request, reviewer_data, user):
1280 1282 """
1281 1283 Update the reviewers in the pull request
1282 1284
1283 1285 :param pull_request: the pr to update
1284 1286 :param reviewer_data: list of tuples
1285 1287 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1286 1288 :param user: current use who triggers this action
1287 1289 """
1288 1290
1289 1291 pull_request = self.__get_pull_request(pull_request)
1290 1292 if pull_request.is_closed():
1291 1293 raise ValueError('This pull request is closed')
1292 1294
1293 1295 reviewers = {}
1294 1296 for user_id, reasons, mandatory, role, rules in reviewer_data:
1295 1297 if isinstance(user_id, (int, compat.string_types)):
1296 1298 user_id = self._get_user(user_id).user_id
1297 1299 reviewers[user_id] = {
1298 1300 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1299 1301
1300 1302 reviewers_ids = set(reviewers.keys())
1301 1303 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1302 1304 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1303 1305
1304 1306 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1305 1307
1306 1308 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1307 1309 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1308 1310
1309 1311 log.debug("Adding %s reviewers", ids_to_add)
1310 1312 log.debug("Removing %s reviewers", ids_to_remove)
1311 1313 changed = False
1312 1314 added_audit_reviewers = []
1313 1315 removed_audit_reviewers = []
1314 1316
1315 1317 for uid in ids_to_add:
1316 1318 changed = True
1317 1319 _usr = self._get_user(uid)
1318 1320 reviewer = PullRequestReviewers()
1319 1321 reviewer.user = _usr
1320 1322 reviewer.pull_request = pull_request
1321 1323 reviewer.reasons = reviewers[uid]['reasons']
1322 1324 # NOTE(marcink): mandatory shouldn't be changed now
1323 1325 # reviewer.mandatory = reviewers[uid]['reasons']
1324 1326 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1325 1327 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1326 1328 Session().add(reviewer)
1327 1329 added_audit_reviewers.append(reviewer.get_dict())
1328 1330
1329 1331 for uid in ids_to_remove:
1330 1332 changed = True
1331 1333 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1332 1334 # This is an edge case that handles previous state of having the same reviewer twice.
1333 1335 # this CAN happen due to the lack of DB checks
1334 1336 reviewers = PullRequestReviewers.query()\
1335 1337 .filter(PullRequestReviewers.user_id == uid,
1336 1338 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1337 1339 PullRequestReviewers.pull_request == pull_request)\
1338 1340 .all()
1339 1341
1340 1342 for obj in reviewers:
1341 1343 added_audit_reviewers.append(obj.get_dict())
1342 1344 Session().delete(obj)
1343 1345
1344 1346 if changed:
1345 1347 Session().expire_all()
1346 1348 pull_request.updated_on = datetime.datetime.now()
1347 1349 Session().add(pull_request)
1348 1350
1349 1351 # finally store audit logs
1350 1352 for user_data in added_audit_reviewers:
1351 1353 self._log_audit_action(
1352 1354 'repo.pull_request.reviewer.add', {'data': user_data},
1353 1355 user, pull_request)
1354 1356 for user_data in removed_audit_reviewers:
1355 1357 self._log_audit_action(
1356 1358 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1357 1359 user, pull_request)
1358 1360
1359 1361 self.notify_reviewers(pull_request, ids_to_add, user)
1360 1362 return ids_to_add, ids_to_remove
1361 1363
1362 1364 def update_observers(self, pull_request, observer_data, user):
1363 1365 """
1364 1366 Update the observers in the pull request
1365 1367
1366 1368 :param pull_request: the pr to update
1367 1369 :param observer_data: list of tuples
1368 1370 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1369 1371 :param user: current use who triggers this action
1370 1372 """
1371 1373 pull_request = self.__get_pull_request(pull_request)
1372 1374 if pull_request.is_closed():
1373 1375 raise ValueError('This pull request is closed')
1374 1376
1375 1377 observers = {}
1376 1378 for user_id, reasons, mandatory, role, rules in observer_data:
1377 1379 if isinstance(user_id, (int, compat.string_types)):
1378 1380 user_id = self._get_user(user_id).user_id
1379 1381 observers[user_id] = {
1380 1382 'reasons': reasons, 'observers': mandatory, 'role': role}
1381 1383
1382 1384 observers_ids = set(observers.keys())
1383 1385 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1384 1386 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1385 1387
1386 1388 current_observers_ids = set([x.user.user_id for x in current_observers])
1387 1389
1388 1390 ids_to_add = observers_ids.difference(current_observers_ids)
1389 1391 ids_to_remove = current_observers_ids.difference(observers_ids)
1390 1392
1391 1393 log.debug("Adding %s observer", ids_to_add)
1392 1394 log.debug("Removing %s observer", ids_to_remove)
1393 1395 changed = False
1394 1396 added_audit_observers = []
1395 1397 removed_audit_observers = []
1396 1398
1397 1399 for uid in ids_to_add:
1398 1400 changed = True
1399 1401 _usr = self._get_user(uid)
1400 1402 observer = PullRequestReviewers()
1401 1403 observer.user = _usr
1402 1404 observer.pull_request = pull_request
1403 1405 observer.reasons = observers[uid]['reasons']
1404 1406 # NOTE(marcink): mandatory shouldn't be changed now
1405 1407 # observer.mandatory = observer[uid]['reasons']
1406 1408
1407 1409 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1408 1410 observer.role = PullRequestReviewers.ROLE_OBSERVER
1409 1411 Session().add(observer)
1410 1412 added_audit_observers.append(observer.get_dict())
1411 1413
1412 1414 for uid in ids_to_remove:
1413 1415 changed = True
1414 1416 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1415 1417 # This is an edge case that handles previous state of having the same reviewer twice.
1416 1418 # this CAN happen due to the lack of DB checks
1417 1419 observers = PullRequestReviewers.query()\
1418 1420 .filter(PullRequestReviewers.user_id == uid,
1419 1421 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1420 1422 PullRequestReviewers.pull_request == pull_request)\
1421 1423 .all()
1422 1424
1423 1425 for obj in observers:
1424 1426 added_audit_observers.append(obj.get_dict())
1425 1427 Session().delete(obj)
1426 1428
1427 1429 if changed:
1428 1430 Session().expire_all()
1429 1431 pull_request.updated_on = datetime.datetime.now()
1430 1432 Session().add(pull_request)
1431 1433
1432 1434 # finally store audit logs
1433 1435 for user_data in added_audit_observers:
1434 1436 self._log_audit_action(
1435 1437 'repo.pull_request.observer.add', {'data': user_data},
1436 1438 user, pull_request)
1437 1439 for user_data in removed_audit_observers:
1438 1440 self._log_audit_action(
1439 1441 'repo.pull_request.observer.delete', {'old_data': user_data},
1440 1442 user, pull_request)
1441 1443
1442 1444 self.notify_observers(pull_request, ids_to_add, user)
1443 1445 return ids_to_add, ids_to_remove
1444 1446
1445 1447 def get_url(self, pull_request, request=None, permalink=False):
1446 1448 if not request:
1447 1449 request = get_current_request()
1448 1450
1449 1451 if permalink:
1450 1452 return request.route_url(
1451 1453 'pull_requests_global',
1452 1454 pull_request_id=pull_request.pull_request_id,)
1453 1455 else:
1454 1456 return request.route_url('pullrequest_show',
1455 1457 repo_name=safe_str(pull_request.target_repo.repo_name),
1456 1458 pull_request_id=pull_request.pull_request_id,)
1457 1459
1458 1460 def get_shadow_clone_url(self, pull_request, request=None):
1459 1461 """
1460 1462 Returns qualified url pointing to the shadow repository. If this pull
1461 1463 request is closed there is no shadow repository and ``None`` will be
1462 1464 returned.
1463 1465 """
1464 1466 if pull_request.is_closed():
1465 1467 return None
1466 1468 else:
1467 1469 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1468 1470 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1469 1471
1470 1472 def _notify_reviewers(self, pull_request, user_ids, role, user):
1471 1473 # notification to reviewers/observers
1472 1474 if not user_ids:
1473 1475 return
1474 1476
1475 1477 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1476 1478
1477 1479 pull_request_obj = pull_request
1478 1480 # get the current participants of this pull request
1479 1481 recipients = user_ids
1480 1482 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1481 1483
1482 1484 pr_source_repo = pull_request_obj.source_repo
1483 1485 pr_target_repo = pull_request_obj.target_repo
1484 1486
1485 1487 pr_url = h.route_url('pullrequest_show',
1486 1488 repo_name=pr_target_repo.repo_name,
1487 1489 pull_request_id=pull_request_obj.pull_request_id,)
1488 1490
1489 1491 # set some variables for email notification
1490 1492 pr_target_repo_url = h.route_url(
1491 1493 'repo_summary', repo_name=pr_target_repo.repo_name)
1492 1494
1493 1495 pr_source_repo_url = h.route_url(
1494 1496 'repo_summary', repo_name=pr_source_repo.repo_name)
1495 1497
1496 1498 # pull request specifics
1497 1499 pull_request_commits = [
1498 1500 (x.raw_id, x.message)
1499 1501 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1500 1502
1501 1503 current_rhodecode_user = user
1502 1504 kwargs = {
1503 1505 'user': current_rhodecode_user,
1504 1506 'pull_request_author': pull_request.author,
1505 1507 'pull_request': pull_request_obj,
1506 1508 'pull_request_commits': pull_request_commits,
1507 1509
1508 1510 'pull_request_target_repo': pr_target_repo,
1509 1511 'pull_request_target_repo_url': pr_target_repo_url,
1510 1512
1511 1513 'pull_request_source_repo': pr_source_repo,
1512 1514 'pull_request_source_repo_url': pr_source_repo_url,
1513 1515
1514 1516 'pull_request_url': pr_url,
1515 1517 'thread_ids': [pr_url],
1516 1518 'user_role': role
1517 1519 }
1518 1520
1519 1521 # create notification objects, and emails
1520 1522 NotificationModel().create(
1521 1523 created_by=current_rhodecode_user,
1522 1524 notification_subject='', # Filled in based on the notification_type
1523 1525 notification_body='', # Filled in based on the notification_type
1524 1526 notification_type=notification_type,
1525 1527 recipients=recipients,
1526 1528 email_kwargs=kwargs,
1527 1529 )
1528 1530
1529 1531 def notify_reviewers(self, pull_request, reviewers_ids, user):
1530 1532 return self._notify_reviewers(pull_request, reviewers_ids,
1531 1533 PullRequestReviewers.ROLE_REVIEWER, user)
1532 1534
1533 1535 def notify_observers(self, pull_request, observers_ids, user):
1534 1536 return self._notify_reviewers(pull_request, observers_ids,
1535 1537 PullRequestReviewers.ROLE_OBSERVER, user)
1536 1538
1537 1539 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1538 1540 commit_changes, file_changes):
1539 1541
1540 1542 updating_user_id = updating_user.user_id
1541 1543 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1542 1544 # NOTE(marcink): send notification to all other users except to
1543 1545 # person who updated the PR
1544 1546 recipients = reviewers.difference(set([updating_user_id]))
1545 1547
1546 1548 log.debug('Notify following recipients about pull-request update %s', recipients)
1547 1549
1548 1550 pull_request_obj = pull_request
1549 1551
1550 1552 # send email about the update
1551 1553 changed_files = (
1552 1554 file_changes.added + file_changes.modified + file_changes.removed)
1553 1555
1554 1556 pr_source_repo = pull_request_obj.source_repo
1555 1557 pr_target_repo = pull_request_obj.target_repo
1556 1558
1557 1559 pr_url = h.route_url('pullrequest_show',
1558 1560 repo_name=pr_target_repo.repo_name,
1559 1561 pull_request_id=pull_request_obj.pull_request_id,)
1560 1562
1561 1563 # set some variables for email notification
1562 1564 pr_target_repo_url = h.route_url(
1563 1565 'repo_summary', repo_name=pr_target_repo.repo_name)
1564 1566
1565 1567 pr_source_repo_url = h.route_url(
1566 1568 'repo_summary', repo_name=pr_source_repo.repo_name)
1567 1569
1568 1570 email_kwargs = {
1569 1571 'date': datetime.datetime.now(),
1570 1572 'updating_user': updating_user,
1571 1573
1572 1574 'pull_request': pull_request_obj,
1573 1575
1574 1576 'pull_request_target_repo': pr_target_repo,
1575 1577 'pull_request_target_repo_url': pr_target_repo_url,
1576 1578
1577 1579 'pull_request_source_repo': pr_source_repo,
1578 1580 'pull_request_source_repo_url': pr_source_repo_url,
1579 1581
1580 1582 'pull_request_url': pr_url,
1581 1583
1582 1584 'ancestor_commit_id': ancestor_commit_id,
1583 1585 'added_commits': commit_changes.added,
1584 1586 'removed_commits': commit_changes.removed,
1585 1587 'changed_files': changed_files,
1586 1588 'added_files': file_changes.added,
1587 1589 'modified_files': file_changes.modified,
1588 1590 'removed_files': file_changes.removed,
1589 1591 'thread_ids': [pr_url],
1590 1592 }
1591 1593
1592 1594 # create notification objects, and emails
1593 1595 NotificationModel().create(
1594 1596 created_by=updating_user,
1595 1597 notification_subject='', # Filled in based on the notification_type
1596 1598 notification_body='', # Filled in based on the notification_type
1597 1599 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1598 1600 recipients=recipients,
1599 1601 email_kwargs=email_kwargs,
1600 1602 )
1601 1603
1602 1604 def delete(self, pull_request, user=None):
1603 1605 if not user:
1604 1606 user = getattr(get_current_rhodecode_user(), 'username', None)
1605 1607
1606 1608 pull_request = self.__get_pull_request(pull_request)
1607 1609 old_data = pull_request.get_api_data(with_merge_state=False)
1608 1610 self._cleanup_merge_workspace(pull_request)
1609 1611 self._log_audit_action(
1610 1612 'repo.pull_request.delete', {'old_data': old_data},
1611 1613 user, pull_request)
1612 1614 Session().delete(pull_request)
1613 1615
1614 1616 def close_pull_request(self, pull_request, user):
1615 1617 pull_request = self.__get_pull_request(pull_request)
1616 1618 self._cleanup_merge_workspace(pull_request)
1617 1619 pull_request.status = PullRequest.STATUS_CLOSED
1618 1620 pull_request.updated_on = datetime.datetime.now()
1619 1621 Session().add(pull_request)
1620 1622 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1621 1623
1622 1624 pr_data = pull_request.get_api_data(with_merge_state=False)
1623 1625 self._log_audit_action(
1624 1626 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1625 1627
1626 1628 def close_pull_request_with_comment(
1627 1629 self, pull_request, user, repo, message=None, auth_user=None):
1628 1630
1629 1631 pull_request_review_status = pull_request.calculated_review_status()
1630 1632
1631 1633 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1632 1634 # approved only if we have voting consent
1633 1635 status = ChangesetStatus.STATUS_APPROVED
1634 1636 else:
1635 1637 status = ChangesetStatus.STATUS_REJECTED
1636 1638 status_lbl = ChangesetStatus.get_status_lbl(status)
1637 1639
1638 1640 default_message = (
1639 1641 'Closing with status change {transition_icon} {status}.'
1640 1642 ).format(transition_icon='>', status=status_lbl)
1641 1643 text = message or default_message
1642 1644
1643 1645 # create a comment, and link it to new status
1644 1646 comment = CommentsModel().create(
1645 1647 text=text,
1646 1648 repo=repo.repo_id,
1647 1649 user=user.user_id,
1648 1650 pull_request=pull_request.pull_request_id,
1649 1651 status_change=status_lbl,
1650 1652 status_change_type=status,
1651 1653 closing_pr=True,
1652 1654 auth_user=auth_user,
1653 1655 )
1654 1656
1655 1657 # calculate old status before we change it
1656 1658 old_calculated_status = pull_request.calculated_review_status()
1657 1659 ChangesetStatusModel().set_status(
1658 1660 repo.repo_id,
1659 1661 status,
1660 1662 user.user_id,
1661 1663 comment=comment,
1662 1664 pull_request=pull_request.pull_request_id
1663 1665 )
1664 1666
1665 1667 Session().flush()
1666 1668
1667 1669 self.trigger_pull_request_hook(pull_request, user, 'comment',
1668 1670 data={'comment': comment})
1669 1671
1670 1672 # we now calculate the status of pull request again, and based on that
1671 1673 # calculation trigger status change. This might happen in cases
1672 1674 # that non-reviewer admin closes a pr, which means his vote doesn't
1673 1675 # change the status, while if he's a reviewer this might change it.
1674 1676 calculated_status = pull_request.calculated_review_status()
1675 1677 if old_calculated_status != calculated_status:
1676 1678 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1677 1679 data={'status': calculated_status})
1678 1680
1679 1681 # finally close the PR
1680 1682 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1681 1683
1682 1684 return comment, status
1683 1685
1684 1686 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1685 1687 _ = translator or get_current_request().translate
1686 1688
1687 1689 if not self._is_merge_enabled(pull_request):
1688 1690 return None, False, _('Server-side pull request merging is disabled.')
1689 1691
1690 1692 if pull_request.is_closed():
1691 1693 return None, False, _('This pull request is closed.')
1692 1694
1693 1695 merge_possible, msg = self._check_repo_requirements(
1694 1696 target=pull_request.target_repo, source=pull_request.source_repo,
1695 1697 translator=_)
1696 1698 if not merge_possible:
1697 1699 return None, merge_possible, msg
1698 1700
1699 1701 try:
1700 1702 merge_response = self._try_merge(
1701 1703 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1702 1704 log.debug("Merge response: %s", merge_response)
1703 1705 return merge_response, merge_response.possible, merge_response.merge_status_message
1704 1706 except NotImplementedError:
1705 1707 return None, False, _('Pull request merging is not supported.')
1706 1708
1707 1709 def _check_repo_requirements(self, target, source, translator):
1708 1710 """
1709 1711 Check if `target` and `source` have compatible requirements.
1710 1712
1711 1713 Currently this is just checking for largefiles.
1712 1714 """
1713 1715 _ = translator
1714 1716 target_has_largefiles = self._has_largefiles(target)
1715 1717 source_has_largefiles = self._has_largefiles(source)
1716 1718 merge_possible = True
1717 1719 message = u''
1718 1720
1719 1721 if target_has_largefiles != source_has_largefiles:
1720 1722 merge_possible = False
1721 1723 if source_has_largefiles:
1722 1724 message = _(
1723 1725 'Target repository large files support is disabled.')
1724 1726 else:
1725 1727 message = _(
1726 1728 'Source repository large files support is disabled.')
1727 1729
1728 1730 return merge_possible, message
1729 1731
1730 1732 def _has_largefiles(self, repo):
1731 1733 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1732 1734 'extensions', 'largefiles')
1733 1735 return largefiles_ui and largefiles_ui[0].active
1734 1736
1735 1737 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1736 1738 """
1737 1739 Try to merge the pull request and return the merge status.
1738 1740 """
1739 1741 log.debug(
1740 1742 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1741 1743 pull_request.pull_request_id, force_shadow_repo_refresh)
1742 1744 target_vcs = pull_request.target_repo.scm_instance()
1743 1745 # Refresh the target reference.
1744 1746 try:
1745 1747 target_ref = self._refresh_reference(
1746 1748 pull_request.target_ref_parts, target_vcs)
1747 1749 except CommitDoesNotExistError:
1748 1750 merge_state = MergeResponse(
1749 1751 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1750 1752 metadata={'target_ref': pull_request.target_ref_parts})
1751 1753 return merge_state
1752 1754
1753 1755 target_locked = pull_request.target_repo.locked
1754 1756 if target_locked and target_locked[0]:
1755 1757 locked_by = 'user:{}'.format(target_locked[0])
1756 1758 log.debug("The target repository is locked by %s.", locked_by)
1757 1759 merge_state = MergeResponse(
1758 1760 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1759 1761 metadata={'locked_by': locked_by})
1760 1762 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1761 1763 pull_request, target_ref):
1762 1764 log.debug("Refreshing the merge status of the repository.")
1763 1765 merge_state = self._refresh_merge_state(
1764 1766 pull_request, target_vcs, target_ref)
1765 1767 else:
1766 1768 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1767 1769 metadata = {
1768 1770 'unresolved_files': '',
1769 1771 'target_ref': pull_request.target_ref_parts,
1770 1772 'source_ref': pull_request.source_ref_parts,
1771 1773 }
1772 1774 if pull_request.last_merge_metadata:
1773 1775 metadata.update(pull_request.last_merge_metadata_parsed)
1774 1776
1775 1777 if not possible and target_ref.type == 'branch':
1776 1778 # NOTE(marcink): case for mercurial multiple heads on branch
1777 1779 heads = target_vcs._heads(target_ref.name)
1778 1780 if len(heads) != 1:
1779 1781 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1780 1782 metadata.update({
1781 1783 'heads': heads
1782 1784 })
1783 1785
1784 1786 merge_state = MergeResponse(
1785 1787 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1786 1788
1787 1789 return merge_state
1788 1790
1789 1791 def _refresh_reference(self, reference, vcs_repository):
1790 1792 if reference.type in self.UPDATABLE_REF_TYPES:
1791 1793 name_or_id = reference.name
1792 1794 else:
1793 1795 name_or_id = reference.commit_id
1794 1796
1795 1797 refreshed_commit = vcs_repository.get_commit(name_or_id)
1796 1798 refreshed_reference = Reference(
1797 1799 reference.type, reference.name, refreshed_commit.raw_id)
1798 1800 return refreshed_reference
1799 1801
1800 1802 def _needs_merge_state_refresh(self, pull_request, target_reference):
1801 1803 return not(
1802 1804 pull_request.revisions and
1803 1805 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1804 1806 target_reference.commit_id == pull_request._last_merge_target_rev)
1805 1807
1806 1808 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1807 1809 workspace_id = self._workspace_id(pull_request)
1808 1810 source_vcs = pull_request.source_repo.scm_instance()
1809 1811 repo_id = pull_request.target_repo.repo_id
1810 1812 use_rebase = self._use_rebase_for_merging(pull_request)
1811 1813 close_branch = self._close_branch_before_merging(pull_request)
1812 1814 merge_state = target_vcs.merge(
1813 1815 repo_id, workspace_id,
1814 1816 target_reference, source_vcs, pull_request.source_ref_parts,
1815 1817 dry_run=True, use_rebase=use_rebase,
1816 1818 close_branch=close_branch)
1817 1819
1818 1820 # Do not store the response if there was an unknown error.
1819 1821 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1820 1822 pull_request._last_merge_source_rev = \
1821 1823 pull_request.source_ref_parts.commit_id
1822 1824 pull_request._last_merge_target_rev = target_reference.commit_id
1823 1825 pull_request.last_merge_status = merge_state.failure_reason
1824 1826 pull_request.last_merge_metadata = merge_state.metadata
1825 1827
1826 1828 pull_request.shadow_merge_ref = merge_state.merge_ref
1827 1829 Session().add(pull_request)
1828 1830 Session().commit()
1829 1831
1830 1832 return merge_state
1831 1833
1832 1834 def _workspace_id(self, pull_request):
1833 1835 workspace_id = 'pr-%s' % pull_request.pull_request_id
1834 1836 return workspace_id
1835 1837
1836 1838 def generate_repo_data(self, repo, commit_id=None, branch=None,
1837 1839 bookmark=None, translator=None):
1838 1840 from rhodecode.model.repo import RepoModel
1839 1841
1840 1842 all_refs, selected_ref = \
1841 1843 self._get_repo_pullrequest_sources(
1842 1844 repo.scm_instance(), commit_id=commit_id,
1843 1845 branch=branch, bookmark=bookmark, translator=translator)
1844 1846
1845 1847 refs_select2 = []
1846 1848 for element in all_refs:
1847 1849 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1848 1850 refs_select2.append({'text': element[1], 'children': children})
1849 1851
1850 1852 return {
1851 1853 'user': {
1852 1854 'user_id': repo.user.user_id,
1853 1855 'username': repo.user.username,
1854 1856 'firstname': repo.user.first_name,
1855 1857 'lastname': repo.user.last_name,
1856 1858 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1857 1859 },
1858 1860 'name': repo.repo_name,
1859 1861 'link': RepoModel().get_url(repo),
1860 1862 'description': h.chop_at_smart(repo.description_safe, '\n'),
1861 1863 'refs': {
1862 1864 'all_refs': all_refs,
1863 1865 'selected_ref': selected_ref,
1864 1866 'select2_refs': refs_select2
1865 1867 }
1866 1868 }
1867 1869
1868 1870 def generate_pullrequest_title(self, source, source_ref, target):
1869 1871 return u'{source}#{at_ref} to {target}'.format(
1870 1872 source=source,
1871 1873 at_ref=source_ref,
1872 1874 target=target,
1873 1875 )
1874 1876
1875 1877 def _cleanup_merge_workspace(self, pull_request):
1876 1878 # Merging related cleanup
1877 1879 repo_id = pull_request.target_repo.repo_id
1878 1880 target_scm = pull_request.target_repo.scm_instance()
1879 1881 workspace_id = self._workspace_id(pull_request)
1880 1882
1881 1883 try:
1882 1884 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1883 1885 except NotImplementedError:
1884 1886 pass
1885 1887
1886 1888 def _get_repo_pullrequest_sources(
1887 1889 self, repo, commit_id=None, branch=None, bookmark=None,
1888 1890 translator=None):
1889 1891 """
1890 1892 Return a structure with repo's interesting commits, suitable for
1891 1893 the selectors in pullrequest controller
1892 1894
1893 1895 :param commit_id: a commit that must be in the list somehow
1894 1896 and selected by default
1895 1897 :param branch: a branch that must be in the list and selected
1896 1898 by default - even if closed
1897 1899 :param bookmark: a bookmark that must be in the list and selected
1898 1900 """
1899 1901 _ = translator or get_current_request().translate
1900 1902
1901 1903 commit_id = safe_str(commit_id) if commit_id else None
1902 1904 branch = safe_unicode(branch) if branch else None
1903 1905 bookmark = safe_unicode(bookmark) if bookmark else None
1904 1906
1905 1907 selected = None
1906 1908
1907 1909 # order matters: first source that has commit_id in it will be selected
1908 1910 sources = []
1909 1911 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1910 1912 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1911 1913
1912 1914 if commit_id:
1913 1915 ref_commit = (h.short_id(commit_id), commit_id)
1914 1916 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1915 1917
1916 1918 sources.append(
1917 1919 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1918 1920 )
1919 1921
1920 1922 groups = []
1921 1923
1922 1924 for group_key, ref_list, group_name, match in sources:
1923 1925 group_refs = []
1924 1926 for ref_name, ref_id in ref_list:
1925 1927 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1926 1928 group_refs.append((ref_key, ref_name))
1927 1929
1928 1930 if not selected:
1929 1931 if set([commit_id, match]) & set([ref_id, ref_name]):
1930 1932 selected = ref_key
1931 1933
1932 1934 if group_refs:
1933 1935 groups.append((group_refs, group_name))
1934 1936
1935 1937 if not selected:
1936 1938 ref = commit_id or branch or bookmark
1937 1939 if ref:
1938 1940 raise CommitDoesNotExistError(
1939 1941 u'No commit refs could be found matching: {}'.format(ref))
1940 1942 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1941 1943 selected = u'branch:{}:{}'.format(
1942 1944 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1943 1945 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1944 1946 )
1945 1947 elif repo.commit_ids:
1946 1948 # make the user select in this case
1947 1949 selected = None
1948 1950 else:
1949 1951 raise EmptyRepositoryError()
1950 1952 return groups, selected
1951 1953
1952 1954 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1953 1955 hide_whitespace_changes, diff_context):
1954 1956
1955 1957 return self._get_diff_from_pr_or_version(
1956 1958 source_repo, source_ref_id, target_ref_id,
1957 1959 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1958 1960
1959 1961 def _get_diff_from_pr_or_version(
1960 1962 self, source_repo, source_ref_id, target_ref_id,
1961 1963 hide_whitespace_changes, diff_context):
1962 1964
1963 1965 target_commit = source_repo.get_commit(
1964 1966 commit_id=safe_str(target_ref_id))
1965 1967 source_commit = source_repo.get_commit(
1966 1968 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1967 1969 if isinstance(source_repo, Repository):
1968 1970 vcs_repo = source_repo.scm_instance()
1969 1971 else:
1970 1972 vcs_repo = source_repo
1971 1973
1972 1974 # TODO: johbo: In the context of an update, we cannot reach
1973 1975 # the old commit anymore with our normal mechanisms. It needs
1974 1976 # some sort of special support in the vcs layer to avoid this
1975 1977 # workaround.
1976 1978 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1977 1979 vcs_repo.alias == 'git'):
1978 1980 source_commit.raw_id = safe_str(source_ref_id)
1979 1981
1980 1982 log.debug('calculating diff between '
1981 1983 'source_ref:%s and target_ref:%s for repo `%s`',
1982 1984 target_ref_id, source_ref_id,
1983 1985 safe_unicode(vcs_repo.path))
1984 1986
1985 1987 vcs_diff = vcs_repo.get_diff(
1986 1988 commit1=target_commit, commit2=source_commit,
1987 1989 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1988 1990 return vcs_diff
1989 1991
1990 1992 def _is_merge_enabled(self, pull_request):
1991 1993 return self._get_general_setting(
1992 1994 pull_request, 'rhodecode_pr_merge_enabled')
1993 1995
1994 1996 def _use_rebase_for_merging(self, pull_request):
1995 1997 repo_type = pull_request.target_repo.repo_type
1996 1998 if repo_type == 'hg':
1997 1999 return self._get_general_setting(
1998 2000 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1999 2001 elif repo_type == 'git':
2000 2002 return self._get_general_setting(
2001 2003 pull_request, 'rhodecode_git_use_rebase_for_merging')
2002 2004
2003 2005 return False
2004 2006
2005 2007 def _user_name_for_merging(self, pull_request, user):
2006 2008 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2007 2009 if env_user_name_attr and hasattr(user, env_user_name_attr):
2008 2010 user_name_attr = env_user_name_attr
2009 2011 else:
2010 2012 user_name_attr = 'short_contact'
2011 2013
2012 2014 user_name = getattr(user, user_name_attr)
2013 2015 return user_name
2014 2016
2015 2017 def _close_branch_before_merging(self, pull_request):
2016 2018 repo_type = pull_request.target_repo.repo_type
2017 2019 if repo_type == 'hg':
2018 2020 return self._get_general_setting(
2019 2021 pull_request, 'rhodecode_hg_close_branch_before_merging')
2020 2022 elif repo_type == 'git':
2021 2023 return self._get_general_setting(
2022 2024 pull_request, 'rhodecode_git_close_branch_before_merging')
2023 2025
2024 2026 return False
2025 2027
2026 2028 def _get_general_setting(self, pull_request, settings_key, default=False):
2027 2029 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2028 2030 settings = settings_model.get_general_settings()
2029 2031 return settings.get(settings_key, default)
2030 2032
2031 2033 def _log_audit_action(self, action, action_data, user, pull_request):
2032 2034 audit_logger.store(
2033 2035 action=action,
2034 2036 action_data=action_data,
2035 2037 user=user,
2036 2038 repo=pull_request.target_repo)
2037 2039
2038 2040 def get_reviewer_functions(self):
2039 2041 """
2040 2042 Fetches functions for validation and fetching default reviewers.
2041 2043 If available we use the EE package, else we fallback to CE
2042 2044 package functions
2043 2045 """
2044 2046 try:
2045 2047 from rc_reviewers.utils import get_default_reviewers_data
2046 2048 from rc_reviewers.utils import validate_default_reviewers
2047 2049 from rc_reviewers.utils import validate_observers
2048 2050 except ImportError:
2049 2051 from rhodecode.apps.repository.utils import get_default_reviewers_data
2050 2052 from rhodecode.apps.repository.utils import validate_default_reviewers
2051 2053 from rhodecode.apps.repository.utils import validate_observers
2052 2054
2053 2055 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2054 2056
2055 2057
2056 2058 class MergeCheck(object):
2057 2059 """
2058 2060 Perform Merge Checks and returns a check object which stores information
2059 2061 about merge errors, and merge conditions
2060 2062 """
2061 2063 TODO_CHECK = 'todo'
2062 2064 PERM_CHECK = 'perm'
2063 2065 REVIEW_CHECK = 'review'
2064 2066 MERGE_CHECK = 'merge'
2065 2067 WIP_CHECK = 'wip'
2066 2068
2067 2069 def __init__(self):
2068 2070 self.review_status = None
2069 2071 self.merge_possible = None
2070 2072 self.merge_msg = ''
2071 2073 self.merge_response = None
2072 2074 self.failed = None
2073 2075 self.errors = []
2074 2076 self.error_details = OrderedDict()
2075 2077 self.source_commit = AttributeDict()
2076 2078 self.target_commit = AttributeDict()
2077 2079 self.reviewers_count = 0
2078 2080 self.observers_count = 0
2079 2081
2080 2082 def __repr__(self):
2081 2083 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2082 2084 self.merge_possible, self.failed, self.errors)
2083 2085
2084 2086 def push_error(self, error_type, message, error_key, details):
2085 2087 self.failed = True
2086 2088 self.errors.append([error_type, message])
2087 2089 self.error_details[error_key] = dict(
2088 2090 details=details,
2089 2091 error_type=error_type,
2090 2092 message=message
2091 2093 )
2092 2094
2093 2095 @classmethod
2094 2096 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2095 2097 force_shadow_repo_refresh=False):
2096 2098 _ = translator
2097 2099 merge_check = cls()
2098 2100
2099 2101 # title has WIP:
2100 2102 if pull_request.work_in_progress:
2101 2103 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2102 2104
2103 2105 msg = _('WIP marker in title prevents from accidental merge.')
2104 2106 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2105 2107 if fail_early:
2106 2108 return merge_check
2107 2109
2108 2110 # permissions to merge
2109 2111 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2110 2112 if not user_allowed_to_merge:
2111 2113 log.debug("MergeCheck: cannot merge, approval is pending.")
2112 2114
2113 2115 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2114 2116 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2115 2117 if fail_early:
2116 2118 return merge_check
2117 2119
2118 2120 # permission to merge into the target branch
2119 2121 target_commit_id = pull_request.target_ref_parts.commit_id
2120 2122 if pull_request.target_ref_parts.type == 'branch':
2121 2123 branch_name = pull_request.target_ref_parts.name
2122 2124 else:
2123 2125 # for mercurial we can always figure out the branch from the commit
2124 2126 # in case of bookmark
2125 2127 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2126 2128 branch_name = target_commit.branch
2127 2129
2128 2130 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2129 2131 pull_request.target_repo.repo_name, branch_name)
2130 2132 if branch_perm and branch_perm == 'branch.none':
2131 2133 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2132 2134 branch_name, rule)
2133 2135 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2134 2136 if fail_early:
2135 2137 return merge_check
2136 2138
2137 2139 # review status, must be always present
2138 2140 review_status = pull_request.calculated_review_status()
2139 2141 merge_check.review_status = review_status
2140 2142 merge_check.reviewers_count = pull_request.reviewers_count
2141 2143 merge_check.observers_count = pull_request.observers_count
2142 2144
2143 2145 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2144 2146 if not status_approved and merge_check.reviewers_count:
2145 2147 log.debug("MergeCheck: cannot merge, approval is pending.")
2146 2148 msg = _('Pull request reviewer approval is pending.')
2147 2149
2148 2150 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2149 2151
2150 2152 if fail_early:
2151 2153 return merge_check
2152 2154
2153 2155 # left over TODOs
2154 2156 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2155 2157 if todos:
2156 2158 log.debug("MergeCheck: cannot merge, {} "
2157 2159 "unresolved TODOs left.".format(len(todos)))
2158 2160
2159 2161 if len(todos) == 1:
2160 2162 msg = _('Cannot merge, {} TODO still not resolved.').format(
2161 2163 len(todos))
2162 2164 else:
2163 2165 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2164 2166 len(todos))
2165 2167
2166 2168 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2167 2169
2168 2170 if fail_early:
2169 2171 return merge_check
2170 2172
2171 2173 # merge possible, here is the filesystem simulation + shadow repo
2172 2174 merge_response, merge_status, msg = PullRequestModel().merge_status(
2173 2175 pull_request, translator=translator,
2174 2176 force_shadow_repo_refresh=force_shadow_repo_refresh)
2175 2177
2176 2178 merge_check.merge_possible = merge_status
2177 2179 merge_check.merge_msg = msg
2178 2180 merge_check.merge_response = merge_response
2179 2181
2180 2182 source_ref_id = pull_request.source_ref_parts.commit_id
2181 2183 target_ref_id = pull_request.target_ref_parts.commit_id
2182 2184
2183 2185 try:
2184 2186 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2185 2187 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2186 2188 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2187 2189 merge_check.source_commit.current_raw_id = source_commit.raw_id
2188 2190 merge_check.source_commit.previous_raw_id = source_ref_id
2189 2191
2190 2192 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2191 2193 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2192 2194 merge_check.target_commit.current_raw_id = target_commit.raw_id
2193 2195 merge_check.target_commit.previous_raw_id = target_ref_id
2194 2196 except (SourceRefMissing, TargetRefMissing):
2195 2197 pass
2196 2198
2197 2199 if not merge_status:
2198 2200 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2199 2201 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2200 2202
2201 2203 if fail_early:
2202 2204 return merge_check
2203 2205
2204 2206 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2205 2207 return merge_check
2206 2208
2207 2209 @classmethod
2208 2210 def get_merge_conditions(cls, pull_request, translator):
2209 2211 _ = translator
2210 2212 merge_details = {}
2211 2213
2212 2214 model = PullRequestModel()
2213 2215 use_rebase = model._use_rebase_for_merging(pull_request)
2214 2216
2215 2217 if use_rebase:
2216 2218 merge_details['merge_strategy'] = dict(
2217 2219 details={},
2218 2220 message=_('Merge strategy: rebase')
2219 2221 )
2220 2222 else:
2221 2223 merge_details['merge_strategy'] = dict(
2222 2224 details={},
2223 2225 message=_('Merge strategy: explicit merge commit')
2224 2226 )
2225 2227
2226 2228 close_branch = model._close_branch_before_merging(pull_request)
2227 2229 if close_branch:
2228 2230 repo_type = pull_request.target_repo.repo_type
2229 2231 close_msg = ''
2230 2232 if repo_type == 'hg':
2231 2233 close_msg = _('Source branch will be closed before the merge.')
2232 2234 elif repo_type == 'git':
2233 2235 close_msg = _('Source branch will be deleted after the merge.')
2234 2236
2235 2237 merge_details['close_branch'] = dict(
2236 2238 details={},
2237 2239 message=close_msg
2238 2240 )
2239 2241
2240 2242 return merge_details
2241 2243
2242 2244
2243 2245 ChangeTuple = collections.namedtuple(
2244 2246 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2245 2247
2246 2248 FileChangeTuple = collections.namedtuple(
2247 2249 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now