##// END OF EJS Templates
commits: re-implemented fetching a single commit for git case....
marcink -
r3740:dcd8fbea new-ui
parent child Browse files
Show More
@@ -1,502 +1,501 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23 import collections
24 24
25 25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 26 from pyramid.view import view_config
27 27 from pyramid.renderers import render
28 28 from pyramid.response import Response
29 29
30 30 from rhodecode.apps._base import RepoAppView
31 31
32 32 from rhodecode.lib import diffs, codeblocks
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35 35
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.diffs import (
38 38 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
39 39 get_diff_whitespace_flag)
40 40 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
41 41 import rhodecode.lib.helpers as h
42 42 from rhodecode.lib.utils2 import safe_unicode, str2bool
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 RepositoryError, CommitDoesNotExistError)
46 46 from rhodecode.model.db import ChangesetComment, ChangesetStatus
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import CommentsModel
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 def _update_with_GET(params, request):
56 56 for k in ['diff1', 'diff2', 'diff']:
57 57 params[k] += request.GET.getall(k)
58 58
59 59
60 60
61 61
62 62
63 63 class RepoCommitsView(RepoAppView):
64 64 def load_default_context(self):
65 65 c = self._get_local_tmpl_context(include_app_defaults=True)
66 66 c.rhodecode_repo = self.rhodecode_vcs_repo
67 67
68 68 return c
69 69
70 70 def _is_diff_cache_enabled(self, target_repo):
71 71 caching_enabled = self._get_general_setting(
72 72 target_repo, 'rhodecode_diff_cache')
73 73 log.debug('Diff caching enabled: %s', caching_enabled)
74 74 return caching_enabled
75 75
76 76 def _commit(self, commit_id_range, method):
77 77 _ = self.request.translate
78 78 c = self.load_default_context()
79 79 c.fulldiff = self.request.GET.get('fulldiff')
80 80
81 81 # fetch global flags of ignore ws or context lines
82 82 diff_context = get_diff_context(self.request)
83 83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84 84
85 85 # diff_limit will cut off the whole diff if the limit is applied
86 86 # otherwise it will just hide the big files from the front-end
87 87 diff_limit = c.visual.cut_off_limit_diff
88 88 file_limit = c.visual.cut_off_limit_file
89 89
90 90 # get ranges of commit ids if preset
91 91 commit_range = commit_id_range.split('...')[:2]
92 92
93 93 try:
94 94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 95 'message', 'parents']
96 96
97 97 if len(commit_range) == 2:
98 98 commits = self.rhodecode_vcs_repo.get_commits(
99 99 start_id=commit_range[0], end_id=commit_range[1],
100 100 pre_load=pre_load, translate_tags=False)
101 101 commits = list(commits)
102 102 else:
103 103 commits = [self.rhodecode_vcs_repo.get_commit(
104 104 commit_id=commit_id_range, pre_load=pre_load)]
105 105
106 106 c.commit_ranges = commits
107 107 if not c.commit_ranges:
108 raise RepositoryError(
109 'The commit range returned an empty result')
110 except CommitDoesNotExistError:
111 msg = _('No such commit exists for this repository')
108 raise RepositoryError('The commit range returned an empty result')
109 except CommitDoesNotExistError as e:
110 msg = _('No such commit exists. Org exception: `{}`').format(e)
112 111 h.flash(msg, category='error')
113 112 raise HTTPNotFound()
114 113 except Exception:
115 114 log.exception("General failure")
116 115 raise HTTPNotFound()
117 116
118 117 c.changes = OrderedDict()
119 118 c.lines_added = 0
120 119 c.lines_deleted = 0
121 120
122 121 # auto collapse if we have more than limit
123 122 collapse_limit = diffs.DiffProcessor._collapse_commits_over
124 123 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
125 124
126 125 c.commit_statuses = ChangesetStatus.STATUSES
127 126 c.inline_comments = []
128 127 c.files = []
129 128
130 129 c.statuses = []
131 130 c.comments = []
132 131 c.unresolved_comments = []
133 132 if len(c.commit_ranges) == 1:
134 133 commit = c.commit_ranges[0]
135 134 c.comments = CommentsModel().get_comments(
136 135 self.db_repo.repo_id,
137 136 revision=commit.raw_id)
138 137 c.statuses.append(ChangesetStatusModel().get_status(
139 138 self.db_repo.repo_id, commit.raw_id))
140 139 # comments from PR
141 140 statuses = ChangesetStatusModel().get_statuses(
142 141 self.db_repo.repo_id, commit.raw_id,
143 142 with_revisions=True)
144 143 prs = set(st.pull_request for st in statuses
145 144 if st.pull_request is not None)
146 145 # from associated statuses, check the pull requests, and
147 146 # show comments from them
148 147 for pr in prs:
149 148 c.comments.extend(pr.comments)
150 149
151 150 c.unresolved_comments = CommentsModel()\
152 151 .get_commit_unresolved_todos(commit.raw_id)
153 152
154 153 diff = None
155 154 # Iterate over ranges (default commit view is always one commit)
156 155 for commit in c.commit_ranges:
157 156 c.changes[commit.raw_id] = []
158 157
159 158 commit2 = commit
160 159 commit1 = commit.first_parent
161 160
162 161 if method == 'show':
163 162 inline_comments = CommentsModel().get_inline_comments(
164 163 self.db_repo.repo_id, revision=commit.raw_id)
165 164 c.inline_cnt = CommentsModel().get_inline_comments_count(
166 165 inline_comments)
167 166 c.inline_comments = inline_comments
168 167
169 168 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
170 169 self.db_repo)
171 170 cache_file_path = diff_cache_exist(
172 171 cache_path, 'diff', commit.raw_id,
173 172 hide_whitespace_changes, diff_context, c.fulldiff)
174 173
175 174 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
176 175 force_recache = str2bool(self.request.GET.get('force_recache'))
177 176
178 177 cached_diff = None
179 178 if caching_enabled:
180 179 cached_diff = load_cached_diff(cache_file_path)
181 180
182 181 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
183 182 if not force_recache and has_proper_diff_cache:
184 183 diffset = cached_diff['diff']
185 184 else:
186 185 vcs_diff = self.rhodecode_vcs_repo.get_diff(
187 186 commit1, commit2,
188 187 ignore_whitespace=hide_whitespace_changes,
189 188 context=diff_context)
190 189
191 190 diff_processor = diffs.DiffProcessor(
192 191 vcs_diff, format='newdiff', diff_limit=diff_limit,
193 192 file_limit=file_limit, show_full_diff=c.fulldiff)
194 193
195 194 _parsed = diff_processor.prepare()
196 195
197 196 diffset = codeblocks.DiffSet(
198 197 repo_name=self.db_repo_name,
199 198 source_node_getter=codeblocks.diffset_node_getter(commit1),
200 199 target_node_getter=codeblocks.diffset_node_getter(commit2))
201 200
202 201 diffset = self.path_filter.render_patchset_filtered(
203 202 diffset, _parsed, commit1.raw_id, commit2.raw_id)
204 203
205 204 # save cached diff
206 205 if caching_enabled:
207 206 cache_diff(cache_file_path, diffset, None)
208 207
209 208 c.limited_diff = diffset.limited_diff
210 209 c.changes[commit.raw_id] = diffset
211 210 else:
212 211 # TODO(marcink): no cache usage here...
213 212 _diff = self.rhodecode_vcs_repo.get_diff(
214 213 commit1, commit2,
215 214 ignore_whitespace=hide_whitespace_changes, context=diff_context)
216 215 diff_processor = diffs.DiffProcessor(
217 216 _diff, format='newdiff', diff_limit=diff_limit,
218 217 file_limit=file_limit, show_full_diff=c.fulldiff)
219 218 # downloads/raw we only need RAW diff nothing else
220 219 diff = self.path_filter.get_raw_patch(diff_processor)
221 220 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
222 221
223 222 # sort comments by how they were generated
224 223 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
225 224
226 225 if len(c.commit_ranges) == 1:
227 226 c.commit = c.commit_ranges[0]
228 227 c.parent_tmpl = ''.join(
229 228 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
230 229
231 230 if method == 'download':
232 231 response = Response(diff)
233 232 response.content_type = 'text/plain'
234 233 response.content_disposition = (
235 234 'attachment; filename=%s.diff' % commit_id_range[:12])
236 235 return response
237 236 elif method == 'patch':
238 237 c.diff = safe_unicode(diff)
239 238 patch = render(
240 239 'rhodecode:templates/changeset/patch_changeset.mako',
241 240 self._get_template_context(c), self.request)
242 241 response = Response(patch)
243 242 response.content_type = 'text/plain'
244 243 return response
245 244 elif method == 'raw':
246 245 response = Response(diff)
247 246 response.content_type = 'text/plain'
248 247 return response
249 248 elif method == 'show':
250 249 if len(c.commit_ranges) == 1:
251 250 html = render(
252 251 'rhodecode:templates/changeset/changeset.mako',
253 252 self._get_template_context(c), self.request)
254 253 return Response(html)
255 254 else:
256 255 c.ancestor = None
257 256 c.target_repo = self.db_repo
258 257 html = render(
259 258 'rhodecode:templates/changeset/changeset_range.mako',
260 259 self._get_template_context(c), self.request)
261 260 return Response(html)
262 261
263 262 raise HTTPBadRequest()
264 263
265 264 @LoginRequired()
266 265 @HasRepoPermissionAnyDecorator(
267 266 'repository.read', 'repository.write', 'repository.admin')
268 267 @view_config(
269 268 route_name='repo_commit', request_method='GET',
270 269 renderer=None)
271 270 def repo_commit_show(self):
272 271 commit_id = self.request.matchdict['commit_id']
273 272 return self._commit(commit_id, method='show')
274 273
275 274 @LoginRequired()
276 275 @HasRepoPermissionAnyDecorator(
277 276 'repository.read', 'repository.write', 'repository.admin')
278 277 @view_config(
279 278 route_name='repo_commit_raw', request_method='GET',
280 279 renderer=None)
281 280 @view_config(
282 281 route_name='repo_commit_raw_deprecated', request_method='GET',
283 282 renderer=None)
284 283 def repo_commit_raw(self):
285 284 commit_id = self.request.matchdict['commit_id']
286 285 return self._commit(commit_id, method='raw')
287 286
288 287 @LoginRequired()
289 288 @HasRepoPermissionAnyDecorator(
290 289 'repository.read', 'repository.write', 'repository.admin')
291 290 @view_config(
292 291 route_name='repo_commit_patch', request_method='GET',
293 292 renderer=None)
294 293 def repo_commit_patch(self):
295 294 commit_id = self.request.matchdict['commit_id']
296 295 return self._commit(commit_id, method='patch')
297 296
298 297 @LoginRequired()
299 298 @HasRepoPermissionAnyDecorator(
300 299 'repository.read', 'repository.write', 'repository.admin')
301 300 @view_config(
302 301 route_name='repo_commit_download', request_method='GET',
303 302 renderer=None)
304 303 def repo_commit_download(self):
305 304 commit_id = self.request.matchdict['commit_id']
306 305 return self._commit(commit_id, method='download')
307 306
308 307 @LoginRequired()
309 308 @NotAnonymous()
310 309 @HasRepoPermissionAnyDecorator(
311 310 'repository.read', 'repository.write', 'repository.admin')
312 311 @CSRFRequired()
313 312 @view_config(
314 313 route_name='repo_commit_comment_create', request_method='POST',
315 314 renderer='json_ext')
316 315 def repo_commit_comment_create(self):
317 316 _ = self.request.translate
318 317 commit_id = self.request.matchdict['commit_id']
319 318
320 319 c = self.load_default_context()
321 320 status = self.request.POST.get('changeset_status', None)
322 321 text = self.request.POST.get('text')
323 322 comment_type = self.request.POST.get('comment_type')
324 323 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
325 324
326 325 if status:
327 326 text = text or (_('Status change %(transition_icon)s %(status)s')
328 327 % {'transition_icon': '>',
329 328 'status': ChangesetStatus.get_status_lbl(status)})
330 329
331 330 multi_commit_ids = []
332 331 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
333 332 if _commit_id not in ['', None, EmptyCommit.raw_id]:
334 333 if _commit_id not in multi_commit_ids:
335 334 multi_commit_ids.append(_commit_id)
336 335
337 336 commit_ids = multi_commit_ids or [commit_id]
338 337
339 338 comment = None
340 339 for current_id in filter(None, commit_ids):
341 340 comment = CommentsModel().create(
342 341 text=text,
343 342 repo=self.db_repo.repo_id,
344 343 user=self._rhodecode_db_user.user_id,
345 344 commit_id=current_id,
346 345 f_path=self.request.POST.get('f_path'),
347 346 line_no=self.request.POST.get('line'),
348 347 status_change=(ChangesetStatus.get_status_lbl(status)
349 348 if status else None),
350 349 status_change_type=status,
351 350 comment_type=comment_type,
352 351 resolves_comment_id=resolves_comment_id,
353 352 auth_user=self._rhodecode_user
354 353 )
355 354
356 355 # get status if set !
357 356 if status:
358 357 # if latest status was from pull request and it's closed
359 358 # disallow changing status !
360 359 # dont_allow_on_closed_pull_request = True !
361 360
362 361 try:
363 362 ChangesetStatusModel().set_status(
364 363 self.db_repo.repo_id,
365 364 status,
366 365 self._rhodecode_db_user.user_id,
367 366 comment,
368 367 revision=current_id,
369 368 dont_allow_on_closed_pull_request=True
370 369 )
371 370 except StatusChangeOnClosedPullRequestError:
372 371 msg = _('Changing the status of a commit associated with '
373 372 'a closed pull request is not allowed')
374 373 log.exception(msg)
375 374 h.flash(msg, category='warning')
376 375 raise HTTPFound(h.route_path(
377 376 'repo_commit', repo_name=self.db_repo_name,
378 377 commit_id=current_id))
379 378
380 379 # finalize, commit and redirect
381 380 Session().commit()
382 381
383 382 data = {
384 383 'target_id': h.safeid(h.safe_unicode(
385 384 self.request.POST.get('f_path'))),
386 385 }
387 386 if comment:
388 387 c.co = comment
389 388 rendered_comment = render(
390 389 'rhodecode:templates/changeset/changeset_comment_block.mako',
391 390 self._get_template_context(c), self.request)
392 391
393 392 data.update(comment.get_dict())
394 393 data.update({'rendered_text': rendered_comment})
395 394
396 395 return data
397 396
398 397 @LoginRequired()
399 398 @NotAnonymous()
400 399 @HasRepoPermissionAnyDecorator(
401 400 'repository.read', 'repository.write', 'repository.admin')
402 401 @CSRFRequired()
403 402 @view_config(
404 403 route_name='repo_commit_comment_preview', request_method='POST',
405 404 renderer='string', xhr=True)
406 405 def repo_commit_comment_preview(self):
407 406 # Technically a CSRF token is not needed as no state changes with this
408 407 # call. However, as this is a POST is better to have it, so automated
409 408 # tools don't flag it as potential CSRF.
410 409 # Post is required because the payload could be bigger than the maximum
411 410 # allowed by GET.
412 411
413 412 text = self.request.POST.get('text')
414 413 renderer = self.request.POST.get('renderer') or 'rst'
415 414 if text:
416 415 return h.render(text, renderer=renderer, mentions=True)
417 416 return ''
418 417
419 418 @LoginRequired()
420 419 @NotAnonymous()
421 420 @HasRepoPermissionAnyDecorator(
422 421 'repository.read', 'repository.write', 'repository.admin')
423 422 @CSRFRequired()
424 423 @view_config(
425 424 route_name='repo_commit_comment_delete', request_method='POST',
426 425 renderer='json_ext')
427 426 def repo_commit_comment_delete(self):
428 427 commit_id = self.request.matchdict['commit_id']
429 428 comment_id = self.request.matchdict['comment_id']
430 429
431 430 comment = ChangesetComment.get_or_404(comment_id)
432 431 if not comment:
433 432 log.debug('Comment with id:%s not found, skipping', comment_id)
434 433 # comment already deleted in another call probably
435 434 return True
436 435
437 436 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
438 437 super_admin = h.HasPermissionAny('hg.admin')()
439 438 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
440 439 is_repo_comment = comment.repo.repo_name == self.db_repo_name
441 440 comment_repo_admin = is_repo_admin and is_repo_comment
442 441
443 442 if super_admin or comment_owner or comment_repo_admin:
444 443 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
445 444 Session().commit()
446 445 return True
447 446 else:
448 447 log.warning('No permissions for user %s to delete comment_id: %s',
449 448 self._rhodecode_db_user, comment_id)
450 449 raise HTTPNotFound()
451 450
452 451 @LoginRequired()
453 452 @HasRepoPermissionAnyDecorator(
454 453 'repository.read', 'repository.write', 'repository.admin')
455 454 @view_config(
456 455 route_name='repo_commit_data', request_method='GET',
457 456 renderer='json_ext', xhr=True)
458 457 def repo_commit_data(self):
459 458 commit_id = self.request.matchdict['commit_id']
460 459 self.load_default_context()
461 460
462 461 try:
463 462 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
464 463 except CommitDoesNotExistError as e:
465 464 return EmptyCommit(message=str(e))
466 465
467 466 @LoginRequired()
468 467 @HasRepoPermissionAnyDecorator(
469 468 'repository.read', 'repository.write', 'repository.admin')
470 469 @view_config(
471 470 route_name='repo_commit_children', request_method='GET',
472 471 renderer='json_ext', xhr=True)
473 472 def repo_commit_children(self):
474 473 commit_id = self.request.matchdict['commit_id']
475 474 self.load_default_context()
476 475
477 476 try:
478 477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
479 478 children = commit.children
480 479 except CommitDoesNotExistError:
481 480 children = []
482 481
483 482 result = {"results": children}
484 483 return result
485 484
486 485 @LoginRequired()
487 486 @HasRepoPermissionAnyDecorator(
488 487 'repository.read', 'repository.write', 'repository.admin')
489 488 @view_config(
490 489 route_name='repo_commit_parents', request_method='GET',
491 490 renderer='json_ext')
492 491 def repo_commit_parents(self):
493 492 commit_id = self.request.matchdict['commit_id']
494 493 self.load_default_context()
495 494
496 495 try:
497 496 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
498 497 parents = commit.parents
499 498 except CommitDoesNotExistError:
500 499 parents = []
501 500 result = {"results": parents}
502 501 return result
@@ -1,105 +1,106 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT inmemory module
23 23 """
24 24
25 25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 26 from rhodecode.lib.utils import safe_str
27 27 from rhodecode.lib.vcs.backends import base
28 28
29 29
30 30 class GitInMemoryCommit(base.BaseInMemoryCommit):
31 31
32 32 def commit(self, message, author, parents=None, branch=None, date=None,
33 33 **kwargs):
34 34 """
35 35 Performs in-memory commit (doesn't check workdir in any way) and
36 36 returns newly created `GitCommit`. Updates repository's
37 37 `commit_ids`.
38 38
39 39 :param message: message of the commit
40 40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
41 41 :param parents: single parent or sequence of parents from which commit
42 42 would be derived
43 43 :param date: `datetime.datetime` instance. Defaults to
44 44 ``datetime.datetime.now()``.
45 45 :param branch: branch name, as string. If none given, default backend's
46 46 branch would be used.
47 47
48 48 :raises `CommitError`: if any error occurs while committing
49 49 """
50 50 self.check_integrity(parents)
51 51 if branch is None:
52 52 branch = self.repository.DEFAULT_BRANCH_NAME
53 53
54 54 ENCODING = "UTF-8"
55 55
56 56 commit_tree = None
57 57 if self.parents[0]:
58 58 commit_tree = self.parents[0]._commit['tree']
59 59
60 60 updated = []
61 61 for node in self.added + self.changed:
62 62 if not node.is_binary:
63 63 content = node.content.encode(ENCODING)
64 64 else:
65 65 content = node.content
66 66 updated.append({
67 67 'path': node.path,
68 68 'node_path': node.name.encode(ENCODING),
69 69 'content': content,
70 70 'mode': node.mode,
71 71 })
72 72
73 73 removed = [node.path for node in self.removed]
74 74
75 75 date, tz = date_to_timestamp_plus_offset(date)
76 76
77 77 # TODO: johbo: Make kwargs explicit and check if this is needed.
78 78 author_time = kwargs.pop('author_time', date)
79 79 author_tz = kwargs.pop('author_timezone', tz)
80 80
81 81 commit_data = {
82 82 'parents': [p._commit['id'] for p in self.parents if p],
83 83 'author': safe_str(author),
84 84 'committer': safe_str(author),
85 85 'encoding': ENCODING,
86 86 'message': safe_str(message),
87 87 'commit_time': int(date),
88 88 'author_time': int(author_time),
89 89 'commit_timezone': tz,
90 90 'author_timezone': author_tz,
91 91 }
92 92
93 93 commit_id = self.repository._remote.commit(
94 94 commit_data, branch, commit_tree, updated, removed)
95 95
96 96 # Update vcs repository object
97 self.repository.commit_ids.append(commit_id)
98 self.repository._rebuild_cache(self.repository.commit_ids)
97 if commit_id not in self.repository.commit_ids:
98 self.repository.commit_ids.append(commit_id)
99 self.repository._rebuild_cache(self.repository.commit_ids)
99 100
100 101 # invalidate parsed refs after commit
101 102 self.repository._refs = self.repository._get_refs()
102 103 self.repository.branches = self.repository._get_branches()
103 104 tip = self.repository.get_commit()
104 105 self.reset()
105 106 return tip
@@ -1,1012 +1,1031 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference)
39 39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError,
44 44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 45
46 46
47 47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class GitRepository(BaseRepository):
53 53 """
54 54 Git repository backend.
55 55 """
56 56 DEFAULT_BRANCH_NAME = 'master'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self.with_wire = with_wire
66 66
67 67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 68
69 69 # caches
70 70 self._commit_ids = {}
71 71
72 72 @LazyProperty
73 73 def _remote(self):
74 74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75 75
76 76 @LazyProperty
77 77 def bare(self):
78 78 return self._remote.bare()
79 79
80 80 @LazyProperty
81 81 def head(self):
82 82 return self._remote.head()
83 83
84 84 @LazyProperty
85 85 def commit_ids(self):
86 86 """
87 87 Returns list of commit ids, in ascending order. Being lazy
88 88 attribute allows external tools to inject commit ids from cache.
89 89 """
90 90 commit_ids = self._get_all_commit_ids()
91 91 self._rebuild_cache(commit_ids)
92 92 return commit_ids
93 93
94 94 def _rebuild_cache(self, commit_ids):
95 95 self._commit_ids = dict((commit_id, index)
96 96 for index, commit_id in enumerate(commit_ids))
97 97
98 98 def run_git_command(self, cmd, **opts):
99 99 """
100 100 Runs given ``cmd`` as git command and returns tuple
101 101 (stdout, stderr).
102 102
103 103 :param cmd: git command to be executed
104 104 :param opts: env options to pass into Subprocess command
105 105 """
106 106 if not isinstance(cmd, list):
107 107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108 108
109 109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 110 out, err = self._remote.run_git_command(cmd, **opts)
111 111 if err and not skip_stderr_log:
112 112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 113 return out, err
114 114
115 115 @staticmethod
116 116 def check_url(url, config):
117 117 """
118 118 Function will check given url and try to verify if it's a valid
119 119 link. Sometimes it may happened that git will issue basic
120 120 auth request that can cause whole API to hang when used from python
121 121 or other external calls.
122 122
123 123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 124 when the return code is non 200
125 125 """
126 126 # check first if it's not an url
127 127 if os.path.isdir(url) or url.startswith('file:'):
128 128 return True
129 129
130 130 if '+' in url.split('://', 1)[0]:
131 131 url = url.split('+', 1)[1]
132 132
133 133 # Request the _remote to verify the url
134 134 return connection.Git.check_url(url, config.serialize())
135 135
136 136 @staticmethod
137 137 def is_valid_repository(path):
138 138 if os.path.isdir(os.path.join(path, '.git')):
139 139 return True
140 140 # check case of bare repository
141 141 try:
142 142 GitRepository(path)
143 143 return True
144 144 except VCSError:
145 145 pass
146 146 return False
147 147
148 148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 149 bare=False):
150 150 if create and os.path.exists(self.path):
151 151 raise RepositoryError(
152 152 "Cannot create repository at %s, location already exist"
153 153 % self.path)
154 154
155 155 if bare and do_workspace_checkout:
156 156 raise RepositoryError("Cannot update a bare repository")
157 157 try:
158 158
159 159 if src_url:
160 160 # check URL before any actions
161 161 GitRepository.check_url(src_url, self.config)
162 162
163 163 if create:
164 164 os.makedirs(self.path, mode=0o755)
165 165
166 166 if bare:
167 167 self._remote.init_bare()
168 168 else:
169 169 self._remote.init()
170 170
171 171 if src_url and bare:
172 172 # bare repository only allows a fetch and checkout is not allowed
173 173 self.fetch(src_url, commit_ids=None)
174 174 elif src_url:
175 175 self.pull(src_url, commit_ids=None,
176 176 update_after=do_workspace_checkout)
177 177
178 178 else:
179 179 if not self._remote.assert_correct_path():
180 180 raise RepositoryError(
181 181 'Path "%s" does not contain a Git repository' %
182 182 (self.path,))
183 183
184 184 # TODO: johbo: check if we have to translate the OSError here
185 185 except OSError as err:
186 186 raise RepositoryError(err)
187 187
188 188 def _get_all_commit_ids(self, filters=None):
189 189 # we must check if this repo is not empty, since later command
190 190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 191 # errors
192 192
193 193 head = self._remote.head(show_exc=False)
194 194 if not head:
195 195 return []
196 196
197 197 rev_filter = ['--branches', '--tags']
198 198 extra_filter = []
199 199
200 200 if filters:
201 201 if filters.get('since'):
202 202 extra_filter.append('--since=%s' % (filters['since']))
203 203 if filters.get('until'):
204 204 extra_filter.append('--until=%s' % (filters['until']))
205 205 if filters.get('branch_name'):
206 206 rev_filter = ['--tags']
207 207 extra_filter.append(filters['branch_name'])
208 208 rev_filter.extend(extra_filter)
209 209
210 210 # if filters.get('start') or filters.get('end'):
211 211 # # skip is offset, max-count is limit
212 212 # if filters.get('start'):
213 213 # extra_filter += ' --skip=%s' % filters['start']
214 214 # if filters.get('end'):
215 215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 216
217 217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 218 try:
219 219 output, __ = self.run_git_command(cmd)
220 220 except RepositoryError:
221 221 # Can be raised for empty repositories
222 222 return []
223 223 return output.splitlines()
224 224
225 def _get_commit_id(self, commit_id_or_idx):
225 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
226 226 def is_null(value):
227 227 return len(value) == commit_id_or_idx.count('0')
228 228
229 if self.is_empty():
230 raise EmptyRepositoryError("There are no commits yet")
231
232 229 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
233 230 return self.commit_ids[-1]
234 231
235 232 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
236 233 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
237 234 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
238 235 try:
239 236 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
240 237 except Exception:
241 msg = "Commit %s does not exist for %s" % (
242 commit_id_or_idx, self)
238 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
243 239 raise CommitDoesNotExistError(msg)
244 240
245 241 elif is_bstr:
246 242 # check full path ref, eg. refs/heads/master
247 243 ref_id = self._refs.get(commit_id_or_idx)
248 244 if ref_id:
249 245 return ref_id
250 246
251 247 # check branch name
252 248 branch_ids = self.branches.values()
253 249 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 250 if ref_id:
255 251 return ref_id
256 252
257 253 # check tag name
258 254 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 255 if ref_id:
260 256 return ref_id
261 257
262 258 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 259 commit_id_or_idx not in self.commit_ids):
264 msg = "Commit %s does not exist for %s" % (
265 commit_id_or_idx, self)
260 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
266 261 raise CommitDoesNotExistError(msg)
267 262
268 263 # Ensure we return full id
269 264 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 265 raise CommitDoesNotExistError(
271 266 "Given commit id %s not recognized" % commit_id_or_idx)
272 267 return commit_id_or_idx
273 268
274 269 def get_hook_location(self):
275 270 """
276 271 returns absolute path to location where hooks are stored
277 272 """
278 273 loc = os.path.join(self.path, 'hooks')
279 274 if not self.bare:
280 275 loc = os.path.join(self.path, '.git', 'hooks')
281 276 return loc
282 277
283 278 @LazyProperty
284 279 def last_change(self):
285 280 """
286 281 Returns last change made on this repository as
287 282 `datetime.datetime` object.
288 283 """
289 284 try:
290 285 return self.get_commit().date
291 286 except RepositoryError:
292 287 tzoffset = makedate()[1]
293 288 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294 289
295 290 def _get_fs_mtime(self):
296 291 idx_loc = '' if self.bare else '.git'
297 292 # fallback to filesystem
298 293 in_path = os.path.join(self.path, idx_loc, "index")
299 294 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 295 if os.path.exists(in_path):
301 296 return os.stat(in_path).st_mtime
302 297 else:
303 298 return os.stat(he_path).st_mtime
304 299
305 300 @LazyProperty
306 301 def description(self):
307 302 description = self._remote.get_description()
308 303 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309 304
310 305 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 306 if self.is_empty():
312 307 return OrderedDict()
313 308
314 309 result = []
315 310 for ref, sha in self._refs.iteritems():
316 311 if ref.startswith(prefix):
317 312 ref_name = ref
318 313 if strip_prefix:
319 314 ref_name = ref[len(prefix):]
320 315 result.append((safe_unicode(ref_name), sha))
321 316
322 317 def get_name(entry):
323 318 return entry[0]
324 319
325 320 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326 321
327 322 def _get_branches(self):
328 323 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329 324
330 325 @LazyProperty
331 326 def branches(self):
332 327 return self._get_branches()
333 328
334 329 @LazyProperty
335 330 def branches_closed(self):
336 331 return {}
337 332
338 333 @LazyProperty
339 334 def bookmarks(self):
340 335 return {}
341 336
342 337 @LazyProperty
343 338 def branches_all(self):
344 339 all_branches = {}
345 340 all_branches.update(self.branches)
346 341 all_branches.update(self.branches_closed)
347 342 return all_branches
348 343
349 344 @LazyProperty
350 345 def tags(self):
351 346 return self._get_tags()
352 347
353 348 def _get_tags(self):
354 349 return self._get_refs_entries(
355 350 prefix='refs/tags/', strip_prefix=True, reverse=True)
356 351
357 352 def tag(self, name, user, commit_id=None, message=None, date=None,
358 353 **kwargs):
359 354 # TODO: fix this method to apply annotated tags correct with message
360 355 """
361 356 Creates and returns a tag for the given ``commit_id``.
362 357
363 358 :param name: name for new tag
364 359 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 360 :param commit_id: commit id for which new tag would be created
366 361 :param message: message of the tag's commit
367 362 :param date: date of tag's commit
368 363
369 364 :raises TagAlreadyExistError: if tag with same name already exists
370 365 """
371 366 if name in self.tags:
372 367 raise TagAlreadyExistError("Tag %s already exists" % name)
373 368 commit = self.get_commit(commit_id=commit_id)
374 369 message = message or "Added tag %s for commit %s" % (
375 370 name, commit.raw_id)
376 371 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377 372
378 373 self._refs = self._get_refs()
379 374 self.tags = self._get_tags()
380 375 return commit
381 376
382 377 def remove_tag(self, name, user, message=None, date=None):
383 378 """
384 379 Removes tag with the given ``name``.
385 380
386 381 :param name: name of the tag to be removed
387 382 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 383 :param message: message of the tag's removal commit
389 384 :param date: date of tag's removal commit
390 385
391 386 :raises TagDoesNotExistError: if tag with given name does not exists
392 387 """
393 388 if name not in self.tags:
394 389 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 390 tagpath = vcspath.join(
396 391 self._remote.get_refs_path(), 'refs', 'tags', name)
397 392 try:
398 393 os.remove(tagpath)
399 394 self._refs = self._get_refs()
400 395 self.tags = self._get_tags()
401 396 except OSError as e:
402 397 raise RepositoryError(e.strerror)
403 398
404 399 def _get_refs(self):
405 400 return self._remote.get_refs()
406 401
407 402 @LazyProperty
408 403 def _refs(self):
409 404 return self._get_refs()
410 405
411 406 @property
412 407 def _ref_tree(self):
413 408 node = tree = {}
414 409 for ref, sha in self._refs.iteritems():
415 410 path = ref.split('/')
416 411 for bit in path[:-1]:
417 412 node = node.setdefault(bit, {})
418 413 node[path[-1]] = sha
419 414 node = tree
420 415 return tree
421 416
422 417 def get_remote_ref(self, ref_name):
423 418 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 419 try:
425 420 return self._refs[ref_key]
426 421 except Exception:
427 422 return
428 423
429 424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
430 425 """
431 426 Returns `GitCommit` object representing commit from git repository
432 427 at the given `commit_id` or head (most recent commit) if None given.
433 428 """
429 if self.is_empty():
430 raise EmptyRepositoryError("There are no commits yet")
431
434 432 if commit_id is not None:
435 433 self._validate_commit_id(commit_id)
434 try:
435 # we have cached idx, use it without contacting the remote
436 idx = self._commit_ids[commit_id]
437 return GitCommit(self, commit_id, idx, pre_load=pre_load)
438 except KeyError:
439 pass
440
436 441 elif commit_idx is not None:
437 442 self._validate_commit_idx(commit_idx)
438 commit_id = commit_idx
439 commit_id = self._get_commit_id(commit_id)
443 try:
444 _commit_id = self.commit_ids[commit_idx]
445 if commit_idx < 0:
446 commit_idx = self.commit_ids.index(_commit_id)
447 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
448 except IndexError:
449 commit_id = commit_idx
450 else:
451 commit_id = "tip"
452
453 commit_id = self._lookup_commit(commit_id)
454 remote_idx = None
455 if translate_tag:
456 # Need to call remote to translate id for tagging scenario
457 remote_data = self._remote.get_object(commit_id)
458 commit_id = remote_data["commit_id"]
459 remote_idx = remote_data["idx"]
460
440 461 try:
441 if translate_tag:
442 # Need to call remote to translate id for tagging scenario
443 commit_id = self._remote.get_object(commit_id)["commit_id"]
444 462 idx = self._commit_ids[commit_id]
445 463 except KeyError:
446 raise RepositoryError("Cannot get object with id %s" % commit_id)
464 idx = remote_idx or 0
447 465
448 466 return GitCommit(self, commit_id, idx, pre_load=pre_load)
449 467
450 468 def get_commits(
451 469 self, start_id=None, end_id=None, start_date=None, end_date=None,
452 470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
453 471 """
454 472 Returns generator of `GitCommit` objects from start to end (both
455 473 are inclusive), in ascending date order.
456 474
457 475 :param start_id: None, str(commit_id)
458 476 :param end_id: None, str(commit_id)
459 477 :param start_date: if specified, commits with commit date less than
460 478 ``start_date`` would be filtered out from returned set
461 479 :param end_date: if specified, commits with commit date greater than
462 480 ``end_date`` would be filtered out from returned set
463 481 :param branch_name: if specified, commits not reachable from given
464 482 branch would be filtered out from returned set
465 483 :param show_hidden: Show hidden commits such as obsolete or hidden from
466 484 Mercurial evolve
467 485 :raise BranchDoesNotExistError: If given `branch_name` does not
468 486 exist.
469 487 :raise CommitDoesNotExistError: If commits for given `start` or
470 488 `end` could not be found.
471 489
472 490 """
473 491 if self.is_empty():
474 492 raise EmptyRepositoryError("There are no commits yet")
493
475 494 self._validate_branch_name(branch_name)
476 495
477 496 if start_id is not None:
478 497 self._validate_commit_id(start_id)
479 498 if end_id is not None:
480 499 self._validate_commit_id(end_id)
481 500
482 start_raw_id = self._get_commit_id(start_id)
501 start_raw_id = self._lookup_commit(start_id)
483 502 start_pos = self._commit_ids[start_raw_id] if start_id else None
484 end_raw_id = self._get_commit_id(end_id)
503 end_raw_id = self._lookup_commit(end_id)
485 504 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
486 505
487 506 if None not in [start_id, end_id] and start_pos > end_pos:
488 507 raise RepositoryError(
489 508 "Start commit '%s' cannot be after end commit '%s'" %
490 509 (start_id, end_id))
491 510
492 511 if end_pos is not None:
493 512 end_pos += 1
494 513
495 514 filter_ = []
496 515 if branch_name:
497 516 filter_.append({'branch_name': branch_name})
498 517 if start_date and not end_date:
499 518 filter_.append({'since': start_date})
500 519 if end_date and not start_date:
501 520 filter_.append({'until': end_date})
502 521 if start_date and end_date:
503 522 filter_.append({'since': start_date})
504 523 filter_.append({'until': end_date})
505 524
506 525 # if start_pos or end_pos:
507 526 # filter_.append({'start': start_pos})
508 527 # filter_.append({'end': end_pos})
509 528
510 529 if filter_:
511 530 revfilters = {
512 531 'branch_name': branch_name,
513 532 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
514 533 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
515 534 'start': start_pos,
516 535 'end': end_pos,
517 536 }
518 537 commit_ids = self._get_all_commit_ids(filters=revfilters)
519 538
520 539 # pure python stuff, it's slow due to walker walking whole repo
521 540 # def get_revs(walker):
522 541 # for walker_entry in walker:
523 542 # yield walker_entry.commit.id
524 543 # revfilters = {}
525 544 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
526 545 else:
527 546 commit_ids = self.commit_ids
528 547
529 548 if start_pos or end_pos:
530 549 commit_ids = commit_ids[start_pos: end_pos]
531 550
532 551 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 552 translate_tag=translate_tags)
534 553
535 554 def get_diff(
536 555 self, commit1, commit2, path='', ignore_whitespace=False,
537 556 context=3, path1=None):
538 557 """
539 558 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 559 ``commit2`` since ``commit1``.
541 560
542 561 :param commit1: Entry point from which diff is shown. Can be
543 562 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 563 the changes since empty state of the repository until ``commit2``
545 564 :param commit2: Until which commits changes should be shown.
546 565 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 566 changes. Defaults to ``False``.
548 567 :param context: How many lines before/after changed lines should be
549 568 shown. Defaults to ``3``.
550 569 """
551 570 self._validate_diff_commits(commit1, commit2)
552 571 if path1 is not None and path1 != path:
553 572 raise ValueError("Diff of two different paths not supported.")
554 573
555 574 flags = [
556 575 '-U%s' % context, '--full-index', '--binary', '-p',
557 576 '-M', '--abbrev=40']
558 577 if ignore_whitespace:
559 578 flags.append('-w')
560 579
561 580 if commit1 == self.EMPTY_COMMIT:
562 581 cmd = ['show'] + flags + [commit2.raw_id]
563 582 else:
564 583 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
565 584
566 585 if path:
567 586 cmd.extend(['--', path])
568 587
569 588 stdout, __ = self.run_git_command(cmd)
570 589 # If we used 'show' command, strip first few lines (until actual diff
571 590 # starts)
572 591 if commit1 == self.EMPTY_COMMIT:
573 592 lines = stdout.splitlines()
574 593 x = 0
575 594 for line in lines:
576 595 if line.startswith('diff'):
577 596 break
578 597 x += 1
579 598 # Append new line just like 'diff' command do
580 599 stdout = '\n'.join(lines[x:]) + '\n'
581 600 return GitDiff(stdout)
582 601
583 602 def strip(self, commit_id, branch_name):
584 603 commit = self.get_commit(commit_id=commit_id)
585 604 if commit.merge:
586 605 raise Exception('Cannot reset to merge commit')
587 606
588 607 # parent is going to be the new head now
589 608 commit = commit.parents[0]
590 609 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
591 610
592 611 self.commit_ids = self._get_all_commit_ids()
593 612 self._rebuild_cache(self.commit_ids)
594 613
595 614 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
596 615 if commit_id1 == commit_id2:
597 616 return commit_id1
598 617
599 618 if self != repo2:
600 619 commits = self._remote.get_missing_revs(
601 620 commit_id1, commit_id2, repo2.path)
602 621 if commits:
603 622 commit = repo2.get_commit(commits[-1])
604 623 if commit.parents:
605 624 ancestor_id = commit.parents[0].raw_id
606 625 else:
607 626 ancestor_id = None
608 627 else:
609 628 # no commits from other repo, ancestor_id is the commit_id2
610 629 ancestor_id = commit_id2
611 630 else:
612 631 output, __ = self.run_git_command(
613 632 ['merge-base', commit_id1, commit_id2])
614 633 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
615 634
616 635 return ancestor_id
617 636
618 637 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
619 638 repo1 = self
620 639 ancestor_id = None
621 640
622 641 if commit_id1 == commit_id2:
623 642 commits = []
624 643 elif repo1 != repo2:
625 644 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
626 645 repo2.path)
627 646 commits = [
628 647 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
629 648 for commit_id in reversed(missing_ids)]
630 649 else:
631 650 output, __ = repo1.run_git_command(
632 651 ['log', '--reverse', '--pretty=format: %H', '-s',
633 652 '%s..%s' % (commit_id1, commit_id2)])
634 653 commits = [
635 654 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
636 655 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
637 656
638 657 return commits
639 658
640 659 @LazyProperty
641 660 def in_memory_commit(self):
642 661 """
643 662 Returns ``GitInMemoryCommit`` object for this repository.
644 663 """
645 664 return GitInMemoryCommit(self)
646 665
647 666 def pull(self, url, commit_ids=None, update_after=False):
648 667 """
649 668 Pull changes from external location. Pull is different in GIT
650 669 that fetch since it's doing a checkout
651 670
652 671 :param commit_ids: Optional. Can be set to a list of commit ids
653 672 which shall be pulled from the other repository.
654 673 """
655 674 refs = None
656 675 if commit_ids is not None:
657 676 remote_refs = self._remote.get_remote_refs(url)
658 677 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
659 678 self._remote.pull(url, refs=refs, update_after=update_after)
660 679 self._remote.invalidate_vcs_cache()
661 680
662 681 def fetch(self, url, commit_ids=None):
663 682 """
664 683 Fetch all git objects from external location.
665 684 """
666 685 self._remote.sync_fetch(url, refs=commit_ids)
667 686 self._remote.invalidate_vcs_cache()
668 687
669 688 def push(self, url):
670 689 refs = None
671 690 self._remote.sync_push(url, refs=refs)
672 691
673 692 def set_refs(self, ref_name, commit_id):
674 693 self._remote.set_refs(ref_name, commit_id)
675 694
676 695 def remove_ref(self, ref_name):
677 696 self._remote.remove_ref(ref_name)
678 697
679 698 def _update_server_info(self):
680 699 """
681 700 runs gits update-server-info command in this repo instance
682 701 """
683 702 self._remote.update_server_info()
684 703
685 704 def _current_branch(self):
686 705 """
687 706 Return the name of the current branch.
688 707
689 708 It only works for non bare repositories (i.e. repositories with a
690 709 working copy)
691 710 """
692 711 if self.bare:
693 712 raise RepositoryError('Bare git repos do not have active branches')
694 713
695 714 if self.is_empty():
696 715 return None
697 716
698 717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
699 718 return stdout.strip()
700 719
701 720 def _checkout(self, branch_name, create=False, force=False):
702 721 """
703 722 Checkout a branch in the working directory.
704 723
705 724 It tries to create the branch if create is True, failing if the branch
706 725 already exists.
707 726
708 727 It only works for non bare repositories (i.e. repositories with a
709 728 working copy)
710 729 """
711 730 if self.bare:
712 731 raise RepositoryError('Cannot checkout branches in a bare git repo')
713 732
714 733 cmd = ['checkout']
715 734 if force:
716 735 cmd.append('-f')
717 736 if create:
718 737 cmd.append('-b')
719 738 cmd.append(branch_name)
720 739 self.run_git_command(cmd, fail_on_stderr=False)
721 740
722 741 def _identify(self):
723 742 """
724 743 Return the current state of the working directory.
725 744 """
726 745 if self.bare:
727 746 raise RepositoryError('Bare git repos do not have active branches')
728 747
729 748 if self.is_empty():
730 749 return None
731 750
732 751 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
733 752 return stdout.strip()
734 753
735 754 def _local_clone(self, clone_path, branch_name, source_branch=None):
736 755 """
737 756 Create a local clone of the current repo.
738 757 """
739 758 # N.B.(skreft): the --branch option is required as otherwise the shallow
740 759 # clone will only fetch the active branch.
741 760 cmd = ['clone', '--branch', branch_name,
742 761 self.path, os.path.abspath(clone_path)]
743 762
744 763 self.run_git_command(cmd, fail_on_stderr=False)
745 764
746 765 # if we get the different source branch, make sure we also fetch it for
747 766 # merge conditions
748 767 if source_branch and source_branch != branch_name:
749 768 # check if the ref exists.
750 769 shadow_repo = GitRepository(os.path.abspath(clone_path))
751 770 if shadow_repo.get_remote_ref(source_branch):
752 771 cmd = ['fetch', self.path, source_branch]
753 772 self.run_git_command(cmd, fail_on_stderr=False)
754 773
755 774 def _local_fetch(self, repository_path, branch_name, use_origin=False):
756 775 """
757 776 Fetch a branch from a local repository.
758 777 """
759 778 repository_path = os.path.abspath(repository_path)
760 779 if repository_path == self.path:
761 780 raise ValueError('Cannot fetch from the same repository')
762 781
763 782 if use_origin:
764 783 branch_name = '+{branch}:refs/heads/{branch}'.format(
765 784 branch=branch_name)
766 785
767 786 cmd = ['fetch', '--no-tags', '--update-head-ok',
768 787 repository_path, branch_name]
769 788 self.run_git_command(cmd, fail_on_stderr=False)
770 789
771 790 def _local_reset(self, branch_name):
772 791 branch_name = '{}'.format(branch_name)
773 792 cmd = ['reset', '--hard', branch_name, '--']
774 793 self.run_git_command(cmd, fail_on_stderr=False)
775 794
776 795 def _last_fetch_heads(self):
777 796 """
778 797 Return the last fetched heads that need merging.
779 798
780 799 The algorithm is defined at
781 800 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
782 801 """
783 802 if not self.bare:
784 803 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
785 804 else:
786 805 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
787 806
788 807 heads = []
789 808 with open(fetch_heads_path) as f:
790 809 for line in f:
791 810 if ' not-for-merge ' in line:
792 811 continue
793 812 line = re.sub('\t.*', '', line, flags=re.DOTALL)
794 813 heads.append(line)
795 814
796 815 return heads
797 816
798 817 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
799 818 return GitRepository(shadow_repository_path)
800 819
801 820 def _local_pull(self, repository_path, branch_name, ff_only=True):
802 821 """
803 822 Pull a branch from a local repository.
804 823 """
805 824 if self.bare:
806 825 raise RepositoryError('Cannot pull into a bare git repository')
807 826 # N.B.(skreft): The --ff-only option is to make sure this is a
808 827 # fast-forward (i.e., we are only pulling new changes and there are no
809 828 # conflicts with our current branch)
810 829 # Additionally, that option needs to go before --no-tags, otherwise git
811 830 # pull complains about it being an unknown flag.
812 831 cmd = ['pull']
813 832 if ff_only:
814 833 cmd.append('--ff-only')
815 834 cmd.extend(['--no-tags', repository_path, branch_name])
816 835 self.run_git_command(cmd, fail_on_stderr=False)
817 836
818 837 def _local_merge(self, merge_message, user_name, user_email, heads):
819 838 """
820 839 Merge the given head into the checked out branch.
821 840
822 841 It will force a merge commit.
823 842
824 843 Currently it raises an error if the repo is empty, as it is not possible
825 844 to create a merge commit in an empty repo.
826 845
827 846 :param merge_message: The message to use for the merge commit.
828 847 :param heads: the heads to merge.
829 848 """
830 849 if self.bare:
831 850 raise RepositoryError('Cannot merge into a bare git repository')
832 851
833 852 if not heads:
834 853 return
835 854
836 855 if self.is_empty():
837 856 # TODO(skreft): do somehting more robust in this case.
838 857 raise RepositoryError(
839 858 'Do not know how to merge into empty repositories yet')
840 859
841 860 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
842 861 # commit message. We also specify the user who is doing the merge.
843 862 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
844 863 '-c', 'user.email=%s' % safe_str(user_email),
845 864 'merge', '--no-ff', '-m', safe_str(merge_message)]
846 865 cmd.extend(heads)
847 866 try:
848 867 output = self.run_git_command(cmd, fail_on_stderr=False)
849 868 except RepositoryError:
850 869 # Cleanup any merge leftovers
851 870 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
852 871 raise
853 872
854 873 def _local_push(
855 874 self, source_branch, repository_path, target_branch,
856 875 enable_hooks=False, rc_scm_data=None):
857 876 """
858 877 Push the source_branch to the given repository and target_branch.
859 878
860 879 Currently it if the target_branch is not master and the target repo is
861 880 empty, the push will work, but then GitRepository won't be able to find
862 881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
863 882 pointing to master, which does not exist).
864 883
865 884 It does not run the hooks in the target repo.
866 885 """
867 886 # TODO(skreft): deal with the case in which the target repo is empty,
868 887 # and the target_branch is not master.
869 888 target_repo = GitRepository(repository_path)
870 889 if (not target_repo.bare and
871 890 target_repo._current_branch() == target_branch):
872 891 # Git prevents pushing to the checked out branch, so simulate it by
873 892 # pulling into the target repository.
874 893 target_repo._local_pull(self.path, source_branch)
875 894 else:
876 895 cmd = ['push', os.path.abspath(repository_path),
877 896 '%s:%s' % (source_branch, target_branch)]
878 897 gitenv = {}
879 898 if rc_scm_data:
880 899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
881 900
882 901 if not enable_hooks:
883 902 gitenv['RC_SKIP_HOOKS'] = '1'
884 903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
885 904
886 905 def _get_new_pr_branch(self, source_branch, target_branch):
887 906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
888 907 pr_branches = []
889 908 for branch in self.branches:
890 909 if branch.startswith(prefix):
891 910 pr_branches.append(int(branch[len(prefix):]))
892 911
893 912 if not pr_branches:
894 913 branch_id = 0
895 914 else:
896 915 branch_id = max(pr_branches) + 1
897 916
898 917 return '%s%d' % (prefix, branch_id)
899 918
900 919 def _maybe_prepare_merge_workspace(
901 920 self, repo_id, workspace_id, target_ref, source_ref):
902 921 shadow_repository_path = self._get_shadow_repository_path(
903 922 repo_id, workspace_id)
904 923 if not os.path.exists(shadow_repository_path):
905 924 self._local_clone(
906 925 shadow_repository_path, target_ref.name, source_ref.name)
907 926 log.debug(
908 927 'Prepared shadow repository in %s', shadow_repository_path)
909 928
910 929 return shadow_repository_path
911 930
912 931 def _merge_repo(self, repo_id, workspace_id, target_ref,
913 932 source_repo, source_ref, merge_message,
914 933 merger_name, merger_email, dry_run=False,
915 934 use_rebase=False, close_branch=False):
916 935
917 936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
918 937 'rebase' if use_rebase else 'merge', dry_run)
919 938 if target_ref.commit_id != self.branches[target_ref.name]:
920 939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
921 940 target_ref.commit_id, self.branches[target_ref.name])
922 941 return MergeResponse(
923 942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
924 943 metadata={'target_ref': target_ref})
925 944
926 945 shadow_repository_path = self._maybe_prepare_merge_workspace(
927 946 repo_id, workspace_id, target_ref, source_ref)
928 947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
929 948
930 949 # checkout source, if it's different. Otherwise we could not
931 950 # fetch proper commits for merge testing
932 951 if source_ref.name != target_ref.name:
933 952 if shadow_repo.get_remote_ref(source_ref.name):
934 953 shadow_repo._checkout(source_ref.name, force=True)
935 954
936 955 # checkout target, and fetch changes
937 956 shadow_repo._checkout(target_ref.name, force=True)
938 957
939 958 # fetch/reset pull the target, in case it is changed
940 959 # this handles even force changes
941 960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
942 961 shadow_repo._local_reset(target_ref.name)
943 962
944 963 # Need to reload repo to invalidate the cache, or otherwise we cannot
945 964 # retrieve the last target commit.
946 965 shadow_repo = self._get_shadow_instance(shadow_repository_path)
947 966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
948 967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
949 968 target_ref, target_ref.commit_id,
950 969 shadow_repo.branches[target_ref.name])
951 970 return MergeResponse(
952 971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
953 972 metadata={'target_ref': target_ref})
954 973
955 974 # calculate new branch
956 975 pr_branch = shadow_repo._get_new_pr_branch(
957 976 source_ref.name, target_ref.name)
958 977 log.debug('using pull-request merge branch: `%s`', pr_branch)
959 978 # checkout to temp branch, and fetch changes
960 979 shadow_repo._checkout(pr_branch, create=True)
961 980 try:
962 981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
963 982 except RepositoryError:
964 983 log.exception('Failure when doing local fetch on '
965 984 'shadow repo: %s', shadow_repo)
966 985 return MergeResponse(
967 986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
968 987 metadata={'source_ref': source_ref})
969 988
970 989 merge_ref = None
971 990 merge_failure_reason = MergeFailureReason.NONE
972 991 metadata = {}
973 992 try:
974 993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
975 994 [source_ref.commit_id])
976 995 merge_possible = True
977 996
978 997 # Need to reload repo to invalidate the cache, or otherwise we
979 998 # cannot retrieve the merge commit.
980 999 shadow_repo = GitRepository(shadow_repository_path)
981 1000 merge_commit_id = shadow_repo.branches[pr_branch]
982 1001
983 1002 # Set a reference pointing to the merge commit. This reference may
984 1003 # be used to easily identify the last successful merge commit in
985 1004 # the shadow repository.
986 1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
987 1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
988 1007 except RepositoryError:
989 1008 log.exception('Failure when doing local merge on git shadow repo')
990 1009 merge_possible = False
991 1010 merge_failure_reason = MergeFailureReason.MERGE_FAILED
992 1011
993 1012 if merge_possible and not dry_run:
994 1013 try:
995 1014 shadow_repo._local_push(
996 1015 pr_branch, self.path, target_ref.name, enable_hooks=True,
997 1016 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
998 1017 merge_succeeded = True
999 1018 except RepositoryError:
1000 1019 log.exception(
1001 1020 'Failure when doing local push from the shadow '
1002 1021 'repository to the target repository at %s.', self.path)
1003 1022 merge_succeeded = False
1004 1023 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1005 1024 metadata['target'] = 'git shadow repo'
1006 1025 metadata['merge_commit'] = pr_branch
1007 1026 else:
1008 1027 merge_succeeded = False
1009 1028
1010 1029 return MergeResponse(
1011 1030 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1012 1031 metadata=metadata)
@@ -1,97 +1,98 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG inmemory module
23 23 """
24 24
25 25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 26 from rhodecode.lib.utils import safe_str
27 27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
28 28 from rhodecode.lib.vcs.exceptions import RepositoryError
29 29
30 30
31 31 class MercurialInMemoryCommit(BaseInMemoryCommit):
32 32
33 33 def commit(self, message, author, parents=None, branch=None, date=None,
34 34 **kwargs):
35 35 """
36 36 Performs in-memory commit (doesn't check workdir in any way) and
37 37 returns newly created `MercurialCommit`. Updates repository's
38 38 `commit_ids`.
39 39
40 40 :param message: message of the commit
41 41 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
42 42 :param parents: single parent or sequence of parents from which commit
43 43 would be derived
44 44 :param date: `datetime.datetime` instance. Defaults to
45 45 ``datetime.datetime.now()``.
46 46 :param branch: Optional. Branch name as unicode. Will use the backend's
47 47 default if not given.
48 48
49 49 :raises `RepositoryError`: if any error occurs while committing
50 50 """
51 51 self.check_integrity(parents)
52 52
53 53 if not isinstance(message, unicode) or not isinstance(author, unicode):
54 54 # TODO: johbo: Should be a TypeError
55 55 raise RepositoryError('Given message and author needs to be '
56 56 'an <unicode> instance got %r & %r instead'
57 57 % (type(message), type(author)))
58 58
59 59 if branch is None:
60 60 branch = self.repository.DEFAULT_BRANCH_NAME
61 61 kwargs['branch'] = safe_str(branch)
62 62
63 63 message = safe_str(message)
64 64 author = safe_str(author)
65 65
66 66 parent_ids = [p.raw_id if p else None for p in self.parents]
67 67
68 68 ENCODING = "UTF-8"
69 69
70 70 updated = []
71 71 for node in self.added + self.changed:
72 72 if node.is_binary:
73 73 content = node.content
74 74 else:
75 75 content = node.content.encode(ENCODING)
76 76 updated.append({
77 77 'path': node.path,
78 78 'content': content,
79 79 'mode': node.mode,
80 80 })
81 81
82 82 removed = [node.path for node in self.removed]
83 83
84 84 date, tz = date_to_timestamp_plus_offset(date)
85 85
86 new_id = self.repository._remote.commitctx(
86 commit_id = self.repository._remote.commitctx(
87 87 message=message, parents=parent_ids,
88 88 commit_time=date, commit_timezone=tz, user=author,
89 89 files=self.get_paths(), extra=kwargs, removed=removed,
90 90 updated=updated)
91 if commit_id not in self.repository.commit_ids:
92 self.repository.commit_ids.append(commit_id)
93 self.repository._rebuild_cache(self.repository.commit_ids)
91 94
92 self.repository.commit_ids.append(new_id)
93 self.repository._rebuild_cache(self.repository.commit_ids)
94 95 self.repository.branches = self.repository._get_branches()
95 96 tip = self.repository.get_commit()
96 97 self.reset()
97 98 return tip
@@ -1,941 +1,942 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 do_workspace_checkout=False, with_wire=None, bare=False):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param do_workspace_checkout=False: sets update of working copy after
71 71 making a clone
72 72 :param bare: not used, compatible with other VCS
73 73 """
74 74
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 # mercurial since 4.4.X requires certain configuration to be present
77 77 # because sometimes we init the repos with config we need to meet
78 78 # special requirements
79 79 self.config = config if config else self.get_default_config(
80 80 default=[('extensions', 'largefiles', '1')])
81 81 self.with_wire = with_wire
82 82
83 83 self._init_repo(create, src_url, do_workspace_checkout)
84 84
85 85 # caches
86 86 self._commit_ids = {}
87 87
88 88 @LazyProperty
89 89 def _remote(self):
90 90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 91
92 92 @LazyProperty
93 93 def commit_ids(self):
94 94 """
95 95 Returns list of commit ids, in ascending order. Being lazy
96 96 attribute allows external tools to inject shas from cache.
97 97 """
98 98 commit_ids = self._get_all_commit_ids()
99 99 self._rebuild_cache(commit_ids)
100 100 return commit_ids
101 101
102 102 def _rebuild_cache(self, commit_ids):
103 103 self._commit_ids = dict((commit_id, index)
104 104 for index, commit_id in enumerate(commit_ids))
105 105
106 106 @LazyProperty
107 107 def branches(self):
108 108 return self._get_branches()
109 109
110 110 @LazyProperty
111 111 def branches_closed(self):
112 112 return self._get_branches(active=False, closed=True)
113 113
114 114 @LazyProperty
115 115 def branches_all(self):
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 def _get_branches(self, active=True, closed=False):
122 122 """
123 123 Gets branches for this repository
124 124 Returns only not closed active branches by default
125 125
126 126 :param active: return also active branches
127 127 :param closed: return also closed branches
128 128
129 129 """
130 130 if self.is_empty():
131 131 return {}
132 132
133 133 def get_name(ctx):
134 134 return ctx[0]
135 135
136 136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 137 self._remote.branches(active, closed).items()]
138 138
139 139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 140
141 141 @LazyProperty
142 142 def tags(self):
143 143 """
144 144 Gets tags for this repository
145 145 """
146 146 return self._get_tags()
147 147
148 148 def _get_tags(self):
149 149 if self.is_empty():
150 150 return {}
151 151
152 152 def get_name(ctx):
153 153 return ctx[0]
154 154
155 155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 156 self._remote.tags().items()]
157 157
158 158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 159
160 160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 161 **kwargs):
162 162 """
163 163 Creates and returns a tag for the given ``commit_id``.
164 164
165 165 :param name: name for new tag
166 166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 167 :param commit_id: commit id for which new tag would be created
168 168 :param message: message of the tag's commit
169 169 :param date: date of tag's commit
170 170
171 171 :raises TagAlreadyExistError: if tag with same name already exists
172 172 """
173 173 if name in self.tags:
174 174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 175 commit = self.get_commit(commit_id=commit_id)
176 176 local = kwargs.setdefault('local', False)
177 177
178 178 if message is None:
179 179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 180
181 181 date, tz = date_to_timestamp_plus_offset(date)
182 182
183 183 self._remote.tag(
184 184 name, commit.raw_id, message, local, user, date, tz)
185 185 self._remote.invalidate_vcs_cache()
186 186
187 187 # Reinitialize tags
188 188 self.tags = self._get_tags()
189 189 tag_id = self.tags[name]
190 190
191 191 return self.get_commit(commit_id=tag_id)
192 192
193 193 def remove_tag(self, name, user, message=None, date=None):
194 194 """
195 195 Removes tag with the given `name`.
196 196
197 197 :param name: name of the tag to be removed
198 198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 199 :param message: message of the tag's removal commit
200 200 :param date: date of tag's removal commit
201 201
202 202 :raises TagDoesNotExistError: if tag with given name does not exists
203 203 """
204 204 if name not in self.tags:
205 205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 206 if message is None:
207 207 message = "Removed tag %s" % name
208 208 local = False
209 209
210 210 date, tz = date_to_timestamp_plus_offset(date)
211 211
212 212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 213 self._remote.invalidate_vcs_cache()
214 214 self.tags = self._get_tags()
215 215
216 216 @LazyProperty
217 217 def bookmarks(self):
218 218 """
219 219 Gets bookmarks for this repository
220 220 """
221 221 return self._get_bookmarks()
222 222
223 223 def _get_bookmarks(self):
224 224 if self.is_empty():
225 225 return {}
226 226
227 227 def get_name(ctx):
228 228 return ctx[0]
229 229
230 230 _bookmarks = [
231 231 (safe_unicode(n), hexlify(h)) for n, h in
232 232 self._remote.bookmarks().items()]
233 233
234 234 return OrderedDict(sorted(_bookmarks, key=get_name))
235 235
236 236 def _get_all_commit_ids(self):
237 237 return self._remote.get_all_commit_ids('visible')
238 238
239 239 def get_diff(
240 240 self, commit1, commit2, path='', ignore_whitespace=False,
241 241 context=3, path1=None):
242 242 """
243 243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 244 `commit2` since `commit1`.
245 245
246 246 :param commit1: Entry point from which diff is shown. Can be
247 247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 248 the changes since empty state of the repository until `commit2`
249 249 :param commit2: Until which commit changes should be shown.
250 250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 251 changes. Defaults to ``False``.
252 252 :param context: How many lines before/after changed lines should be
253 253 shown. Defaults to ``3``.
254 254 """
255 255 self._validate_diff_commits(commit1, commit2)
256 256 if path1 is not None and path1 != path:
257 257 raise ValueError("Diff of two different paths not supported.")
258 258
259 259 if path:
260 260 file_filter = [self.path, path]
261 261 else:
262 262 file_filter = None
263 263
264 264 diff = self._remote.diff(
265 265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 266 opt_git=True, opt_ignorews=ignore_whitespace,
267 267 context=context)
268 268 return MercurialDiff(diff)
269 269
270 270 def strip(self, commit_id, branch=None):
271 271 self._remote.strip(commit_id, update=False, backup="none")
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 self.commit_ids = self._get_all_commit_ids()
275 275 self._rebuild_cache(self.commit_ids)
276 276
277 277 def verify(self):
278 278 verify = self._remote.verify()
279 279
280 280 self._remote.invalidate_vcs_cache()
281 281 return verify
282 282
283 283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 284 if commit_id1 == commit_id2:
285 285 return commit_id1
286 286
287 287 ancestors = self._remote.revs_from_revspec(
288 288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 289 other_path=repo2.path)
290 290 return repo2[ancestors[0]].raw_id if ancestors else None
291 291
292 292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 293 if commit_id1 == commit_id2:
294 294 commits = []
295 295 else:
296 296 if merge:
297 297 indexes = self._remote.revs_from_revspec(
298 298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 300 else:
301 301 indexes = self._remote.revs_from_revspec(
302 302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 303 commit_id1, other_path=repo2.path)
304 304
305 305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 306 for idx in indexes]
307 307
308 308 return commits
309 309
310 310 @staticmethod
311 311 def check_url(url, config):
312 312 """
313 313 Function will check given url and try to verify if it's a valid
314 314 link. Sometimes it may happened that mercurial will issue basic
315 315 auth request that can cause whole API to hang when used from python
316 316 or other external calls.
317 317
318 318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 319 when the return code is non 200
320 320 """
321 321 # check first if it's not an local url
322 322 if os.path.isdir(url) or url.startswith('file:'):
323 323 return True
324 324
325 325 # Request the _remote to verify the url
326 326 return connection.Hg.check_url(url, config.serialize())
327 327
328 328 @staticmethod
329 329 def is_valid_repository(path):
330 330 return os.path.isdir(os.path.join(path, '.hg'))
331 331
332 332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 333 """
334 334 Function will check for mercurial repository in given path. If there
335 335 is no repository in that path it will raise an exception unless
336 336 `create` parameter is set to True - in that case repository would
337 337 be created.
338 338
339 339 If `src_url` is given, would try to clone repository from the
340 340 location at given clone_point. Additionally it'll make update to
341 341 working copy accordingly to `do_workspace_checkout` flag.
342 342 """
343 343 if create and os.path.exists(self.path):
344 344 raise RepositoryError(
345 345 "Cannot create repository at %s, location already exist"
346 346 % self.path)
347 347
348 348 if src_url:
349 349 url = str(self._get_url(src_url))
350 350 MercurialRepository.check_url(url, self.config)
351 351
352 352 self._remote.clone(url, self.path, do_workspace_checkout)
353 353
354 354 # Don't try to create if we've already cloned repo
355 355 create = False
356 356
357 357 if create:
358 358 os.makedirs(self.path, mode=0o755)
359 359
360 360 self._remote.localrepository(create)
361 361
362 362 @LazyProperty
363 363 def in_memory_commit(self):
364 364 return MercurialInMemoryCommit(self)
365 365
366 366 @LazyProperty
367 367 def description(self):
368 368 description = self._remote.get_config_value(
369 369 'web', 'description', untrusted=True)
370 370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371 371
372 372 @LazyProperty
373 373 def contact(self):
374 374 contact = (
375 375 self._remote.get_config_value("web", "contact") or
376 376 self._remote.get_config_value("ui", "username"))
377 377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378 378
379 379 @LazyProperty
380 380 def last_change(self):
381 381 """
382 382 Returns last change made on this repository as
383 383 `datetime.datetime` object.
384 384 """
385 385 try:
386 386 return self.get_commit().date
387 387 except RepositoryError:
388 388 tzoffset = makedate()[1]
389 389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390 390
391 391 def _get_fs_mtime(self):
392 392 # fallback to filesystem
393 393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 394 st_path = os.path.join(self.path, '.hg', "store")
395 395 if os.path.exists(cl_path):
396 396 return os.stat(cl_path).st_mtime
397 397 else:
398 398 return os.stat(st_path).st_mtime
399 399
400 400 def _get_url(self, url):
401 401 """
402 402 Returns normalized url. If schema is not given, would fall
403 403 to filesystem
404 404 (``file:///``) schema.
405 405 """
406 406 url = url.encode('utf8')
407 407 if url != 'default' and '://' not in url:
408 408 url = "file:" + urllib.pathname2url(url)
409 409 return url
410 410
411 411 def get_hook_location(self):
412 412 """
413 413 returns absolute path to location where hooks are stored
414 414 """
415 415 return os.path.join(self.path, '.hg', '.hgrc')
416 416
417 417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 418 """
419 419 Returns ``MercurialCommit`` object representing repository's
420 420 commit at the given `commit_id` or `commit_idx`.
421 421 """
422 422 if self.is_empty():
423 423 raise EmptyRepositoryError("There are no commits yet")
424 424
425 425 if commit_id is not None:
426 426 self._validate_commit_id(commit_id)
427 427 try:
428 # we have cached idx, use it without contacting the remote
428 429 idx = self._commit_ids[commit_id]
429 430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 431 except KeyError:
431 432 pass
433
432 434 elif commit_idx is not None:
433 435 self._validate_commit_idx(commit_idx)
434 436 try:
435 id_ = self.commit_ids[commit_idx]
437 _commit_id = self.commit_ids[commit_idx]
436 438 if commit_idx < 0:
437 commit_idx += len(self.commit_ids)
438 return MercurialCommit(
439 self, id_, commit_idx, pre_load=pre_load)
439 commit_idx = self.commit_ids.index(_commit_id)
440
441 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 442 except IndexError:
441 443 commit_id = commit_idx
442 444 else:
443 445 commit_id = "tip"
444 446
445 447 if isinstance(commit_id, unicode):
446 448 commit_id = safe_str(commit_id)
447 449
448 450 try:
449 451 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 452 except CommitDoesNotExistError:
451 msg = "Commit %s does not exist for %s" % (
452 commit_id, self)
453 msg = "Commit %s does not exist for %s" % (commit_id, self.name)
453 454 raise CommitDoesNotExistError(msg)
454 455
455 456 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 457
457 458 def get_commits(
458 459 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 460 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 461 """
461 462 Returns generator of ``MercurialCommit`` objects from start to end
462 463 (both are inclusive)
463 464
464 465 :param start_id: None, str(commit_id)
465 466 :param end_id: None, str(commit_id)
466 467 :param start_date: if specified, commits with commit date less than
467 468 ``start_date`` would be filtered out from returned set
468 469 :param end_date: if specified, commits with commit date greater than
469 470 ``end_date`` would be filtered out from returned set
470 471 :param branch_name: if specified, commits not reachable from given
471 472 branch would be filtered out from returned set
472 473 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 474 Mercurial evolve
474 475 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 476 exist.
476 477 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 478 ``end`` could not be found.
478 479 """
479 480 # actually we should check now if it's not an empty repo
480 481 branch_ancestors = False
481 482 if self.is_empty():
482 483 raise EmptyRepositoryError("There are no commits yet")
483 484 self._validate_branch_name(branch_name)
484 485
485 486 if start_id is not None:
486 487 self._validate_commit_id(start_id)
487 488 c_start = self.get_commit(commit_id=start_id)
488 489 start_pos = self._commit_ids[c_start.raw_id]
489 490 else:
490 491 start_pos = None
491 492
492 493 if end_id is not None:
493 494 self._validate_commit_id(end_id)
494 495 c_end = self.get_commit(commit_id=end_id)
495 496 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 497 else:
497 498 end_pos = None
498 499
499 500 if None not in [start_id, end_id] and start_pos > end_pos:
500 501 raise RepositoryError(
501 502 "Start commit '%s' cannot be after end commit '%s'" %
502 503 (start_id, end_id))
503 504
504 505 if end_pos is not None:
505 506 end_pos += 1
506 507
507 508 commit_filter = []
508 509
509 510 if branch_name and not branch_ancestors:
510 511 commit_filter.append('branch("%s")' % (branch_name,))
511 512 elif branch_name and branch_ancestors:
512 513 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 514
514 515 if start_date and not end_date:
515 516 commit_filter.append('date(">%s")' % (start_date,))
516 517 if end_date and not start_date:
517 518 commit_filter.append('date("<%s")' % (end_date,))
518 519 if start_date and end_date:
519 520 commit_filter.append(
520 521 'date(">%s") and date("<%s")' % (start_date, end_date))
521 522
522 523 if not show_hidden:
523 524 commit_filter.append('not obsolete()')
524 525 commit_filter.append('not hidden()')
525 526
526 527 # TODO: johbo: Figure out a simpler way for this solution
527 528 collection_generator = CollectionGenerator
528 529 if commit_filter:
529 530 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 531 revisions = self._remote.rev_range([commit_filter])
531 532 collection_generator = MercurialIndexBasedCollectionGenerator
532 533 else:
533 534 revisions = self.commit_ids
534 535
535 536 if start_pos or end_pos:
536 537 revisions = revisions[start_pos:end_pos]
537 538
538 539 return collection_generator(self, revisions, pre_load=pre_load)
539 540
540 541 def pull(self, url, commit_ids=None):
541 542 """
542 543 Pull changes from external location.
543 544
544 545 :param commit_ids: Optional. Can be set to a list of commit ids
545 546 which shall be pulled from the other repository.
546 547 """
547 548 url = self._get_url(url)
548 549 self._remote.pull(url, commit_ids=commit_ids)
549 550 self._remote.invalidate_vcs_cache()
550 551
551 552 def fetch(self, url, commit_ids=None):
552 553 """
553 554 Backward compatibility with GIT fetch==pull
554 555 """
555 556 return self.pull(url, commit_ids=commit_ids)
556 557
557 558 def push(self, url):
558 559 url = self._get_url(url)
559 560 self._remote.sync_push(url)
560 561
561 562 def _local_clone(self, clone_path):
562 563 """
563 564 Create a local clone of the current repo.
564 565 """
565 566 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 567 hooks=False)
567 568
568 569 def _update(self, revision, clean=False):
569 570 """
570 571 Update the working copy to the specified revision.
571 572 """
572 573 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 574 self._remote.update(revision, clean=clean)
574 575
575 576 def _identify(self):
576 577 """
577 578 Return the current state of the working directory.
578 579 """
579 580 return self._remote.identify().strip().rstrip('+')
580 581
581 582 def _heads(self, branch=None):
582 583 """
583 584 Return the commit ids of the repository heads.
584 585 """
585 586 return self._remote.heads(branch=branch).strip().split(' ')
586 587
587 588 def _ancestor(self, revision1, revision2):
588 589 """
589 590 Return the common ancestor of the two revisions.
590 591 """
591 592 return self._remote.ancestor(revision1, revision2)
592 593
593 594 def _local_push(
594 595 self, revision, repository_path, push_branches=False,
595 596 enable_hooks=False):
596 597 """
597 598 Push the given revision to the specified repository.
598 599
599 600 :param push_branches: allow to create branches in the target repo.
600 601 """
601 602 self._remote.push(
602 603 [revision], repository_path, hooks=enable_hooks,
603 604 push_branches=push_branches)
604 605
605 606 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 607 source_ref, use_rebase=False, dry_run=False):
607 608 """
608 609 Merge the given source_revision into the checked out revision.
609 610
610 611 Returns the commit id of the merge and a boolean indicating if the
611 612 commit needs to be pushed.
612 613 """
613 614 self._update(target_ref.commit_id, clean=True)
614 615
615 616 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 617 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 618
618 619 if ancestor == source_ref.commit_id:
619 620 # Nothing to do, the changes were already integrated
620 621 return target_ref.commit_id, False
621 622
622 623 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 624 # In this case we should force a commit message
624 625 return source_ref.commit_id, True
625 626
626 627 if use_rebase:
627 628 try:
628 629 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 630 target_ref.commit_id)
630 631 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 632 self._remote.rebase(
632 633 source=source_ref.commit_id, dest=target_ref.commit_id)
633 634 self._remote.invalidate_vcs_cache()
634 635 self._update(bookmark_name, clean=True)
635 636 return self._identify(), True
636 637 except RepositoryError:
637 638 # The rebase-abort may raise another exception which 'hides'
638 639 # the original one, therefore we log it here.
639 640 log.exception('Error while rebasing shadow repo during merge.')
640 641
641 642 # Cleanup any rebase leftovers
642 643 self._remote.invalidate_vcs_cache()
643 644 self._remote.rebase(abort=True)
644 645 self._remote.invalidate_vcs_cache()
645 646 self._remote.update(clean=True)
646 647 raise
647 648 else:
648 649 try:
649 650 self._remote.merge(source_ref.commit_id)
650 651 self._remote.invalidate_vcs_cache()
651 652 self._remote.commit(
652 653 message=safe_str(merge_message),
653 654 username=safe_str('%s <%s>' % (user_name, user_email)))
654 655 self._remote.invalidate_vcs_cache()
655 656 return self._identify(), True
656 657 except RepositoryError:
657 658 # Cleanup any merge leftovers
658 659 self._remote.update(clean=True)
659 660 raise
660 661
661 662 def _local_close(self, target_ref, user_name, user_email,
662 663 source_ref, close_message=''):
663 664 """
664 665 Close the branch of the given source_revision
665 666
666 667 Returns the commit id of the close and a boolean indicating if the
667 668 commit needs to be pushed.
668 669 """
669 670 self._update(source_ref.commit_id)
670 671 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 672 try:
672 673 self._remote.commit(
673 674 message=safe_str(message),
674 675 username=safe_str('%s <%s>' % (user_name, user_email)),
675 676 close_branch=True)
676 677 self._remote.invalidate_vcs_cache()
677 678 return self._identify(), True
678 679 except RepositoryError:
679 680 # Cleanup any commit leftovers
680 681 self._remote.update(clean=True)
681 682 raise
682 683
683 684 def _is_the_same_branch(self, target_ref, source_ref):
684 685 return (
685 686 self._get_branch_name(target_ref) ==
686 687 self._get_branch_name(source_ref))
687 688
688 689 def _get_branch_name(self, ref):
689 690 if ref.type == 'branch':
690 691 return ref.name
691 692 return self._remote.ctx_branch(ref.commit_id)
692 693
693 694 def _maybe_prepare_merge_workspace(
694 695 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 696 shadow_repository_path = self._get_shadow_repository_path(
696 697 repo_id, workspace_id)
697 698 if not os.path.exists(shadow_repository_path):
698 699 self._local_clone(shadow_repository_path)
699 700 log.debug(
700 701 'Prepared shadow repository in %s', shadow_repository_path)
701 702
702 703 return shadow_repository_path
703 704
704 705 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 706 source_repo, source_ref, merge_message,
706 707 merger_name, merger_email, dry_run=False,
707 708 use_rebase=False, close_branch=False):
708 709
709 710 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 711 'rebase' if use_rebase else 'merge', dry_run)
711 712 if target_ref.commit_id not in self._heads():
712 713 return MergeResponse(
713 714 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 715 metadata={'target_ref': target_ref})
715 716
716 717 try:
717 718 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 719 heads = '\n,'.join(self._heads(target_ref.name))
719 720 metadata = {
720 721 'target_ref': target_ref,
721 722 'source_ref': source_ref,
722 723 'heads': heads
723 724 }
724 725 return MergeResponse(
725 726 False, False, None,
726 727 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
727 728 metadata=metadata)
728 729 except CommitDoesNotExistError:
729 730 log.exception('Failure when looking up branch heads on hg target')
730 731 return MergeResponse(
731 732 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
732 733 metadata={'target_ref': target_ref})
733 734
734 735 shadow_repository_path = self._maybe_prepare_merge_workspace(
735 736 repo_id, workspace_id, target_ref, source_ref)
736 737 shadow_repo = self._get_shadow_instance(shadow_repository_path)
737 738
738 739 log.debug('Pulling in target reference %s', target_ref)
739 740 self._validate_pull_reference(target_ref)
740 741 shadow_repo._local_pull(self.path, target_ref)
741 742
742 743 try:
743 744 log.debug('Pulling in source reference %s', source_ref)
744 745 source_repo._validate_pull_reference(source_ref)
745 746 shadow_repo._local_pull(source_repo.path, source_ref)
746 747 except CommitDoesNotExistError:
747 748 log.exception('Failure when doing local pull on hg shadow repo')
748 749 return MergeResponse(
749 750 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
750 751 metadata={'source_ref': source_ref})
751 752
752 753 merge_ref = None
753 754 merge_commit_id = None
754 755 close_commit_id = None
755 756 merge_failure_reason = MergeFailureReason.NONE
756 757 metadata = {}
757 758
758 759 # enforce that close branch should be used only in case we source from
759 760 # an actual Branch
760 761 close_branch = close_branch and source_ref.type == 'branch'
761 762
762 763 # don't allow to close branch if source and target are the same
763 764 close_branch = close_branch and source_ref.name != target_ref.name
764 765
765 766 needs_push_on_close = False
766 767 if close_branch and not use_rebase and not dry_run:
767 768 try:
768 769 close_commit_id, needs_push_on_close = shadow_repo._local_close(
769 770 target_ref, merger_name, merger_email, source_ref)
770 771 merge_possible = True
771 772 except RepositoryError:
772 773 log.exception('Failure when doing close branch on '
773 774 'shadow repo: %s', shadow_repo)
774 775 merge_possible = False
775 776 merge_failure_reason = MergeFailureReason.MERGE_FAILED
776 777 else:
777 778 merge_possible = True
778 779
779 780 needs_push = False
780 781 if merge_possible:
781 782 try:
782 783 merge_commit_id, needs_push = shadow_repo._local_merge(
783 784 target_ref, merge_message, merger_name, merger_email,
784 785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
785 786 merge_possible = True
786 787
787 788 # read the state of the close action, if it
788 789 # maybe required a push
789 790 needs_push = needs_push or needs_push_on_close
790 791
791 792 # Set a bookmark pointing to the merge commit. This bookmark
792 793 # may be used to easily identify the last successful merge
793 794 # commit in the shadow repository.
794 795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
795 796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
796 797 except SubrepoMergeError:
797 798 log.exception(
798 799 'Subrepo merge error during local merge on hg shadow repo.')
799 800 merge_possible = False
800 801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
801 802 needs_push = False
802 803 except RepositoryError:
803 804 log.exception('Failure when doing local merge on hg shadow repo')
804 805 merge_possible = False
805 806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
806 807 needs_push = False
807 808
808 809 if merge_possible and not dry_run:
809 810 if needs_push:
810 811 # In case the target is a bookmark, update it, so after pushing
811 812 # the bookmarks is also updated in the target.
812 813 if target_ref.type == 'book':
813 814 shadow_repo.bookmark(
814 815 target_ref.name, revision=merge_commit_id)
815 816 try:
816 817 shadow_repo_with_hooks = self._get_shadow_instance(
817 818 shadow_repository_path,
818 819 enable_hooks=True)
819 820 # This is the actual merge action, we push from shadow
820 821 # into origin.
821 822 # Note: the push_branches option will push any new branch
822 823 # defined in the source repository to the target. This may
823 824 # be dangerous as branches are permanent in Mercurial.
824 825 # This feature was requested in issue #441.
825 826 shadow_repo_with_hooks._local_push(
826 827 merge_commit_id, self.path, push_branches=True,
827 828 enable_hooks=True)
828 829
829 830 # maybe we also need to push the close_commit_id
830 831 if close_commit_id:
831 832 shadow_repo_with_hooks._local_push(
832 833 close_commit_id, self.path, push_branches=True,
833 834 enable_hooks=True)
834 835 merge_succeeded = True
835 836 except RepositoryError:
836 837 log.exception(
837 838 'Failure when doing local push from the shadow '
838 839 'repository to the target repository at %s.', self.path)
839 840 merge_succeeded = False
840 841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
841 842 metadata['target'] = 'hg shadow repo'
842 843 metadata['merge_commit'] = merge_commit_id
843 844 else:
844 845 merge_succeeded = True
845 846 else:
846 847 merge_succeeded = False
847 848
848 849 return MergeResponse(
849 850 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
850 851 metadata=metadata)
851 852
852 853 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
853 854 config = self.config.copy()
854 855 if not enable_hooks:
855 856 config.clear_section('hooks')
856 857 return MercurialRepository(shadow_repository_path, config)
857 858
858 859 def _validate_pull_reference(self, reference):
859 860 if not (reference.name in self.bookmarks or
860 861 reference.name in self.branches or
861 862 self.get_commit(reference.commit_id)):
862 863 raise CommitDoesNotExistError(
863 864 'Unknown branch, bookmark or commit id')
864 865
865 866 def _local_pull(self, repository_path, reference):
866 867 """
867 868 Fetch a branch, bookmark or commit from a local repository.
868 869 """
869 870 repository_path = os.path.abspath(repository_path)
870 871 if repository_path == self.path:
871 872 raise ValueError('Cannot pull from the same repository')
872 873
873 874 reference_type_to_option_name = {
874 875 'book': 'bookmark',
875 876 'branch': 'branch',
876 877 }
877 878 option_name = reference_type_to_option_name.get(
878 879 reference.type, 'revision')
879 880
880 881 if option_name == 'revision':
881 882 ref = reference.commit_id
882 883 else:
883 884 ref = reference.name
884 885
885 886 options = {option_name: [ref]}
886 887 self._remote.pull_cmd(repository_path, hooks=False, **options)
887 888 self._remote.invalidate_vcs_cache()
888 889
889 890 def bookmark(self, bookmark, revision=None):
890 891 if isinstance(bookmark, unicode):
891 892 bookmark = safe_str(bookmark)
892 893 self._remote.bookmark(bookmark, revision=revision)
893 894 self._remote.invalidate_vcs_cache()
894 895
895 896 def get_path_permissions(self, username):
896 897 hgacl_file = os.path.join(self.path, '.hg/hgacl')
897 898
898 899 def read_patterns(suffix):
899 900 svalue = None
900 901 for section, option in [
901 902 ('narrowacl', username + suffix),
902 903 ('narrowacl', 'default' + suffix),
903 904 ('narrowhgacl', username + suffix),
904 905 ('narrowhgacl', 'default' + suffix)
905 906 ]:
906 907 try:
907 908 svalue = hgacl.get(section, option)
908 909 break # stop at the first value we find
909 910 except configparser.NoOptionError:
910 911 pass
911 912 if not svalue:
912 913 return None
913 914 result = ['/']
914 915 for pattern in svalue.split():
915 916 result.append(pattern)
916 917 if '*' not in pattern and '?' not in pattern:
917 918 result.append(pattern + '/*')
918 919 return result
919 920
920 921 if os.path.exists(hgacl_file):
921 922 try:
922 923 hgacl = configparser.RawConfigParser()
923 924 hgacl.read(hgacl_file)
924 925
925 926 includes = read_patterns('.includes')
926 927 excludes = read_patterns('.excludes')
927 928 return BasePathPermissionChecker.create_from_patterns(
928 929 includes, excludes)
929 930 except BaseException as e:
930 931 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
931 932 hgacl_file, self.name, e)
932 933 raise exceptions.RepositoryRequirementError(msg)
933 934 else:
934 935 return None
935 936
936 937
937 938 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
938 939
939 940 def _commit_factory(self, commit_id):
940 941 return self.repo.get_commit(
941 942 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,5154 +1,5156 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import warnings
33 33 import ipaddress
34 34 import functools
35 35 import traceback
36 36 import collections
37 37
38 38 from sqlalchemy import (
39 39 or_, and_, not_, func, TypeDecorator, event,
40 40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 42 Text, Float, PickleType)
43 43 from sqlalchemy.sql.expression import true, false, case
44 44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 45 from sqlalchemy.orm import (
46 46 relationship, joinedload, class_mapper, validates, aliased)
47 47 from sqlalchemy.ext.declarative import declared_attr
48 48 from sqlalchemy.ext.hybrid import hybrid_property
49 49 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 50 from sqlalchemy.dialects.mysql import LONGTEXT
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52 from pyramid import compat
53 53 from pyramid.threadlocal import get_current_request
54 54 from webhelpers.text import collapse, remove_formatting
55 55
56 56 from rhodecode.translation import _
57 57 from rhodecode.lib.vcs import get_vcs_instance
58 58 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
59 59 from rhodecode.lib.utils2 import (
60 60 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
61 61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
63 63 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
64 64 JsonRaw
65 65 from rhodecode.lib.ext_json import json
66 66 from rhodecode.lib.caching_query import FromCache
67 67 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
68 68 from rhodecode.lib.encrypt2 import Encryptor
69 69 from rhodecode.model.meta import Base, Session
70 70
71 71 URL_SEP = '/'
72 72 log = logging.getLogger(__name__)
73 73
74 74 # =============================================================================
75 75 # BASE CLASSES
76 76 # =============================================================================
77 77
78 78 # this is propagated from .ini file rhodecode.encrypted_values.secret or
79 79 # beaker.session.secret if first is not set.
80 80 # and initialized at environment.py
81 81 ENCRYPTION_KEY = None
82 82
83 83 # used to sort permissions by types, '#' used here is not allowed to be in
84 84 # usernames, and it's very early in sorted string.printable table.
85 85 PERMISSION_TYPE_SORT = {
86 86 'admin': '####',
87 87 'write': '###',
88 88 'read': '##',
89 89 'none': '#',
90 90 }
91 91
92 92
93 93 def display_user_sort(obj):
94 94 """
95 95 Sort function used to sort permissions in .permissions() function of
96 96 Repository, RepoGroup, UserGroup. Also it put the default user in front
97 97 of all other resources
98 98 """
99 99
100 100 if obj.username == User.DEFAULT_USER:
101 101 return '#####'
102 102 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
103 103 return prefix + obj.username
104 104
105 105
106 106 def display_user_group_sort(obj):
107 107 """
108 108 Sort function used to sort permissions in .permissions() function of
109 109 Repository, RepoGroup, UserGroup. Also it put the default user in front
110 110 of all other resources
111 111 """
112 112
113 113 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
114 114 return prefix + obj.users_group_name
115 115
116 116
117 117 def _hash_key(k):
118 118 return sha1_safe(k)
119 119
120 120
121 121 def in_filter_generator(qry, items, limit=500):
122 122 """
123 123 Splits IN() into multiple with OR
124 124 e.g.::
125 125 cnt = Repository.query().filter(
126 126 or_(
127 127 *in_filter_generator(Repository.repo_id, range(100000))
128 128 )).count()
129 129 """
130 130 if not items:
131 131 # empty list will cause empty query which might cause security issues
132 132 # this can lead to hidden unpleasant results
133 133 items = [-1]
134 134
135 135 parts = []
136 136 for chunk in xrange(0, len(items), limit):
137 137 parts.append(
138 138 qry.in_(items[chunk: chunk + limit])
139 139 )
140 140
141 141 return parts
142 142
143 143
144 144 base_table_args = {
145 145 'extend_existing': True,
146 146 'mysql_engine': 'InnoDB',
147 147 'mysql_charset': 'utf8',
148 148 'sqlite_autoincrement': True
149 149 }
150 150
151 151
152 152 class EncryptedTextValue(TypeDecorator):
153 153 """
154 154 Special column for encrypted long text data, use like::
155 155
156 156 value = Column("encrypted_value", EncryptedValue(), nullable=False)
157 157
158 158 This column is intelligent so if value is in unencrypted form it return
159 159 unencrypted form, but on save it always encrypts
160 160 """
161 161 impl = Text
162 162
163 163 def process_bind_param(self, value, dialect):
164 164 """
165 165 Setter for storing value
166 166 """
167 167 import rhodecode
168 168 if not value:
169 169 return value
170 170
171 171 # protect against double encrypting if values is already encrypted
172 172 if value.startswith('enc$aes$') \
173 173 or value.startswith('enc$aes_hmac$') \
174 174 or value.startswith('enc2$'):
175 175 raise ValueError('value needs to be in unencrypted format, '
176 176 'ie. not starting with enc$ or enc2$')
177 177
178 178 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
179 179 if algo == 'aes':
180 180 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
181 181 elif algo == 'fernet':
182 182 return Encryptor(ENCRYPTION_KEY).encrypt(value)
183 183 else:
184 184 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
185 185
186 186 def process_result_value(self, value, dialect):
187 187 """
188 188 Getter for retrieving value
189 189 """
190 190
191 191 import rhodecode
192 192 if not value:
193 193 return value
194 194
195 195 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
196 196 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
197 197 if algo == 'aes':
198 198 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
199 199 elif algo == 'fernet':
200 200 return Encryptor(ENCRYPTION_KEY).decrypt(value)
201 201 else:
202 202 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
203 203 return decrypted_data
204 204
205 205
206 206 class BaseModel(object):
207 207 """
208 208 Base Model for all classes
209 209 """
210 210
211 211 @classmethod
212 212 def _get_keys(cls):
213 213 """return column names for this model """
214 214 return class_mapper(cls).c.keys()
215 215
216 216 def get_dict(self):
217 217 """
218 218 return dict with keys and values corresponding
219 219 to this model data """
220 220
221 221 d = {}
222 222 for k in self._get_keys():
223 223 d[k] = getattr(self, k)
224 224
225 225 # also use __json__() if present to get additional fields
226 226 _json_attr = getattr(self, '__json__', None)
227 227 if _json_attr:
228 228 # update with attributes from __json__
229 229 if callable(_json_attr):
230 230 _json_attr = _json_attr()
231 231 for k, val in _json_attr.iteritems():
232 232 d[k] = val
233 233 return d
234 234
235 235 def get_appstruct(self):
236 236 """return list with keys and values tuples corresponding
237 237 to this model data """
238 238
239 239 lst = []
240 240 for k in self._get_keys():
241 241 lst.append((k, getattr(self, k),))
242 242 return lst
243 243
244 244 def populate_obj(self, populate_dict):
245 245 """populate model with data from given populate_dict"""
246 246
247 247 for k in self._get_keys():
248 248 if k in populate_dict:
249 249 setattr(self, k, populate_dict[k])
250 250
251 251 @classmethod
252 252 def query(cls):
253 253 return Session().query(cls)
254 254
255 255 @classmethod
256 256 def get(cls, id_):
257 257 if id_:
258 258 return cls.query().get(id_)
259 259
260 260 @classmethod
261 261 def get_or_404(cls, id_):
262 262 from pyramid.httpexceptions import HTTPNotFound
263 263
264 264 try:
265 265 id_ = int(id_)
266 266 except (TypeError, ValueError):
267 267 raise HTTPNotFound()
268 268
269 269 res = cls.query().get(id_)
270 270 if not res:
271 271 raise HTTPNotFound()
272 272 return res
273 273
274 274 @classmethod
275 275 def getAll(cls):
276 276 # deprecated and left for backward compatibility
277 277 return cls.get_all()
278 278
279 279 @classmethod
280 280 def get_all(cls):
281 281 return cls.query().all()
282 282
283 283 @classmethod
284 284 def delete(cls, id_):
285 285 obj = cls.query().get(id_)
286 286 Session().delete(obj)
287 287
288 288 @classmethod
289 289 def identity_cache(cls, session, attr_name, value):
290 290 exist_in_session = []
291 291 for (item_cls, pkey), instance in session.identity_map.items():
292 292 if cls == item_cls and getattr(instance, attr_name) == value:
293 293 exist_in_session.append(instance)
294 294 if exist_in_session:
295 295 if len(exist_in_session) == 1:
296 296 return exist_in_session[0]
297 297 log.exception(
298 298 'multiple objects with attr %s and '
299 299 'value %s found with same name: %r',
300 300 attr_name, value, exist_in_session)
301 301
302 302 def __repr__(self):
303 303 if hasattr(self, '__unicode__'):
304 304 # python repr needs to return str
305 305 try:
306 306 return safe_str(self.__unicode__())
307 307 except UnicodeDecodeError:
308 308 pass
309 309 return '<DB:%s>' % (self.__class__.__name__)
310 310
311 311
312 312 class RhodeCodeSetting(Base, BaseModel):
313 313 __tablename__ = 'rhodecode_settings'
314 314 __table_args__ = (
315 315 UniqueConstraint('app_settings_name'),
316 316 base_table_args
317 317 )
318 318
319 319 SETTINGS_TYPES = {
320 320 'str': safe_str,
321 321 'int': safe_int,
322 322 'unicode': safe_unicode,
323 323 'bool': str2bool,
324 324 'list': functools.partial(aslist, sep=',')
325 325 }
326 326 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
327 327 GLOBAL_CONF_KEY = 'app_settings'
328 328
329 329 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
330 330 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
331 331 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
332 332 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
333 333
334 334 def __init__(self, key='', val='', type='unicode'):
335 335 self.app_settings_name = key
336 336 self.app_settings_type = type
337 337 self.app_settings_value = val
338 338
339 339 @validates('_app_settings_value')
340 340 def validate_settings_value(self, key, val):
341 341 assert type(val) == unicode
342 342 return val
343 343
344 344 @hybrid_property
345 345 def app_settings_value(self):
346 346 v = self._app_settings_value
347 347 _type = self.app_settings_type
348 348 if _type:
349 349 _type = self.app_settings_type.split('.')[0]
350 350 # decode the encrypted value
351 351 if 'encrypted' in self.app_settings_type:
352 352 cipher = EncryptedTextValue()
353 353 v = safe_unicode(cipher.process_result_value(v, None))
354 354
355 355 converter = self.SETTINGS_TYPES.get(_type) or \
356 356 self.SETTINGS_TYPES['unicode']
357 357 return converter(v)
358 358
359 359 @app_settings_value.setter
360 360 def app_settings_value(self, val):
361 361 """
362 362 Setter that will always make sure we use unicode in app_settings_value
363 363
364 364 :param val:
365 365 """
366 366 val = safe_unicode(val)
367 367 # encode the encrypted value
368 368 if 'encrypted' in self.app_settings_type:
369 369 cipher = EncryptedTextValue()
370 370 val = safe_unicode(cipher.process_bind_param(val, None))
371 371 self._app_settings_value = val
372 372
373 373 @hybrid_property
374 374 def app_settings_type(self):
375 375 return self._app_settings_type
376 376
377 377 @app_settings_type.setter
378 378 def app_settings_type(self, val):
379 379 if val.split('.')[0] not in self.SETTINGS_TYPES:
380 380 raise Exception('type must be one of %s got %s'
381 381 % (self.SETTINGS_TYPES.keys(), val))
382 382 self._app_settings_type = val
383 383
384 384 @classmethod
385 385 def get_by_prefix(cls, prefix):
386 386 return RhodeCodeSetting.query()\
387 387 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
388 388 .all()
389 389
390 390 def __unicode__(self):
391 391 return u"<%s('%s:%s[%s]')>" % (
392 392 self.__class__.__name__,
393 393 self.app_settings_name, self.app_settings_value,
394 394 self.app_settings_type
395 395 )
396 396
397 397
398 398 class RhodeCodeUi(Base, BaseModel):
399 399 __tablename__ = 'rhodecode_ui'
400 400 __table_args__ = (
401 401 UniqueConstraint('ui_key'),
402 402 base_table_args
403 403 )
404 404
405 405 HOOK_REPO_SIZE = 'changegroup.repo_size'
406 406 # HG
407 407 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
408 408 HOOK_PULL = 'outgoing.pull_logger'
409 409 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
410 410 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
411 411 HOOK_PUSH = 'changegroup.push_logger'
412 412 HOOK_PUSH_KEY = 'pushkey.key_push'
413 413
414 414 HOOKS_BUILTIN = [
415 415 HOOK_PRE_PULL,
416 416 HOOK_PULL,
417 417 HOOK_PRE_PUSH,
418 418 HOOK_PRETX_PUSH,
419 419 HOOK_PUSH,
420 420 HOOK_PUSH_KEY,
421 421 ]
422 422
423 423 # TODO: johbo: Unify way how hooks are configured for git and hg,
424 424 # git part is currently hardcoded.
425 425
426 426 # SVN PATTERNS
427 427 SVN_BRANCH_ID = 'vcs_svn_branch'
428 428 SVN_TAG_ID = 'vcs_svn_tag'
429 429
430 430 ui_id = Column(
431 431 "ui_id", Integer(), nullable=False, unique=True, default=None,
432 432 primary_key=True)
433 433 ui_section = Column(
434 434 "ui_section", String(255), nullable=True, unique=None, default=None)
435 435 ui_key = Column(
436 436 "ui_key", String(255), nullable=True, unique=None, default=None)
437 437 ui_value = Column(
438 438 "ui_value", String(255), nullable=True, unique=None, default=None)
439 439 ui_active = Column(
440 440 "ui_active", Boolean(), nullable=True, unique=None, default=True)
441 441
442 442 def __repr__(self):
443 443 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
444 444 self.ui_key, self.ui_value)
445 445
446 446
447 447 class RepoRhodeCodeSetting(Base, BaseModel):
448 448 __tablename__ = 'repo_rhodecode_settings'
449 449 __table_args__ = (
450 450 UniqueConstraint(
451 451 'app_settings_name', 'repository_id',
452 452 name='uq_repo_rhodecode_setting_name_repo_id'),
453 453 base_table_args
454 454 )
455 455
456 456 repository_id = Column(
457 457 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
458 458 nullable=False)
459 459 app_settings_id = Column(
460 460 "app_settings_id", Integer(), nullable=False, unique=True,
461 461 default=None, primary_key=True)
462 462 app_settings_name = Column(
463 463 "app_settings_name", String(255), nullable=True, unique=None,
464 464 default=None)
465 465 _app_settings_value = Column(
466 466 "app_settings_value", String(4096), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_type = Column(
469 469 "app_settings_type", String(255), nullable=True, unique=None,
470 470 default=None)
471 471
472 472 repository = relationship('Repository')
473 473
474 474 def __init__(self, repository_id, key='', val='', type='unicode'):
475 475 self.repository_id = repository_id
476 476 self.app_settings_name = key
477 477 self.app_settings_type = type
478 478 self.app_settings_value = val
479 479
480 480 @validates('_app_settings_value')
481 481 def validate_settings_value(self, key, val):
482 482 assert type(val) == unicode
483 483 return val
484 484
485 485 @hybrid_property
486 486 def app_settings_value(self):
487 487 v = self._app_settings_value
488 488 type_ = self.app_settings_type
489 489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 490 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
491 491 return converter(v)
492 492
493 493 @app_settings_value.setter
494 494 def app_settings_value(self, val):
495 495 """
496 496 Setter that will always make sure we use unicode in app_settings_value
497 497
498 498 :param val:
499 499 """
500 500 self._app_settings_value = safe_unicode(val)
501 501
502 502 @hybrid_property
503 503 def app_settings_type(self):
504 504 return self._app_settings_type
505 505
506 506 @app_settings_type.setter
507 507 def app_settings_type(self, val):
508 508 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
509 509 if val not in SETTINGS_TYPES:
510 510 raise Exception('type must be one of %s got %s'
511 511 % (SETTINGS_TYPES.keys(), val))
512 512 self._app_settings_type = val
513 513
514 514 def __unicode__(self):
515 515 return u"<%s('%s:%s:%s[%s]')>" % (
516 516 self.__class__.__name__, self.repository.repo_name,
517 517 self.app_settings_name, self.app_settings_value,
518 518 self.app_settings_type
519 519 )
520 520
521 521
522 522 class RepoRhodeCodeUi(Base, BaseModel):
523 523 __tablename__ = 'repo_rhodecode_ui'
524 524 __table_args__ = (
525 525 UniqueConstraint(
526 526 'repository_id', 'ui_section', 'ui_key',
527 527 name='uq_repo_rhodecode_ui_repository_id_section_key'),
528 528 base_table_args
529 529 )
530 530
531 531 repository_id = Column(
532 532 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
533 533 nullable=False)
534 534 ui_id = Column(
535 535 "ui_id", Integer(), nullable=False, unique=True, default=None,
536 536 primary_key=True)
537 537 ui_section = Column(
538 538 "ui_section", String(255), nullable=True, unique=None, default=None)
539 539 ui_key = Column(
540 540 "ui_key", String(255), nullable=True, unique=None, default=None)
541 541 ui_value = Column(
542 542 "ui_value", String(255), nullable=True, unique=None, default=None)
543 543 ui_active = Column(
544 544 "ui_active", Boolean(), nullable=True, unique=None, default=True)
545 545
546 546 repository = relationship('Repository')
547 547
548 548 def __repr__(self):
549 549 return '<%s[%s:%s]%s=>%s]>' % (
550 550 self.__class__.__name__, self.repository.repo_name,
551 551 self.ui_section, self.ui_key, self.ui_value)
552 552
553 553
554 554 class User(Base, BaseModel):
555 555 __tablename__ = 'users'
556 556 __table_args__ = (
557 557 UniqueConstraint('username'), UniqueConstraint('email'),
558 558 Index('u_username_idx', 'username'),
559 559 Index('u_email_idx', 'email'),
560 560 base_table_args
561 561 )
562 562
563 563 DEFAULT_USER = 'default'
564 564 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
565 565 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
566 566
567 567 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
568 568 username = Column("username", String(255), nullable=True, unique=None, default=None)
569 569 password = Column("password", String(255), nullable=True, unique=None, default=None)
570 570 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
571 571 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
572 572 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
573 573 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
574 574 _email = Column("email", String(255), nullable=True, unique=None, default=None)
575 575 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
576 576 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
577 577
578 578 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
579 579 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
580 580 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
581 581 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
582 582 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
583 583 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
584 584
585 585 user_log = relationship('UserLog')
586 586 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
587 587
588 588 repositories = relationship('Repository')
589 589 repository_groups = relationship('RepoGroup')
590 590 user_groups = relationship('UserGroup')
591 591
592 592 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
593 593 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
594 594
595 595 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
596 596 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
597 597 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
598 598
599 599 group_member = relationship('UserGroupMember', cascade='all')
600 600
601 601 notifications = relationship('UserNotification', cascade='all')
602 602 # notifications assigned to this user
603 603 user_created_notifications = relationship('Notification', cascade='all')
604 604 # comments created by this user
605 605 user_comments = relationship('ChangesetComment', cascade='all')
606 606 # user profile extra info
607 607 user_emails = relationship('UserEmailMap', cascade='all')
608 608 user_ip_map = relationship('UserIpMap', cascade='all')
609 609 user_auth_tokens = relationship('UserApiKeys', cascade='all')
610 610 user_ssh_keys = relationship('UserSshKeys', cascade='all')
611 611
612 612 # gists
613 613 user_gists = relationship('Gist', cascade='all')
614 614 # user pull requests
615 615 user_pull_requests = relationship('PullRequest', cascade='all')
616 616 # external identities
617 617 extenal_identities = relationship(
618 618 'ExternalIdentity',
619 619 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
620 620 cascade='all')
621 621 # review rules
622 622 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
623 623
624 624 def __unicode__(self):
625 625 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
626 626 self.user_id, self.username)
627 627
628 628 @hybrid_property
629 629 def email(self):
630 630 return self._email
631 631
632 632 @email.setter
633 633 def email(self, val):
634 634 self._email = val.lower() if val else None
635 635
636 636 @hybrid_property
637 637 def first_name(self):
638 638 from rhodecode.lib import helpers as h
639 639 if self.name:
640 640 return h.escape(self.name)
641 641 return self.name
642 642
643 643 @hybrid_property
644 644 def last_name(self):
645 645 from rhodecode.lib import helpers as h
646 646 if self.lastname:
647 647 return h.escape(self.lastname)
648 648 return self.lastname
649 649
650 650 @hybrid_property
651 651 def api_key(self):
652 652 """
653 653 Fetch if exist an auth-token with role ALL connected to this user
654 654 """
655 655 user_auth_token = UserApiKeys.query()\
656 656 .filter(UserApiKeys.user_id == self.user_id)\
657 657 .filter(or_(UserApiKeys.expires == -1,
658 658 UserApiKeys.expires >= time.time()))\
659 659 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
660 660 if user_auth_token:
661 661 user_auth_token = user_auth_token.api_key
662 662
663 663 return user_auth_token
664 664
665 665 @api_key.setter
666 666 def api_key(self, val):
667 667 # don't allow to set API key this is deprecated for now
668 668 self._api_key = None
669 669
670 670 @property
671 671 def reviewer_pull_requests(self):
672 672 return PullRequestReviewers.query() \
673 673 .options(joinedload(PullRequestReviewers.pull_request)) \
674 674 .filter(PullRequestReviewers.user_id == self.user_id) \
675 675 .all()
676 676
677 677 @property
678 678 def firstname(self):
679 679 # alias for future
680 680 return self.name
681 681
682 682 @property
683 683 def emails(self):
684 684 other = UserEmailMap.query()\
685 685 .filter(UserEmailMap.user == self) \
686 686 .order_by(UserEmailMap.email_id.asc()) \
687 687 .all()
688 688 return [self.email] + [x.email for x in other]
689 689
690 690 @property
691 691 def auth_tokens(self):
692 692 auth_tokens = self.get_auth_tokens()
693 693 return [x.api_key for x in auth_tokens]
694 694
695 695 def get_auth_tokens(self):
696 696 return UserApiKeys.query()\
697 697 .filter(UserApiKeys.user == self)\
698 698 .order_by(UserApiKeys.user_api_key_id.asc())\
699 699 .all()
700 700
701 701 @LazyProperty
702 702 def feed_token(self):
703 703 return self.get_feed_token()
704 704
705 705 def get_feed_token(self, cache=True):
706 706 feed_tokens = UserApiKeys.query()\
707 707 .filter(UserApiKeys.user == self)\
708 708 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
709 709 if cache:
710 710 feed_tokens = feed_tokens.options(
711 711 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
712 712
713 713 feed_tokens = feed_tokens.all()
714 714 if feed_tokens:
715 715 return feed_tokens[0].api_key
716 716 return 'NO_FEED_TOKEN_AVAILABLE'
717 717
718 718 @classmethod
719 719 def get(cls, user_id, cache=False):
720 720 if not user_id:
721 721 return
722 722
723 723 user = cls.query()
724 724 if cache:
725 725 user = user.options(
726 726 FromCache("sql_cache_short", "get_users_%s" % user_id))
727 727 return user.get(user_id)
728 728
729 729 @classmethod
730 730 def extra_valid_auth_tokens(cls, user, role=None):
731 731 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
732 732 .filter(or_(UserApiKeys.expires == -1,
733 733 UserApiKeys.expires >= time.time()))
734 734 if role:
735 735 tokens = tokens.filter(or_(UserApiKeys.role == role,
736 736 UserApiKeys.role == UserApiKeys.ROLE_ALL))
737 737 return tokens.all()
738 738
739 739 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
740 740 from rhodecode.lib import auth
741 741
742 742 log.debug('Trying to authenticate user: %s via auth-token, '
743 743 'and roles: %s', self, roles)
744 744
745 745 if not auth_token:
746 746 return False
747 747
748 748 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
749 749 tokens_q = UserApiKeys.query()\
750 750 .filter(UserApiKeys.user_id == self.user_id)\
751 751 .filter(or_(UserApiKeys.expires == -1,
752 752 UserApiKeys.expires >= time.time()))
753 753
754 754 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
755 755
756 756 crypto_backend = auth.crypto_backend()
757 757 enc_token_map = {}
758 758 plain_token_map = {}
759 759 for token in tokens_q:
760 760 if token.api_key.startswith(crypto_backend.ENC_PREF):
761 761 enc_token_map[token.api_key] = token
762 762 else:
763 763 plain_token_map[token.api_key] = token
764 764 log.debug(
765 765 'Found %s plain and %s encrypted user tokens to check for authentication',
766 766 len(plain_token_map), len(enc_token_map))
767 767
768 768 # plain token match comes first
769 769 match = plain_token_map.get(auth_token)
770 770
771 771 # check encrypted tokens now
772 772 if not match:
773 773 for token_hash, token in enc_token_map.items():
774 774 # NOTE(marcink): this is expensive to calculate, but most secure
775 775 if crypto_backend.hash_check(auth_token, token_hash):
776 776 match = token
777 777 break
778 778
779 779 if match:
780 780 log.debug('Found matching token %s', match)
781 781 if match.repo_id:
782 782 log.debug('Found scope, checking for scope match of token %s', match)
783 783 if match.repo_id == scope_repo_id:
784 784 return True
785 785 else:
786 786 log.debug(
787 787 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
788 788 'and calling scope is:%s, skipping further checks',
789 789 match.repo, scope_repo_id)
790 790 return False
791 791 else:
792 792 return True
793 793
794 794 return False
795 795
796 796 @property
797 797 def ip_addresses(self):
798 798 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
799 799 return [x.ip_addr for x in ret]
800 800
801 801 @property
802 802 def username_and_name(self):
803 803 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
804 804
805 805 @property
806 806 def username_or_name_or_email(self):
807 807 full_name = self.full_name if self.full_name is not ' ' else None
808 808 return self.username or full_name or self.email
809 809
810 810 @property
811 811 def full_name(self):
812 812 return '%s %s' % (self.first_name, self.last_name)
813 813
814 814 @property
815 815 def full_name_or_username(self):
816 816 return ('%s %s' % (self.first_name, self.last_name)
817 817 if (self.first_name and self.last_name) else self.username)
818 818
819 819 @property
820 820 def full_contact(self):
821 821 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
822 822
823 823 @property
824 824 def short_contact(self):
825 825 return '%s %s' % (self.first_name, self.last_name)
826 826
827 827 @property
828 828 def is_admin(self):
829 829 return self.admin
830 830
831 831 def AuthUser(self, **kwargs):
832 832 """
833 833 Returns instance of AuthUser for this user
834 834 """
835 835 from rhodecode.lib.auth import AuthUser
836 836 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
837 837
838 838 @hybrid_property
839 839 def user_data(self):
840 840 if not self._user_data:
841 841 return {}
842 842
843 843 try:
844 844 return json.loads(self._user_data)
845 845 except TypeError:
846 846 return {}
847 847
848 848 @user_data.setter
849 849 def user_data(self, val):
850 850 if not isinstance(val, dict):
851 851 raise Exception('user_data must be dict, got %s' % type(val))
852 852 try:
853 853 self._user_data = json.dumps(val)
854 854 except Exception:
855 855 log.error(traceback.format_exc())
856 856
857 857 @classmethod
858 858 def get_by_username(cls, username, case_insensitive=False,
859 859 cache=False, identity_cache=False):
860 860 session = Session()
861 861
862 862 if case_insensitive:
863 863 q = cls.query().filter(
864 864 func.lower(cls.username) == func.lower(username))
865 865 else:
866 866 q = cls.query().filter(cls.username == username)
867 867
868 868 if cache:
869 869 if identity_cache:
870 870 val = cls.identity_cache(session, 'username', username)
871 871 if val:
872 872 return val
873 873 else:
874 874 cache_key = "get_user_by_name_%s" % _hash_key(username)
875 875 q = q.options(
876 876 FromCache("sql_cache_short", cache_key))
877 877
878 878 return q.scalar()
879 879
880 880 @classmethod
881 881 def get_by_auth_token(cls, auth_token, cache=False):
882 882 q = UserApiKeys.query()\
883 883 .filter(UserApiKeys.api_key == auth_token)\
884 884 .filter(or_(UserApiKeys.expires == -1,
885 885 UserApiKeys.expires >= time.time()))
886 886 if cache:
887 887 q = q.options(
888 888 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
889 889
890 890 match = q.first()
891 891 if match:
892 892 return match.user
893 893
894 894 @classmethod
895 895 def get_by_email(cls, email, case_insensitive=False, cache=False):
896 896
897 897 if case_insensitive:
898 898 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
899 899
900 900 else:
901 901 q = cls.query().filter(cls.email == email)
902 902
903 903 email_key = _hash_key(email)
904 904 if cache:
905 905 q = q.options(
906 906 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
907 907
908 908 ret = q.scalar()
909 909 if ret is None:
910 910 q = UserEmailMap.query()
911 911 # try fetching in alternate email map
912 912 if case_insensitive:
913 913 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
914 914 else:
915 915 q = q.filter(UserEmailMap.email == email)
916 916 q = q.options(joinedload(UserEmailMap.user))
917 917 if cache:
918 918 q = q.options(
919 919 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
920 920 ret = getattr(q.scalar(), 'user', None)
921 921
922 922 return ret
923 923
924 924 @classmethod
925 925 def get_from_cs_author(cls, author):
926 926 """
927 927 Tries to get User objects out of commit author string
928 928
929 929 :param author:
930 930 """
931 931 from rhodecode.lib.helpers import email, author_name
932 932 # Valid email in the attribute passed, see if they're in the system
933 933 _email = email(author)
934 934 if _email:
935 935 user = cls.get_by_email(_email, case_insensitive=True)
936 936 if user:
937 937 return user
938 938 # Maybe we can match by username?
939 939 _author = author_name(author)
940 940 user = cls.get_by_username(_author, case_insensitive=True)
941 941 if user:
942 942 return user
943 943
944 944 def update_userdata(self, **kwargs):
945 945 usr = self
946 946 old = usr.user_data
947 947 old.update(**kwargs)
948 948 usr.user_data = old
949 949 Session().add(usr)
950 950 log.debug('updated userdata with ', kwargs)
951 951
952 952 def update_lastlogin(self):
953 953 """Update user lastlogin"""
954 954 self.last_login = datetime.datetime.now()
955 955 Session().add(self)
956 956 log.debug('updated user %s lastlogin', self.username)
957 957
958 958 def update_password(self, new_password):
959 959 from rhodecode.lib.auth import get_crypt_password
960 960
961 961 self.password = get_crypt_password(new_password)
962 962 Session().add(self)
963 963
964 964 @classmethod
965 965 def get_first_super_admin(cls):
966 966 user = User.query()\
967 967 .filter(User.admin == true()) \
968 968 .order_by(User.user_id.asc()) \
969 969 .first()
970 970
971 971 if user is None:
972 972 raise Exception('FATAL: Missing administrative account!')
973 973 return user
974 974
975 975 @classmethod
976 976 def get_all_super_admins(cls, only_active=False):
977 977 """
978 978 Returns all admin accounts sorted by username
979 979 """
980 980 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
981 981 if only_active:
982 982 qry = qry.filter(User.active == true())
983 983 return qry.all()
984 984
985 985 @classmethod
986 986 def get_default_user(cls, cache=False, refresh=False):
987 987 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
988 988 if user is None:
989 989 raise Exception('FATAL: Missing default account!')
990 990 if refresh:
991 991 # The default user might be based on outdated state which
992 992 # has been loaded from the cache.
993 993 # A call to refresh() ensures that the
994 994 # latest state from the database is used.
995 995 Session().refresh(user)
996 996 return user
997 997
998 998 def _get_default_perms(self, user, suffix=''):
999 999 from rhodecode.model.permission import PermissionModel
1000 1000 return PermissionModel().get_default_perms(user.user_perms, suffix)
1001 1001
1002 1002 def get_default_perms(self, suffix=''):
1003 1003 return self._get_default_perms(self, suffix)
1004 1004
1005 1005 def get_api_data(self, include_secrets=False, details='full'):
1006 1006 """
1007 1007 Common function for generating user related data for API
1008 1008
1009 1009 :param include_secrets: By default secrets in the API data will be replaced
1010 1010 by a placeholder value to prevent exposing this data by accident. In case
1011 1011 this data shall be exposed, set this flag to ``True``.
1012 1012
1013 1013 :param details: details can be 'basic|full' basic gives only a subset of
1014 1014 the available user information that includes user_id, name and emails.
1015 1015 """
1016 1016 user = self
1017 1017 user_data = self.user_data
1018 1018 data = {
1019 1019 'user_id': user.user_id,
1020 1020 'username': user.username,
1021 1021 'firstname': user.name,
1022 1022 'lastname': user.lastname,
1023 1023 'email': user.email,
1024 1024 'emails': user.emails,
1025 1025 }
1026 1026 if details == 'basic':
1027 1027 return data
1028 1028
1029 1029 auth_token_length = 40
1030 1030 auth_token_replacement = '*' * auth_token_length
1031 1031
1032 1032 extras = {
1033 1033 'auth_tokens': [auth_token_replacement],
1034 1034 'active': user.active,
1035 1035 'admin': user.admin,
1036 1036 'extern_type': user.extern_type,
1037 1037 'extern_name': user.extern_name,
1038 1038 'last_login': user.last_login,
1039 1039 'last_activity': user.last_activity,
1040 1040 'ip_addresses': user.ip_addresses,
1041 1041 'language': user_data.get('language')
1042 1042 }
1043 1043 data.update(extras)
1044 1044
1045 1045 if include_secrets:
1046 1046 data['auth_tokens'] = user.auth_tokens
1047 1047 return data
1048 1048
1049 1049 def __json__(self):
1050 1050 data = {
1051 1051 'full_name': self.full_name,
1052 1052 'full_name_or_username': self.full_name_or_username,
1053 1053 'short_contact': self.short_contact,
1054 1054 'full_contact': self.full_contact,
1055 1055 }
1056 1056 data.update(self.get_api_data())
1057 1057 return data
1058 1058
1059 1059
1060 1060 class UserApiKeys(Base, BaseModel):
1061 1061 __tablename__ = 'user_api_keys'
1062 1062 __table_args__ = (
1063 1063 Index('uak_api_key_idx', 'api_key', unique=True),
1064 1064 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1065 1065 base_table_args
1066 1066 )
1067 1067 __mapper_args__ = {}
1068 1068
1069 1069 # ApiKey role
1070 1070 ROLE_ALL = 'token_role_all'
1071 1071 ROLE_HTTP = 'token_role_http'
1072 1072 ROLE_VCS = 'token_role_vcs'
1073 1073 ROLE_API = 'token_role_api'
1074 1074 ROLE_FEED = 'token_role_feed'
1075 1075 ROLE_PASSWORD_RESET = 'token_password_reset'
1076 1076
1077 1077 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1078 1078
1079 1079 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1080 1080 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1081 1081 api_key = Column("api_key", String(255), nullable=False, unique=True)
1082 1082 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1083 1083 expires = Column('expires', Float(53), nullable=False)
1084 1084 role = Column('role', String(255), nullable=True)
1085 1085 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1086 1086
1087 1087 # scope columns
1088 1088 repo_id = Column(
1089 1089 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1090 1090 nullable=True, unique=None, default=None)
1091 1091 repo = relationship('Repository', lazy='joined')
1092 1092
1093 1093 repo_group_id = Column(
1094 1094 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1095 1095 nullable=True, unique=None, default=None)
1096 1096 repo_group = relationship('RepoGroup', lazy='joined')
1097 1097
1098 1098 user = relationship('User', lazy='joined')
1099 1099
1100 1100 def __unicode__(self):
1101 1101 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1102 1102
1103 1103 def __json__(self):
1104 1104 data = {
1105 1105 'auth_token': self.api_key,
1106 1106 'role': self.role,
1107 1107 'scope': self.scope_humanized,
1108 1108 'expired': self.expired
1109 1109 }
1110 1110 return data
1111 1111
1112 1112 def get_api_data(self, include_secrets=False):
1113 1113 data = self.__json__()
1114 1114 if include_secrets:
1115 1115 return data
1116 1116 else:
1117 1117 data['auth_token'] = self.token_obfuscated
1118 1118 return data
1119 1119
1120 1120 @hybrid_property
1121 1121 def description_safe(self):
1122 1122 from rhodecode.lib import helpers as h
1123 1123 return h.escape(self.description)
1124 1124
1125 1125 @property
1126 1126 def expired(self):
1127 1127 if self.expires == -1:
1128 1128 return False
1129 1129 return time.time() > self.expires
1130 1130
1131 1131 @classmethod
1132 1132 def _get_role_name(cls, role):
1133 1133 return {
1134 1134 cls.ROLE_ALL: _('all'),
1135 1135 cls.ROLE_HTTP: _('http/web interface'),
1136 1136 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1137 1137 cls.ROLE_API: _('api calls'),
1138 1138 cls.ROLE_FEED: _('feed access'),
1139 1139 }.get(role, role)
1140 1140
1141 1141 @property
1142 1142 def role_humanized(self):
1143 1143 return self._get_role_name(self.role)
1144 1144
1145 1145 def _get_scope(self):
1146 1146 if self.repo:
1147 1147 return 'Repository: {}'.format(self.repo.repo_name)
1148 1148 if self.repo_group:
1149 1149 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1150 1150 return 'Global'
1151 1151
1152 1152 @property
1153 1153 def scope_humanized(self):
1154 1154 return self._get_scope()
1155 1155
1156 1156 @property
1157 1157 def token_obfuscated(self):
1158 1158 if self.api_key:
1159 1159 return self.api_key[:4] + "****"
1160 1160
1161 1161
1162 1162 class UserEmailMap(Base, BaseModel):
1163 1163 __tablename__ = 'user_email_map'
1164 1164 __table_args__ = (
1165 1165 Index('uem_email_idx', 'email'),
1166 1166 UniqueConstraint('email'),
1167 1167 base_table_args
1168 1168 )
1169 1169 __mapper_args__ = {}
1170 1170
1171 1171 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1172 1172 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1173 1173 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1174 1174 user = relationship('User', lazy='joined')
1175 1175
1176 1176 @validates('_email')
1177 1177 def validate_email(self, key, email):
1178 1178 # check if this email is not main one
1179 1179 main_email = Session().query(User).filter(User.email == email).scalar()
1180 1180 if main_email is not None:
1181 1181 raise AttributeError('email %s is present is user table' % email)
1182 1182 return email
1183 1183
1184 1184 @hybrid_property
1185 1185 def email(self):
1186 1186 return self._email
1187 1187
1188 1188 @email.setter
1189 1189 def email(self, val):
1190 1190 self._email = val.lower() if val else None
1191 1191
1192 1192
1193 1193 class UserIpMap(Base, BaseModel):
1194 1194 __tablename__ = 'user_ip_map'
1195 1195 __table_args__ = (
1196 1196 UniqueConstraint('user_id', 'ip_addr'),
1197 1197 base_table_args
1198 1198 )
1199 1199 __mapper_args__ = {}
1200 1200
1201 1201 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1202 1202 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1203 1203 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1204 1204 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1205 1205 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1206 1206 user = relationship('User', lazy='joined')
1207 1207
1208 1208 @hybrid_property
1209 1209 def description_safe(self):
1210 1210 from rhodecode.lib import helpers as h
1211 1211 return h.escape(self.description)
1212 1212
1213 1213 @classmethod
1214 1214 def _get_ip_range(cls, ip_addr):
1215 1215 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1216 1216 return [str(net.network_address), str(net.broadcast_address)]
1217 1217
1218 1218 def __json__(self):
1219 1219 return {
1220 1220 'ip_addr': self.ip_addr,
1221 1221 'ip_range': self._get_ip_range(self.ip_addr),
1222 1222 }
1223 1223
1224 1224 def __unicode__(self):
1225 1225 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1226 1226 self.user_id, self.ip_addr)
1227 1227
1228 1228
1229 1229 class UserSshKeys(Base, BaseModel):
1230 1230 __tablename__ = 'user_ssh_keys'
1231 1231 __table_args__ = (
1232 1232 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1233 1233
1234 1234 UniqueConstraint('ssh_key_fingerprint'),
1235 1235
1236 1236 base_table_args
1237 1237 )
1238 1238 __mapper_args__ = {}
1239 1239
1240 1240 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1241 1241 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1242 1242 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1243 1243
1244 1244 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1245 1245
1246 1246 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1247 1247 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1248 1248 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1249 1249
1250 1250 user = relationship('User', lazy='joined')
1251 1251
1252 1252 def __json__(self):
1253 1253 data = {
1254 1254 'ssh_fingerprint': self.ssh_key_fingerprint,
1255 1255 'description': self.description,
1256 1256 'created_on': self.created_on
1257 1257 }
1258 1258 return data
1259 1259
1260 1260 def get_api_data(self):
1261 1261 data = self.__json__()
1262 1262 return data
1263 1263
1264 1264
1265 1265 class UserLog(Base, BaseModel):
1266 1266 __tablename__ = 'user_logs'
1267 1267 __table_args__ = (
1268 1268 base_table_args,
1269 1269 )
1270 1270
1271 1271 VERSION_1 = 'v1'
1272 1272 VERSION_2 = 'v2'
1273 1273 VERSIONS = [VERSION_1, VERSION_2]
1274 1274
1275 1275 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1276 1276 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1277 1277 username = Column("username", String(255), nullable=True, unique=None, default=None)
1278 1278 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1279 1279 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1280 1280 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1281 1281 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1282 1282 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1283 1283
1284 1284 version = Column("version", String(255), nullable=True, default=VERSION_1)
1285 1285 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1286 1286 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1287 1287
1288 1288 def __unicode__(self):
1289 1289 return u"<%s('id:%s:%s')>" % (
1290 1290 self.__class__.__name__, self.repository_name, self.action)
1291 1291
1292 1292 def __json__(self):
1293 1293 return {
1294 1294 'user_id': self.user_id,
1295 1295 'username': self.username,
1296 1296 'repository_id': self.repository_id,
1297 1297 'repository_name': self.repository_name,
1298 1298 'user_ip': self.user_ip,
1299 1299 'action_date': self.action_date,
1300 1300 'action': self.action,
1301 1301 }
1302 1302
1303 1303 @hybrid_property
1304 1304 def entry_id(self):
1305 1305 return self.user_log_id
1306 1306
1307 1307 @property
1308 1308 def action_as_day(self):
1309 1309 return datetime.date(*self.action_date.timetuple()[:3])
1310 1310
1311 1311 user = relationship('User')
1312 1312 repository = relationship('Repository', cascade='')
1313 1313
1314 1314
1315 1315 class UserGroup(Base, BaseModel):
1316 1316 __tablename__ = 'users_groups'
1317 1317 __table_args__ = (
1318 1318 base_table_args,
1319 1319 )
1320 1320
1321 1321 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1322 1322 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1323 1323 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1324 1324 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1325 1325 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1326 1326 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1327 1327 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1328 1328 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1329 1329
1330 1330 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1331 1331 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1332 1332 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1333 1333 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1334 1334 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1335 1335 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1336 1336
1337 1337 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1338 1338 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1339 1339
1340 1340 @classmethod
1341 1341 def _load_group_data(cls, column):
1342 1342 if not column:
1343 1343 return {}
1344 1344
1345 1345 try:
1346 1346 return json.loads(column) or {}
1347 1347 except TypeError:
1348 1348 return {}
1349 1349
1350 1350 @hybrid_property
1351 1351 def description_safe(self):
1352 1352 from rhodecode.lib import helpers as h
1353 1353 return h.escape(self.user_group_description)
1354 1354
1355 1355 @hybrid_property
1356 1356 def group_data(self):
1357 1357 return self._load_group_data(self._group_data)
1358 1358
1359 1359 @group_data.expression
1360 1360 def group_data(self, **kwargs):
1361 1361 return self._group_data
1362 1362
1363 1363 @group_data.setter
1364 1364 def group_data(self, val):
1365 1365 try:
1366 1366 self._group_data = json.dumps(val)
1367 1367 except Exception:
1368 1368 log.error(traceback.format_exc())
1369 1369
1370 1370 @classmethod
1371 1371 def _load_sync(cls, group_data):
1372 1372 if group_data:
1373 1373 return group_data.get('extern_type')
1374 1374
1375 1375 @property
1376 1376 def sync(self):
1377 1377 return self._load_sync(self.group_data)
1378 1378
1379 1379 def __unicode__(self):
1380 1380 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1381 1381 self.users_group_id,
1382 1382 self.users_group_name)
1383 1383
1384 1384 @classmethod
1385 1385 def get_by_group_name(cls, group_name, cache=False,
1386 1386 case_insensitive=False):
1387 1387 if case_insensitive:
1388 1388 q = cls.query().filter(func.lower(cls.users_group_name) ==
1389 1389 func.lower(group_name))
1390 1390
1391 1391 else:
1392 1392 q = cls.query().filter(cls.users_group_name == group_name)
1393 1393 if cache:
1394 1394 q = q.options(
1395 1395 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1396 1396 return q.scalar()
1397 1397
1398 1398 @classmethod
1399 1399 def get(cls, user_group_id, cache=False):
1400 1400 if not user_group_id:
1401 1401 return
1402 1402
1403 1403 user_group = cls.query()
1404 1404 if cache:
1405 1405 user_group = user_group.options(
1406 1406 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1407 1407 return user_group.get(user_group_id)
1408 1408
1409 1409 def permissions(self, with_admins=True, with_owner=True,
1410 1410 expand_from_user_groups=False):
1411 1411 """
1412 1412 Permissions for user groups
1413 1413 """
1414 1414 _admin_perm = 'usergroup.admin'
1415 1415
1416 1416 owner_row = []
1417 1417 if with_owner:
1418 1418 usr = AttributeDict(self.user.get_dict())
1419 1419 usr.owner_row = True
1420 1420 usr.permission = _admin_perm
1421 1421 owner_row.append(usr)
1422 1422
1423 1423 super_admin_ids = []
1424 1424 super_admin_rows = []
1425 1425 if with_admins:
1426 1426 for usr in User.get_all_super_admins():
1427 1427 super_admin_ids.append(usr.user_id)
1428 1428 # if this admin is also owner, don't double the record
1429 1429 if usr.user_id == owner_row[0].user_id:
1430 1430 owner_row[0].admin_row = True
1431 1431 else:
1432 1432 usr = AttributeDict(usr.get_dict())
1433 1433 usr.admin_row = True
1434 1434 usr.permission = _admin_perm
1435 1435 super_admin_rows.append(usr)
1436 1436
1437 1437 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1438 1438 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1439 1439 joinedload(UserUserGroupToPerm.user),
1440 1440 joinedload(UserUserGroupToPerm.permission),)
1441 1441
1442 1442 # get owners and admins and permissions. We do a trick of re-writing
1443 1443 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1444 1444 # has a global reference and changing one object propagates to all
1445 1445 # others. This means if admin is also an owner admin_row that change
1446 1446 # would propagate to both objects
1447 1447 perm_rows = []
1448 1448 for _usr in q.all():
1449 1449 usr = AttributeDict(_usr.user.get_dict())
1450 1450 # if this user is also owner/admin, mark as duplicate record
1451 1451 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1452 1452 usr.duplicate_perm = True
1453 1453 usr.permission = _usr.permission.permission_name
1454 1454 perm_rows.append(usr)
1455 1455
1456 1456 # filter the perm rows by 'default' first and then sort them by
1457 1457 # admin,write,read,none permissions sorted again alphabetically in
1458 1458 # each group
1459 1459 perm_rows = sorted(perm_rows, key=display_user_sort)
1460 1460
1461 1461 user_groups_rows = []
1462 1462 if expand_from_user_groups:
1463 1463 for ug in self.permission_user_groups(with_members=True):
1464 1464 for user_data in ug.members:
1465 1465 user_groups_rows.append(user_data)
1466 1466
1467 1467 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1468 1468
1469 1469 def permission_user_groups(self, with_members=False):
1470 1470 q = UserGroupUserGroupToPerm.query()\
1471 1471 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1472 1472 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1473 1473 joinedload(UserGroupUserGroupToPerm.target_user_group),
1474 1474 joinedload(UserGroupUserGroupToPerm.permission),)
1475 1475
1476 1476 perm_rows = []
1477 1477 for _user_group in q.all():
1478 1478 entry = AttributeDict(_user_group.user_group.get_dict())
1479 1479 entry.permission = _user_group.permission.permission_name
1480 1480 if with_members:
1481 1481 entry.members = [x.user.get_dict()
1482 1482 for x in _user_group.user_group.members]
1483 1483 perm_rows.append(entry)
1484 1484
1485 1485 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1486 1486 return perm_rows
1487 1487
1488 1488 def _get_default_perms(self, user_group, suffix=''):
1489 1489 from rhodecode.model.permission import PermissionModel
1490 1490 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1491 1491
1492 1492 def get_default_perms(self, suffix=''):
1493 1493 return self._get_default_perms(self, suffix)
1494 1494
1495 1495 def get_api_data(self, with_group_members=True, include_secrets=False):
1496 1496 """
1497 1497 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1498 1498 basically forwarded.
1499 1499
1500 1500 """
1501 1501 user_group = self
1502 1502 data = {
1503 1503 'users_group_id': user_group.users_group_id,
1504 1504 'group_name': user_group.users_group_name,
1505 1505 'group_description': user_group.user_group_description,
1506 1506 'active': user_group.users_group_active,
1507 1507 'owner': user_group.user.username,
1508 1508 'sync': user_group.sync,
1509 1509 'owner_email': user_group.user.email,
1510 1510 }
1511 1511
1512 1512 if with_group_members:
1513 1513 users = []
1514 1514 for user in user_group.members:
1515 1515 user = user.user
1516 1516 users.append(user.get_api_data(include_secrets=include_secrets))
1517 1517 data['users'] = users
1518 1518
1519 1519 return data
1520 1520
1521 1521
1522 1522 class UserGroupMember(Base, BaseModel):
1523 1523 __tablename__ = 'users_groups_members'
1524 1524 __table_args__ = (
1525 1525 base_table_args,
1526 1526 )
1527 1527
1528 1528 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1529 1529 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1530 1530 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1531 1531
1532 1532 user = relationship('User', lazy='joined')
1533 1533 users_group = relationship('UserGroup')
1534 1534
1535 1535 def __init__(self, gr_id='', u_id=''):
1536 1536 self.users_group_id = gr_id
1537 1537 self.user_id = u_id
1538 1538
1539 1539
1540 1540 class RepositoryField(Base, BaseModel):
1541 1541 __tablename__ = 'repositories_fields'
1542 1542 __table_args__ = (
1543 1543 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1544 1544 base_table_args,
1545 1545 )
1546 1546
1547 1547 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1548 1548
1549 1549 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1550 1550 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1551 1551 field_key = Column("field_key", String(250))
1552 1552 field_label = Column("field_label", String(1024), nullable=False)
1553 1553 field_value = Column("field_value", String(10000), nullable=False)
1554 1554 field_desc = Column("field_desc", String(1024), nullable=False)
1555 1555 field_type = Column("field_type", String(255), nullable=False, unique=None)
1556 1556 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1557 1557
1558 1558 repository = relationship('Repository')
1559 1559
1560 1560 @property
1561 1561 def field_key_prefixed(self):
1562 1562 return 'ex_%s' % self.field_key
1563 1563
1564 1564 @classmethod
1565 1565 def un_prefix_key(cls, key):
1566 1566 if key.startswith(cls.PREFIX):
1567 1567 return key[len(cls.PREFIX):]
1568 1568 return key
1569 1569
1570 1570 @classmethod
1571 1571 def get_by_key_name(cls, key, repo):
1572 1572 row = cls.query()\
1573 1573 .filter(cls.repository == repo)\
1574 1574 .filter(cls.field_key == key).scalar()
1575 1575 return row
1576 1576
1577 1577
1578 1578 class Repository(Base, BaseModel):
1579 1579 __tablename__ = 'repositories'
1580 1580 __table_args__ = (
1581 1581 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1582 1582 base_table_args,
1583 1583 )
1584 1584 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1585 1585 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1586 1586 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1587 1587
1588 1588 STATE_CREATED = 'repo_state_created'
1589 1589 STATE_PENDING = 'repo_state_pending'
1590 1590 STATE_ERROR = 'repo_state_error'
1591 1591
1592 1592 LOCK_AUTOMATIC = 'lock_auto'
1593 1593 LOCK_API = 'lock_api'
1594 1594 LOCK_WEB = 'lock_web'
1595 1595 LOCK_PULL = 'lock_pull'
1596 1596
1597 1597 NAME_SEP = URL_SEP
1598 1598
1599 1599 repo_id = Column(
1600 1600 "repo_id", Integer(), nullable=False, unique=True, default=None,
1601 1601 primary_key=True)
1602 1602 _repo_name = Column(
1603 1603 "repo_name", Text(), nullable=False, default=None)
1604 1604 _repo_name_hash = Column(
1605 1605 "repo_name_hash", String(255), nullable=False, unique=True)
1606 1606 repo_state = Column("repo_state", String(255), nullable=True)
1607 1607
1608 1608 clone_uri = Column(
1609 1609 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1610 1610 default=None)
1611 1611 push_uri = Column(
1612 1612 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1613 1613 default=None)
1614 1614 repo_type = Column(
1615 1615 "repo_type", String(255), nullable=False, unique=False, default=None)
1616 1616 user_id = Column(
1617 1617 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1618 1618 unique=False, default=None)
1619 1619 private = Column(
1620 1620 "private", Boolean(), nullable=True, unique=None, default=None)
1621 1621 archived = Column(
1622 1622 "archived", Boolean(), nullable=True, unique=None, default=None)
1623 1623 enable_statistics = Column(
1624 1624 "statistics", Boolean(), nullable=True, unique=None, default=True)
1625 1625 enable_downloads = Column(
1626 1626 "downloads", Boolean(), nullable=True, unique=None, default=True)
1627 1627 description = Column(
1628 1628 "description", String(10000), nullable=True, unique=None, default=None)
1629 1629 created_on = Column(
1630 1630 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1631 1631 default=datetime.datetime.now)
1632 1632 updated_on = Column(
1633 1633 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1634 1634 default=datetime.datetime.now)
1635 1635 _landing_revision = Column(
1636 1636 "landing_revision", String(255), nullable=False, unique=False,
1637 1637 default=None)
1638 1638 enable_locking = Column(
1639 1639 "enable_locking", Boolean(), nullable=False, unique=None,
1640 1640 default=False)
1641 1641 _locked = Column(
1642 1642 "locked", String(255), nullable=True, unique=False, default=None)
1643 1643 _changeset_cache = Column(
1644 1644 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1645 1645
1646 1646 fork_id = Column(
1647 1647 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1648 1648 nullable=True, unique=False, default=None)
1649 1649 group_id = Column(
1650 1650 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1651 1651 unique=False, default=None)
1652 1652
1653 1653 user = relationship('User', lazy='joined')
1654 1654 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1655 1655 group = relationship('RepoGroup', lazy='joined')
1656 1656 repo_to_perm = relationship(
1657 1657 'UserRepoToPerm', cascade='all',
1658 1658 order_by='UserRepoToPerm.repo_to_perm_id')
1659 1659 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1660 1660 stats = relationship('Statistics', cascade='all', uselist=False)
1661 1661
1662 1662 followers = relationship(
1663 1663 'UserFollowing',
1664 1664 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1665 1665 cascade='all')
1666 1666 extra_fields = relationship(
1667 1667 'RepositoryField', cascade="all, delete, delete-orphan")
1668 1668 logs = relationship('UserLog')
1669 1669 comments = relationship(
1670 1670 'ChangesetComment', cascade="all, delete, delete-orphan")
1671 1671 pull_requests_source = relationship(
1672 1672 'PullRequest',
1673 1673 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1674 1674 cascade="all, delete, delete-orphan")
1675 1675 pull_requests_target = relationship(
1676 1676 'PullRequest',
1677 1677 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1678 1678 cascade="all, delete, delete-orphan")
1679 1679 ui = relationship('RepoRhodeCodeUi', cascade="all")
1680 1680 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1681 1681 integrations = relationship('Integration',
1682 1682 cascade="all, delete, delete-orphan")
1683 1683
1684 1684 scoped_tokens = relationship('UserApiKeys', cascade="all")
1685 1685
1686 1686 def __unicode__(self):
1687 1687 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1688 1688 safe_unicode(self.repo_name))
1689 1689
1690 1690 @hybrid_property
1691 1691 def description_safe(self):
1692 1692 from rhodecode.lib import helpers as h
1693 1693 return h.escape(self.description)
1694 1694
1695 1695 @hybrid_property
1696 1696 def landing_rev(self):
1697 1697 # always should return [rev_type, rev]
1698 1698 if self._landing_revision:
1699 1699 _rev_info = self._landing_revision.split(':')
1700 1700 if len(_rev_info) < 2:
1701 1701 _rev_info.insert(0, 'rev')
1702 1702 return [_rev_info[0], _rev_info[1]]
1703 1703 return [None, None]
1704 1704
1705 1705 @landing_rev.setter
1706 1706 def landing_rev(self, val):
1707 1707 if ':' not in val:
1708 1708 raise ValueError('value must be delimited with `:` and consist '
1709 1709 'of <rev_type>:<rev>, got %s instead' % val)
1710 1710 self._landing_revision = val
1711 1711
1712 1712 @hybrid_property
1713 1713 def locked(self):
1714 1714 if self._locked:
1715 1715 user_id, timelocked, reason = self._locked.split(':')
1716 1716 lock_values = int(user_id), timelocked, reason
1717 1717 else:
1718 1718 lock_values = [None, None, None]
1719 1719 return lock_values
1720 1720
1721 1721 @locked.setter
1722 1722 def locked(self, val):
1723 1723 if val and isinstance(val, (list, tuple)):
1724 1724 self._locked = ':'.join(map(str, val))
1725 1725 else:
1726 1726 self._locked = None
1727 1727
1728 1728 @hybrid_property
1729 1729 def changeset_cache(self):
1730 1730 from rhodecode.lib.vcs.backends.base import EmptyCommit
1731 1731 dummy = EmptyCommit().__json__()
1732 1732 if not self._changeset_cache:
1733 1733 dummy['source_repo_id'] = self.repo_id
1734 1734 return json.loads(json.dumps(dummy))
1735 1735
1736 1736 try:
1737 1737 return json.loads(self._changeset_cache)
1738 1738 except TypeError:
1739 1739 return dummy
1740 1740 except Exception:
1741 1741 log.error(traceback.format_exc())
1742 1742 return dummy
1743 1743
1744 1744 @changeset_cache.setter
1745 1745 def changeset_cache(self, val):
1746 1746 try:
1747 1747 self._changeset_cache = json.dumps(val)
1748 1748 except Exception:
1749 1749 log.error(traceback.format_exc())
1750 1750
1751 1751 @hybrid_property
1752 1752 def repo_name(self):
1753 1753 return self._repo_name
1754 1754
1755 1755 @repo_name.setter
1756 1756 def repo_name(self, value):
1757 1757 self._repo_name = value
1758 1758 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1759 1759
1760 1760 @classmethod
1761 1761 def normalize_repo_name(cls, repo_name):
1762 1762 """
1763 1763 Normalizes os specific repo_name to the format internally stored inside
1764 1764 database using URL_SEP
1765 1765
1766 1766 :param cls:
1767 1767 :param repo_name:
1768 1768 """
1769 1769 return cls.NAME_SEP.join(repo_name.split(os.sep))
1770 1770
1771 1771 @classmethod
1772 1772 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1773 1773 session = Session()
1774 1774 q = session.query(cls).filter(cls.repo_name == repo_name)
1775 1775
1776 1776 if cache:
1777 1777 if identity_cache:
1778 1778 val = cls.identity_cache(session, 'repo_name', repo_name)
1779 1779 if val:
1780 1780 return val
1781 1781 else:
1782 1782 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1783 1783 q = q.options(
1784 1784 FromCache("sql_cache_short", cache_key))
1785 1785
1786 1786 return q.scalar()
1787 1787
1788 1788 @classmethod
1789 1789 def get_by_id_or_repo_name(cls, repoid):
1790 1790 if isinstance(repoid, (int, long)):
1791 1791 try:
1792 1792 repo = cls.get(repoid)
1793 1793 except ValueError:
1794 1794 repo = None
1795 1795 else:
1796 1796 repo = cls.get_by_repo_name(repoid)
1797 1797 return repo
1798 1798
1799 1799 @classmethod
1800 1800 def get_by_full_path(cls, repo_full_path):
1801 1801 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1802 1802 repo_name = cls.normalize_repo_name(repo_name)
1803 1803 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1804 1804
1805 1805 @classmethod
1806 1806 def get_repo_forks(cls, repo_id):
1807 1807 return cls.query().filter(Repository.fork_id == repo_id)
1808 1808
1809 1809 @classmethod
1810 1810 def base_path(cls):
1811 1811 """
1812 1812 Returns base path when all repos are stored
1813 1813
1814 1814 :param cls:
1815 1815 """
1816 1816 q = Session().query(RhodeCodeUi)\
1817 1817 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1818 1818 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1819 1819 return q.one().ui_value
1820 1820
1821 1821 @classmethod
1822 1822 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1823 1823 case_insensitive=True, archived=False):
1824 1824 q = Repository.query()
1825 1825
1826 1826 if not archived:
1827 1827 q = q.filter(Repository.archived.isnot(true()))
1828 1828
1829 1829 if not isinstance(user_id, Optional):
1830 1830 q = q.filter(Repository.user_id == user_id)
1831 1831
1832 1832 if not isinstance(group_id, Optional):
1833 1833 q = q.filter(Repository.group_id == group_id)
1834 1834
1835 1835 if case_insensitive:
1836 1836 q = q.order_by(func.lower(Repository.repo_name))
1837 1837 else:
1838 1838 q = q.order_by(Repository.repo_name)
1839 1839
1840 1840 return q.all()
1841 1841
1842 1842 @property
1843 1843 def forks(self):
1844 1844 """
1845 1845 Return forks of this repo
1846 1846 """
1847 1847 return Repository.get_repo_forks(self.repo_id)
1848 1848
1849 1849 @property
1850 1850 def parent(self):
1851 1851 """
1852 1852 Returns fork parent
1853 1853 """
1854 1854 return self.fork
1855 1855
1856 1856 @property
1857 1857 def just_name(self):
1858 1858 return self.repo_name.split(self.NAME_SEP)[-1]
1859 1859
1860 1860 @property
1861 1861 def groups_with_parents(self):
1862 1862 groups = []
1863 1863 if self.group is None:
1864 1864 return groups
1865 1865
1866 1866 cur_gr = self.group
1867 1867 groups.insert(0, cur_gr)
1868 1868 while 1:
1869 1869 gr = getattr(cur_gr, 'parent_group', None)
1870 1870 cur_gr = cur_gr.parent_group
1871 1871 if gr is None:
1872 1872 break
1873 1873 groups.insert(0, gr)
1874 1874
1875 1875 return groups
1876 1876
1877 1877 @property
1878 1878 def groups_and_repo(self):
1879 1879 return self.groups_with_parents, self
1880 1880
1881 1881 @LazyProperty
1882 1882 def repo_path(self):
1883 1883 """
1884 1884 Returns base full path for that repository means where it actually
1885 1885 exists on a filesystem
1886 1886 """
1887 1887 q = Session().query(RhodeCodeUi).filter(
1888 1888 RhodeCodeUi.ui_key == self.NAME_SEP)
1889 1889 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1890 1890 return q.one().ui_value
1891 1891
1892 1892 @property
1893 1893 def repo_full_path(self):
1894 1894 p = [self.repo_path]
1895 1895 # we need to split the name by / since this is how we store the
1896 1896 # names in the database, but that eventually needs to be converted
1897 1897 # into a valid system path
1898 1898 p += self.repo_name.split(self.NAME_SEP)
1899 1899 return os.path.join(*map(safe_unicode, p))
1900 1900
1901 1901 @property
1902 1902 def cache_keys(self):
1903 1903 """
1904 1904 Returns associated cache keys for that repo
1905 1905 """
1906 1906 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1907 1907 repo_id=self.repo_id)
1908 1908 return CacheKey.query()\
1909 1909 .filter(CacheKey.cache_args == invalidation_namespace)\
1910 1910 .order_by(CacheKey.cache_key)\
1911 1911 .all()
1912 1912
1913 1913 @property
1914 1914 def cached_diffs_relative_dir(self):
1915 1915 """
1916 1916 Return a relative to the repository store path of cached diffs
1917 1917 used for safe display for users, who shouldn't know the absolute store
1918 1918 path
1919 1919 """
1920 1920 return os.path.join(
1921 1921 os.path.dirname(self.repo_name),
1922 1922 self.cached_diffs_dir.split(os.path.sep)[-1])
1923 1923
1924 1924 @property
1925 1925 def cached_diffs_dir(self):
1926 1926 path = self.repo_full_path
1927 1927 return os.path.join(
1928 1928 os.path.dirname(path),
1929 1929 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1930 1930
1931 1931 def cached_diffs(self):
1932 1932 diff_cache_dir = self.cached_diffs_dir
1933 1933 if os.path.isdir(diff_cache_dir):
1934 1934 return os.listdir(diff_cache_dir)
1935 1935 return []
1936 1936
1937 1937 def shadow_repos(self):
1938 1938 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1939 1939 return [
1940 1940 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1941 1941 if x.startswith(shadow_repos_pattern)]
1942 1942
1943 1943 def get_new_name(self, repo_name):
1944 1944 """
1945 1945 returns new full repository name based on assigned group and new new
1946 1946
1947 1947 :param group_name:
1948 1948 """
1949 1949 path_prefix = self.group.full_path_splitted if self.group else []
1950 1950 return self.NAME_SEP.join(path_prefix + [repo_name])
1951 1951
1952 1952 @property
1953 1953 def _config(self):
1954 1954 """
1955 1955 Returns db based config object.
1956 1956 """
1957 1957 from rhodecode.lib.utils import make_db_config
1958 1958 return make_db_config(clear_session=False, repo=self)
1959 1959
1960 1960 def permissions(self, with_admins=True, with_owner=True,
1961 1961 expand_from_user_groups=False):
1962 1962 """
1963 1963 Permissions for repositories
1964 1964 """
1965 1965 _admin_perm = 'repository.admin'
1966 1966
1967 1967 owner_row = []
1968 1968 if with_owner:
1969 1969 usr = AttributeDict(self.user.get_dict())
1970 1970 usr.owner_row = True
1971 1971 usr.permission = _admin_perm
1972 1972 usr.permission_id = None
1973 1973 owner_row.append(usr)
1974 1974
1975 1975 super_admin_ids = []
1976 1976 super_admin_rows = []
1977 1977 if with_admins:
1978 1978 for usr in User.get_all_super_admins():
1979 1979 super_admin_ids.append(usr.user_id)
1980 1980 # if this admin is also owner, don't double the record
1981 1981 if usr.user_id == owner_row[0].user_id:
1982 1982 owner_row[0].admin_row = True
1983 1983 else:
1984 1984 usr = AttributeDict(usr.get_dict())
1985 1985 usr.admin_row = True
1986 1986 usr.permission = _admin_perm
1987 1987 usr.permission_id = None
1988 1988 super_admin_rows.append(usr)
1989 1989
1990 1990 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1991 1991 q = q.options(joinedload(UserRepoToPerm.repository),
1992 1992 joinedload(UserRepoToPerm.user),
1993 1993 joinedload(UserRepoToPerm.permission),)
1994 1994
1995 1995 # get owners and admins and permissions. We do a trick of re-writing
1996 1996 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1997 1997 # has a global reference and changing one object propagates to all
1998 1998 # others. This means if admin is also an owner admin_row that change
1999 1999 # would propagate to both objects
2000 2000 perm_rows = []
2001 2001 for _usr in q.all():
2002 2002 usr = AttributeDict(_usr.user.get_dict())
2003 2003 # if this user is also owner/admin, mark as duplicate record
2004 2004 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2005 2005 usr.duplicate_perm = True
2006 2006 # also check if this permission is maybe used by branch_permissions
2007 2007 if _usr.branch_perm_entry:
2008 2008 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2009 2009
2010 2010 usr.permission = _usr.permission.permission_name
2011 2011 usr.permission_id = _usr.repo_to_perm_id
2012 2012 perm_rows.append(usr)
2013 2013
2014 2014 # filter the perm rows by 'default' first and then sort them by
2015 2015 # admin,write,read,none permissions sorted again alphabetically in
2016 2016 # each group
2017 2017 perm_rows = sorted(perm_rows, key=display_user_sort)
2018 2018
2019 2019 user_groups_rows = []
2020 2020 if expand_from_user_groups:
2021 2021 for ug in self.permission_user_groups(with_members=True):
2022 2022 for user_data in ug.members:
2023 2023 user_groups_rows.append(user_data)
2024 2024
2025 2025 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2026 2026
2027 2027 def permission_user_groups(self, with_members=True):
2028 2028 q = UserGroupRepoToPerm.query()\
2029 2029 .filter(UserGroupRepoToPerm.repository == self)
2030 2030 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2031 2031 joinedload(UserGroupRepoToPerm.users_group),
2032 2032 joinedload(UserGroupRepoToPerm.permission),)
2033 2033
2034 2034 perm_rows = []
2035 2035 for _user_group in q.all():
2036 2036 entry = AttributeDict(_user_group.users_group.get_dict())
2037 2037 entry.permission = _user_group.permission.permission_name
2038 2038 if with_members:
2039 2039 entry.members = [x.user.get_dict()
2040 2040 for x in _user_group.users_group.members]
2041 2041 perm_rows.append(entry)
2042 2042
2043 2043 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2044 2044 return perm_rows
2045 2045
2046 2046 def get_api_data(self, include_secrets=False):
2047 2047 """
2048 2048 Common function for generating repo api data
2049 2049
2050 2050 :param include_secrets: See :meth:`User.get_api_data`.
2051 2051
2052 2052 """
2053 2053 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2054 2054 # move this methods on models level.
2055 2055 from rhodecode.model.settings import SettingsModel
2056 2056 from rhodecode.model.repo import RepoModel
2057 2057
2058 2058 repo = self
2059 2059 _user_id, _time, _reason = self.locked
2060 2060
2061 2061 data = {
2062 2062 'repo_id': repo.repo_id,
2063 2063 'repo_name': repo.repo_name,
2064 2064 'repo_type': repo.repo_type,
2065 2065 'clone_uri': repo.clone_uri or '',
2066 2066 'push_uri': repo.push_uri or '',
2067 2067 'url': RepoModel().get_url(self),
2068 2068 'private': repo.private,
2069 2069 'created_on': repo.created_on,
2070 2070 'description': repo.description_safe,
2071 2071 'landing_rev': repo.landing_rev,
2072 2072 'owner': repo.user.username,
2073 2073 'fork_of': repo.fork.repo_name if repo.fork else None,
2074 2074 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2075 2075 'enable_statistics': repo.enable_statistics,
2076 2076 'enable_locking': repo.enable_locking,
2077 2077 'enable_downloads': repo.enable_downloads,
2078 2078 'last_changeset': repo.changeset_cache,
2079 2079 'locked_by': User.get(_user_id).get_api_data(
2080 2080 include_secrets=include_secrets) if _user_id else None,
2081 2081 'locked_date': time_to_datetime(_time) if _time else None,
2082 2082 'lock_reason': _reason if _reason else None,
2083 2083 }
2084 2084
2085 2085 # TODO: mikhail: should be per-repo settings here
2086 2086 rc_config = SettingsModel().get_all_settings()
2087 2087 repository_fields = str2bool(
2088 2088 rc_config.get('rhodecode_repository_fields'))
2089 2089 if repository_fields:
2090 2090 for f in self.extra_fields:
2091 2091 data[f.field_key_prefixed] = f.field_value
2092 2092
2093 2093 return data
2094 2094
2095 2095 @classmethod
2096 2096 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2097 2097 if not lock_time:
2098 2098 lock_time = time.time()
2099 2099 if not lock_reason:
2100 2100 lock_reason = cls.LOCK_AUTOMATIC
2101 2101 repo.locked = [user_id, lock_time, lock_reason]
2102 2102 Session().add(repo)
2103 2103 Session().commit()
2104 2104
2105 2105 @classmethod
2106 2106 def unlock(cls, repo):
2107 2107 repo.locked = None
2108 2108 Session().add(repo)
2109 2109 Session().commit()
2110 2110
2111 2111 @classmethod
2112 2112 def getlock(cls, repo):
2113 2113 return repo.locked
2114 2114
2115 2115 def is_user_lock(self, user_id):
2116 2116 if self.lock[0]:
2117 2117 lock_user_id = safe_int(self.lock[0])
2118 2118 user_id = safe_int(user_id)
2119 2119 # both are ints, and they are equal
2120 2120 return all([lock_user_id, user_id]) and lock_user_id == user_id
2121 2121
2122 2122 return False
2123 2123
2124 2124 def get_locking_state(self, action, user_id, only_when_enabled=True):
2125 2125 """
2126 2126 Checks locking on this repository, if locking is enabled and lock is
2127 2127 present returns a tuple of make_lock, locked, locked_by.
2128 2128 make_lock can have 3 states None (do nothing) True, make lock
2129 2129 False release lock, This value is later propagated to hooks, which
2130 2130 do the locking. Think about this as signals passed to hooks what to do.
2131 2131
2132 2132 """
2133 2133 # TODO: johbo: This is part of the business logic and should be moved
2134 2134 # into the RepositoryModel.
2135 2135
2136 2136 if action not in ('push', 'pull'):
2137 2137 raise ValueError("Invalid action value: %s" % repr(action))
2138 2138
2139 2139 # defines if locked error should be thrown to user
2140 2140 currently_locked = False
2141 2141 # defines if new lock should be made, tri-state
2142 2142 make_lock = None
2143 2143 repo = self
2144 2144 user = User.get(user_id)
2145 2145
2146 2146 lock_info = repo.locked
2147 2147
2148 2148 if repo and (repo.enable_locking or not only_when_enabled):
2149 2149 if action == 'push':
2150 2150 # check if it's already locked !, if it is compare users
2151 2151 locked_by_user_id = lock_info[0]
2152 2152 if user.user_id == locked_by_user_id:
2153 2153 log.debug(
2154 2154 'Got `push` action from user %s, now unlocking', user)
2155 2155 # unlock if we have push from user who locked
2156 2156 make_lock = False
2157 2157 else:
2158 2158 # we're not the same user who locked, ban with
2159 2159 # code defined in settings (default is 423 HTTP Locked) !
2160 2160 log.debug('Repo %s is currently locked by %s', repo, user)
2161 2161 currently_locked = True
2162 2162 elif action == 'pull':
2163 2163 # [0] user [1] date
2164 2164 if lock_info[0] and lock_info[1]:
2165 2165 log.debug('Repo %s is currently locked by %s', repo, user)
2166 2166 currently_locked = True
2167 2167 else:
2168 2168 log.debug('Setting lock on repo %s by %s', repo, user)
2169 2169 make_lock = True
2170 2170
2171 2171 else:
2172 2172 log.debug('Repository %s do not have locking enabled', repo)
2173 2173
2174 2174 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2175 2175 make_lock, currently_locked, lock_info)
2176 2176
2177 2177 from rhodecode.lib.auth import HasRepoPermissionAny
2178 2178 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2179 2179 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2180 2180 # if we don't have at least write permission we cannot make a lock
2181 2181 log.debug('lock state reset back to FALSE due to lack '
2182 2182 'of at least read permission')
2183 2183 make_lock = False
2184 2184
2185 2185 return make_lock, currently_locked, lock_info
2186 2186
2187 2187 @property
2188 2188 def last_commit_cache_update_diff(self):
2189 2189 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2190 2190
2191 2191 @property
2192 2192 def last_commit_change(self):
2193 2193 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2194 2194 empty_date = datetime.datetime.fromtimestamp(0)
2195 2195 date_latest = self.changeset_cache.get('date', empty_date)
2196 2196 try:
2197 2197 return parse_datetime(date_latest)
2198 2198 except Exception:
2199 2199 return empty_date
2200 2200
2201 2201 @property
2202 2202 def last_db_change(self):
2203 2203 return self.updated_on
2204 2204
2205 2205 @property
2206 2206 def clone_uri_hidden(self):
2207 2207 clone_uri = self.clone_uri
2208 2208 if clone_uri:
2209 2209 import urlobject
2210 2210 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2211 2211 if url_obj.password:
2212 2212 clone_uri = url_obj.with_password('*****')
2213 2213 return clone_uri
2214 2214
2215 2215 @property
2216 2216 def push_uri_hidden(self):
2217 2217 push_uri = self.push_uri
2218 2218 if push_uri:
2219 2219 import urlobject
2220 2220 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2221 2221 if url_obj.password:
2222 2222 push_uri = url_obj.with_password('*****')
2223 2223 return push_uri
2224 2224
2225 2225 def clone_url(self, **override):
2226 2226 from rhodecode.model.settings import SettingsModel
2227 2227
2228 2228 uri_tmpl = None
2229 2229 if 'with_id' in override:
2230 2230 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2231 2231 del override['with_id']
2232 2232
2233 2233 if 'uri_tmpl' in override:
2234 2234 uri_tmpl = override['uri_tmpl']
2235 2235 del override['uri_tmpl']
2236 2236
2237 2237 ssh = False
2238 2238 if 'ssh' in override:
2239 2239 ssh = True
2240 2240 del override['ssh']
2241 2241
2242 2242 # we didn't override our tmpl from **overrides
2243 2243 if not uri_tmpl:
2244 2244 rc_config = SettingsModel().get_all_settings(cache=True)
2245 2245 if ssh:
2246 2246 uri_tmpl = rc_config.get(
2247 2247 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2248 2248 else:
2249 2249 uri_tmpl = rc_config.get(
2250 2250 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2251 2251
2252 2252 request = get_current_request()
2253 2253 return get_clone_url(request=request,
2254 2254 uri_tmpl=uri_tmpl,
2255 2255 repo_name=self.repo_name,
2256 2256 repo_id=self.repo_id, **override)
2257 2257
2258 2258 def set_state(self, state):
2259 2259 self.repo_state = state
2260 2260 Session().add(self)
2261 2261 #==========================================================================
2262 2262 # SCM PROPERTIES
2263 2263 #==========================================================================
2264 2264
2265 2265 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2266 2266 return get_commit_safe(
2267 2267 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2268 2268
2269 2269 def get_changeset(self, rev=None, pre_load=None):
2270 2270 warnings.warn("Use get_commit", DeprecationWarning)
2271 2271 commit_id = None
2272 2272 commit_idx = None
2273 2273 if isinstance(rev, compat.string_types):
2274 2274 commit_id = rev
2275 2275 else:
2276 2276 commit_idx = rev
2277 2277 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2278 2278 pre_load=pre_load)
2279 2279
2280 2280 def get_landing_commit(self):
2281 2281 """
2282 2282 Returns landing commit, or if that doesn't exist returns the tip
2283 2283 """
2284 2284 _rev_type, _rev = self.landing_rev
2285 2285 commit = self.get_commit(_rev)
2286 2286 if isinstance(commit, EmptyCommit):
2287 2287 return self.get_commit()
2288 2288 return commit
2289 2289
2290 2290 def update_commit_cache(self, cs_cache=None, config=None):
2291 2291 """
2292 2292 Update cache of last changeset for repository, keys should be::
2293 2293
2294 2294 source_repo_id
2295 2295 short_id
2296 2296 raw_id
2297 2297 revision
2298 2298 parents
2299 2299 message
2300 2300 date
2301 2301 author
2302 2302 updated_on
2303 2303
2304 2304 """
2305 2305 from rhodecode.lib.vcs.backends.base import BaseChangeset
2306 2306 if cs_cache is None:
2307 2307 # use no-cache version here
2308 2308 scm_repo = self.scm_instance(cache=False, config=config)
2309 2309
2310 2310 empty = scm_repo is None or scm_repo.is_empty()
2311 2311 if not empty:
2312 2312 cs_cache = scm_repo.get_commit(
2313 2313 pre_load=["author", "date", "message", "parents"])
2314 2314 else:
2315 2315 cs_cache = EmptyCommit()
2316 2316
2317 2317 if isinstance(cs_cache, BaseChangeset):
2318 2318 cs_cache = cs_cache.__json__()
2319 2319
2320 2320 def is_outdated(new_cs_cache):
2321 2321 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2322 2322 new_cs_cache['revision'] != self.changeset_cache['revision']):
2323 2323 return True
2324 2324 return False
2325 2325
2326 2326 # check if we have maybe already latest cached revision
2327 2327 if is_outdated(cs_cache) or not self.changeset_cache:
2328 2328 _default = datetime.datetime.utcnow()
2329 2329 last_change = cs_cache.get('date') or _default
2330 2330 # we check if last update is newer than the new value
2331 2331 # if yes, we use the current timestamp instead. Imagine you get
2332 2332 # old commit pushed 1y ago, we'd set last update 1y to ago.
2333 2333 last_change_timestamp = datetime_to_time(last_change)
2334 2334 current_timestamp = datetime_to_time(last_change)
2335 2335 if last_change_timestamp > current_timestamp:
2336 2336 cs_cache['date'] = _default
2337 2337
2338 2338 cs_cache['updated_on'] = time.time()
2339 2339 self.changeset_cache = cs_cache
2340 2340 Session().add(self)
2341 2341 Session().commit()
2342 2342
2343 2343 log.debug('updated repo %s with new commit cache %s',
2344 2344 self.repo_name, cs_cache)
2345 2345 else:
2346 2346 cs_cache = self.changeset_cache
2347 2347 cs_cache['updated_on'] = time.time()
2348 2348 self.changeset_cache = cs_cache
2349 2349 Session().add(self)
2350 2350 Session().commit()
2351 2351
2352 2352 log.debug('Skipping update_commit_cache for repo:`%s` '
2353 2353 'commit already with latest changes', self.repo_name)
2354 2354
2355 2355 @property
2356 2356 def tip(self):
2357 2357 return self.get_commit('tip')
2358 2358
2359 2359 @property
2360 2360 def author(self):
2361 2361 return self.tip.author
2362 2362
2363 2363 @property
2364 2364 def last_change(self):
2365 2365 return self.scm_instance().last_change
2366 2366
2367 2367 def get_comments(self, revisions=None):
2368 2368 """
2369 2369 Returns comments for this repository grouped by revisions
2370 2370
2371 2371 :param revisions: filter query by revisions only
2372 2372 """
2373 2373 cmts = ChangesetComment.query()\
2374 2374 .filter(ChangesetComment.repo == self)
2375 2375 if revisions:
2376 2376 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2377 2377 grouped = collections.defaultdict(list)
2378 2378 for cmt in cmts.all():
2379 2379 grouped[cmt.revision].append(cmt)
2380 2380 return grouped
2381 2381
2382 2382 def statuses(self, revisions=None):
2383 2383 """
2384 2384 Returns statuses for this repository
2385 2385
2386 2386 :param revisions: list of revisions to get statuses for
2387 2387 """
2388 2388 statuses = ChangesetStatus.query()\
2389 2389 .filter(ChangesetStatus.repo == self)\
2390 2390 .filter(ChangesetStatus.version == 0)
2391 2391
2392 2392 if revisions:
2393 2393 # Try doing the filtering in chunks to avoid hitting limits
2394 2394 size = 500
2395 2395 status_results = []
2396 2396 for chunk in xrange(0, len(revisions), size):
2397 2397 status_results += statuses.filter(
2398 2398 ChangesetStatus.revision.in_(
2399 2399 revisions[chunk: chunk+size])
2400 2400 ).all()
2401 2401 else:
2402 2402 status_results = statuses.all()
2403 2403
2404 2404 grouped = {}
2405 2405
2406 2406 # maybe we have open new pullrequest without a status?
2407 2407 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2408 2408 status_lbl = ChangesetStatus.get_status_lbl(stat)
2409 2409 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2410 2410 for rev in pr.revisions:
2411 2411 pr_id = pr.pull_request_id
2412 2412 pr_repo = pr.target_repo.repo_name
2413 2413 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2414 2414
2415 2415 for stat in status_results:
2416 2416 pr_id = pr_repo = None
2417 2417 if stat.pull_request:
2418 2418 pr_id = stat.pull_request.pull_request_id
2419 2419 pr_repo = stat.pull_request.target_repo.repo_name
2420 2420 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2421 2421 pr_id, pr_repo]
2422 2422 return grouped
2423 2423
2424 2424 # ==========================================================================
2425 2425 # SCM CACHE INSTANCE
2426 2426 # ==========================================================================
2427 2427
2428 2428 def scm_instance(self, **kwargs):
2429 2429 import rhodecode
2430 2430
2431 2431 # Passing a config will not hit the cache currently only used
2432 2432 # for repo2dbmapper
2433 2433 config = kwargs.pop('config', None)
2434 2434 cache = kwargs.pop('cache', None)
2435 2435 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2436 2436 # if cache is NOT defined use default global, else we have a full
2437 2437 # control over cache behaviour
2438 2438 if cache is None and full_cache and not config:
2439 2439 return self._get_instance_cached()
2440 # cache here is sent to the "vcs server"
2440 2441 return self._get_instance(cache=bool(cache), config=config)
2441 2442
2442 2443 def _get_instance_cached(self):
2443 2444 from rhodecode.lib import rc_cache
2444 2445
2445 2446 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2446 2447 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2447 2448 repo_id=self.repo_id)
2448 2449 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2449 2450
2450 2451 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2451 2452 def get_instance_cached(repo_id, context_id):
2452 2453 return self._get_instance()
2453 2454
2454 2455 # we must use thread scoped cache here,
2455 2456 # because each thread of gevent needs it's own not shared connection and cache
2456 2457 # we also alter `args` so the cache key is individual for every green thread.
2457 2458 inv_context_manager = rc_cache.InvalidationContext(
2458 2459 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2459 2460 thread_scoped=True)
2460 2461 with inv_context_manager as invalidation_context:
2461 2462 args = (self.repo_id, inv_context_manager.cache_key)
2462 2463 # re-compute and store cache if we get invalidate signal
2463 2464 if invalidation_context.should_invalidate():
2464 2465 instance = get_instance_cached.refresh(*args)
2465 2466 else:
2466 2467 instance = get_instance_cached(*args)
2467 2468
2468 2469 log.debug('Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2469 2470 return instance
2470 2471
2471 2472 def _get_instance(self, cache=True, config=None):
2472 2473 config = config or self._config
2473 2474 custom_wire = {
2474 2475 'cache': cache # controls the vcs.remote cache
2475 2476 }
2476 2477 repo = get_vcs_instance(
2477 2478 repo_path=safe_str(self.repo_full_path),
2478 2479 config=config,
2479 2480 with_wire=custom_wire,
2480 2481 create=False,
2481 2482 _vcs_alias=self.repo_type)
2482
2483 if repo is not None:
2484 repo.count() # cache rebuild
2483 2485 return repo
2484 2486
2485 2487 def __json__(self):
2486 2488 return {'landing_rev': self.landing_rev}
2487 2489
2488 2490 def get_dict(self):
2489 2491
2490 2492 # Since we transformed `repo_name` to a hybrid property, we need to
2491 2493 # keep compatibility with the code which uses `repo_name` field.
2492 2494
2493 2495 result = super(Repository, self).get_dict()
2494 2496 result['repo_name'] = result.pop('_repo_name', None)
2495 2497 return result
2496 2498
2497 2499
2498 2500 class RepoGroup(Base, BaseModel):
2499 2501 __tablename__ = 'groups'
2500 2502 __table_args__ = (
2501 2503 UniqueConstraint('group_name', 'group_parent_id'),
2502 2504 base_table_args,
2503 2505 )
2504 2506 __mapper_args__ = {'order_by': 'group_name'}
2505 2507
2506 2508 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2507 2509
2508 2510 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2509 2511 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2510 2512 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2511 2513 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2512 2514 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2513 2515 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2514 2516 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2515 2517 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2516 2518 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2517 2519 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2518 2520 _changeset_cache = Column(
2519 2521 "changeset_cache", LargeBinary(), nullable=True) # JSON data
2520 2522
2521 2523 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2522 2524 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2523 2525 parent_group = relationship('RepoGroup', remote_side=group_id)
2524 2526 user = relationship('User')
2525 2527 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
2526 2528
2527 2529 def __init__(self, group_name='', parent_group=None):
2528 2530 self.group_name = group_name
2529 2531 self.parent_group = parent_group
2530 2532
2531 2533 def __unicode__(self):
2532 2534 return u"<%s('id:%s:%s')>" % (
2533 2535 self.__class__.__name__, self.group_id, self.group_name)
2534 2536
2535 2537 @hybrid_property
2536 2538 def group_name(self):
2537 2539 return self._group_name
2538 2540
2539 2541 @group_name.setter
2540 2542 def group_name(self, value):
2541 2543 self._group_name = value
2542 2544 self.group_name_hash = self.hash_repo_group_name(value)
2543 2545
2544 2546 @hybrid_property
2545 2547 def changeset_cache(self):
2546 2548 from rhodecode.lib.vcs.backends.base import EmptyCommit
2547 2549 dummy = EmptyCommit().__json__()
2548 2550 if not self._changeset_cache:
2549 2551 dummy['source_repo_id'] = ''
2550 2552 return json.loads(json.dumps(dummy))
2551 2553
2552 2554 try:
2553 2555 return json.loads(self._changeset_cache)
2554 2556 except TypeError:
2555 2557 return dummy
2556 2558 except Exception:
2557 2559 log.error(traceback.format_exc())
2558 2560 return dummy
2559 2561
2560 2562 @changeset_cache.setter
2561 2563 def changeset_cache(self, val):
2562 2564 try:
2563 2565 self._changeset_cache = json.dumps(val)
2564 2566 except Exception:
2565 2567 log.error(traceback.format_exc())
2566 2568
2567 2569 @validates('group_parent_id')
2568 2570 def validate_group_parent_id(self, key, val):
2569 2571 """
2570 2572 Check cycle references for a parent group to self
2571 2573 """
2572 2574 if self.group_id and val:
2573 2575 assert val != self.group_id
2574 2576
2575 2577 return val
2576 2578
2577 2579 @hybrid_property
2578 2580 def description_safe(self):
2579 2581 from rhodecode.lib import helpers as h
2580 2582 return h.escape(self.group_description)
2581 2583
2582 2584 @classmethod
2583 2585 def hash_repo_group_name(cls, repo_group_name):
2584 2586 val = remove_formatting(repo_group_name)
2585 2587 val = safe_str(val).lower()
2586 2588 chars = []
2587 2589 for c in val:
2588 2590 if c not in string.ascii_letters:
2589 2591 c = str(ord(c))
2590 2592 chars.append(c)
2591 2593
2592 2594 return ''.join(chars)
2593 2595
2594 2596 @classmethod
2595 2597 def _generate_choice(cls, repo_group):
2596 2598 from webhelpers.html import literal as _literal
2597 2599 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2598 2600 return repo_group.group_id, _name(repo_group.full_path_splitted)
2599 2601
2600 2602 @classmethod
2601 2603 def groups_choices(cls, groups=None, show_empty_group=True):
2602 2604 if not groups:
2603 2605 groups = cls.query().all()
2604 2606
2605 2607 repo_groups = []
2606 2608 if show_empty_group:
2607 2609 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2608 2610
2609 2611 repo_groups.extend([cls._generate_choice(x) for x in groups])
2610 2612
2611 2613 repo_groups = sorted(
2612 2614 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2613 2615 return repo_groups
2614 2616
2615 2617 @classmethod
2616 2618 def url_sep(cls):
2617 2619 return URL_SEP
2618 2620
2619 2621 @classmethod
2620 2622 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2621 2623 if case_insensitive:
2622 2624 gr = cls.query().filter(func.lower(cls.group_name)
2623 2625 == func.lower(group_name))
2624 2626 else:
2625 2627 gr = cls.query().filter(cls.group_name == group_name)
2626 2628 if cache:
2627 2629 name_key = _hash_key(group_name)
2628 2630 gr = gr.options(
2629 2631 FromCache("sql_cache_short", "get_group_%s" % name_key))
2630 2632 return gr.scalar()
2631 2633
2632 2634 @classmethod
2633 2635 def get_user_personal_repo_group(cls, user_id):
2634 2636 user = User.get(user_id)
2635 2637 if user.username == User.DEFAULT_USER:
2636 2638 return None
2637 2639
2638 2640 return cls.query()\
2639 2641 .filter(cls.personal == true()) \
2640 2642 .filter(cls.user == user) \
2641 2643 .order_by(cls.group_id.asc()) \
2642 2644 .first()
2643 2645
2644 2646 @classmethod
2645 2647 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2646 2648 case_insensitive=True):
2647 2649 q = RepoGroup.query()
2648 2650
2649 2651 if not isinstance(user_id, Optional):
2650 2652 q = q.filter(RepoGroup.user_id == user_id)
2651 2653
2652 2654 if not isinstance(group_id, Optional):
2653 2655 q = q.filter(RepoGroup.group_parent_id == group_id)
2654 2656
2655 2657 if case_insensitive:
2656 2658 q = q.order_by(func.lower(RepoGroup.group_name))
2657 2659 else:
2658 2660 q = q.order_by(RepoGroup.group_name)
2659 2661 return q.all()
2660 2662
2661 2663 @property
2662 2664 def parents(self, parents_recursion_limit = 10):
2663 2665 groups = []
2664 2666 if self.parent_group is None:
2665 2667 return groups
2666 2668 cur_gr = self.parent_group
2667 2669 groups.insert(0, cur_gr)
2668 2670 cnt = 0
2669 2671 while 1:
2670 2672 cnt += 1
2671 2673 gr = getattr(cur_gr, 'parent_group', None)
2672 2674 cur_gr = cur_gr.parent_group
2673 2675 if gr is None:
2674 2676 break
2675 2677 if cnt == parents_recursion_limit:
2676 2678 # this will prevent accidental infinit loops
2677 2679 log.error('more than %s parents found for group %s, stopping '
2678 2680 'recursive parent fetching', parents_recursion_limit, self)
2679 2681 break
2680 2682
2681 2683 groups.insert(0, gr)
2682 2684 return groups
2683 2685
2684 2686 @property
2685 2687 def last_commit_cache_update_diff(self):
2686 2688 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2687 2689
2688 2690 @property
2689 2691 def last_commit_change(self):
2690 2692 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2691 2693 empty_date = datetime.datetime.fromtimestamp(0)
2692 2694 date_latest = self.changeset_cache.get('date', empty_date)
2693 2695 try:
2694 2696 return parse_datetime(date_latest)
2695 2697 except Exception:
2696 2698 return empty_date
2697 2699
2698 2700 @property
2699 2701 def last_db_change(self):
2700 2702 return self.updated_on
2701 2703
2702 2704 @property
2703 2705 def children(self):
2704 2706 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2705 2707
2706 2708 @property
2707 2709 def name(self):
2708 2710 return self.group_name.split(RepoGroup.url_sep())[-1]
2709 2711
2710 2712 @property
2711 2713 def full_path(self):
2712 2714 return self.group_name
2713 2715
2714 2716 @property
2715 2717 def full_path_splitted(self):
2716 2718 return self.group_name.split(RepoGroup.url_sep())
2717 2719
2718 2720 @property
2719 2721 def repositories(self):
2720 2722 return Repository.query()\
2721 2723 .filter(Repository.group == self)\
2722 2724 .order_by(Repository.repo_name)
2723 2725
2724 2726 @property
2725 2727 def repositories_recursive_count(self):
2726 2728 cnt = self.repositories.count()
2727 2729
2728 2730 def children_count(group):
2729 2731 cnt = 0
2730 2732 for child in group.children:
2731 2733 cnt += child.repositories.count()
2732 2734 cnt += children_count(child)
2733 2735 return cnt
2734 2736
2735 2737 return cnt + children_count(self)
2736 2738
2737 2739 def _recursive_objects(self, include_repos=True, include_groups=True):
2738 2740 all_ = []
2739 2741
2740 2742 def _get_members(root_gr):
2741 2743 if include_repos:
2742 2744 for r in root_gr.repositories:
2743 2745 all_.append(r)
2744 2746 childs = root_gr.children.all()
2745 2747 if childs:
2746 2748 for gr in childs:
2747 2749 if include_groups:
2748 2750 all_.append(gr)
2749 2751 _get_members(gr)
2750 2752
2751 2753 root_group = []
2752 2754 if include_groups:
2753 2755 root_group = [self]
2754 2756
2755 2757 _get_members(self)
2756 2758 return root_group + all_
2757 2759
2758 2760 def recursive_groups_and_repos(self):
2759 2761 """
2760 2762 Recursive return all groups, with repositories in those groups
2761 2763 """
2762 2764 return self._recursive_objects()
2763 2765
2764 2766 def recursive_groups(self):
2765 2767 """
2766 2768 Returns all children groups for this group including children of children
2767 2769 """
2768 2770 return self._recursive_objects(include_repos=False)
2769 2771
2770 2772 def recursive_repos(self):
2771 2773 """
2772 2774 Returns all children repositories for this group
2773 2775 """
2774 2776 return self._recursive_objects(include_groups=False)
2775 2777
2776 2778 def get_new_name(self, group_name):
2777 2779 """
2778 2780 returns new full group name based on parent and new name
2779 2781
2780 2782 :param group_name:
2781 2783 """
2782 2784 path_prefix = (self.parent_group.full_path_splitted if
2783 2785 self.parent_group else [])
2784 2786 return RepoGroup.url_sep().join(path_prefix + [group_name])
2785 2787
2786 2788 def update_commit_cache(self, config=None):
2787 2789 """
2788 2790 Update cache of last changeset for newest repository inside this group, keys should be::
2789 2791
2790 2792 source_repo_id
2791 2793 short_id
2792 2794 raw_id
2793 2795 revision
2794 2796 parents
2795 2797 message
2796 2798 date
2797 2799 author
2798 2800
2799 2801 """
2800 2802 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2801 2803
2802 2804 def repo_groups_and_repos():
2803 2805 all_entries = OrderedDefaultDict(list)
2804 2806
2805 2807 def _get_members(root_gr, pos=0):
2806 2808
2807 2809 for repo in root_gr.repositories:
2808 2810 all_entries[root_gr].append(repo)
2809 2811
2810 2812 # fill in all parent positions
2811 2813 for parent_group in root_gr.parents:
2812 2814 all_entries[parent_group].extend(all_entries[root_gr])
2813 2815
2814 2816 children_groups = root_gr.children.all()
2815 2817 if children_groups:
2816 2818 for cnt, gr in enumerate(children_groups, 1):
2817 2819 _get_members(gr, pos=pos+cnt)
2818 2820
2819 2821 _get_members(root_gr=self)
2820 2822 return all_entries
2821 2823
2822 2824 empty_date = datetime.datetime.fromtimestamp(0)
2823 2825 for repo_group, repos in repo_groups_and_repos().items():
2824 2826
2825 2827 latest_repo_cs_cache = {}
2826 2828 for repo in repos:
2827 2829 repo_cs_cache = repo.changeset_cache
2828 2830 date_latest = latest_repo_cs_cache.get('date', empty_date)
2829 2831 date_current = repo_cs_cache.get('date', empty_date)
2830 2832 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2831 2833 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2832 2834 latest_repo_cs_cache = repo_cs_cache
2833 2835 latest_repo_cs_cache['source_repo_id'] = repo.repo_id
2834 2836
2835 2837 latest_repo_cs_cache['updated_on'] = time.time()
2836 2838 repo_group.changeset_cache = latest_repo_cs_cache
2837 2839 Session().add(repo_group)
2838 2840 Session().commit()
2839 2841
2840 2842 log.debug('updated repo group %s with new commit cache %s',
2841 2843 repo_group.group_name, latest_repo_cs_cache)
2842 2844
2843 2845 def permissions(self, with_admins=True, with_owner=True,
2844 2846 expand_from_user_groups=False):
2845 2847 """
2846 2848 Permissions for repository groups
2847 2849 """
2848 2850 _admin_perm = 'group.admin'
2849 2851
2850 2852 owner_row = []
2851 2853 if with_owner:
2852 2854 usr = AttributeDict(self.user.get_dict())
2853 2855 usr.owner_row = True
2854 2856 usr.permission = _admin_perm
2855 2857 owner_row.append(usr)
2856 2858
2857 2859 super_admin_ids = []
2858 2860 super_admin_rows = []
2859 2861 if with_admins:
2860 2862 for usr in User.get_all_super_admins():
2861 2863 super_admin_ids.append(usr.user_id)
2862 2864 # if this admin is also owner, don't double the record
2863 2865 if usr.user_id == owner_row[0].user_id:
2864 2866 owner_row[0].admin_row = True
2865 2867 else:
2866 2868 usr = AttributeDict(usr.get_dict())
2867 2869 usr.admin_row = True
2868 2870 usr.permission = _admin_perm
2869 2871 super_admin_rows.append(usr)
2870 2872
2871 2873 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2872 2874 q = q.options(joinedload(UserRepoGroupToPerm.group),
2873 2875 joinedload(UserRepoGroupToPerm.user),
2874 2876 joinedload(UserRepoGroupToPerm.permission),)
2875 2877
2876 2878 # get owners and admins and permissions. We do a trick of re-writing
2877 2879 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2878 2880 # has a global reference and changing one object propagates to all
2879 2881 # others. This means if admin is also an owner admin_row that change
2880 2882 # would propagate to both objects
2881 2883 perm_rows = []
2882 2884 for _usr in q.all():
2883 2885 usr = AttributeDict(_usr.user.get_dict())
2884 2886 # if this user is also owner/admin, mark as duplicate record
2885 2887 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2886 2888 usr.duplicate_perm = True
2887 2889 usr.permission = _usr.permission.permission_name
2888 2890 perm_rows.append(usr)
2889 2891
2890 2892 # filter the perm rows by 'default' first and then sort them by
2891 2893 # admin,write,read,none permissions sorted again alphabetically in
2892 2894 # each group
2893 2895 perm_rows = sorted(perm_rows, key=display_user_sort)
2894 2896
2895 2897 user_groups_rows = []
2896 2898 if expand_from_user_groups:
2897 2899 for ug in self.permission_user_groups(with_members=True):
2898 2900 for user_data in ug.members:
2899 2901 user_groups_rows.append(user_data)
2900 2902
2901 2903 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2902 2904
2903 2905 def permission_user_groups(self, with_members=False):
2904 2906 q = UserGroupRepoGroupToPerm.query()\
2905 2907 .filter(UserGroupRepoGroupToPerm.group == self)
2906 2908 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2907 2909 joinedload(UserGroupRepoGroupToPerm.users_group),
2908 2910 joinedload(UserGroupRepoGroupToPerm.permission),)
2909 2911
2910 2912 perm_rows = []
2911 2913 for _user_group in q.all():
2912 2914 entry = AttributeDict(_user_group.users_group.get_dict())
2913 2915 entry.permission = _user_group.permission.permission_name
2914 2916 if with_members:
2915 2917 entry.members = [x.user.get_dict()
2916 2918 for x in _user_group.users_group.members]
2917 2919 perm_rows.append(entry)
2918 2920
2919 2921 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2920 2922 return perm_rows
2921 2923
2922 2924 def get_api_data(self):
2923 2925 """
2924 2926 Common function for generating api data
2925 2927
2926 2928 """
2927 2929 group = self
2928 2930 data = {
2929 2931 'group_id': group.group_id,
2930 2932 'group_name': group.group_name,
2931 2933 'group_description': group.description_safe,
2932 2934 'parent_group': group.parent_group.group_name if group.parent_group else None,
2933 2935 'repositories': [x.repo_name for x in group.repositories],
2934 2936 'owner': group.user.username,
2935 2937 }
2936 2938 return data
2937 2939
2938 2940 def get_dict(self):
2939 2941 # Since we transformed `group_name` to a hybrid property, we need to
2940 2942 # keep compatibility with the code which uses `group_name` field.
2941 2943 result = super(RepoGroup, self).get_dict()
2942 2944 result['group_name'] = result.pop('_group_name', None)
2943 2945 return result
2944 2946
2945 2947
2946 2948 class Permission(Base, BaseModel):
2947 2949 __tablename__ = 'permissions'
2948 2950 __table_args__ = (
2949 2951 Index('p_perm_name_idx', 'permission_name'),
2950 2952 base_table_args,
2951 2953 )
2952 2954
2953 2955 PERMS = [
2954 2956 ('hg.admin', _('RhodeCode Super Administrator')),
2955 2957
2956 2958 ('repository.none', _('Repository no access')),
2957 2959 ('repository.read', _('Repository read access')),
2958 2960 ('repository.write', _('Repository write access')),
2959 2961 ('repository.admin', _('Repository admin access')),
2960 2962
2961 2963 ('group.none', _('Repository group no access')),
2962 2964 ('group.read', _('Repository group read access')),
2963 2965 ('group.write', _('Repository group write access')),
2964 2966 ('group.admin', _('Repository group admin access')),
2965 2967
2966 2968 ('usergroup.none', _('User group no access')),
2967 2969 ('usergroup.read', _('User group read access')),
2968 2970 ('usergroup.write', _('User group write access')),
2969 2971 ('usergroup.admin', _('User group admin access')),
2970 2972
2971 2973 ('branch.none', _('Branch no permissions')),
2972 2974 ('branch.merge', _('Branch access by web merge')),
2973 2975 ('branch.push', _('Branch access by push')),
2974 2976 ('branch.push_force', _('Branch access by push with force')),
2975 2977
2976 2978 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2977 2979 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2978 2980
2979 2981 ('hg.usergroup.create.false', _('User Group creation disabled')),
2980 2982 ('hg.usergroup.create.true', _('User Group creation enabled')),
2981 2983
2982 2984 ('hg.create.none', _('Repository creation disabled')),
2983 2985 ('hg.create.repository', _('Repository creation enabled')),
2984 2986 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2985 2987 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2986 2988
2987 2989 ('hg.fork.none', _('Repository forking disabled')),
2988 2990 ('hg.fork.repository', _('Repository forking enabled')),
2989 2991
2990 2992 ('hg.register.none', _('Registration disabled')),
2991 2993 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2992 2994 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2993 2995
2994 2996 ('hg.password_reset.enabled', _('Password reset enabled')),
2995 2997 ('hg.password_reset.hidden', _('Password reset hidden')),
2996 2998 ('hg.password_reset.disabled', _('Password reset disabled')),
2997 2999
2998 3000 ('hg.extern_activate.manual', _('Manual activation of external account')),
2999 3001 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3000 3002
3001 3003 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3002 3004 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3003 3005 ]
3004 3006
3005 3007 # definition of system default permissions for DEFAULT user, created on
3006 3008 # system setup
3007 3009 DEFAULT_USER_PERMISSIONS = [
3008 3010 # object perms
3009 3011 'repository.read',
3010 3012 'group.read',
3011 3013 'usergroup.read',
3012 3014 # branch, for backward compat we need same value as before so forced pushed
3013 3015 'branch.push_force',
3014 3016 # global
3015 3017 'hg.create.repository',
3016 3018 'hg.repogroup.create.false',
3017 3019 'hg.usergroup.create.false',
3018 3020 'hg.create.write_on_repogroup.true',
3019 3021 'hg.fork.repository',
3020 3022 'hg.register.manual_activate',
3021 3023 'hg.password_reset.enabled',
3022 3024 'hg.extern_activate.auto',
3023 3025 'hg.inherit_default_perms.true',
3024 3026 ]
3025 3027
3026 3028 # defines which permissions are more important higher the more important
3027 3029 # Weight defines which permissions are more important.
3028 3030 # The higher number the more important.
3029 3031 PERM_WEIGHTS = {
3030 3032 'repository.none': 0,
3031 3033 'repository.read': 1,
3032 3034 'repository.write': 3,
3033 3035 'repository.admin': 4,
3034 3036
3035 3037 'group.none': 0,
3036 3038 'group.read': 1,
3037 3039 'group.write': 3,
3038 3040 'group.admin': 4,
3039 3041
3040 3042 'usergroup.none': 0,
3041 3043 'usergroup.read': 1,
3042 3044 'usergroup.write': 3,
3043 3045 'usergroup.admin': 4,
3044 3046
3045 3047 'branch.none': 0,
3046 3048 'branch.merge': 1,
3047 3049 'branch.push': 3,
3048 3050 'branch.push_force': 4,
3049 3051
3050 3052 'hg.repogroup.create.false': 0,
3051 3053 'hg.repogroup.create.true': 1,
3052 3054
3053 3055 'hg.usergroup.create.false': 0,
3054 3056 'hg.usergroup.create.true': 1,
3055 3057
3056 3058 'hg.fork.none': 0,
3057 3059 'hg.fork.repository': 1,
3058 3060 'hg.create.none': 0,
3059 3061 'hg.create.repository': 1
3060 3062 }
3061 3063
3062 3064 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3063 3065 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3064 3066 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3065 3067
3066 3068 def __unicode__(self):
3067 3069 return u"<%s('%s:%s')>" % (
3068 3070 self.__class__.__name__, self.permission_id, self.permission_name
3069 3071 )
3070 3072
3071 3073 @classmethod
3072 3074 def get_by_key(cls, key):
3073 3075 return cls.query().filter(cls.permission_name == key).scalar()
3074 3076
3075 3077 @classmethod
3076 3078 def get_default_repo_perms(cls, user_id, repo_id=None):
3077 3079 q = Session().query(UserRepoToPerm, Repository, Permission)\
3078 3080 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3079 3081 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3080 3082 .filter(UserRepoToPerm.user_id == user_id)
3081 3083 if repo_id:
3082 3084 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3083 3085 return q.all()
3084 3086
3085 3087 @classmethod
3086 3088 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3087 3089 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3088 3090 .join(
3089 3091 Permission,
3090 3092 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3091 3093 .join(
3092 3094 UserRepoToPerm,
3093 3095 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3094 3096 .filter(UserRepoToPerm.user_id == user_id)
3095 3097
3096 3098 if repo_id:
3097 3099 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3098 3100 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3099 3101
3100 3102 @classmethod
3101 3103 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3102 3104 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3103 3105 .join(
3104 3106 Permission,
3105 3107 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3106 3108 .join(
3107 3109 Repository,
3108 3110 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3109 3111 .join(
3110 3112 UserGroup,
3111 3113 UserGroupRepoToPerm.users_group_id ==
3112 3114 UserGroup.users_group_id)\
3113 3115 .join(
3114 3116 UserGroupMember,
3115 3117 UserGroupRepoToPerm.users_group_id ==
3116 3118 UserGroupMember.users_group_id)\
3117 3119 .filter(
3118 3120 UserGroupMember.user_id == user_id,
3119 3121 UserGroup.users_group_active == true())
3120 3122 if repo_id:
3121 3123 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3122 3124 return q.all()
3123 3125
3124 3126 @classmethod
3125 3127 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3126 3128 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3127 3129 .join(
3128 3130 Permission,
3129 3131 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3130 3132 .join(
3131 3133 UserGroupRepoToPerm,
3132 3134 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3133 3135 .join(
3134 3136 UserGroup,
3135 3137 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3136 3138 .join(
3137 3139 UserGroupMember,
3138 3140 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3139 3141 .filter(
3140 3142 UserGroupMember.user_id == user_id,
3141 3143 UserGroup.users_group_active == true())
3142 3144
3143 3145 if repo_id:
3144 3146 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3145 3147 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3146 3148
3147 3149 @classmethod
3148 3150 def get_default_group_perms(cls, user_id, repo_group_id=None):
3149 3151 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3150 3152 .join(
3151 3153 Permission,
3152 3154 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3153 3155 .join(
3154 3156 RepoGroup,
3155 3157 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3156 3158 .filter(UserRepoGroupToPerm.user_id == user_id)
3157 3159 if repo_group_id:
3158 3160 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3159 3161 return q.all()
3160 3162
3161 3163 @classmethod
3162 3164 def get_default_group_perms_from_user_group(
3163 3165 cls, user_id, repo_group_id=None):
3164 3166 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3165 3167 .join(
3166 3168 Permission,
3167 3169 UserGroupRepoGroupToPerm.permission_id ==
3168 3170 Permission.permission_id)\
3169 3171 .join(
3170 3172 RepoGroup,
3171 3173 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3172 3174 .join(
3173 3175 UserGroup,
3174 3176 UserGroupRepoGroupToPerm.users_group_id ==
3175 3177 UserGroup.users_group_id)\
3176 3178 .join(
3177 3179 UserGroupMember,
3178 3180 UserGroupRepoGroupToPerm.users_group_id ==
3179 3181 UserGroupMember.users_group_id)\
3180 3182 .filter(
3181 3183 UserGroupMember.user_id == user_id,
3182 3184 UserGroup.users_group_active == true())
3183 3185 if repo_group_id:
3184 3186 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3185 3187 return q.all()
3186 3188
3187 3189 @classmethod
3188 3190 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3189 3191 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3190 3192 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3191 3193 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3192 3194 .filter(UserUserGroupToPerm.user_id == user_id)
3193 3195 if user_group_id:
3194 3196 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3195 3197 return q.all()
3196 3198
3197 3199 @classmethod
3198 3200 def get_default_user_group_perms_from_user_group(
3199 3201 cls, user_id, user_group_id=None):
3200 3202 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3201 3203 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3202 3204 .join(
3203 3205 Permission,
3204 3206 UserGroupUserGroupToPerm.permission_id ==
3205 3207 Permission.permission_id)\
3206 3208 .join(
3207 3209 TargetUserGroup,
3208 3210 UserGroupUserGroupToPerm.target_user_group_id ==
3209 3211 TargetUserGroup.users_group_id)\
3210 3212 .join(
3211 3213 UserGroup,
3212 3214 UserGroupUserGroupToPerm.user_group_id ==
3213 3215 UserGroup.users_group_id)\
3214 3216 .join(
3215 3217 UserGroupMember,
3216 3218 UserGroupUserGroupToPerm.user_group_id ==
3217 3219 UserGroupMember.users_group_id)\
3218 3220 .filter(
3219 3221 UserGroupMember.user_id == user_id,
3220 3222 UserGroup.users_group_active == true())
3221 3223 if user_group_id:
3222 3224 q = q.filter(
3223 3225 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3224 3226
3225 3227 return q.all()
3226 3228
3227 3229
3228 3230 class UserRepoToPerm(Base, BaseModel):
3229 3231 __tablename__ = 'repo_to_perm'
3230 3232 __table_args__ = (
3231 3233 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3232 3234 base_table_args
3233 3235 )
3234 3236
3235 3237 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3236 3238 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3237 3239 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3238 3240 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3239 3241
3240 3242 user = relationship('User')
3241 3243 repository = relationship('Repository')
3242 3244 permission = relationship('Permission')
3243 3245
3244 3246 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3245 3247
3246 3248 @classmethod
3247 3249 def create(cls, user, repository, permission):
3248 3250 n = cls()
3249 3251 n.user = user
3250 3252 n.repository = repository
3251 3253 n.permission = permission
3252 3254 Session().add(n)
3253 3255 return n
3254 3256
3255 3257 def __unicode__(self):
3256 3258 return u'<%s => %s >' % (self.user, self.repository)
3257 3259
3258 3260
3259 3261 class UserUserGroupToPerm(Base, BaseModel):
3260 3262 __tablename__ = 'user_user_group_to_perm'
3261 3263 __table_args__ = (
3262 3264 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3263 3265 base_table_args
3264 3266 )
3265 3267
3266 3268 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3267 3269 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3268 3270 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3269 3271 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3270 3272
3271 3273 user = relationship('User')
3272 3274 user_group = relationship('UserGroup')
3273 3275 permission = relationship('Permission')
3274 3276
3275 3277 @classmethod
3276 3278 def create(cls, user, user_group, permission):
3277 3279 n = cls()
3278 3280 n.user = user
3279 3281 n.user_group = user_group
3280 3282 n.permission = permission
3281 3283 Session().add(n)
3282 3284 return n
3283 3285
3284 3286 def __unicode__(self):
3285 3287 return u'<%s => %s >' % (self.user, self.user_group)
3286 3288
3287 3289
3288 3290 class UserToPerm(Base, BaseModel):
3289 3291 __tablename__ = 'user_to_perm'
3290 3292 __table_args__ = (
3291 3293 UniqueConstraint('user_id', 'permission_id'),
3292 3294 base_table_args
3293 3295 )
3294 3296
3295 3297 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3296 3298 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3297 3299 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3298 3300
3299 3301 user = relationship('User')
3300 3302 permission = relationship('Permission', lazy='joined')
3301 3303
3302 3304 def __unicode__(self):
3303 3305 return u'<%s => %s >' % (self.user, self.permission)
3304 3306
3305 3307
3306 3308 class UserGroupRepoToPerm(Base, BaseModel):
3307 3309 __tablename__ = 'users_group_repo_to_perm'
3308 3310 __table_args__ = (
3309 3311 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3310 3312 base_table_args
3311 3313 )
3312 3314
3313 3315 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3314 3316 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3315 3317 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3316 3318 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3317 3319
3318 3320 users_group = relationship('UserGroup')
3319 3321 permission = relationship('Permission')
3320 3322 repository = relationship('Repository')
3321 3323 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3322 3324
3323 3325 @classmethod
3324 3326 def create(cls, users_group, repository, permission):
3325 3327 n = cls()
3326 3328 n.users_group = users_group
3327 3329 n.repository = repository
3328 3330 n.permission = permission
3329 3331 Session().add(n)
3330 3332 return n
3331 3333
3332 3334 def __unicode__(self):
3333 3335 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3334 3336
3335 3337
3336 3338 class UserGroupUserGroupToPerm(Base, BaseModel):
3337 3339 __tablename__ = 'user_group_user_group_to_perm'
3338 3340 __table_args__ = (
3339 3341 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3340 3342 CheckConstraint('target_user_group_id != user_group_id'),
3341 3343 base_table_args
3342 3344 )
3343 3345
3344 3346 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3345 3347 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3346 3348 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3347 3349 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3348 3350
3349 3351 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3350 3352 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3351 3353 permission = relationship('Permission')
3352 3354
3353 3355 @classmethod
3354 3356 def create(cls, target_user_group, user_group, permission):
3355 3357 n = cls()
3356 3358 n.target_user_group = target_user_group
3357 3359 n.user_group = user_group
3358 3360 n.permission = permission
3359 3361 Session().add(n)
3360 3362 return n
3361 3363
3362 3364 def __unicode__(self):
3363 3365 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3364 3366
3365 3367
3366 3368 class UserGroupToPerm(Base, BaseModel):
3367 3369 __tablename__ = 'users_group_to_perm'
3368 3370 __table_args__ = (
3369 3371 UniqueConstraint('users_group_id', 'permission_id',),
3370 3372 base_table_args
3371 3373 )
3372 3374
3373 3375 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3374 3376 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3375 3377 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3376 3378
3377 3379 users_group = relationship('UserGroup')
3378 3380 permission = relationship('Permission')
3379 3381
3380 3382
3381 3383 class UserRepoGroupToPerm(Base, BaseModel):
3382 3384 __tablename__ = 'user_repo_group_to_perm'
3383 3385 __table_args__ = (
3384 3386 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3385 3387 base_table_args
3386 3388 )
3387 3389
3388 3390 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3389 3391 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3390 3392 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3391 3393 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3392 3394
3393 3395 user = relationship('User')
3394 3396 group = relationship('RepoGroup')
3395 3397 permission = relationship('Permission')
3396 3398
3397 3399 @classmethod
3398 3400 def create(cls, user, repository_group, permission):
3399 3401 n = cls()
3400 3402 n.user = user
3401 3403 n.group = repository_group
3402 3404 n.permission = permission
3403 3405 Session().add(n)
3404 3406 return n
3405 3407
3406 3408
3407 3409 class UserGroupRepoGroupToPerm(Base, BaseModel):
3408 3410 __tablename__ = 'users_group_repo_group_to_perm'
3409 3411 __table_args__ = (
3410 3412 UniqueConstraint('users_group_id', 'group_id'),
3411 3413 base_table_args
3412 3414 )
3413 3415
3414 3416 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3415 3417 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3416 3418 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3417 3419 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3418 3420
3419 3421 users_group = relationship('UserGroup')
3420 3422 permission = relationship('Permission')
3421 3423 group = relationship('RepoGroup')
3422 3424
3423 3425 @classmethod
3424 3426 def create(cls, user_group, repository_group, permission):
3425 3427 n = cls()
3426 3428 n.users_group = user_group
3427 3429 n.group = repository_group
3428 3430 n.permission = permission
3429 3431 Session().add(n)
3430 3432 return n
3431 3433
3432 3434 def __unicode__(self):
3433 3435 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3434 3436
3435 3437
3436 3438 class Statistics(Base, BaseModel):
3437 3439 __tablename__ = 'statistics'
3438 3440 __table_args__ = (
3439 3441 base_table_args
3440 3442 )
3441 3443
3442 3444 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3443 3445 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3444 3446 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3445 3447 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3446 3448 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3447 3449 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3448 3450
3449 3451 repository = relationship('Repository', single_parent=True)
3450 3452
3451 3453
3452 3454 class UserFollowing(Base, BaseModel):
3453 3455 __tablename__ = 'user_followings'
3454 3456 __table_args__ = (
3455 3457 UniqueConstraint('user_id', 'follows_repository_id'),
3456 3458 UniqueConstraint('user_id', 'follows_user_id'),
3457 3459 base_table_args
3458 3460 )
3459 3461
3460 3462 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3461 3463 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3462 3464 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3463 3465 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3464 3466 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3465 3467
3466 3468 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3467 3469
3468 3470 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3469 3471 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3470 3472
3471 3473 @classmethod
3472 3474 def get_repo_followers(cls, repo_id):
3473 3475 return cls.query().filter(cls.follows_repo_id == repo_id)
3474 3476
3475 3477
3476 3478 class CacheKey(Base, BaseModel):
3477 3479 __tablename__ = 'cache_invalidation'
3478 3480 __table_args__ = (
3479 3481 UniqueConstraint('cache_key'),
3480 3482 Index('key_idx', 'cache_key'),
3481 3483 base_table_args,
3482 3484 )
3483 3485
3484 3486 CACHE_TYPE_FEED = 'FEED'
3485 3487 CACHE_TYPE_README = 'README'
3486 3488 # namespaces used to register process/thread aware caches
3487 3489 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3488 3490 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3489 3491
3490 3492 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3491 3493 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3492 3494 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3493 3495 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3494 3496
3495 3497 def __init__(self, cache_key, cache_args=''):
3496 3498 self.cache_key = cache_key
3497 3499 self.cache_args = cache_args
3498 3500 self.cache_active = False
3499 3501
3500 3502 def __unicode__(self):
3501 3503 return u"<%s('%s:%s[%s]')>" % (
3502 3504 self.__class__.__name__,
3503 3505 self.cache_id, self.cache_key, self.cache_active)
3504 3506
3505 3507 def _cache_key_partition(self):
3506 3508 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3507 3509 return prefix, repo_name, suffix
3508 3510
3509 3511 def get_prefix(self):
3510 3512 """
3511 3513 Try to extract prefix from existing cache key. The key could consist
3512 3514 of prefix, repo_name, suffix
3513 3515 """
3514 3516 # this returns prefix, repo_name, suffix
3515 3517 return self._cache_key_partition()[0]
3516 3518
3517 3519 def get_suffix(self):
3518 3520 """
3519 3521 get suffix that might have been used in _get_cache_key to
3520 3522 generate self.cache_key. Only used for informational purposes
3521 3523 in repo_edit.mako.
3522 3524 """
3523 3525 # prefix, repo_name, suffix
3524 3526 return self._cache_key_partition()[2]
3525 3527
3526 3528 @classmethod
3527 3529 def delete_all_cache(cls):
3528 3530 """
3529 3531 Delete all cache keys from database.
3530 3532 Should only be run when all instances are down and all entries
3531 3533 thus stale.
3532 3534 """
3533 3535 cls.query().delete()
3534 3536 Session().commit()
3535 3537
3536 3538 @classmethod
3537 3539 def set_invalidate(cls, cache_uid, delete=False):
3538 3540 """
3539 3541 Mark all caches of a repo as invalid in the database.
3540 3542 """
3541 3543
3542 3544 try:
3543 3545 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3544 3546 if delete:
3545 3547 qry.delete()
3546 3548 log.debug('cache objects deleted for cache args %s',
3547 3549 safe_str(cache_uid))
3548 3550 else:
3549 3551 qry.update({"cache_active": False})
3550 3552 log.debug('cache objects marked as invalid for cache args %s',
3551 3553 safe_str(cache_uid))
3552 3554
3553 3555 Session().commit()
3554 3556 except Exception:
3555 3557 log.exception(
3556 3558 'Cache key invalidation failed for cache args %s',
3557 3559 safe_str(cache_uid))
3558 3560 Session().rollback()
3559 3561
3560 3562 @classmethod
3561 3563 def get_active_cache(cls, cache_key):
3562 3564 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3563 3565 if inv_obj:
3564 3566 return inv_obj
3565 3567 return None
3566 3568
3567 3569
3568 3570 class ChangesetComment(Base, BaseModel):
3569 3571 __tablename__ = 'changeset_comments'
3570 3572 __table_args__ = (
3571 3573 Index('cc_revision_idx', 'revision'),
3572 3574 base_table_args,
3573 3575 )
3574 3576
3575 3577 COMMENT_OUTDATED = u'comment_outdated'
3576 3578 COMMENT_TYPE_NOTE = u'note'
3577 3579 COMMENT_TYPE_TODO = u'todo'
3578 3580 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3579 3581
3580 3582 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3581 3583 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3582 3584 revision = Column('revision', String(40), nullable=True)
3583 3585 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3584 3586 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3585 3587 line_no = Column('line_no', Unicode(10), nullable=True)
3586 3588 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3587 3589 f_path = Column('f_path', Unicode(1000), nullable=True)
3588 3590 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3589 3591 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3590 3592 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3591 3593 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3592 3594 renderer = Column('renderer', Unicode(64), nullable=True)
3593 3595 display_state = Column('display_state', Unicode(128), nullable=True)
3594 3596
3595 3597 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3596 3598 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3597 3599
3598 3600 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3599 3601 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3600 3602
3601 3603 author = relationship('User', lazy='joined')
3602 3604 repo = relationship('Repository')
3603 3605 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3604 3606 pull_request = relationship('PullRequest', lazy='joined')
3605 3607 pull_request_version = relationship('PullRequestVersion')
3606 3608
3607 3609 @classmethod
3608 3610 def get_users(cls, revision=None, pull_request_id=None):
3609 3611 """
3610 3612 Returns user associated with this ChangesetComment. ie those
3611 3613 who actually commented
3612 3614
3613 3615 :param cls:
3614 3616 :param revision:
3615 3617 """
3616 3618 q = Session().query(User)\
3617 3619 .join(ChangesetComment.author)
3618 3620 if revision:
3619 3621 q = q.filter(cls.revision == revision)
3620 3622 elif pull_request_id:
3621 3623 q = q.filter(cls.pull_request_id == pull_request_id)
3622 3624 return q.all()
3623 3625
3624 3626 @classmethod
3625 3627 def get_index_from_version(cls, pr_version, versions):
3626 3628 num_versions = [x.pull_request_version_id for x in versions]
3627 3629 try:
3628 3630 return num_versions.index(pr_version) +1
3629 3631 except (IndexError, ValueError):
3630 3632 return
3631 3633
3632 3634 @property
3633 3635 def outdated(self):
3634 3636 return self.display_state == self.COMMENT_OUTDATED
3635 3637
3636 3638 def outdated_at_version(self, version):
3637 3639 """
3638 3640 Checks if comment is outdated for given pull request version
3639 3641 """
3640 3642 return self.outdated and self.pull_request_version_id != version
3641 3643
3642 3644 def older_than_version(self, version):
3643 3645 """
3644 3646 Checks if comment is made from previous version than given
3645 3647 """
3646 3648 if version is None:
3647 3649 return self.pull_request_version_id is not None
3648 3650
3649 3651 return self.pull_request_version_id < version
3650 3652
3651 3653 @property
3652 3654 def resolved(self):
3653 3655 return self.resolved_by[0] if self.resolved_by else None
3654 3656
3655 3657 @property
3656 3658 def is_todo(self):
3657 3659 return self.comment_type == self.COMMENT_TYPE_TODO
3658 3660
3659 3661 @property
3660 3662 def is_inline(self):
3661 3663 return self.line_no and self.f_path
3662 3664
3663 3665 def get_index_version(self, versions):
3664 3666 return self.get_index_from_version(
3665 3667 self.pull_request_version_id, versions)
3666 3668
3667 3669 def __repr__(self):
3668 3670 if self.comment_id:
3669 3671 return '<DB:Comment #%s>' % self.comment_id
3670 3672 else:
3671 3673 return '<DB:Comment at %#x>' % id(self)
3672 3674
3673 3675 def get_api_data(self):
3674 3676 comment = self
3675 3677 data = {
3676 3678 'comment_id': comment.comment_id,
3677 3679 'comment_type': comment.comment_type,
3678 3680 'comment_text': comment.text,
3679 3681 'comment_status': comment.status_change,
3680 3682 'comment_f_path': comment.f_path,
3681 3683 'comment_lineno': comment.line_no,
3682 3684 'comment_author': comment.author,
3683 3685 'comment_created_on': comment.created_on,
3684 3686 'comment_resolved_by': self.resolved
3685 3687 }
3686 3688 return data
3687 3689
3688 3690 def __json__(self):
3689 3691 data = dict()
3690 3692 data.update(self.get_api_data())
3691 3693 return data
3692 3694
3693 3695
3694 3696 class ChangesetStatus(Base, BaseModel):
3695 3697 __tablename__ = 'changeset_statuses'
3696 3698 __table_args__ = (
3697 3699 Index('cs_revision_idx', 'revision'),
3698 3700 Index('cs_version_idx', 'version'),
3699 3701 UniqueConstraint('repo_id', 'revision', 'version'),
3700 3702 base_table_args
3701 3703 )
3702 3704
3703 3705 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3704 3706 STATUS_APPROVED = 'approved'
3705 3707 STATUS_REJECTED = 'rejected'
3706 3708 STATUS_UNDER_REVIEW = 'under_review'
3707 3709
3708 3710 STATUSES = [
3709 3711 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3710 3712 (STATUS_APPROVED, _("Approved")),
3711 3713 (STATUS_REJECTED, _("Rejected")),
3712 3714 (STATUS_UNDER_REVIEW, _("Under Review")),
3713 3715 ]
3714 3716
3715 3717 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3716 3718 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3717 3719 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3718 3720 revision = Column('revision', String(40), nullable=False)
3719 3721 status = Column('status', String(128), nullable=False, default=DEFAULT)
3720 3722 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3721 3723 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3722 3724 version = Column('version', Integer(), nullable=False, default=0)
3723 3725 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3724 3726
3725 3727 author = relationship('User', lazy='joined')
3726 3728 repo = relationship('Repository')
3727 3729 comment = relationship('ChangesetComment', lazy='joined')
3728 3730 pull_request = relationship('PullRequest', lazy='joined')
3729 3731
3730 3732 def __unicode__(self):
3731 3733 return u"<%s('%s[v%s]:%s')>" % (
3732 3734 self.__class__.__name__,
3733 3735 self.status, self.version, self.author
3734 3736 )
3735 3737
3736 3738 @classmethod
3737 3739 def get_status_lbl(cls, value):
3738 3740 return dict(cls.STATUSES).get(value)
3739 3741
3740 3742 @property
3741 3743 def status_lbl(self):
3742 3744 return ChangesetStatus.get_status_lbl(self.status)
3743 3745
3744 3746 def get_api_data(self):
3745 3747 status = self
3746 3748 data = {
3747 3749 'status_id': status.changeset_status_id,
3748 3750 'status': status.status,
3749 3751 }
3750 3752 return data
3751 3753
3752 3754 def __json__(self):
3753 3755 data = dict()
3754 3756 data.update(self.get_api_data())
3755 3757 return data
3756 3758
3757 3759
3758 3760 class _SetState(object):
3759 3761 """
3760 3762 Context processor allowing changing state for sensitive operation such as
3761 3763 pull request update or merge
3762 3764 """
3763 3765
3764 3766 def __init__(self, pull_request, pr_state, back_state=None):
3765 3767 self._pr = pull_request
3766 3768 self._org_state = back_state or pull_request.pull_request_state
3767 3769 self._pr_state = pr_state
3768 3770
3769 3771 def __enter__(self):
3770 3772 log.debug('StateLock: entering set state context, setting state to: `%s`',
3771 3773 self._pr_state)
3772 3774 self._pr.pull_request_state = self._pr_state
3773 3775 Session().add(self._pr)
3774 3776 Session().commit()
3775 3777
3776 3778 def __exit__(self, exc_type, exc_val, exc_tb):
3777 3779 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3778 3780 self._org_state)
3779 3781 self._pr.pull_request_state = self._org_state
3780 3782 Session().add(self._pr)
3781 3783 Session().commit()
3782 3784
3783 3785
3784 3786 class _PullRequestBase(BaseModel):
3785 3787 """
3786 3788 Common attributes of pull request and version entries.
3787 3789 """
3788 3790
3789 3791 # .status values
3790 3792 STATUS_NEW = u'new'
3791 3793 STATUS_OPEN = u'open'
3792 3794 STATUS_CLOSED = u'closed'
3793 3795
3794 3796 # available states
3795 3797 STATE_CREATING = u'creating'
3796 3798 STATE_UPDATING = u'updating'
3797 3799 STATE_MERGING = u'merging'
3798 3800 STATE_CREATED = u'created'
3799 3801
3800 3802 title = Column('title', Unicode(255), nullable=True)
3801 3803 description = Column(
3802 3804 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3803 3805 nullable=True)
3804 3806 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3805 3807
3806 3808 # new/open/closed status of pull request (not approve/reject/etc)
3807 3809 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3808 3810 created_on = Column(
3809 3811 'created_on', DateTime(timezone=False), nullable=False,
3810 3812 default=datetime.datetime.now)
3811 3813 updated_on = Column(
3812 3814 'updated_on', DateTime(timezone=False), nullable=False,
3813 3815 default=datetime.datetime.now)
3814 3816
3815 3817 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3816 3818
3817 3819 @declared_attr
3818 3820 def user_id(cls):
3819 3821 return Column(
3820 3822 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3821 3823 unique=None)
3822 3824
3823 3825 # 500 revisions max
3824 3826 _revisions = Column(
3825 3827 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3826 3828
3827 3829 @declared_attr
3828 3830 def source_repo_id(cls):
3829 3831 # TODO: dan: rename column to source_repo_id
3830 3832 return Column(
3831 3833 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3832 3834 nullable=False)
3833 3835
3834 3836 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3835 3837
3836 3838 @hybrid_property
3837 3839 def source_ref(self):
3838 3840 return self._source_ref
3839 3841
3840 3842 @source_ref.setter
3841 3843 def source_ref(self, val):
3842 3844 parts = (val or '').split(':')
3843 3845 if len(parts) != 3:
3844 3846 raise ValueError(
3845 3847 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3846 3848 self._source_ref = safe_unicode(val)
3847 3849
3848 3850 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3849 3851
3850 3852 @hybrid_property
3851 3853 def target_ref(self):
3852 3854 return self._target_ref
3853 3855
3854 3856 @target_ref.setter
3855 3857 def target_ref(self, val):
3856 3858 parts = (val or '').split(':')
3857 3859 if len(parts) != 3:
3858 3860 raise ValueError(
3859 3861 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3860 3862 self._target_ref = safe_unicode(val)
3861 3863
3862 3864 @declared_attr
3863 3865 def target_repo_id(cls):
3864 3866 # TODO: dan: rename column to target_repo_id
3865 3867 return Column(
3866 3868 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3867 3869 nullable=False)
3868 3870
3869 3871 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3870 3872
3871 3873 # TODO: dan: rename column to last_merge_source_rev
3872 3874 _last_merge_source_rev = Column(
3873 3875 'last_merge_org_rev', String(40), nullable=True)
3874 3876 # TODO: dan: rename column to last_merge_target_rev
3875 3877 _last_merge_target_rev = Column(
3876 3878 'last_merge_other_rev', String(40), nullable=True)
3877 3879 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3878 3880 merge_rev = Column('merge_rev', String(40), nullable=True)
3879 3881
3880 3882 reviewer_data = Column(
3881 3883 'reviewer_data_json', MutationObj.as_mutable(
3882 3884 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3883 3885
3884 3886 @property
3885 3887 def reviewer_data_json(self):
3886 3888 return json.dumps(self.reviewer_data)
3887 3889
3888 3890 @hybrid_property
3889 3891 def description_safe(self):
3890 3892 from rhodecode.lib import helpers as h
3891 3893 return h.escape(self.description)
3892 3894
3893 3895 @hybrid_property
3894 3896 def revisions(self):
3895 3897 return self._revisions.split(':') if self._revisions else []
3896 3898
3897 3899 @revisions.setter
3898 3900 def revisions(self, val):
3899 3901 self._revisions = ':'.join(val)
3900 3902
3901 3903 @hybrid_property
3902 3904 def last_merge_status(self):
3903 3905 return safe_int(self._last_merge_status)
3904 3906
3905 3907 @last_merge_status.setter
3906 3908 def last_merge_status(self, val):
3907 3909 self._last_merge_status = val
3908 3910
3909 3911 @declared_attr
3910 3912 def author(cls):
3911 3913 return relationship('User', lazy='joined')
3912 3914
3913 3915 @declared_attr
3914 3916 def source_repo(cls):
3915 3917 return relationship(
3916 3918 'Repository',
3917 3919 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3918 3920
3919 3921 @property
3920 3922 def source_ref_parts(self):
3921 3923 return self.unicode_to_reference(self.source_ref)
3922 3924
3923 3925 @declared_attr
3924 3926 def target_repo(cls):
3925 3927 return relationship(
3926 3928 'Repository',
3927 3929 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3928 3930
3929 3931 @property
3930 3932 def target_ref_parts(self):
3931 3933 return self.unicode_to_reference(self.target_ref)
3932 3934
3933 3935 @property
3934 3936 def shadow_merge_ref(self):
3935 3937 return self.unicode_to_reference(self._shadow_merge_ref)
3936 3938
3937 3939 @shadow_merge_ref.setter
3938 3940 def shadow_merge_ref(self, ref):
3939 3941 self._shadow_merge_ref = self.reference_to_unicode(ref)
3940 3942
3941 3943 @staticmethod
3942 3944 def unicode_to_reference(raw):
3943 3945 """
3944 3946 Convert a unicode (or string) to a reference object.
3945 3947 If unicode evaluates to False it returns None.
3946 3948 """
3947 3949 if raw:
3948 3950 refs = raw.split(':')
3949 3951 return Reference(*refs)
3950 3952 else:
3951 3953 return None
3952 3954
3953 3955 @staticmethod
3954 3956 def reference_to_unicode(ref):
3955 3957 """
3956 3958 Convert a reference object to unicode.
3957 3959 If reference is None it returns None.
3958 3960 """
3959 3961 if ref:
3960 3962 return u':'.join(ref)
3961 3963 else:
3962 3964 return None
3963 3965
3964 3966 def get_api_data(self, with_merge_state=True):
3965 3967 from rhodecode.model.pull_request import PullRequestModel
3966 3968
3967 3969 pull_request = self
3968 3970 if with_merge_state:
3969 3971 merge_status = PullRequestModel().merge_status(pull_request)
3970 3972 merge_state = {
3971 3973 'status': merge_status[0],
3972 3974 'message': safe_unicode(merge_status[1]),
3973 3975 }
3974 3976 else:
3975 3977 merge_state = {'status': 'not_available',
3976 3978 'message': 'not_available'}
3977 3979
3978 3980 merge_data = {
3979 3981 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3980 3982 'reference': (
3981 3983 pull_request.shadow_merge_ref._asdict()
3982 3984 if pull_request.shadow_merge_ref else None),
3983 3985 }
3984 3986
3985 3987 data = {
3986 3988 'pull_request_id': pull_request.pull_request_id,
3987 3989 'url': PullRequestModel().get_url(pull_request),
3988 3990 'title': pull_request.title,
3989 3991 'description': pull_request.description,
3990 3992 'status': pull_request.status,
3991 3993 'state': pull_request.pull_request_state,
3992 3994 'created_on': pull_request.created_on,
3993 3995 'updated_on': pull_request.updated_on,
3994 3996 'commit_ids': pull_request.revisions,
3995 3997 'review_status': pull_request.calculated_review_status(),
3996 3998 'mergeable': merge_state,
3997 3999 'source': {
3998 4000 'clone_url': pull_request.source_repo.clone_url(),
3999 4001 'repository': pull_request.source_repo.repo_name,
4000 4002 'reference': {
4001 4003 'name': pull_request.source_ref_parts.name,
4002 4004 'type': pull_request.source_ref_parts.type,
4003 4005 'commit_id': pull_request.source_ref_parts.commit_id,
4004 4006 },
4005 4007 },
4006 4008 'target': {
4007 4009 'clone_url': pull_request.target_repo.clone_url(),
4008 4010 'repository': pull_request.target_repo.repo_name,
4009 4011 'reference': {
4010 4012 'name': pull_request.target_ref_parts.name,
4011 4013 'type': pull_request.target_ref_parts.type,
4012 4014 'commit_id': pull_request.target_ref_parts.commit_id,
4013 4015 },
4014 4016 },
4015 4017 'merge': merge_data,
4016 4018 'author': pull_request.author.get_api_data(include_secrets=False,
4017 4019 details='basic'),
4018 4020 'reviewers': [
4019 4021 {
4020 4022 'user': reviewer.get_api_data(include_secrets=False,
4021 4023 details='basic'),
4022 4024 'reasons': reasons,
4023 4025 'review_status': st[0][1].status if st else 'not_reviewed',
4024 4026 }
4025 4027 for obj, reviewer, reasons, mandatory, st in
4026 4028 pull_request.reviewers_statuses()
4027 4029 ]
4028 4030 }
4029 4031
4030 4032 return data
4031 4033
4032 4034 def set_state(self, pull_request_state, final_state=None):
4033 4035 """
4034 4036 # goes from initial state to updating to initial state.
4035 4037 # initial state can be changed by specifying back_state=
4036 4038 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4037 4039 pull_request.merge()
4038 4040
4039 4041 :param pull_request_state:
4040 4042 :param final_state:
4041 4043
4042 4044 """
4043 4045
4044 4046 return _SetState(self, pull_request_state, back_state=final_state)
4045 4047
4046 4048
4047 4049 class PullRequest(Base, _PullRequestBase):
4048 4050 __tablename__ = 'pull_requests'
4049 4051 __table_args__ = (
4050 4052 base_table_args,
4051 4053 )
4052 4054
4053 4055 pull_request_id = Column(
4054 4056 'pull_request_id', Integer(), nullable=False, primary_key=True)
4055 4057
4056 4058 def __repr__(self):
4057 4059 if self.pull_request_id:
4058 4060 return '<DB:PullRequest #%s>' % self.pull_request_id
4059 4061 else:
4060 4062 return '<DB:PullRequest at %#x>' % id(self)
4061 4063
4062 4064 reviewers = relationship('PullRequestReviewers',
4063 4065 cascade="all, delete, delete-orphan")
4064 4066 statuses = relationship('ChangesetStatus',
4065 4067 cascade="all, delete, delete-orphan")
4066 4068 comments = relationship('ChangesetComment',
4067 4069 cascade="all, delete, delete-orphan")
4068 4070 versions = relationship('PullRequestVersion',
4069 4071 cascade="all, delete, delete-orphan",
4070 4072 lazy='dynamic')
4071 4073
4072 4074 @classmethod
4073 4075 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4074 4076 internal_methods=None):
4075 4077
4076 4078 class PullRequestDisplay(object):
4077 4079 """
4078 4080 Special object wrapper for showing PullRequest data via Versions
4079 4081 It mimics PR object as close as possible. This is read only object
4080 4082 just for display
4081 4083 """
4082 4084
4083 4085 def __init__(self, attrs, internal=None):
4084 4086 self.attrs = attrs
4085 4087 # internal have priority over the given ones via attrs
4086 4088 self.internal = internal or ['versions']
4087 4089
4088 4090 def __getattr__(self, item):
4089 4091 if item in self.internal:
4090 4092 return getattr(self, item)
4091 4093 try:
4092 4094 return self.attrs[item]
4093 4095 except KeyError:
4094 4096 raise AttributeError(
4095 4097 '%s object has no attribute %s' % (self, item))
4096 4098
4097 4099 def __repr__(self):
4098 4100 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4099 4101
4100 4102 def versions(self):
4101 4103 return pull_request_obj.versions.order_by(
4102 4104 PullRequestVersion.pull_request_version_id).all()
4103 4105
4104 4106 def is_closed(self):
4105 4107 return pull_request_obj.is_closed()
4106 4108
4107 4109 @property
4108 4110 def pull_request_version_id(self):
4109 4111 return getattr(pull_request_obj, 'pull_request_version_id', None)
4110 4112
4111 4113 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
4112 4114
4113 4115 attrs.author = StrictAttributeDict(
4114 4116 pull_request_obj.author.get_api_data())
4115 4117 if pull_request_obj.target_repo:
4116 4118 attrs.target_repo = StrictAttributeDict(
4117 4119 pull_request_obj.target_repo.get_api_data())
4118 4120 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4119 4121
4120 4122 if pull_request_obj.source_repo:
4121 4123 attrs.source_repo = StrictAttributeDict(
4122 4124 pull_request_obj.source_repo.get_api_data())
4123 4125 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4124 4126
4125 4127 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4126 4128 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4127 4129 attrs.revisions = pull_request_obj.revisions
4128 4130
4129 4131 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4130 4132 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4131 4133 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4132 4134
4133 4135 return PullRequestDisplay(attrs, internal=internal_methods)
4134 4136
4135 4137 def is_closed(self):
4136 4138 return self.status == self.STATUS_CLOSED
4137 4139
4138 4140 def __json__(self):
4139 4141 return {
4140 4142 'revisions': self.revisions,
4141 4143 }
4142 4144
4143 4145 def calculated_review_status(self):
4144 4146 from rhodecode.model.changeset_status import ChangesetStatusModel
4145 4147 return ChangesetStatusModel().calculated_review_status(self)
4146 4148
4147 4149 def reviewers_statuses(self):
4148 4150 from rhodecode.model.changeset_status import ChangesetStatusModel
4149 4151 return ChangesetStatusModel().reviewers_statuses(self)
4150 4152
4151 4153 @property
4152 4154 def workspace_id(self):
4153 4155 from rhodecode.model.pull_request import PullRequestModel
4154 4156 return PullRequestModel()._workspace_id(self)
4155 4157
4156 4158 def get_shadow_repo(self):
4157 4159 workspace_id = self.workspace_id
4158 4160 vcs_obj = self.target_repo.scm_instance()
4159 4161 shadow_repository_path = vcs_obj._get_shadow_repository_path(
4160 4162 self.target_repo.repo_id, workspace_id)
4161 4163 if os.path.isdir(shadow_repository_path):
4162 4164 return vcs_obj._get_shadow_instance(shadow_repository_path)
4163 4165
4164 4166
4165 4167 class PullRequestVersion(Base, _PullRequestBase):
4166 4168 __tablename__ = 'pull_request_versions'
4167 4169 __table_args__ = (
4168 4170 base_table_args,
4169 4171 )
4170 4172
4171 4173 pull_request_version_id = Column(
4172 4174 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4173 4175 pull_request_id = Column(
4174 4176 'pull_request_id', Integer(),
4175 4177 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4176 4178 pull_request = relationship('PullRequest')
4177 4179
4178 4180 def __repr__(self):
4179 4181 if self.pull_request_version_id:
4180 4182 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4181 4183 else:
4182 4184 return '<DB:PullRequestVersion at %#x>' % id(self)
4183 4185
4184 4186 @property
4185 4187 def reviewers(self):
4186 4188 return self.pull_request.reviewers
4187 4189
4188 4190 @property
4189 4191 def versions(self):
4190 4192 return self.pull_request.versions
4191 4193
4192 4194 def is_closed(self):
4193 4195 # calculate from original
4194 4196 return self.pull_request.status == self.STATUS_CLOSED
4195 4197
4196 4198 def calculated_review_status(self):
4197 4199 return self.pull_request.calculated_review_status()
4198 4200
4199 4201 def reviewers_statuses(self):
4200 4202 return self.pull_request.reviewers_statuses()
4201 4203
4202 4204
4203 4205 class PullRequestReviewers(Base, BaseModel):
4204 4206 __tablename__ = 'pull_request_reviewers'
4205 4207 __table_args__ = (
4206 4208 base_table_args,
4207 4209 )
4208 4210
4209 4211 @hybrid_property
4210 4212 def reasons(self):
4211 4213 if not self._reasons:
4212 4214 return []
4213 4215 return self._reasons
4214 4216
4215 4217 @reasons.setter
4216 4218 def reasons(self, val):
4217 4219 val = val or []
4218 4220 if any(not isinstance(x, compat.string_types) for x in val):
4219 4221 raise Exception('invalid reasons type, must be list of strings')
4220 4222 self._reasons = val
4221 4223
4222 4224 pull_requests_reviewers_id = Column(
4223 4225 'pull_requests_reviewers_id', Integer(), nullable=False,
4224 4226 primary_key=True)
4225 4227 pull_request_id = Column(
4226 4228 "pull_request_id", Integer(),
4227 4229 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4228 4230 user_id = Column(
4229 4231 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4230 4232 _reasons = Column(
4231 4233 'reason', MutationList.as_mutable(
4232 4234 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4233 4235
4234 4236 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4235 4237 user = relationship('User')
4236 4238 pull_request = relationship('PullRequest')
4237 4239
4238 4240 rule_data = Column(
4239 4241 'rule_data_json',
4240 4242 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4241 4243
4242 4244 def rule_user_group_data(self):
4243 4245 """
4244 4246 Returns the voting user group rule data for this reviewer
4245 4247 """
4246 4248
4247 4249 if self.rule_data and 'vote_rule' in self.rule_data:
4248 4250 user_group_data = {}
4249 4251 if 'rule_user_group_entry_id' in self.rule_data:
4250 4252 # means a group with voting rules !
4251 4253 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4252 4254 user_group_data['name'] = self.rule_data['rule_name']
4253 4255 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4254 4256
4255 4257 return user_group_data
4256 4258
4257 4259 def __unicode__(self):
4258 4260 return u"<%s('id:%s')>" % (self.__class__.__name__,
4259 4261 self.pull_requests_reviewers_id)
4260 4262
4261 4263
4262 4264 class Notification(Base, BaseModel):
4263 4265 __tablename__ = 'notifications'
4264 4266 __table_args__ = (
4265 4267 Index('notification_type_idx', 'type'),
4266 4268 base_table_args,
4267 4269 )
4268 4270
4269 4271 TYPE_CHANGESET_COMMENT = u'cs_comment'
4270 4272 TYPE_MESSAGE = u'message'
4271 4273 TYPE_MENTION = u'mention'
4272 4274 TYPE_REGISTRATION = u'registration'
4273 4275 TYPE_PULL_REQUEST = u'pull_request'
4274 4276 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4275 4277
4276 4278 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4277 4279 subject = Column('subject', Unicode(512), nullable=True)
4278 4280 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4279 4281 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4280 4282 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4281 4283 type_ = Column('type', Unicode(255))
4282 4284
4283 4285 created_by_user = relationship('User')
4284 4286 notifications_to_users = relationship('UserNotification', lazy='joined',
4285 4287 cascade="all, delete, delete-orphan")
4286 4288
4287 4289 @property
4288 4290 def recipients(self):
4289 4291 return [x.user for x in UserNotification.query()\
4290 4292 .filter(UserNotification.notification == self)\
4291 4293 .order_by(UserNotification.user_id.asc()).all()]
4292 4294
4293 4295 @classmethod
4294 4296 def create(cls, created_by, subject, body, recipients, type_=None):
4295 4297 if type_ is None:
4296 4298 type_ = Notification.TYPE_MESSAGE
4297 4299
4298 4300 notification = cls()
4299 4301 notification.created_by_user = created_by
4300 4302 notification.subject = subject
4301 4303 notification.body = body
4302 4304 notification.type_ = type_
4303 4305 notification.created_on = datetime.datetime.now()
4304 4306
4305 4307 # For each recipient link the created notification to his account
4306 4308 for u in recipients:
4307 4309 assoc = UserNotification()
4308 4310 assoc.user_id = u.user_id
4309 4311 assoc.notification = notification
4310 4312
4311 4313 # if created_by is inside recipients mark his notification
4312 4314 # as read
4313 4315 if u.user_id == created_by.user_id:
4314 4316 assoc.read = True
4315 4317 Session().add(assoc)
4316 4318
4317 4319 Session().add(notification)
4318 4320
4319 4321 return notification
4320 4322
4321 4323
4322 4324 class UserNotification(Base, BaseModel):
4323 4325 __tablename__ = 'user_to_notification'
4324 4326 __table_args__ = (
4325 4327 UniqueConstraint('user_id', 'notification_id'),
4326 4328 base_table_args
4327 4329 )
4328 4330
4329 4331 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4330 4332 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4331 4333 read = Column('read', Boolean, default=False)
4332 4334 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4333 4335
4334 4336 user = relationship('User', lazy="joined")
4335 4337 notification = relationship('Notification', lazy="joined",
4336 4338 order_by=lambda: Notification.created_on.desc(),)
4337 4339
4338 4340 def mark_as_read(self):
4339 4341 self.read = True
4340 4342 Session().add(self)
4341 4343
4342 4344
4343 4345 class Gist(Base, BaseModel):
4344 4346 __tablename__ = 'gists'
4345 4347 __table_args__ = (
4346 4348 Index('g_gist_access_id_idx', 'gist_access_id'),
4347 4349 Index('g_created_on_idx', 'created_on'),
4348 4350 base_table_args
4349 4351 )
4350 4352
4351 4353 GIST_PUBLIC = u'public'
4352 4354 GIST_PRIVATE = u'private'
4353 4355 DEFAULT_FILENAME = u'gistfile1.txt'
4354 4356
4355 4357 ACL_LEVEL_PUBLIC = u'acl_public'
4356 4358 ACL_LEVEL_PRIVATE = u'acl_private'
4357 4359
4358 4360 gist_id = Column('gist_id', Integer(), primary_key=True)
4359 4361 gist_access_id = Column('gist_access_id', Unicode(250))
4360 4362 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4361 4363 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4362 4364 gist_expires = Column('gist_expires', Float(53), nullable=False)
4363 4365 gist_type = Column('gist_type', Unicode(128), nullable=False)
4364 4366 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4365 4367 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4366 4368 acl_level = Column('acl_level', Unicode(128), nullable=True)
4367 4369
4368 4370 owner = relationship('User')
4369 4371
4370 4372 def __repr__(self):
4371 4373 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4372 4374
4373 4375 @hybrid_property
4374 4376 def description_safe(self):
4375 4377 from rhodecode.lib import helpers as h
4376 4378 return h.escape(self.gist_description)
4377 4379
4378 4380 @classmethod
4379 4381 def get_or_404(cls, id_):
4380 4382 from pyramid.httpexceptions import HTTPNotFound
4381 4383
4382 4384 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4383 4385 if not res:
4384 4386 raise HTTPNotFound()
4385 4387 return res
4386 4388
4387 4389 @classmethod
4388 4390 def get_by_access_id(cls, gist_access_id):
4389 4391 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4390 4392
4391 4393 def gist_url(self):
4392 4394 from rhodecode.model.gist import GistModel
4393 4395 return GistModel().get_url(self)
4394 4396
4395 4397 @classmethod
4396 4398 def base_path(cls):
4397 4399 """
4398 4400 Returns base path when all gists are stored
4399 4401
4400 4402 :param cls:
4401 4403 """
4402 4404 from rhodecode.model.gist import GIST_STORE_LOC
4403 4405 q = Session().query(RhodeCodeUi)\
4404 4406 .filter(RhodeCodeUi.ui_key == URL_SEP)
4405 4407 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4406 4408 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4407 4409
4408 4410 def get_api_data(self):
4409 4411 """
4410 4412 Common function for generating gist related data for API
4411 4413 """
4412 4414 gist = self
4413 4415 data = {
4414 4416 'gist_id': gist.gist_id,
4415 4417 'type': gist.gist_type,
4416 4418 'access_id': gist.gist_access_id,
4417 4419 'description': gist.gist_description,
4418 4420 'url': gist.gist_url(),
4419 4421 'expires': gist.gist_expires,
4420 4422 'created_on': gist.created_on,
4421 4423 'modified_at': gist.modified_at,
4422 4424 'content': None,
4423 4425 'acl_level': gist.acl_level,
4424 4426 }
4425 4427 return data
4426 4428
4427 4429 def __json__(self):
4428 4430 data = dict(
4429 4431 )
4430 4432 data.update(self.get_api_data())
4431 4433 return data
4432 4434 # SCM functions
4433 4435
4434 4436 def scm_instance(self, **kwargs):
4435 4437 """
4436 Get explicit Mercurial repository used
4438 Get an instance of VCS Repository
4439
4437 4440 :param kwargs:
4438 :return:
4439 4441 """
4440 4442 from rhodecode.model.gist import GistModel
4441 4443 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4442 4444 return get_vcs_instance(
4443 4445 repo_path=safe_str(full_repo_path), create=False,
4444 4446 _vcs_alias=GistModel.vcs_backend)
4445 4447
4446 4448
4447 4449 class ExternalIdentity(Base, BaseModel):
4448 4450 __tablename__ = 'external_identities'
4449 4451 __table_args__ = (
4450 4452 Index('local_user_id_idx', 'local_user_id'),
4451 4453 Index('external_id_idx', 'external_id'),
4452 4454 base_table_args
4453 4455 )
4454 4456
4455 4457 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4456 4458 external_username = Column('external_username', Unicode(1024), default=u'')
4457 4459 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4458 4460 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4459 4461 access_token = Column('access_token', String(1024), default=u'')
4460 4462 alt_token = Column('alt_token', String(1024), default=u'')
4461 4463 token_secret = Column('token_secret', String(1024), default=u'')
4462 4464
4463 4465 @classmethod
4464 4466 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4465 4467 """
4466 4468 Returns ExternalIdentity instance based on search params
4467 4469
4468 4470 :param external_id:
4469 4471 :param provider_name:
4470 4472 :return: ExternalIdentity
4471 4473 """
4472 4474 query = cls.query()
4473 4475 query = query.filter(cls.external_id == external_id)
4474 4476 query = query.filter(cls.provider_name == provider_name)
4475 4477 if local_user_id:
4476 4478 query = query.filter(cls.local_user_id == local_user_id)
4477 4479 return query.first()
4478 4480
4479 4481 @classmethod
4480 4482 def user_by_external_id_and_provider(cls, external_id, provider_name):
4481 4483 """
4482 4484 Returns User instance based on search params
4483 4485
4484 4486 :param external_id:
4485 4487 :param provider_name:
4486 4488 :return: User
4487 4489 """
4488 4490 query = User.query()
4489 4491 query = query.filter(cls.external_id == external_id)
4490 4492 query = query.filter(cls.provider_name == provider_name)
4491 4493 query = query.filter(User.user_id == cls.local_user_id)
4492 4494 return query.first()
4493 4495
4494 4496 @classmethod
4495 4497 def by_local_user_id(cls, local_user_id):
4496 4498 """
4497 4499 Returns all tokens for user
4498 4500
4499 4501 :param local_user_id:
4500 4502 :return: ExternalIdentity
4501 4503 """
4502 4504 query = cls.query()
4503 4505 query = query.filter(cls.local_user_id == local_user_id)
4504 4506 return query
4505 4507
4506 4508 @classmethod
4507 4509 def load_provider_plugin(cls, plugin_id):
4508 4510 from rhodecode.authentication.base import loadplugin
4509 4511 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4510 4512 auth_plugin = loadplugin(_plugin_id)
4511 4513 return auth_plugin
4512 4514
4513 4515
4514 4516 class Integration(Base, BaseModel):
4515 4517 __tablename__ = 'integrations'
4516 4518 __table_args__ = (
4517 4519 base_table_args
4518 4520 )
4519 4521
4520 4522 integration_id = Column('integration_id', Integer(), primary_key=True)
4521 4523 integration_type = Column('integration_type', String(255))
4522 4524 enabled = Column('enabled', Boolean(), nullable=False)
4523 4525 name = Column('name', String(255), nullable=False)
4524 4526 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4525 4527 default=False)
4526 4528
4527 4529 settings = Column(
4528 4530 'settings_json', MutationObj.as_mutable(
4529 4531 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4530 4532 repo_id = Column(
4531 4533 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4532 4534 nullable=True, unique=None, default=None)
4533 4535 repo = relationship('Repository', lazy='joined')
4534 4536
4535 4537 repo_group_id = Column(
4536 4538 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4537 4539 nullable=True, unique=None, default=None)
4538 4540 repo_group = relationship('RepoGroup', lazy='joined')
4539 4541
4540 4542 @property
4541 4543 def scope(self):
4542 4544 if self.repo:
4543 4545 return repr(self.repo)
4544 4546 if self.repo_group:
4545 4547 if self.child_repos_only:
4546 4548 return repr(self.repo_group) + ' (child repos only)'
4547 4549 else:
4548 4550 return repr(self.repo_group) + ' (recursive)'
4549 4551 if self.child_repos_only:
4550 4552 return 'root_repos'
4551 4553 return 'global'
4552 4554
4553 4555 def __repr__(self):
4554 4556 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4555 4557
4556 4558
4557 4559 class RepoReviewRuleUser(Base, BaseModel):
4558 4560 __tablename__ = 'repo_review_rules_users'
4559 4561 __table_args__ = (
4560 4562 base_table_args
4561 4563 )
4562 4564
4563 4565 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4564 4566 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4565 4567 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4566 4568 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4567 4569 user = relationship('User')
4568 4570
4569 4571 def rule_data(self):
4570 4572 return {
4571 4573 'mandatory': self.mandatory
4572 4574 }
4573 4575
4574 4576
4575 4577 class RepoReviewRuleUserGroup(Base, BaseModel):
4576 4578 __tablename__ = 'repo_review_rules_users_groups'
4577 4579 __table_args__ = (
4578 4580 base_table_args
4579 4581 )
4580 4582
4581 4583 VOTE_RULE_ALL = -1
4582 4584
4583 4585 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4584 4586 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4585 4587 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4586 4588 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4587 4589 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4588 4590 users_group = relationship('UserGroup')
4589 4591
4590 4592 def rule_data(self):
4591 4593 return {
4592 4594 'mandatory': self.mandatory,
4593 4595 'vote_rule': self.vote_rule
4594 4596 }
4595 4597
4596 4598 @property
4597 4599 def vote_rule_label(self):
4598 4600 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4599 4601 return 'all must vote'
4600 4602 else:
4601 4603 return 'min. vote {}'.format(self.vote_rule)
4602 4604
4603 4605
4604 4606 class RepoReviewRule(Base, BaseModel):
4605 4607 __tablename__ = 'repo_review_rules'
4606 4608 __table_args__ = (
4607 4609 base_table_args
4608 4610 )
4609 4611
4610 4612 repo_review_rule_id = Column(
4611 4613 'repo_review_rule_id', Integer(), primary_key=True)
4612 4614 repo_id = Column(
4613 4615 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4614 4616 repo = relationship('Repository', backref='review_rules')
4615 4617
4616 4618 review_rule_name = Column('review_rule_name', String(255))
4617 4619 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4618 4620 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4619 4621 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4620 4622
4621 4623 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4622 4624 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4623 4625 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4624 4626 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4625 4627
4626 4628 rule_users = relationship('RepoReviewRuleUser')
4627 4629 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4628 4630
4629 4631 def _validate_pattern(self, value):
4630 4632 re.compile('^' + glob2re(value) + '$')
4631 4633
4632 4634 @hybrid_property
4633 4635 def source_branch_pattern(self):
4634 4636 return self._branch_pattern or '*'
4635 4637
4636 4638 @source_branch_pattern.setter
4637 4639 def source_branch_pattern(self, value):
4638 4640 self._validate_pattern(value)
4639 4641 self._branch_pattern = value or '*'
4640 4642
4641 4643 @hybrid_property
4642 4644 def target_branch_pattern(self):
4643 4645 return self._target_branch_pattern or '*'
4644 4646
4645 4647 @target_branch_pattern.setter
4646 4648 def target_branch_pattern(self, value):
4647 4649 self._validate_pattern(value)
4648 4650 self._target_branch_pattern = value or '*'
4649 4651
4650 4652 @hybrid_property
4651 4653 def file_pattern(self):
4652 4654 return self._file_pattern or '*'
4653 4655
4654 4656 @file_pattern.setter
4655 4657 def file_pattern(self, value):
4656 4658 self._validate_pattern(value)
4657 4659 self._file_pattern = value or '*'
4658 4660
4659 4661 def matches(self, source_branch, target_branch, files_changed):
4660 4662 """
4661 4663 Check if this review rule matches a branch/files in a pull request
4662 4664
4663 4665 :param source_branch: source branch name for the commit
4664 4666 :param target_branch: target branch name for the commit
4665 4667 :param files_changed: list of file paths changed in the pull request
4666 4668 """
4667 4669
4668 4670 source_branch = source_branch or ''
4669 4671 target_branch = target_branch or ''
4670 4672 files_changed = files_changed or []
4671 4673
4672 4674 branch_matches = True
4673 4675 if source_branch or target_branch:
4674 4676 if self.source_branch_pattern == '*':
4675 4677 source_branch_match = True
4676 4678 else:
4677 4679 if self.source_branch_pattern.startswith('re:'):
4678 4680 source_pattern = self.source_branch_pattern[3:]
4679 4681 else:
4680 4682 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4681 4683 source_branch_regex = re.compile(source_pattern)
4682 4684 source_branch_match = bool(source_branch_regex.search(source_branch))
4683 4685 if self.target_branch_pattern == '*':
4684 4686 target_branch_match = True
4685 4687 else:
4686 4688 if self.target_branch_pattern.startswith('re:'):
4687 4689 target_pattern = self.target_branch_pattern[3:]
4688 4690 else:
4689 4691 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4690 4692 target_branch_regex = re.compile(target_pattern)
4691 4693 target_branch_match = bool(target_branch_regex.search(target_branch))
4692 4694
4693 4695 branch_matches = source_branch_match and target_branch_match
4694 4696
4695 4697 files_matches = True
4696 4698 if self.file_pattern != '*':
4697 4699 files_matches = False
4698 4700 if self.file_pattern.startswith('re:'):
4699 4701 file_pattern = self.file_pattern[3:]
4700 4702 else:
4701 4703 file_pattern = glob2re(self.file_pattern)
4702 4704 file_regex = re.compile(file_pattern)
4703 4705 for filename in files_changed:
4704 4706 if file_regex.search(filename):
4705 4707 files_matches = True
4706 4708 break
4707 4709
4708 4710 return branch_matches and files_matches
4709 4711
4710 4712 @property
4711 4713 def review_users(self):
4712 4714 """ Returns the users which this rule applies to """
4713 4715
4714 4716 users = collections.OrderedDict()
4715 4717
4716 4718 for rule_user in self.rule_users:
4717 4719 if rule_user.user.active:
4718 4720 if rule_user.user not in users:
4719 4721 users[rule_user.user.username] = {
4720 4722 'user': rule_user.user,
4721 4723 'source': 'user',
4722 4724 'source_data': {},
4723 4725 'data': rule_user.rule_data()
4724 4726 }
4725 4727
4726 4728 for rule_user_group in self.rule_user_groups:
4727 4729 source_data = {
4728 4730 'user_group_id': rule_user_group.users_group.users_group_id,
4729 4731 'name': rule_user_group.users_group.users_group_name,
4730 4732 'members': len(rule_user_group.users_group.members)
4731 4733 }
4732 4734 for member in rule_user_group.users_group.members:
4733 4735 if member.user.active:
4734 4736 key = member.user.username
4735 4737 if key in users:
4736 4738 # skip this member as we have him already
4737 4739 # this prevents from override the "first" matched
4738 4740 # users with duplicates in multiple groups
4739 4741 continue
4740 4742
4741 4743 users[key] = {
4742 4744 'user': member.user,
4743 4745 'source': 'user_group',
4744 4746 'source_data': source_data,
4745 4747 'data': rule_user_group.rule_data()
4746 4748 }
4747 4749
4748 4750 return users
4749 4751
4750 4752 def user_group_vote_rule(self, user_id):
4751 4753
4752 4754 rules = []
4753 4755 if not self.rule_user_groups:
4754 4756 return rules
4755 4757
4756 4758 for user_group in self.rule_user_groups:
4757 4759 user_group_members = [x.user_id for x in user_group.users_group.members]
4758 4760 if user_id in user_group_members:
4759 4761 rules.append(user_group)
4760 4762 return rules
4761 4763
4762 4764 def __repr__(self):
4763 4765 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4764 4766 self.repo_review_rule_id, self.repo)
4765 4767
4766 4768
4767 4769 class ScheduleEntry(Base, BaseModel):
4768 4770 __tablename__ = 'schedule_entries'
4769 4771 __table_args__ = (
4770 4772 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4771 4773 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4772 4774 base_table_args,
4773 4775 )
4774 4776
4775 4777 schedule_types = ['crontab', 'timedelta', 'integer']
4776 4778 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4777 4779
4778 4780 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4779 4781 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4780 4782 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4781 4783
4782 4784 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4783 4785 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4784 4786
4785 4787 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4786 4788 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4787 4789
4788 4790 # task
4789 4791 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4790 4792 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4791 4793 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4792 4794 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4793 4795
4794 4796 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4795 4797 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4796 4798
4797 4799 @hybrid_property
4798 4800 def schedule_type(self):
4799 4801 return self._schedule_type
4800 4802
4801 4803 @schedule_type.setter
4802 4804 def schedule_type(self, val):
4803 4805 if val not in self.schedule_types:
4804 4806 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4805 4807 val, self.schedule_type))
4806 4808
4807 4809 self._schedule_type = val
4808 4810
4809 4811 @classmethod
4810 4812 def get_uid(cls, obj):
4811 4813 args = obj.task_args
4812 4814 kwargs = obj.task_kwargs
4813 4815 if isinstance(args, JsonRaw):
4814 4816 try:
4815 4817 args = json.loads(args)
4816 4818 except ValueError:
4817 4819 args = tuple()
4818 4820
4819 4821 if isinstance(kwargs, JsonRaw):
4820 4822 try:
4821 4823 kwargs = json.loads(kwargs)
4822 4824 except ValueError:
4823 4825 kwargs = dict()
4824 4826
4825 4827 dot_notation = obj.task_dot_notation
4826 4828 val = '.'.join(map(safe_str, [
4827 4829 sorted(dot_notation), args, sorted(kwargs.items())]))
4828 4830 return hashlib.sha1(val).hexdigest()
4829 4831
4830 4832 @classmethod
4831 4833 def get_by_schedule_name(cls, schedule_name):
4832 4834 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4833 4835
4834 4836 @classmethod
4835 4837 def get_by_schedule_id(cls, schedule_id):
4836 4838 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4837 4839
4838 4840 @property
4839 4841 def task(self):
4840 4842 return self.task_dot_notation
4841 4843
4842 4844 @property
4843 4845 def schedule(self):
4844 4846 from rhodecode.lib.celerylib.utils import raw_2_schedule
4845 4847 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4846 4848 return schedule
4847 4849
4848 4850 @property
4849 4851 def args(self):
4850 4852 try:
4851 4853 return list(self.task_args or [])
4852 4854 except ValueError:
4853 4855 return list()
4854 4856
4855 4857 @property
4856 4858 def kwargs(self):
4857 4859 try:
4858 4860 return dict(self.task_kwargs or {})
4859 4861 except ValueError:
4860 4862 return dict()
4861 4863
4862 4864 def _as_raw(self, val):
4863 4865 if hasattr(val, 'de_coerce'):
4864 4866 val = val.de_coerce()
4865 4867 if val:
4866 4868 val = json.dumps(val)
4867 4869
4868 4870 return val
4869 4871
4870 4872 @property
4871 4873 def schedule_definition_raw(self):
4872 4874 return self._as_raw(self.schedule_definition)
4873 4875
4874 4876 @property
4875 4877 def args_raw(self):
4876 4878 return self._as_raw(self.task_args)
4877 4879
4878 4880 @property
4879 4881 def kwargs_raw(self):
4880 4882 return self._as_raw(self.task_kwargs)
4881 4883
4882 4884 def __repr__(self):
4883 4885 return '<DB:ScheduleEntry({}:{})>'.format(
4884 4886 self.schedule_entry_id, self.schedule_name)
4885 4887
4886 4888
4887 4889 @event.listens_for(ScheduleEntry, 'before_update')
4888 4890 def update_task_uid(mapper, connection, target):
4889 4891 target.task_uid = ScheduleEntry.get_uid(target)
4890 4892
4891 4893
4892 4894 @event.listens_for(ScheduleEntry, 'before_insert')
4893 4895 def set_task_uid(mapper, connection, target):
4894 4896 target.task_uid = ScheduleEntry.get_uid(target)
4895 4897
4896 4898
4897 4899 class _BaseBranchPerms(BaseModel):
4898 4900 @classmethod
4899 4901 def compute_hash(cls, value):
4900 4902 return sha1_safe(value)
4901 4903
4902 4904 @hybrid_property
4903 4905 def branch_pattern(self):
4904 4906 return self._branch_pattern or '*'
4905 4907
4906 4908 @hybrid_property
4907 4909 def branch_hash(self):
4908 4910 return self._branch_hash
4909 4911
4910 4912 def _validate_glob(self, value):
4911 4913 re.compile('^' + glob2re(value) + '$')
4912 4914
4913 4915 @branch_pattern.setter
4914 4916 def branch_pattern(self, value):
4915 4917 self._validate_glob(value)
4916 4918 self._branch_pattern = value or '*'
4917 4919 # set the Hash when setting the branch pattern
4918 4920 self._branch_hash = self.compute_hash(self._branch_pattern)
4919 4921
4920 4922 def matches(self, branch):
4921 4923 """
4922 4924 Check if this the branch matches entry
4923 4925
4924 4926 :param branch: branch name for the commit
4925 4927 """
4926 4928
4927 4929 branch = branch or ''
4928 4930
4929 4931 branch_matches = True
4930 4932 if branch:
4931 4933 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4932 4934 branch_matches = bool(branch_regex.search(branch))
4933 4935
4934 4936 return branch_matches
4935 4937
4936 4938
4937 4939 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4938 4940 __tablename__ = 'user_to_repo_branch_permissions'
4939 4941 __table_args__ = (
4940 4942 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4941 4943 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4942 4944 )
4943 4945
4944 4946 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4945 4947
4946 4948 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4947 4949 repo = relationship('Repository', backref='user_branch_perms')
4948 4950
4949 4951 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4950 4952 permission = relationship('Permission')
4951 4953
4952 4954 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4953 4955 user_repo_to_perm = relationship('UserRepoToPerm')
4954 4956
4955 4957 rule_order = Column('rule_order', Integer(), nullable=False)
4956 4958 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4957 4959 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4958 4960
4959 4961 def __unicode__(self):
4960 4962 return u'<UserBranchPermission(%s => %r)>' % (
4961 4963 self.user_repo_to_perm, self.branch_pattern)
4962 4964
4963 4965
4964 4966 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4965 4967 __tablename__ = 'user_group_to_repo_branch_permissions'
4966 4968 __table_args__ = (
4967 4969 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4968 4970 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4969 4971 )
4970 4972
4971 4973 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4972 4974
4973 4975 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4974 4976 repo = relationship('Repository', backref='user_group_branch_perms')
4975 4977
4976 4978 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4977 4979 permission = relationship('Permission')
4978 4980
4979 4981 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4980 4982 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4981 4983
4982 4984 rule_order = Column('rule_order', Integer(), nullable=False)
4983 4985 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4984 4986 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4985 4987
4986 4988 def __unicode__(self):
4987 4989 return u'<UserBranchPermission(%s => %r)>' % (
4988 4990 self.user_group_repo_to_perm, self.branch_pattern)
4989 4991
4990 4992
4991 4993 class UserBookmark(Base, BaseModel):
4992 4994 __tablename__ = 'user_bookmarks'
4993 4995 __table_args__ = (
4994 4996 UniqueConstraint('user_id', 'bookmark_repo_id'),
4995 4997 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
4996 4998 UniqueConstraint('user_id', 'bookmark_position'),
4997 4999 base_table_args
4998 5000 )
4999 5001
5000 5002 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5001 5003 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5002 5004 position = Column("bookmark_position", Integer(), nullable=False)
5003 5005 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5004 5006 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5005 5007 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5006 5008
5007 5009 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5008 5010 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5009 5011
5010 5012 user = relationship("User")
5011 5013
5012 5014 repository = relationship("Repository")
5013 5015 repository_group = relationship("RepoGroup")
5014 5016
5015 5017 @classmethod
5016 5018 def get_by_position_for_user(cls, position, user_id):
5017 5019 return cls.query() \
5018 5020 .filter(UserBookmark.user_id == user_id) \
5019 5021 .filter(UserBookmark.position == position).scalar()
5020 5022
5021 5023 @classmethod
5022 5024 def get_bookmarks_for_user(cls, user_id):
5023 5025 return cls.query() \
5024 5026 .filter(UserBookmark.user_id == user_id) \
5025 5027 .options(joinedload(UserBookmark.repository)) \
5026 5028 .options(joinedload(UserBookmark.repository_group)) \
5027 5029 .order_by(UserBookmark.position.asc()) \
5028 5030 .all()
5029 5031
5030 5032 def __unicode__(self):
5031 5033 return u'<UserBookmark(%d @ %r)>' % (self.position, self.redirect_url)
5032 5034
5033 5035
5034 5036 class FileStore(Base, BaseModel):
5035 5037 __tablename__ = 'file_store'
5036 5038 __table_args__ = (
5037 5039 base_table_args
5038 5040 )
5039 5041
5040 5042 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5041 5043 file_uid = Column('file_uid', String(1024), nullable=False)
5042 5044 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5043 5045 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5044 5046 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5045 5047
5046 5048 # sha256 hash
5047 5049 file_hash = Column('file_hash', String(512), nullable=False)
5048 5050 file_size = Column('file_size', Integer(), nullable=False)
5049 5051
5050 5052 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5051 5053 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5052 5054 accessed_count = Column('accessed_count', Integer(), default=0)
5053 5055
5054 5056 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5055 5057
5056 5058 # if repo/repo_group reference is set, check for permissions
5057 5059 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5058 5060
5059 5061 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5060 5062 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5061 5063
5062 5064 # scope limited to user, which requester have access to
5063 5065 scope_user_id = Column(
5064 5066 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5065 5067 nullable=True, unique=None, default=None)
5066 5068 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5067 5069
5068 5070 # scope limited to user group, which requester have access to
5069 5071 scope_user_group_id = Column(
5070 5072 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5071 5073 nullable=True, unique=None, default=None)
5072 5074 user_group = relationship('UserGroup', lazy='joined')
5073 5075
5074 5076 # scope limited to repo, which requester have access to
5075 5077 scope_repo_id = Column(
5076 5078 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5077 5079 nullable=True, unique=None, default=None)
5078 5080 repo = relationship('Repository', lazy='joined')
5079 5081
5080 5082 # scope limited to repo group, which requester have access to
5081 5083 scope_repo_group_id = Column(
5082 5084 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5083 5085 nullable=True, unique=None, default=None)
5084 5086 repo_group = relationship('RepoGroup', lazy='joined')
5085 5087
5086 5088 @classmethod
5087 5089 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5088 5090 file_description='', enabled=True, check_acl=True, user_id=None,
5089 5091 scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5090 5092
5091 5093 store_entry = FileStore()
5092 5094 store_entry.file_uid = file_uid
5093 5095 store_entry.file_display_name = file_display_name
5094 5096 store_entry.file_org_name = filename
5095 5097 store_entry.file_size = file_size
5096 5098 store_entry.file_hash = file_hash
5097 5099 store_entry.file_description = file_description
5098 5100
5099 5101 store_entry.check_acl = check_acl
5100 5102 store_entry.enabled = enabled
5101 5103
5102 5104 store_entry.user_id = user_id
5103 5105 store_entry.scope_user_id = scope_user_id
5104 5106 store_entry.scope_repo_id = scope_repo_id
5105 5107 store_entry.scope_repo_group_id = scope_repo_group_id
5106 5108 return store_entry
5107 5109
5108 5110 @classmethod
5109 5111 def bump_access_counter(cls, file_uid, commit=True):
5110 5112 FileStore().query()\
5111 5113 .filter(FileStore.file_uid == file_uid)\
5112 5114 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5113 5115 FileStore.accessed_on: datetime.datetime.now()})
5114 5116 if commit:
5115 5117 Session().commit()
5116 5118
5117 5119 def __repr__(self):
5118 5120 return '<FileStore({})>'.format(self.file_store_id)
5119 5121
5120 5122
5121 5123 class DbMigrateVersion(Base, BaseModel):
5122 5124 __tablename__ = 'db_migrate_version'
5123 5125 __table_args__ = (
5124 5126 base_table_args,
5125 5127 )
5126 5128
5127 5129 repository_id = Column('repository_id', String(250), primary_key=True)
5128 5130 repository_path = Column('repository_path', Text)
5129 5131 version = Column('version', Integer)
5130 5132
5131 5133 @classmethod
5132 5134 def set_version(cls, version):
5133 5135 """
5134 5136 Helper for forcing a different version, usually for debugging purposes via ishell.
5135 5137 """
5136 5138 ver = DbMigrateVersion.query().first()
5137 5139 ver.version = version
5138 5140 Session().commit()
5139 5141
5140 5142
5141 5143 class DbSession(Base, BaseModel):
5142 5144 __tablename__ = 'db_session'
5143 5145 __table_args__ = (
5144 5146 base_table_args,
5145 5147 )
5146 5148
5147 5149 def __repr__(self):
5148 5150 return '<DB:DbSession({})>'.format(self.id)
5149 5151
5150 5152 id = Column('id', Integer())
5151 5153 namespace = Column('namespace', String(255), primary_key=True)
5152 5154 accessed = Column('accessed', DateTime, nullable=False)
5153 5155 created = Column('created', DateTime, nullable=False)
5154 5156 data = Column('data', PickleType, nullable=False)
@@ -1,1745 +1,1742 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 source_repo.count() # cache rebuild
687 686
688 687 try:
689 688 source_commit = source_repo.get_commit(commit_id=source_ref_name)
690 689 except CommitDoesNotExistError:
691 690 return UpdateResponse(
692 691 executed=False,
693 692 reason=UpdateFailureReason.MISSING_SOURCE_REF,
694 693 old=pull_request, new=None, changes=None,
695 694 source_changed=False, target_changed=False)
696 695
697 696 source_changed = source_ref_id != source_commit.raw_id
698 697
699 698 # target repo
700 699 target_repo = pull_request.target_repo.scm_instance()
701 target_repo.count() # cache rebuild
702 700
703 701 try:
704 702 target_commit = target_repo.get_commit(commit_id=target_ref_name)
705 703 except CommitDoesNotExistError:
706 704 return UpdateResponse(
707 705 executed=False,
708 706 reason=UpdateFailureReason.MISSING_TARGET_REF,
709 707 old=pull_request, new=None, changes=None,
710 708 source_changed=False, target_changed=False)
711 709 target_changed = target_ref_id != target_commit.raw_id
712 710
713 711 if not (source_changed or target_changed):
714 712 log.debug("Nothing changed in pull request %s", pull_request)
715 713 return UpdateResponse(
716 714 executed=False,
717 715 reason=UpdateFailureReason.NO_CHANGE,
718 716 old=pull_request, new=None, changes=None,
719 717 source_changed=target_changed, target_changed=source_changed)
720 718
721 719 change_in_found = 'target repo' if target_changed else 'source repo'
722 720 log.debug('Updating pull request because of change in %s detected',
723 721 change_in_found)
724 722
725 723 # Finally there is a need for an update, in case of source change
726 724 # we create a new version, else just an update
727 725 if source_changed:
728 726 pull_request_version = self._create_version_from_snapshot(pull_request)
729 727 self._link_comments_to_version(pull_request_version)
730 728 else:
731 729 try:
732 730 ver = pull_request.versions[-1]
733 731 except IndexError:
734 732 ver = None
735 733
736 734 pull_request.pull_request_version_id = \
737 735 ver.pull_request_version_id if ver else None
738 736 pull_request_version = pull_request
739 737
740 738 try:
741 739 if target_ref_type in self.REF_TYPES:
742 740 target_commit = target_repo.get_commit(target_ref_name)
743 741 else:
744 742 target_commit = target_repo.get_commit(target_ref_id)
745 743 except CommitDoesNotExistError:
746 744 return UpdateResponse(
747 745 executed=False,
748 746 reason=UpdateFailureReason.MISSING_TARGET_REF,
749 747 old=pull_request, new=None, changes=None,
750 748 source_changed=source_changed, target_changed=target_changed)
751 749
752 750 # re-compute commit ids
753 751 old_commit_ids = pull_request.revisions
754 752 pre_load = ["author", "branch", "date", "message"]
755 753 commit_ranges = target_repo.compare(
756 754 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
757 755 pre_load=pre_load)
758 756
759 757 ancestor = source_repo.get_common_ancestor(
760 758 source_commit.raw_id, target_commit.raw_id, target_repo)
761 759
762 760 pull_request.source_ref = '%s:%s:%s' % (
763 761 source_ref_type, source_ref_name, source_commit.raw_id)
764 762 pull_request.target_ref = '%s:%s:%s' % (
765 763 target_ref_type, target_ref_name, ancestor)
766 764
767 765 pull_request.revisions = [
768 766 commit.raw_id for commit in reversed(commit_ranges)]
769 767 pull_request.updated_on = datetime.datetime.now()
770 768 Session().add(pull_request)
771 769 new_commit_ids = pull_request.revisions
772 770
773 771 old_diff_data, new_diff_data = self._generate_update_diffs(
774 772 pull_request, pull_request_version)
775 773
776 774 # calculate commit and file changes
777 775 changes = self._calculate_commit_id_changes(
778 776 old_commit_ids, new_commit_ids)
779 777 file_changes = self._calculate_file_changes(
780 778 old_diff_data, new_diff_data)
781 779
782 780 # set comments as outdated if DIFFS changed
783 781 CommentsModel().outdate_comments(
784 782 pull_request, old_diff_data=old_diff_data,
785 783 new_diff_data=new_diff_data)
786 784
787 785 commit_changes = (changes.added or changes.removed)
788 786 file_node_changes = (
789 787 file_changes.added or file_changes.modified or file_changes.removed)
790 788 pr_has_changes = commit_changes or file_node_changes
791 789
792 790 # Add an automatic comment to the pull request, in case
793 791 # anything has changed
794 792 if pr_has_changes:
795 793 update_comment = CommentsModel().create(
796 794 text=self._render_update_message(changes, file_changes),
797 795 repo=pull_request.target_repo,
798 796 user=pull_request.author,
799 797 pull_request=pull_request,
800 798 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
801 799
802 800 # Update status to "Under Review" for added commits
803 801 for commit_id in changes.added:
804 802 ChangesetStatusModel().set_status(
805 803 repo=pull_request.source_repo,
806 804 status=ChangesetStatus.STATUS_UNDER_REVIEW,
807 805 comment=update_comment,
808 806 user=pull_request.author,
809 807 pull_request=pull_request,
810 808 revision=commit_id)
811 809
812 810 log.debug(
813 811 'Updated pull request %s, added_ids: %s, common_ids: %s, '
814 812 'removed_ids: %s', pull_request.pull_request_id,
815 813 changes.added, changes.common, changes.removed)
816 814 log.debug(
817 815 'Updated pull request with the following file changes: %s',
818 816 file_changes)
819 817
820 818 log.info(
821 819 "Updated pull request %s from commit %s to commit %s, "
822 820 "stored new version %s of this pull request.",
823 821 pull_request.pull_request_id, source_ref_id,
824 822 pull_request.source_ref_parts.commit_id,
825 823 pull_request_version.pull_request_version_id)
826 824 Session().commit()
827 825 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
828 826
829 827 return UpdateResponse(
830 828 executed=True, reason=UpdateFailureReason.NONE,
831 829 old=pull_request, new=pull_request_version, changes=changes,
832 830 source_changed=source_changed, target_changed=target_changed)
833 831
834 832 def _create_version_from_snapshot(self, pull_request):
835 833 version = PullRequestVersion()
836 834 version.title = pull_request.title
837 835 version.description = pull_request.description
838 836 version.status = pull_request.status
839 837 version.pull_request_state = pull_request.pull_request_state
840 838 version.created_on = datetime.datetime.now()
841 839 version.updated_on = pull_request.updated_on
842 840 version.user_id = pull_request.user_id
843 841 version.source_repo = pull_request.source_repo
844 842 version.source_ref = pull_request.source_ref
845 843 version.target_repo = pull_request.target_repo
846 844 version.target_ref = pull_request.target_ref
847 845
848 846 version._last_merge_source_rev = pull_request._last_merge_source_rev
849 847 version._last_merge_target_rev = pull_request._last_merge_target_rev
850 848 version.last_merge_status = pull_request.last_merge_status
851 849 version.shadow_merge_ref = pull_request.shadow_merge_ref
852 850 version.merge_rev = pull_request.merge_rev
853 851 version.reviewer_data = pull_request.reviewer_data
854 852
855 853 version.revisions = pull_request.revisions
856 854 version.pull_request = pull_request
857 855 Session().add(version)
858 856 Session().flush()
859 857
860 858 return version
861 859
862 860 def _generate_update_diffs(self, pull_request, pull_request_version):
863 861
864 862 diff_context = (
865 863 self.DIFF_CONTEXT +
866 864 CommentsModel.needed_extra_diff_context())
867 865 hide_whitespace_changes = False
868 866 source_repo = pull_request_version.source_repo
869 867 source_ref_id = pull_request_version.source_ref_parts.commit_id
870 868 target_ref_id = pull_request_version.target_ref_parts.commit_id
871 869 old_diff = self._get_diff_from_pr_or_version(
872 870 source_repo, source_ref_id, target_ref_id,
873 871 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
874 872
875 873 source_repo = pull_request.source_repo
876 874 source_ref_id = pull_request.source_ref_parts.commit_id
877 875 target_ref_id = pull_request.target_ref_parts.commit_id
878 876
879 877 new_diff = self._get_diff_from_pr_or_version(
880 878 source_repo, source_ref_id, target_ref_id,
881 879 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
882 880
883 881 old_diff_data = diffs.DiffProcessor(old_diff)
884 882 old_diff_data.prepare()
885 883 new_diff_data = diffs.DiffProcessor(new_diff)
886 884 new_diff_data.prepare()
887 885
888 886 return old_diff_data, new_diff_data
889 887
890 888 def _link_comments_to_version(self, pull_request_version):
891 889 """
892 890 Link all unlinked comments of this pull request to the given version.
893 891
894 892 :param pull_request_version: The `PullRequestVersion` to which
895 893 the comments shall be linked.
896 894
897 895 """
898 896 pull_request = pull_request_version.pull_request
899 897 comments = ChangesetComment.query()\
900 898 .filter(
901 899 # TODO: johbo: Should we query for the repo at all here?
902 900 # Pending decision on how comments of PRs are to be related
903 901 # to either the source repo, the target repo or no repo at all.
904 902 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
905 903 ChangesetComment.pull_request == pull_request,
906 904 ChangesetComment.pull_request_version == None)\
907 905 .order_by(ChangesetComment.comment_id.asc())
908 906
909 907 # TODO: johbo: Find out why this breaks if it is done in a bulk
910 908 # operation.
911 909 for comment in comments:
912 910 comment.pull_request_version_id = (
913 911 pull_request_version.pull_request_version_id)
914 912 Session().add(comment)
915 913
916 914 def _calculate_commit_id_changes(self, old_ids, new_ids):
917 915 added = [x for x in new_ids if x not in old_ids]
918 916 common = [x for x in new_ids if x in old_ids]
919 917 removed = [x for x in old_ids if x not in new_ids]
920 918 total = new_ids
921 919 return ChangeTuple(added, common, removed, total)
922 920
923 921 def _calculate_file_changes(self, old_diff_data, new_diff_data):
924 922
925 923 old_files = OrderedDict()
926 924 for diff_data in old_diff_data.parsed_diff:
927 925 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
928 926
929 927 added_files = []
930 928 modified_files = []
931 929 removed_files = []
932 930 for diff_data in new_diff_data.parsed_diff:
933 931 new_filename = diff_data['filename']
934 932 new_hash = md5_safe(diff_data['raw_diff'])
935 933
936 934 old_hash = old_files.get(new_filename)
937 935 if not old_hash:
938 936 # file is not present in old diff, means it's added
939 937 added_files.append(new_filename)
940 938 else:
941 939 if new_hash != old_hash:
942 940 modified_files.append(new_filename)
943 941 # now remove a file from old, since we have seen it already
944 942 del old_files[new_filename]
945 943
946 944 # removed files is when there are present in old, but not in NEW,
947 945 # since we remove old files that are present in new diff, left-overs
948 946 # if any should be the removed files
949 947 removed_files.extend(old_files.keys())
950 948
951 949 return FileChangeTuple(added_files, modified_files, removed_files)
952 950
953 951 def _render_update_message(self, changes, file_changes):
954 952 """
955 953 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
956 954 so it's always looking the same disregarding on which default
957 955 renderer system is using.
958 956
959 957 :param changes: changes named tuple
960 958 :param file_changes: file changes named tuple
961 959
962 960 """
963 961 new_status = ChangesetStatus.get_status_lbl(
964 962 ChangesetStatus.STATUS_UNDER_REVIEW)
965 963
966 964 changed_files = (
967 965 file_changes.added + file_changes.modified + file_changes.removed)
968 966
969 967 params = {
970 968 'under_review_label': new_status,
971 969 'added_commits': changes.added,
972 970 'removed_commits': changes.removed,
973 971 'changed_files': changed_files,
974 972 'added_files': file_changes.added,
975 973 'modified_files': file_changes.modified,
976 974 'removed_files': file_changes.removed,
977 975 }
978 976 renderer = RstTemplateRenderer()
979 977 return renderer.render('pull_request_update.mako', **params)
980 978
981 979 def edit(self, pull_request, title, description, description_renderer, user):
982 980 pull_request = self.__get_pull_request(pull_request)
983 981 old_data = pull_request.get_api_data(with_merge_state=False)
984 982 if pull_request.is_closed():
985 983 raise ValueError('This pull request is closed')
986 984 if title:
987 985 pull_request.title = title
988 986 pull_request.description = description
989 987 pull_request.updated_on = datetime.datetime.now()
990 988 pull_request.description_renderer = description_renderer
991 989 Session().add(pull_request)
992 990 self._log_audit_action(
993 991 'repo.pull_request.edit', {'old_data': old_data},
994 992 user, pull_request)
995 993
996 994 def update_reviewers(self, pull_request, reviewer_data, user):
997 995 """
998 996 Update the reviewers in the pull request
999 997
1000 998 :param pull_request: the pr to update
1001 999 :param reviewer_data: list of tuples
1002 1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1003 1001 """
1004 1002 pull_request = self.__get_pull_request(pull_request)
1005 1003 if pull_request.is_closed():
1006 1004 raise ValueError('This pull request is closed')
1007 1005
1008 1006 reviewers = {}
1009 1007 for user_id, reasons, mandatory, rules in reviewer_data:
1010 1008 if isinstance(user_id, (int, compat.string_types)):
1011 1009 user_id = self._get_user(user_id).user_id
1012 1010 reviewers[user_id] = {
1013 1011 'reasons': reasons, 'mandatory': mandatory}
1014 1012
1015 1013 reviewers_ids = set(reviewers.keys())
1016 1014 current_reviewers = PullRequestReviewers.query()\
1017 1015 .filter(PullRequestReviewers.pull_request ==
1018 1016 pull_request).all()
1019 1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1020 1018
1021 1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1022 1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1023 1021
1024 1022 log.debug("Adding %s reviewers", ids_to_add)
1025 1023 log.debug("Removing %s reviewers", ids_to_remove)
1026 1024 changed = False
1027 1025 added_audit_reviewers = []
1028 1026 removed_audit_reviewers = []
1029 1027
1030 1028 for uid in ids_to_add:
1031 1029 changed = True
1032 1030 _usr = self._get_user(uid)
1033 1031 reviewer = PullRequestReviewers()
1034 1032 reviewer.user = _usr
1035 1033 reviewer.pull_request = pull_request
1036 1034 reviewer.reasons = reviewers[uid]['reasons']
1037 1035 # NOTE(marcink): mandatory shouldn't be changed now
1038 1036 # reviewer.mandatory = reviewers[uid]['reasons']
1039 1037 Session().add(reviewer)
1040 1038 added_audit_reviewers.append(reviewer.get_dict())
1041 1039
1042 1040 for uid in ids_to_remove:
1043 1041 changed = True
1044 1042 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1045 1043 # that prevents and fixes cases that we added the same reviewer twice.
1046 1044 # this CAN happen due to the lack of DB checks
1047 1045 reviewers = PullRequestReviewers.query()\
1048 1046 .filter(PullRequestReviewers.user_id == uid,
1049 1047 PullRequestReviewers.pull_request == pull_request)\
1050 1048 .all()
1051 1049
1052 1050 for obj in reviewers:
1053 1051 added_audit_reviewers.append(obj.get_dict())
1054 1052 Session().delete(obj)
1055 1053
1056 1054 if changed:
1057 1055 Session().expire_all()
1058 1056 pull_request.updated_on = datetime.datetime.now()
1059 1057 Session().add(pull_request)
1060 1058
1061 1059 # finally store audit logs
1062 1060 for user_data in added_audit_reviewers:
1063 1061 self._log_audit_action(
1064 1062 'repo.pull_request.reviewer.add', {'data': user_data},
1065 1063 user, pull_request)
1066 1064 for user_data in removed_audit_reviewers:
1067 1065 self._log_audit_action(
1068 1066 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1069 1067 user, pull_request)
1070 1068
1071 1069 self.notify_reviewers(pull_request, ids_to_add)
1072 1070 return ids_to_add, ids_to_remove
1073 1071
1074 1072 def get_url(self, pull_request, request=None, permalink=False):
1075 1073 if not request:
1076 1074 request = get_current_request()
1077 1075
1078 1076 if permalink:
1079 1077 return request.route_url(
1080 1078 'pull_requests_global',
1081 1079 pull_request_id=pull_request.pull_request_id,)
1082 1080 else:
1083 1081 return request.route_url('pullrequest_show',
1084 1082 repo_name=safe_str(pull_request.target_repo.repo_name),
1085 1083 pull_request_id=pull_request.pull_request_id,)
1086 1084
1087 1085 def get_shadow_clone_url(self, pull_request, request=None):
1088 1086 """
1089 1087 Returns qualified url pointing to the shadow repository. If this pull
1090 1088 request is closed there is no shadow repository and ``None`` will be
1091 1089 returned.
1092 1090 """
1093 1091 if pull_request.is_closed():
1094 1092 return None
1095 1093 else:
1096 1094 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1097 1095 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1098 1096
1099 1097 def notify_reviewers(self, pull_request, reviewers_ids):
1100 1098 # notification to reviewers
1101 1099 if not reviewers_ids:
1102 1100 return
1103 1101
1104 1102 pull_request_obj = pull_request
1105 1103 # get the current participants of this pull request
1106 1104 recipients = reviewers_ids
1107 1105 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1108 1106
1109 1107 pr_source_repo = pull_request_obj.source_repo
1110 1108 pr_target_repo = pull_request_obj.target_repo
1111 1109
1112 1110 pr_url = h.route_url('pullrequest_show',
1113 1111 repo_name=pr_target_repo.repo_name,
1114 1112 pull_request_id=pull_request_obj.pull_request_id,)
1115 1113
1116 1114 # set some variables for email notification
1117 1115 pr_target_repo_url = h.route_url(
1118 1116 'repo_summary', repo_name=pr_target_repo.repo_name)
1119 1117
1120 1118 pr_source_repo_url = h.route_url(
1121 1119 'repo_summary', repo_name=pr_source_repo.repo_name)
1122 1120
1123 1121 # pull request specifics
1124 1122 pull_request_commits = [
1125 1123 (x.raw_id, x.message)
1126 1124 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1127 1125
1128 1126 kwargs = {
1129 1127 'user': pull_request.author,
1130 1128 'pull_request': pull_request_obj,
1131 1129 'pull_request_commits': pull_request_commits,
1132 1130
1133 1131 'pull_request_target_repo': pr_target_repo,
1134 1132 'pull_request_target_repo_url': pr_target_repo_url,
1135 1133
1136 1134 'pull_request_source_repo': pr_source_repo,
1137 1135 'pull_request_source_repo_url': pr_source_repo_url,
1138 1136
1139 1137 'pull_request_url': pr_url,
1140 1138 }
1141 1139
1142 1140 # pre-generate the subject for notification itself
1143 1141 (subject,
1144 1142 _h, _e, # we don't care about those
1145 1143 body_plaintext) = EmailNotificationModel().render_email(
1146 1144 notification_type, **kwargs)
1147 1145
1148 1146 # create notification objects, and emails
1149 1147 NotificationModel().create(
1150 1148 created_by=pull_request.author,
1151 1149 notification_subject=subject,
1152 1150 notification_body=body_plaintext,
1153 1151 notification_type=notification_type,
1154 1152 recipients=recipients,
1155 1153 email_kwargs=kwargs,
1156 1154 )
1157 1155
1158 1156 def delete(self, pull_request, user):
1159 1157 pull_request = self.__get_pull_request(pull_request)
1160 1158 old_data = pull_request.get_api_data(with_merge_state=False)
1161 1159 self._cleanup_merge_workspace(pull_request)
1162 1160 self._log_audit_action(
1163 1161 'repo.pull_request.delete', {'old_data': old_data},
1164 1162 user, pull_request)
1165 1163 Session().delete(pull_request)
1166 1164
1167 1165 def close_pull_request(self, pull_request, user):
1168 1166 pull_request = self.__get_pull_request(pull_request)
1169 1167 self._cleanup_merge_workspace(pull_request)
1170 1168 pull_request.status = PullRequest.STATUS_CLOSED
1171 1169 pull_request.updated_on = datetime.datetime.now()
1172 1170 Session().add(pull_request)
1173 1171 self.trigger_pull_request_hook(
1174 1172 pull_request, pull_request.author, 'close')
1175 1173
1176 1174 pr_data = pull_request.get_api_data(with_merge_state=False)
1177 1175 self._log_audit_action(
1178 1176 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1179 1177
1180 1178 def close_pull_request_with_comment(
1181 1179 self, pull_request, user, repo, message=None, auth_user=None):
1182 1180
1183 1181 pull_request_review_status = pull_request.calculated_review_status()
1184 1182
1185 1183 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1186 1184 # approved only if we have voting consent
1187 1185 status = ChangesetStatus.STATUS_APPROVED
1188 1186 else:
1189 1187 status = ChangesetStatus.STATUS_REJECTED
1190 1188 status_lbl = ChangesetStatus.get_status_lbl(status)
1191 1189
1192 1190 default_message = (
1193 1191 'Closing with status change {transition_icon} {status}.'
1194 1192 ).format(transition_icon='>', status=status_lbl)
1195 1193 text = message or default_message
1196 1194
1197 1195 # create a comment, and link it to new status
1198 1196 comment = CommentsModel().create(
1199 1197 text=text,
1200 1198 repo=repo.repo_id,
1201 1199 user=user.user_id,
1202 1200 pull_request=pull_request.pull_request_id,
1203 1201 status_change=status_lbl,
1204 1202 status_change_type=status,
1205 1203 closing_pr=True,
1206 1204 auth_user=auth_user,
1207 1205 )
1208 1206
1209 1207 # calculate old status before we change it
1210 1208 old_calculated_status = pull_request.calculated_review_status()
1211 1209 ChangesetStatusModel().set_status(
1212 1210 repo.repo_id,
1213 1211 status,
1214 1212 user.user_id,
1215 1213 comment=comment,
1216 1214 pull_request=pull_request.pull_request_id
1217 1215 )
1218 1216
1219 1217 Session().flush()
1220 1218 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1221 1219 # we now calculate the status of pull request again, and based on that
1222 1220 # calculation trigger status change. This might happen in cases
1223 1221 # that non-reviewer admin closes a pr, which means his vote doesn't
1224 1222 # change the status, while if he's a reviewer this might change it.
1225 1223 calculated_status = pull_request.calculated_review_status()
1226 1224 if old_calculated_status != calculated_status:
1227 1225 self.trigger_pull_request_hook(
1228 1226 pull_request, user, 'review_status_change',
1229 1227 data={'status': calculated_status})
1230 1228
1231 1229 # finally close the PR
1232 1230 PullRequestModel().close_pull_request(
1233 1231 pull_request.pull_request_id, user)
1234 1232
1235 1233 return comment, status
1236 1234
1237 1235 def merge_status(self, pull_request, translator=None,
1238 1236 force_shadow_repo_refresh=False):
1239 1237 _ = translator or get_current_request().translate
1240 1238
1241 1239 if not self._is_merge_enabled(pull_request):
1242 1240 return False, _('Server-side pull request merging is disabled.')
1243 1241 if pull_request.is_closed():
1244 1242 return False, _('This pull request is closed.')
1245 1243 merge_possible, msg = self._check_repo_requirements(
1246 1244 target=pull_request.target_repo, source=pull_request.source_repo,
1247 1245 translator=_)
1248 1246 if not merge_possible:
1249 1247 return merge_possible, msg
1250 1248
1251 1249 try:
1252 1250 resp = self._try_merge(
1253 1251 pull_request,
1254 1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1255 1253 log.debug("Merge response: %s", resp)
1256 1254 status = resp.possible, resp.merge_status_message
1257 1255 except NotImplementedError:
1258 1256 status = False, _('Pull request merging is not supported.')
1259 1257
1260 1258 return status
1261 1259
1262 1260 def _check_repo_requirements(self, target, source, translator):
1263 1261 """
1264 1262 Check if `target` and `source` have compatible requirements.
1265 1263
1266 1264 Currently this is just checking for largefiles.
1267 1265 """
1268 1266 _ = translator
1269 1267 target_has_largefiles = self._has_largefiles(target)
1270 1268 source_has_largefiles = self._has_largefiles(source)
1271 1269 merge_possible = True
1272 1270 message = u''
1273 1271
1274 1272 if target_has_largefiles != source_has_largefiles:
1275 1273 merge_possible = False
1276 1274 if source_has_largefiles:
1277 1275 message = _(
1278 1276 'Target repository large files support is disabled.')
1279 1277 else:
1280 1278 message = _(
1281 1279 'Source repository large files support is disabled.')
1282 1280
1283 1281 return merge_possible, message
1284 1282
1285 1283 def _has_largefiles(self, repo):
1286 1284 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1287 1285 'extensions', 'largefiles')
1288 1286 return largefiles_ui and largefiles_ui[0].active
1289 1287
1290 1288 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1291 1289 """
1292 1290 Try to merge the pull request and return the merge status.
1293 1291 """
1294 1292 log.debug(
1295 1293 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1296 1294 pull_request.pull_request_id, force_shadow_repo_refresh)
1297 1295 target_vcs = pull_request.target_repo.scm_instance()
1298 1296 # Refresh the target reference.
1299 1297 try:
1300 1298 target_ref = self._refresh_reference(
1301 1299 pull_request.target_ref_parts, target_vcs)
1302 1300 except CommitDoesNotExistError:
1303 1301 merge_state = MergeResponse(
1304 1302 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1305 1303 metadata={'target_ref': pull_request.target_ref_parts})
1306 1304 return merge_state
1307 1305
1308 1306 target_locked = pull_request.target_repo.locked
1309 1307 if target_locked and target_locked[0]:
1310 1308 locked_by = 'user:{}'.format(target_locked[0])
1311 1309 log.debug("The target repository is locked by %s.", locked_by)
1312 1310 merge_state = MergeResponse(
1313 1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1314 1312 metadata={'locked_by': locked_by})
1315 1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1316 1314 pull_request, target_ref):
1317 1315 log.debug("Refreshing the merge status of the repository.")
1318 1316 merge_state = self._refresh_merge_state(
1319 1317 pull_request, target_vcs, target_ref)
1320 1318 else:
1321 1319 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1322 1320 metadata = {
1323 1321 'target_ref': pull_request.target_ref_parts,
1324 1322 'source_ref': pull_request.source_ref_parts,
1325 1323 }
1326 1324 if not possible and target_ref.type == 'branch':
1327 1325 # NOTE(marcink): case for mercurial multiple heads on branch
1328 1326 heads = target_vcs._heads(target_ref.name)
1329 1327 if len(heads) != 1:
1330 1328 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1331 1329 metadata.update({
1332 1330 'heads': heads
1333 1331 })
1334 1332 merge_state = MergeResponse(
1335 1333 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1336 1334
1337 1335 return merge_state
1338 1336
1339 1337 def _refresh_reference(self, reference, vcs_repository):
1340 1338 if reference.type in self.UPDATABLE_REF_TYPES:
1341 1339 name_or_id = reference.name
1342 1340 else:
1343 1341 name_or_id = reference.commit_id
1344 1342
1345 vcs_repository.count() # cache rebuild
1346 1343 refreshed_commit = vcs_repository.get_commit(name_or_id)
1347 1344 refreshed_reference = Reference(
1348 1345 reference.type, reference.name, refreshed_commit.raw_id)
1349 1346 return refreshed_reference
1350 1347
1351 1348 def _needs_merge_state_refresh(self, pull_request, target_reference):
1352 1349 return not(
1353 1350 pull_request.revisions and
1354 1351 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1355 1352 target_reference.commit_id == pull_request._last_merge_target_rev)
1356 1353
1357 1354 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1358 1355 workspace_id = self._workspace_id(pull_request)
1359 1356 source_vcs = pull_request.source_repo.scm_instance()
1360 1357 repo_id = pull_request.target_repo.repo_id
1361 1358 use_rebase = self._use_rebase_for_merging(pull_request)
1362 1359 close_branch = self._close_branch_before_merging(pull_request)
1363 1360 merge_state = target_vcs.merge(
1364 1361 repo_id, workspace_id,
1365 1362 target_reference, source_vcs, pull_request.source_ref_parts,
1366 1363 dry_run=True, use_rebase=use_rebase,
1367 1364 close_branch=close_branch)
1368 1365
1369 1366 # Do not store the response if there was an unknown error.
1370 1367 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1371 1368 pull_request._last_merge_source_rev = \
1372 1369 pull_request.source_ref_parts.commit_id
1373 1370 pull_request._last_merge_target_rev = target_reference.commit_id
1374 1371 pull_request.last_merge_status = merge_state.failure_reason
1375 1372 pull_request.shadow_merge_ref = merge_state.merge_ref
1376 1373 Session().add(pull_request)
1377 1374 Session().commit()
1378 1375
1379 1376 return merge_state
1380 1377
1381 1378 def _workspace_id(self, pull_request):
1382 1379 workspace_id = 'pr-%s' % pull_request.pull_request_id
1383 1380 return workspace_id
1384 1381
1385 1382 def generate_repo_data(self, repo, commit_id=None, branch=None,
1386 1383 bookmark=None, translator=None):
1387 1384 from rhodecode.model.repo import RepoModel
1388 1385
1389 1386 all_refs, selected_ref = \
1390 1387 self._get_repo_pullrequest_sources(
1391 1388 repo.scm_instance(), commit_id=commit_id,
1392 1389 branch=branch, bookmark=bookmark, translator=translator)
1393 1390
1394 1391 refs_select2 = []
1395 1392 for element in all_refs:
1396 1393 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1397 1394 refs_select2.append({'text': element[1], 'children': children})
1398 1395
1399 1396 return {
1400 1397 'user': {
1401 1398 'user_id': repo.user.user_id,
1402 1399 'username': repo.user.username,
1403 1400 'firstname': repo.user.first_name,
1404 1401 'lastname': repo.user.last_name,
1405 1402 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1406 1403 },
1407 1404 'name': repo.repo_name,
1408 1405 'link': RepoModel().get_url(repo),
1409 1406 'description': h.chop_at_smart(repo.description_safe, '\n'),
1410 1407 'refs': {
1411 1408 'all_refs': all_refs,
1412 1409 'selected_ref': selected_ref,
1413 1410 'select2_refs': refs_select2
1414 1411 }
1415 1412 }
1416 1413
1417 1414 def generate_pullrequest_title(self, source, source_ref, target):
1418 1415 return u'{source}#{at_ref} to {target}'.format(
1419 1416 source=source,
1420 1417 at_ref=source_ref,
1421 1418 target=target,
1422 1419 )
1423 1420
1424 1421 def _cleanup_merge_workspace(self, pull_request):
1425 1422 # Merging related cleanup
1426 1423 repo_id = pull_request.target_repo.repo_id
1427 1424 target_scm = pull_request.target_repo.scm_instance()
1428 1425 workspace_id = self._workspace_id(pull_request)
1429 1426
1430 1427 try:
1431 1428 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1432 1429 except NotImplementedError:
1433 1430 pass
1434 1431
1435 1432 def _get_repo_pullrequest_sources(
1436 1433 self, repo, commit_id=None, branch=None, bookmark=None,
1437 1434 translator=None):
1438 1435 """
1439 1436 Return a structure with repo's interesting commits, suitable for
1440 1437 the selectors in pullrequest controller
1441 1438
1442 1439 :param commit_id: a commit that must be in the list somehow
1443 1440 and selected by default
1444 1441 :param branch: a branch that must be in the list and selected
1445 1442 by default - even if closed
1446 1443 :param bookmark: a bookmark that must be in the list and selected
1447 1444 """
1448 1445 _ = translator or get_current_request().translate
1449 1446
1450 1447 commit_id = safe_str(commit_id) if commit_id else None
1451 1448 branch = safe_unicode(branch) if branch else None
1452 1449 bookmark = safe_unicode(bookmark) if bookmark else None
1453 1450
1454 1451 selected = None
1455 1452
1456 1453 # order matters: first source that has commit_id in it will be selected
1457 1454 sources = []
1458 1455 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1459 1456 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1460 1457
1461 1458 if commit_id:
1462 1459 ref_commit = (h.short_id(commit_id), commit_id)
1463 1460 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1464 1461
1465 1462 sources.append(
1466 1463 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1467 1464 )
1468 1465
1469 1466 groups = []
1470 1467
1471 1468 for group_key, ref_list, group_name, match in sources:
1472 1469 group_refs = []
1473 1470 for ref_name, ref_id in ref_list:
1474 1471 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1475 1472 group_refs.append((ref_key, ref_name))
1476 1473
1477 1474 if not selected:
1478 1475 if set([commit_id, match]) & set([ref_id, ref_name]):
1479 1476 selected = ref_key
1480 1477
1481 1478 if group_refs:
1482 1479 groups.append((group_refs, group_name))
1483 1480
1484 1481 if not selected:
1485 1482 ref = commit_id or branch or bookmark
1486 1483 if ref:
1487 1484 raise CommitDoesNotExistError(
1488 1485 u'No commit refs could be found matching: {}'.format(ref))
1489 1486 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1490 1487 selected = u'branch:{}:{}'.format(
1491 1488 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1492 1489 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1493 1490 )
1494 1491 elif repo.commit_ids:
1495 1492 # make the user select in this case
1496 1493 selected = None
1497 1494 else:
1498 1495 raise EmptyRepositoryError()
1499 1496 return groups, selected
1500 1497
1501 1498 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1502 1499 hide_whitespace_changes, diff_context):
1503 1500
1504 1501 return self._get_diff_from_pr_or_version(
1505 1502 source_repo, source_ref_id, target_ref_id,
1506 1503 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1507 1504
1508 1505 def _get_diff_from_pr_or_version(
1509 1506 self, source_repo, source_ref_id, target_ref_id,
1510 1507 hide_whitespace_changes, diff_context):
1511 1508
1512 1509 target_commit = source_repo.get_commit(
1513 1510 commit_id=safe_str(target_ref_id))
1514 1511 source_commit = source_repo.get_commit(
1515 1512 commit_id=safe_str(source_ref_id))
1516 1513 if isinstance(source_repo, Repository):
1517 1514 vcs_repo = source_repo.scm_instance()
1518 1515 else:
1519 1516 vcs_repo = source_repo
1520 1517
1521 1518 # TODO: johbo: In the context of an update, we cannot reach
1522 1519 # the old commit anymore with our normal mechanisms. It needs
1523 1520 # some sort of special support in the vcs layer to avoid this
1524 1521 # workaround.
1525 1522 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1526 1523 vcs_repo.alias == 'git'):
1527 1524 source_commit.raw_id = safe_str(source_ref_id)
1528 1525
1529 1526 log.debug('calculating diff between '
1530 1527 'source_ref:%s and target_ref:%s for repo `%s`',
1531 1528 target_ref_id, source_ref_id,
1532 1529 safe_unicode(vcs_repo.path))
1533 1530
1534 1531 vcs_diff = vcs_repo.get_diff(
1535 1532 commit1=target_commit, commit2=source_commit,
1536 1533 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1537 1534 return vcs_diff
1538 1535
1539 1536 def _is_merge_enabled(self, pull_request):
1540 1537 return self._get_general_setting(
1541 1538 pull_request, 'rhodecode_pr_merge_enabled')
1542 1539
1543 1540 def _use_rebase_for_merging(self, pull_request):
1544 1541 repo_type = pull_request.target_repo.repo_type
1545 1542 if repo_type == 'hg':
1546 1543 return self._get_general_setting(
1547 1544 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1548 1545 elif repo_type == 'git':
1549 1546 return self._get_general_setting(
1550 1547 pull_request, 'rhodecode_git_use_rebase_for_merging')
1551 1548
1552 1549 return False
1553 1550
1554 1551 def _close_branch_before_merging(self, pull_request):
1555 1552 repo_type = pull_request.target_repo.repo_type
1556 1553 if repo_type == 'hg':
1557 1554 return self._get_general_setting(
1558 1555 pull_request, 'rhodecode_hg_close_branch_before_merging')
1559 1556 elif repo_type == 'git':
1560 1557 return self._get_general_setting(
1561 1558 pull_request, 'rhodecode_git_close_branch_before_merging')
1562 1559
1563 1560 return False
1564 1561
1565 1562 def _get_general_setting(self, pull_request, settings_key, default=False):
1566 1563 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1567 1564 settings = settings_model.get_general_settings()
1568 1565 return settings.get(settings_key, default)
1569 1566
1570 1567 def _log_audit_action(self, action, action_data, user, pull_request):
1571 1568 audit_logger.store(
1572 1569 action=action,
1573 1570 action_data=action_data,
1574 1571 user=user,
1575 1572 repo=pull_request.target_repo)
1576 1573
1577 1574 def get_reviewer_functions(self):
1578 1575 """
1579 1576 Fetches functions for validation and fetching default reviewers.
1580 1577 If available we use the EE package, else we fallback to CE
1581 1578 package functions
1582 1579 """
1583 1580 try:
1584 1581 from rc_reviewers.utils import get_default_reviewers_data
1585 1582 from rc_reviewers.utils import validate_default_reviewers
1586 1583 except ImportError:
1587 1584 from rhodecode.apps.repository.utils import get_default_reviewers_data
1588 1585 from rhodecode.apps.repository.utils import validate_default_reviewers
1589 1586
1590 1587 return get_default_reviewers_data, validate_default_reviewers
1591 1588
1592 1589
1593 1590 class MergeCheck(object):
1594 1591 """
1595 1592 Perform Merge Checks and returns a check object which stores information
1596 1593 about merge errors, and merge conditions
1597 1594 """
1598 1595 TODO_CHECK = 'todo'
1599 1596 PERM_CHECK = 'perm'
1600 1597 REVIEW_CHECK = 'review'
1601 1598 MERGE_CHECK = 'merge'
1602 1599
1603 1600 def __init__(self):
1604 1601 self.review_status = None
1605 1602 self.merge_possible = None
1606 1603 self.merge_msg = ''
1607 1604 self.failed = None
1608 1605 self.errors = []
1609 1606 self.error_details = OrderedDict()
1610 1607
1611 1608 def push_error(self, error_type, message, error_key, details):
1612 1609 self.failed = True
1613 1610 self.errors.append([error_type, message])
1614 1611 self.error_details[error_key] = dict(
1615 1612 details=details,
1616 1613 error_type=error_type,
1617 1614 message=message
1618 1615 )
1619 1616
1620 1617 @classmethod
1621 1618 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1622 1619 force_shadow_repo_refresh=False):
1623 1620 _ = translator
1624 1621 merge_check = cls()
1625 1622
1626 1623 # permissions to merge
1627 1624 user_allowed_to_merge = PullRequestModel().check_user_merge(
1628 1625 pull_request, auth_user)
1629 1626 if not user_allowed_to_merge:
1630 1627 log.debug("MergeCheck: cannot merge, approval is pending.")
1631 1628
1632 1629 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1633 1630 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1634 1631 if fail_early:
1635 1632 return merge_check
1636 1633
1637 1634 # permission to merge into the target branch
1638 1635 target_commit_id = pull_request.target_ref_parts.commit_id
1639 1636 if pull_request.target_ref_parts.type == 'branch':
1640 1637 branch_name = pull_request.target_ref_parts.name
1641 1638 else:
1642 1639 # for mercurial we can always figure out the branch from the commit
1643 1640 # in case of bookmark
1644 1641 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1645 1642 branch_name = target_commit.branch
1646 1643
1647 1644 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1648 1645 pull_request.target_repo.repo_name, branch_name)
1649 1646 if branch_perm and branch_perm == 'branch.none':
1650 1647 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1651 1648 branch_name, rule)
1652 1649 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1653 1650 if fail_early:
1654 1651 return merge_check
1655 1652
1656 1653 # review status, must be always present
1657 1654 review_status = pull_request.calculated_review_status()
1658 1655 merge_check.review_status = review_status
1659 1656
1660 1657 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1661 1658 if not status_approved:
1662 1659 log.debug("MergeCheck: cannot merge, approval is pending.")
1663 1660
1664 1661 msg = _('Pull request reviewer approval is pending.')
1665 1662
1666 1663 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1667 1664
1668 1665 if fail_early:
1669 1666 return merge_check
1670 1667
1671 1668 # left over TODOs
1672 1669 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1673 1670 if todos:
1674 1671 log.debug("MergeCheck: cannot merge, {} "
1675 1672 "unresolved TODOs left.".format(len(todos)))
1676 1673
1677 1674 if len(todos) == 1:
1678 1675 msg = _('Cannot merge, {} TODO still not resolved.').format(
1679 1676 len(todos))
1680 1677 else:
1681 1678 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1682 1679 len(todos))
1683 1680
1684 1681 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1685 1682
1686 1683 if fail_early:
1687 1684 return merge_check
1688 1685
1689 1686 # merge possible, here is the filesystem simulation + shadow repo
1690 1687 merge_status, msg = PullRequestModel().merge_status(
1691 1688 pull_request, translator=translator,
1692 1689 force_shadow_repo_refresh=force_shadow_repo_refresh)
1693 1690 merge_check.merge_possible = merge_status
1694 1691 merge_check.merge_msg = msg
1695 1692 if not merge_status:
1696 1693 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1697 1694 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1698 1695
1699 1696 if fail_early:
1700 1697 return merge_check
1701 1698
1702 1699 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1703 1700 return merge_check
1704 1701
1705 1702 @classmethod
1706 1703 def get_merge_conditions(cls, pull_request, translator):
1707 1704 _ = translator
1708 1705 merge_details = {}
1709 1706
1710 1707 model = PullRequestModel()
1711 1708 use_rebase = model._use_rebase_for_merging(pull_request)
1712 1709
1713 1710 if use_rebase:
1714 1711 merge_details['merge_strategy'] = dict(
1715 1712 details={},
1716 1713 message=_('Merge strategy: rebase')
1717 1714 )
1718 1715 else:
1719 1716 merge_details['merge_strategy'] = dict(
1720 1717 details={},
1721 1718 message=_('Merge strategy: explicit merge commit')
1722 1719 )
1723 1720
1724 1721 close_branch = model._close_branch_before_merging(pull_request)
1725 1722 if close_branch:
1726 1723 repo_type = pull_request.target_repo.repo_type
1727 1724 close_msg = ''
1728 1725 if repo_type == 'hg':
1729 1726 close_msg = _('Source branch will be closed after merge.')
1730 1727 elif repo_type == 'git':
1731 1728 close_msg = _('Source branch will be deleted after merge.')
1732 1729
1733 1730 merge_details['close_branch'] = dict(
1734 1731 details={},
1735 1732 message=close_msg
1736 1733 )
1737 1734
1738 1735 return merge_details
1739 1736
1740 1737
1741 1738 ChangeTuple = collections.namedtuple(
1742 1739 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1743 1740
1744 1741 FileChangeTuple = collections.namedtuple(
1745 1742 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,196 +1,195 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import stat
23 23 import sys
24 24
25 25 import pytest
26 26 from mock import Mock, patch, DEFAULT
27 27
28 28 import rhodecode
29 29 from rhodecode.model import db, scm
30 30 from rhodecode.tests import no_newline_id_generator
31 31
32 32
33 33 def test_scm_instance_config(backend):
34 34 repo = backend.create_repo()
35 35 with patch.multiple('rhodecode.model.db.Repository',
36 36 _get_instance=DEFAULT,
37 37 _get_instance_cached=DEFAULT) as mocks:
38 38 repo.scm_instance()
39 39 mocks['_get_instance'].assert_called_with(
40 40 config=None, cache=False)
41 41
42 42 config = {'some': 'value'}
43 43 repo.scm_instance(config=config)
44 44 mocks['_get_instance'].assert_called_with(
45 45 config=config, cache=False)
46 46
47 47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
48 48 repo.scm_instance(config=config)
49 49 mocks['_get_instance_cached'].assert_called()
50 50
51 51
52 52 def test__get_instance_config(backend):
53 53 repo = backend.create_repo()
54 54 vcs_class = Mock()
55 55 with patch.multiple('rhodecode.lib.vcs.backends',
56 56 get_scm=DEFAULT,
57 57 get_backend=DEFAULT) as mocks:
58 58 mocks['get_scm'].return_value = backend.alias
59 59 mocks['get_backend'].return_value = vcs_class
60 60 with patch('rhodecode.model.db.Repository._config') as config_mock:
61 61 repo._get_instance()
62 62 vcs_class.assert_called_with(
63 63 repo_path=repo.repo_full_path, config=config_mock,
64 64 create=False, with_wire={'cache': True})
65 65
66 66 new_config = {'override': 'old_config'}
67 67 repo._get_instance(config=new_config)
68 68 vcs_class.assert_called_with(
69 69 repo_path=repo.repo_full_path, config=new_config, create=False,
70 70 with_wire={'cache': True})
71 71
72 72
73 73 def test_mark_for_invalidation_config(backend):
74 74 repo = backend.create_repo()
75 75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
76 76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
77 77 _, kwargs = _mock.call_args
78 78 assert kwargs['config'].__dict__ == repo._config.__dict__
79 79
80 80
81 81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
82 82 commits = [{'message': 'A'}, {'message': 'B'}]
83 83 repo = backend.create_repo(commits=commits)
84 84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
85 85 assert repo.changeset_cache['revision'] == 1
86 86
87 87
88 88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
89 89 repo = backend.create_repo()
90 90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
91 91 assert repo.changeset_cache['revision'] == -1
92 92
93 93
94 94 def test_strip_with_multiple_heads(backend_hg):
95 95 commits = [
96 96 {'message': 'A'},
97 97 {'message': 'a'},
98 98 {'message': 'b'},
99 99 {'message': 'B', 'parents': ['A']},
100 100 {'message': 'a1'},
101 101 ]
102 102 repo = backend_hg.create_repo(commits=commits)
103 103 commit_ids = backend_hg.commit_ids
104 104
105 105 model = scm.ScmModel()
106 106 model.strip(repo, commit_ids['b'], branch=None)
107 107
108 108 vcs_repo = repo.scm_instance()
109 109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
110 110 assert len(rest_commit_ids) == 4
111 111 assert commit_ids['b'] not in rest_commit_ids
112 112
113 113
114 114 def test_strip_with_single_heads(backend_hg):
115 115 commits = [
116 116 {'message': 'A'},
117 117 {'message': 'a'},
118 118 {'message': 'b'},
119 119 ]
120 120 repo = backend_hg.create_repo(commits=commits)
121 121 commit_ids = backend_hg.commit_ids
122 122
123 123 model = scm.ScmModel()
124 124 model.strip(repo, commit_ids['b'], branch=None)
125 125
126 126 vcs_repo = repo.scm_instance()
127 127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
128 128 assert len(rest_commit_ids) == 2
129 129 assert commit_ids['b'] not in rest_commit_ids
130 130
131 131
132 def test_get_nodes_returns_unicode_flat(backend_random):
133 repo = backend_random.repo
134 directories, files = scm.ScmModel().get_nodes(
135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
136 flat=True)
132 def test_get_nodes_returns_unicode_flat(backend):
133 repo = backend.repo
134 commit_id = repo.get_commit(commit_idx=0).raw_id
135 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=True)
137 136 assert_contains_only_unicode(directories)
138 137 assert_contains_only_unicode(files)
139 138
140 139
141 def test_get_nodes_returns_unicode_non_flat(backend_random):
142 repo = backend_random.repo
143 directories, files = scm.ScmModel().get_nodes(
144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
145 flat=False)
140 def test_get_nodes_returns_unicode_non_flat(backend):
141 repo = backend.repo
142 commit_id = repo.get_commit(commit_idx=0).raw_id
143
144 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=False)
146 145 # johbo: Checking only the names for now, since that is the critical
147 146 # part.
148 147 assert_contains_only_unicode([d['name'] for d in directories])
149 148 assert_contains_only_unicode([f['name'] for f in files])
150 149
151 150
152 151 def test_get_nodes_max_file_bytes(backend_random):
153 152 repo = backend_random.repo
154 153 max_file_bytes = 10
155 154 directories, files = scm.ScmModel().get_nodes(
156 155 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
157 156 extended_info=True, flat=False)
158 157 assert any(file['content'] and len(file['content']) > max_file_bytes
159 158 for file in files)
160 159
161 160 directories, files = scm.ScmModel().get_nodes(
162 161 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
163 162 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
164 163 assert all(
165 164 file['content'] is None if file['size'] > max_file_bytes else True
166 165 for file in files)
167 166
168 167
169 168 def assert_contains_only_unicode(structure):
170 169 assert structure
171 170 for value in structure:
172 171 assert isinstance(value, unicode)
173 172
174 173
175 174 @pytest.mark.backends("hg", "git")
176 175 def test_get_non_unicode_reference(backend):
177 176 model = scm.ScmModel()
178 177 non_unicode_list = ["AdΔ±nΔ±".decode("cp1254")]
179 178
180 179 def scm_instance():
181 180 return Mock(
182 181 branches=non_unicode_list, bookmarks=non_unicode_list,
183 182 tags=non_unicode_list, alias=backend.alias)
184 183
185 184 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
186 185 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
187 186 if backend.alias == 'hg':
188 187 valid_choices = [
189 188 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
190 189 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
191 190 else:
192 191 valid_choices = [
193 192 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
194 193 u'tag:Ad\xc4\xb1n\xc4\xb1']
195 194
196 195 assert choices == valid_choices
@@ -1,257 +1,256 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import shutil
23 23 import datetime
24 24
25 25 import pytest
26 26
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import Config
29 29 from rhodecode.lib.vcs.nodes import FileNode
30 30 from rhodecode.tests import get_new_dir
31 31 from rhodecode.tests.utils import check_skip_backends, check_xfail_backends
32 32
33 33
34 34 @pytest.fixture()
35 35 def vcs_repository_support(
36 36 request, backend_alias, baseapp, _vcs_repo_container):
37 37 """
38 38 Provide a test repository for the test run.
39 39
40 40 Depending on the value of `recreate_repo_per_test` a new repo for each
41 41 test will be created.
42 42
43 43 The parameter `--backends` can be used to limit this fixture to specific
44 44 backend implementations.
45 45 """
46 46 cls = request.cls
47 47
48 48 check_skip_backends(request.node, backend_alias)
49 49 check_xfail_backends(request.node, backend_alias)
50 50
51 51 if _should_create_repo_per_test(cls):
52 52 _vcs_repo_container = _create_vcs_repo_container(request)
53 53
54 54 repo = _vcs_repo_container.get_repo(cls, backend_alias=backend_alias)
55 55
56 56 # TODO: johbo: Supporting old test class api, think about removing this
57 57 cls.repo = repo
58 58 cls.repo_path = repo.path
59 59 cls.default_branch = repo.DEFAULT_BRANCH_NAME
60 60 cls.Backend = cls.backend_class = repo.__class__
61 61 cls.imc = repo.in_memory_commit
62 62
63 63 return backend_alias, repo
64 64
65 65
66 66 @pytest.fixture(scope='class')
67 67 def _vcs_repo_container(request):
68 68 """
69 69 Internal fixture intended to help support class based scoping on demand.
70 70 """
71 71 return _create_vcs_repo_container(request)
72 72
73 73
74 74 def _create_vcs_repo_container(request):
75 75 repo_container = VcsRepoContainer()
76 76 if not request.config.getoption('--keep-tmp-path'):
77 77 request.addfinalizer(repo_container.cleanup)
78 78 return repo_container
79 79
80 80
81 81 class VcsRepoContainer(object):
82 82
83 83 def __init__(self):
84 84 self._cleanup_paths = []
85 85 self._repos = {}
86 86
87 87 def get_repo(self, test_class, backend_alias):
88 88 if backend_alias not in self._repos:
89 89 repo = _create_empty_repository(test_class, backend_alias)
90 90
91 91 self._cleanup_paths.append(repo.path)
92 92 self._repos[backend_alias] = repo
93 93 return self._repos[backend_alias]
94 94
95 95 def cleanup(self):
96 96 for repo_path in reversed(self._cleanup_paths):
97 97 shutil.rmtree(repo_path)
98 98
99 99
100 100 def _should_create_repo_per_test(cls):
101 101 return getattr(cls, 'recreate_repo_per_test', False)
102 102
103 103
104 104 def _create_empty_repository(cls, backend_alias=None):
105 105 Backend = get_backend(backend_alias or cls.backend_alias)
106 106 repo_path = get_new_dir(str(time.time()))
107 107 repo = Backend(repo_path, create=True)
108 108 if hasattr(cls, '_get_commits'):
109 109 commits = cls._get_commits()
110 110 cls.tip = _add_commits_to_repo(repo, commits)
111 111
112 112 return repo
113 113
114 114
115 115 @pytest.fixture
116 116 def config():
117 117 """
118 118 Instance of a repository config.
119 119
120 120 The instance contains only one value:
121 121
122 122 - Section: "section-a"
123 123 - Key: "a-1"
124 124 - Value: "value-a-1"
125 125
126 126 The intended usage is for cases where a config instance is needed but no
127 127 specific content is required.
128 128 """
129 129 config = Config()
130 130 config.set('section-a', 'a-1', 'value-a-1')
131 131 return config
132 132
133 133
134 134 def _add_commits_to_repo(repo, commits):
135 135 imc = repo.in_memory_commit
136 136 tip = None
137 137
138 138 for commit in commits:
139 139 for node in commit.get('added', []):
140 140 imc.add(FileNode(node.path, content=node.content))
141 141 for node in commit.get('changed', []):
142 142 imc.change(FileNode(node.path, content=node.content))
143 143 for node in commit.get('removed', []):
144 144 imc.remove(FileNode(node.path))
145 145
146 146 tip = imc.commit(
147 147 message=unicode(commit['message']),
148 148 author=unicode(commit['author']),
149 149 date=commit['date'],
150 150 branch=commit.get('branch'))
151
152 151 return tip
153 152
154 153
155 154 @pytest.fixture
156 155 def vcs_repo(request, backend_alias):
157 156 Backend = get_backend(backend_alias)
158 157 repo_path = get_new_dir(str(time.time()))
159 158 repo = Backend(repo_path, create=True)
160 159
161 160 @request.addfinalizer
162 161 def cleanup():
163 162 shutil.rmtree(repo_path)
164 163
165 164 return repo
166 165
167 166
168 167 @pytest.fixture
169 168 def generate_repo_with_commits(vcs_repo):
170 169 """
171 170 Creates a fabric to generate N comits with some file nodes on a randomly
172 171 generated repository
173 172 """
174 173
175 174 def commit_generator(num):
176 175 start_date = datetime.datetime(2010, 1, 1, 20)
177 176 for x in xrange(num):
178 177 yield {
179 178 'message': 'Commit %d' % x,
180 179 'author': 'Joe Doe <joe.doe@example.com>',
181 180 'date': start_date + datetime.timedelta(hours=12 * x),
182 181 'added': [
183 182 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
184 183 ],
185 184 'modified': [
186 185 FileNode('file_%d.txt' % x,
187 186 content='Foobar %d modified' % (x-1)),
188 187 ]
189 188 }
190 189
191 190 def commit_maker(num=5):
192 191 _add_commits_to_repo(vcs_repo, commit_generator(num))
193 192 return vcs_repo
194 193
195 194 return commit_maker
196 195
197 196
198 197 @pytest.fixture
199 198 def hg_repo(request, vcs_repo):
200 199 repo = vcs_repo
201 200
202 201 commits = repo._get_commits()
203 202 _add_commits_to_repo(repo, commits)
204 203
205 204 return repo
206 205
207 206
208 207 @pytest.fixture
209 208 def hg_commit(hg_repo):
210 209 return hg_repo.get_commit()
211 210
212 211
213 212 class BackendTestMixin(object):
214 213 """
215 214 This is a backend independent test case class which should be created
216 215 with ``type`` method.
217 216
218 217 It is required to set following attributes at subclass:
219 218
220 219 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
221 220 - ``repo_path``: path to the repository which would be created for set of
222 221 tests
223 222 - ``recreate_repo_per_test``: If set to ``False``, repo would NOT be
224 223 created
225 224 before every single test. Defaults to ``True``.
226 225 """
227 226 recreate_repo_per_test = True
228 227
229 228 @classmethod
230 229 def _get_commits(cls):
231 230 commits = [
232 231 {
233 232 'message': u'Initial commit',
234 233 'author': u'Joe Doe <joe.doe@example.com>',
235 234 'date': datetime.datetime(2010, 1, 1, 20),
236 235 'added': [
237 236 FileNode('foobar', content='Foobar'),
238 237 FileNode('foobar2', content='Foobar II'),
239 238 FileNode('foo/bar/baz', content='baz here!'),
240 239 ],
241 240 },
242 241 {
243 242 'message': u'Changes...',
244 243 'author': u'Jane Doe <jane.doe@example.com>',
245 244 'date': datetime.datetime(2010, 1, 1, 21),
246 245 'added': [
247 246 FileNode('some/new.txt', content='news...'),
248 247 ],
249 248 'changed': [
250 249 FileNode('foobar', 'Foobar I'),
251 250 ],
252 251 'removed': [],
253 252 },
254 253 ]
255 254 return commits
256 255
257 256
@@ -1,593 +1,592 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import time
23 23
24 24 import pytest
25 25
26 26 from rhodecode.lib.vcs.backends.base import (
27 27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
28 28 from rhodecode.lib.vcs.exceptions import (
29 29 BranchDoesNotExistError, CommitDoesNotExistError,
30 30 RepositoryError, EmptyRepositoryError)
31 31 from rhodecode.lib.vcs.nodes import (
32 32 FileNode, AddedFileNodesGenerator,
33 33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
34 34 from rhodecode.tests import get_new_dir
35 35 from rhodecode.tests.vcs.conftest import BackendTestMixin
36 36
37 37
38 38 class TestBaseChangeset:
39 39
40 40 def test_is_deprecated(self):
41 41 from rhodecode.lib.vcs.backends.base import BaseChangeset
42 42 pytest.deprecated_call(BaseChangeset)
43 43
44 44
45 45 class TestEmptyCommit(object):
46 46
47 47 def test_branch_without_alias_returns_none(self):
48 48 commit = EmptyCommit()
49 49 assert commit.branch is None
50 50
51 51
52 52 @pytest.mark.usefixtures("vcs_repository_support")
53 53 class TestCommitsInNonEmptyRepo(BackendTestMixin):
54 54 recreate_repo_per_test = True
55 55
56 56 @classmethod
57 57 def _get_commits(cls):
58 58 start_date = datetime.datetime(2010, 1, 1, 20)
59 59 for x in xrange(5):
60 60 yield {
61 61 'message': 'Commit %d' % x,
62 62 'author': 'Joe Doe <joe.doe@example.com>',
63 63 'date': start_date + datetime.timedelta(hours=12 * x),
64 64 'added': [
65 65 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
66 66 ],
67 67 }
68 68
69 69 def test_walk_returns_empty_list_in_case_of_file(self):
70 70 result = list(self.tip.walk('file_0.txt'))
71 71 assert result == []
72 72
73 73 @pytest.mark.backends("git", "hg")
74 74 def test_new_branch(self):
75 75 self.imc.add(FileNode('docs/index.txt',
76 76 content='Documentation\n'))
77 77 foobar_tip = self.imc.commit(
78 78 message=u'New branch: foobar',
79 79 author=u'joe',
80 80 branch='foobar',
81 81 )
82 82 assert 'foobar' in self.repo.branches
83 83 assert foobar_tip.branch == 'foobar'
84 84 # 'foobar' should be the only branch that contains the new commit
85 85 branch = self.repo.branches.values()
86 86 assert branch[0] != branch[1]
87 87
88 88 @pytest.mark.backends("git", "hg")
89 89 def test_new_head_in_default_branch(self):
90 90 tip = self.repo.get_commit()
91 91 self.imc.add(FileNode('docs/index.txt',
92 92 content='Documentation\n'))
93 93 foobar_tip = self.imc.commit(
94 94 message=u'New branch: foobar',
95 95 author=u'joe',
96 96 branch='foobar',
97 97 parents=[tip],
98 98 )
99 99 self.imc.change(FileNode('docs/index.txt',
100 100 content='Documentation\nand more...\n'))
101 101 newtip = self.imc.commit(
102 102 message=u'At default branch',
103 103 author=u'joe',
104 104 branch=foobar_tip.branch,
105 105 parents=[foobar_tip],
106 106 )
107 107
108 108 newest_tip = self.imc.commit(
109 109 message=u'Merged with %s' % foobar_tip.raw_id,
110 110 author=u'joe',
111 111 branch=self.backend_class.DEFAULT_BRANCH_NAME,
112 112 parents=[newtip, foobar_tip],
113 113 )
114 114
115 115 assert newest_tip.branch == self.backend_class.DEFAULT_BRANCH_NAME
116 116
117 117 @pytest.mark.backends("git", "hg")
118 118 def test_get_commits_respects_branch_name(self):
119 119 """
120 120 * e1930d0 (HEAD, master) Back in default branch
121 121 | * e1930d0 (docs) New Branch: docs2
122 122 | * dcc14fa New branch: docs
123 123 |/
124 124 * e63c41a Initial commit
125 125 ...
126 126 * 624d3db Commit 0
127 127
128 128 :return:
129 129 """
130 130 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
131 131 TEST_BRANCH = 'docs'
132 132 org_tip = self.repo.get_commit()
133 133
134 134 self.imc.add(FileNode('readme.txt', content='Document\n'))
135 135 initial = self.imc.commit(
136 136 message=u'Initial commit',
137 137 author=u'joe',
138 138 parents=[org_tip],
139 139 branch=DEFAULT_BRANCH,)
140 140
141 141 self.imc.add(FileNode('newdoc.txt', content='foobar\n'))
142 142 docs_branch_commit1 = self.imc.commit(
143 143 message=u'New branch: docs',
144 144 author=u'joe',
145 145 parents=[initial],
146 146 branch=TEST_BRANCH,)
147 147
148 148 self.imc.add(FileNode('newdoc2.txt', content='foobar2\n'))
149 149 docs_branch_commit2 = self.imc.commit(
150 150 message=u'New branch: docs2',
151 151 author=u'joe',
152 152 parents=[docs_branch_commit1],
153 153 branch=TEST_BRANCH,)
154 154
155 155 self.imc.add(FileNode('newfile', content='hello world\n'))
156 156 self.imc.commit(
157 157 message=u'Back in default branch',
158 158 author=u'joe',
159 159 parents=[initial],
160 160 branch=DEFAULT_BRANCH,)
161 161
162 default_branch_commits = self.repo.get_commits(
163 branch_name=DEFAULT_BRANCH)
162 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
164 163 assert docs_branch_commit1 not in list(default_branch_commits)
165 164 assert docs_branch_commit2 not in list(default_branch_commits)
166 165
167 166 docs_branch_commits = self.repo.get_commits(
168 167 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
169 168 branch_name=TEST_BRANCH)
170 169 assert docs_branch_commit1 in list(docs_branch_commits)
171 170 assert docs_branch_commit2 in list(docs_branch_commits)
172 171
173 172 @pytest.mark.backends("svn")
174 173 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
175 174 repo = vcsbackend_svn['svn-simple-layout']
176 175 commits = repo.get_commits(branch_name='trunk')
177 176 commit_indexes = [c.idx for c in commits]
178 177 assert commit_indexes == [1, 2, 3, 7, 12, 15]
179 178
180 179 def test_get_commit_by_branch(self):
181 180 for branch, commit_id in self.repo.branches.iteritems():
182 181 assert commit_id == self.repo.get_commit(branch).raw_id
183 182
184 183 def test_get_commit_by_tag(self):
185 184 for tag, commit_id in self.repo.tags.iteritems():
186 185 assert commit_id == self.repo.get_commit(tag).raw_id
187 186
188 187 def test_get_commit_parents(self):
189 188 repo = self.repo
190 189 for test_idx in [1, 2, 3]:
191 190 commit = repo.get_commit(commit_idx=test_idx - 1)
192 191 assert [commit] == repo.get_commit(commit_idx=test_idx).parents
193 192
194 193 def test_get_commit_children(self):
195 194 repo = self.repo
196 195 for test_idx in [1, 2, 3]:
197 196 commit = repo.get_commit(commit_idx=test_idx + 1)
198 197 assert [commit] == repo.get_commit(commit_idx=test_idx).children
199 198
200 199
201 200 @pytest.mark.usefixtures("vcs_repository_support")
202 201 class TestCommits(BackendTestMixin):
203 202 recreate_repo_per_test = False
204 203
205 204 @classmethod
206 205 def _get_commits(cls):
207 206 start_date = datetime.datetime(2010, 1, 1, 20)
208 207 for x in xrange(5):
209 208 yield {
210 209 'message': u'Commit %d' % x,
211 210 'author': u'Joe Doe <joe.doe@example.com>',
212 211 'date': start_date + datetime.timedelta(hours=12 * x),
213 212 'added': [
214 213 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
215 214 ],
216 215 }
217 216
218 217 def test_simple(self):
219 218 tip = self.repo.get_commit()
220 219 assert tip.date, datetime.datetime(2010, 1, 3 == 20)
221 220
222 221 def test_simple_serialized_commit(self):
223 222 tip = self.repo.get_commit()
224 223 # json.dumps(tip) uses .__json__() method
225 224 data = tip.__json__()
226 225 assert 'branch' in data
227 226 assert data['revision']
228 227
229 228 def test_retrieve_tip(self):
230 229 tip = self.repo.get_commit('tip')
231 230 assert tip == self.repo.get_commit()
232 231
233 232 def test_invalid(self):
234 233 with pytest.raises(CommitDoesNotExistError):
235 234 self.repo.get_commit(commit_idx=123456789)
236 235
237 236 def test_idx(self):
238 237 commit = self.repo[0]
239 238 assert commit.idx == 0
240 239
241 240 def test_negative_idx(self):
242 241 commit = self.repo.get_commit(commit_idx=-1)
243 242 assert commit.idx >= 0
244 243
245 244 def test_revision_is_deprecated(self):
246 245 def get_revision(commit):
247 246 return commit.revision
248 247
249 248 commit = self.repo[0]
250 249 pytest.deprecated_call(get_revision, commit)
251 250
252 251 def test_size(self):
253 252 tip = self.repo.get_commit()
254 253 size = 5 * len('Foobar N') # Size of 5 files
255 254 assert tip.size == size
256 255
257 256 def test_size_at_commit(self):
258 257 tip = self.repo.get_commit()
259 258 size = 5 * len('Foobar N') # Size of 5 files
260 259 assert self.repo.size_at_commit(tip.raw_id) == size
261 260
262 261 def test_size_at_first_commit(self):
263 262 commit = self.repo[0]
264 263 size = len('Foobar N') # Size of 1 file
265 264 assert self.repo.size_at_commit(commit.raw_id) == size
266 265
267 266 def test_author(self):
268 267 tip = self.repo.get_commit()
269 268 assert_text_equal(tip.author, u'Joe Doe <joe.doe@example.com>')
270 269
271 270 def test_author_name(self):
272 271 tip = self.repo.get_commit()
273 272 assert_text_equal(tip.author_name, u'Joe Doe')
274 273
275 274 def test_author_email(self):
276 275 tip = self.repo.get_commit()
277 276 assert_text_equal(tip.author_email, u'joe.doe@example.com')
278 277
279 278 def test_message(self):
280 279 tip = self.repo.get_commit()
281 280 assert_text_equal(tip.message, u'Commit 4')
282 281
283 282 def test_diff(self):
284 283 tip = self.repo.get_commit()
285 284 diff = tip.diff()
286 285 assert "+Foobar 4" in diff.raw
287 286
288 287 def test_prev(self):
289 288 tip = self.repo.get_commit()
290 289 prev_commit = tip.prev()
291 290 assert prev_commit.message == 'Commit 3'
292 291
293 292 def test_prev_raises_on_first_commit(self):
294 293 commit = self.repo.get_commit(commit_idx=0)
295 294 with pytest.raises(CommitDoesNotExistError):
296 295 commit.prev()
297 296
298 297 def test_prev_works_on_second_commit_issue_183(self):
299 298 commit = self.repo.get_commit(commit_idx=1)
300 299 prev_commit = commit.prev()
301 300 assert prev_commit.idx == 0
302 301
303 302 def test_next(self):
304 303 commit = self.repo.get_commit(commit_idx=2)
305 304 next_commit = commit.next()
306 305 assert next_commit.message == 'Commit 3'
307 306
308 307 def test_next_raises_on_tip(self):
309 308 commit = self.repo.get_commit()
310 309 with pytest.raises(CommitDoesNotExistError):
311 310 commit.next()
312 311
313 312 def test_get_path_commit(self):
314 313 commit = self.repo.get_commit()
315 314 commit.get_path_commit('file_4.txt')
316 315 assert commit.message == 'Commit 4'
317 316
318 317 def test_get_filenodes_generator(self):
319 318 tip = self.repo.get_commit()
320 319 filepaths = [node.path for node in tip.get_filenodes_generator()]
321 320 assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
322 321
323 322 def test_get_file_annotate(self):
324 323 file_added_commit = self.repo.get_commit(commit_idx=3)
325 324 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
326 325
327 326 line_no, commit_id, commit_loader, line = annotations[0]
328 327
329 328 assert line_no == 1
330 329 assert commit_id == file_added_commit.raw_id
331 330 assert commit_loader() == file_added_commit
332 331 assert 'Foobar 3' in line
333 332
334 333 def test_get_file_annotate_does_not_exist(self):
335 334 file_added_commit = self.repo.get_commit(commit_idx=2)
336 335 # TODO: Should use a specific exception class here?
337 336 with pytest.raises(Exception):
338 337 list(file_added_commit.get_file_annotate('file_3.txt'))
339 338
340 339 def test_get_file_annotate_tip(self):
341 340 tip = self.repo.get_commit()
342 341 commit = self.repo.get_commit(commit_idx=3)
343 342 expected_values = list(commit.get_file_annotate('file_3.txt'))
344 343 annotations = list(tip.get_file_annotate('file_3.txt'))
345 344
346 345 # Note: Skip index 2 because the loader function is not the same
347 346 for idx in (0, 1, 3):
348 347 assert annotations[0][idx] == expected_values[0][idx]
349 348
350 349 def test_get_commits_is_ordered_by_date(self):
351 350 commits = self.repo.get_commits()
352 351 assert isinstance(commits, CollectionGenerator)
353 352 assert len(commits) == 0 or len(commits) != 0
354 353 commits = list(commits)
355 354 ordered_by_date = sorted(commits, key=lambda commit: commit.date)
356 355 assert commits == ordered_by_date
357 356
358 357 def test_get_commits_respects_start(self):
359 358 second_id = self.repo.commit_ids[1]
360 359 commits = self.repo.get_commits(start_id=second_id)
361 360 assert isinstance(commits, CollectionGenerator)
362 361 commits = list(commits)
363 362 assert len(commits) == 4
364 363
365 364 def test_get_commits_includes_start_commit(self):
366 365 second_id = self.repo.commit_ids[1]
367 366 commits = self.repo.get_commits(start_id=second_id)
368 367 assert isinstance(commits, CollectionGenerator)
369 368 commits = list(commits)
370 369 assert commits[0].raw_id == second_id
371 370
372 371 def test_get_commits_respects_end(self):
373 372 second_id = self.repo.commit_ids[1]
374 373 commits = self.repo.get_commits(end_id=second_id)
375 374 assert isinstance(commits, CollectionGenerator)
376 375 commits = list(commits)
377 376 assert commits[-1].raw_id == second_id
378 377 assert len(commits) == 2
379 378
380 379 def test_get_commits_respects_both_start_and_end(self):
381 380 second_id = self.repo.commit_ids[1]
382 381 third_id = self.repo.commit_ids[2]
383 382 commits = self.repo.get_commits(start_id=second_id, end_id=third_id)
384 383 assert isinstance(commits, CollectionGenerator)
385 384 commits = list(commits)
386 385 assert len(commits) == 2
387 386
388 387 def test_get_commits_on_empty_repo_raises_EmptyRepository_error(self):
389 388 repo_path = get_new_dir(str(time.time()))
390 389 repo = self.Backend(repo_path, create=True)
391 390
392 391 with pytest.raises(EmptyRepositoryError):
393 392 list(repo.get_commits(start_id='foobar'))
394 393
395 394 def test_get_commits_respects_hidden(self):
396 395 commits = self.repo.get_commits(show_hidden=True)
397 396 assert isinstance(commits, CollectionGenerator)
398 397 assert len(commits) == 5
399 398
400 399 def test_get_commits_includes_end_commit(self):
401 400 second_id = self.repo.commit_ids[1]
402 401 commits = self.repo.get_commits(end_id=second_id)
403 402 assert isinstance(commits, CollectionGenerator)
404 403 assert len(commits) == 2
405 404 commits = list(commits)
406 405 assert commits[-1].raw_id == second_id
407 406
408 407 def test_get_commits_respects_start_date(self):
409 408 start_date = datetime.datetime(2010, 1, 2)
410 409 commits = self.repo.get_commits(start_date=start_date)
411 410 assert isinstance(commits, CollectionGenerator)
412 411 # Should be 4 commits after 2010-01-02 00:00:00
413 412 assert len(commits) == 4
414 413 for c in commits:
415 414 assert c.date >= start_date
416 415
417 416 def test_get_commits_respects_start_date_with_branch(self):
418 417 start_date = datetime.datetime(2010, 1, 2)
419 418 commits = self.repo.get_commits(
420 419 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
421 420 assert isinstance(commits, CollectionGenerator)
422 421 # Should be 4 commits after 2010-01-02 00:00:00
423 422 assert len(commits) == 4
424 423 for c in commits:
425 424 assert c.date >= start_date
426 425
427 426 def test_get_commits_respects_start_date_and_end_date(self):
428 427 start_date = datetime.datetime(2010, 1, 2)
429 428 end_date = datetime.datetime(2010, 1, 3)
430 429 commits = self.repo.get_commits(start_date=start_date,
431 430 end_date=end_date)
432 431 assert isinstance(commits, CollectionGenerator)
433 432 assert len(commits) == 2
434 433 for c in commits:
435 434 assert c.date >= start_date
436 435 assert c.date <= end_date
437 436
438 437 def test_get_commits_respects_end_date(self):
439 438 end_date = datetime.datetime(2010, 1, 2)
440 439 commits = self.repo.get_commits(end_date=end_date)
441 440 assert isinstance(commits, CollectionGenerator)
442 441 assert len(commits) == 1
443 442 for c in commits:
444 443 assert c.date <= end_date
445 444
446 445 def test_get_commits_respects_reverse(self):
447 446 commits = self.repo.get_commits() # no longer reverse support
448 447 assert isinstance(commits, CollectionGenerator)
449 448 assert len(commits) == 5
450 449 commit_ids = reversed([c.raw_id for c in commits])
451 450 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
452 451
453 452 def test_get_commits_slice_generator(self):
454 453 commits = self.repo.get_commits(
455 454 branch_name=self.repo.DEFAULT_BRANCH_NAME)
456 455 assert isinstance(commits, CollectionGenerator)
457 456 commit_slice = list(commits[1:3])
458 457 assert len(commit_slice) == 2
459 458
460 459 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
461 460 with pytest.raises(CommitDoesNotExistError):
462 461 list(self.repo.get_commits(start_id='foobar'))
463 462
464 463 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
465 464 with pytest.raises(CommitDoesNotExistError):
466 465 list(self.repo.get_commits(end_id='foobar'))
467 466
468 467 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
469 468 with pytest.raises(BranchDoesNotExistError):
470 469 list(self.repo.get_commits(branch_name='foobar'))
471 470
472 471 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
473 472 start_id = self.repo.commit_ids[-1]
474 473 end_id = self.repo.commit_ids[0]
475 474 with pytest.raises(RepositoryError):
476 475 list(self.repo.get_commits(start_id=start_id, end_id=end_id))
477 476
478 477 def test_get_commits_raises_for_numerical_ids(self):
479 478 with pytest.raises(TypeError):
480 479 self.repo.get_commits(start_id=1, end_id=2)
481 480
482 481 def test_commit_equality(self):
483 482 commit1 = self.repo.get_commit(self.repo.commit_ids[0])
484 483 commit2 = self.repo.get_commit(self.repo.commit_ids[1])
485 484
486 485 assert commit1 == commit1
487 486 assert commit2 == commit2
488 487 assert commit1 != commit2
489 488 assert commit2 != commit1
490 489 assert commit1 != None
491 490 assert None != commit1
492 491 assert 1 != commit1
493 492 assert 'string' != commit1
494 493
495 494
496 495 @pytest.mark.parametrize("filename, expected", [
497 496 ("README.rst", False),
498 497 ("README", True),
499 498 ])
500 499 def test_commit_is_link(vcsbackend, filename, expected):
501 500 commit = vcsbackend.repo.get_commit()
502 501 link_status = commit.is_link(filename)
503 502 assert link_status is expected
504 503
505 504
506 505 @pytest.mark.usefixtures("vcs_repository_support")
507 506 class TestCommitsChanges(BackendTestMixin):
508 507 recreate_repo_per_test = False
509 508
510 509 @classmethod
511 510 def _get_commits(cls):
512 511 return [
513 512 {
514 513 'message': u'Initial',
515 514 'author': u'Joe Doe <joe.doe@example.com>',
516 515 'date': datetime.datetime(2010, 1, 1, 20),
517 516 'added': [
518 517 FileNode('foo/bar', content='foo'),
519 518 FileNode('foo/baΕ‚', content='foo'),
520 519 FileNode('foobar', content='foo'),
521 520 FileNode('qwe', content='foo'),
522 521 ],
523 522 },
524 523 {
525 524 'message': u'Massive changes',
526 525 'author': u'Joe Doe <joe.doe@example.com>',
527 526 'date': datetime.datetime(2010, 1, 1, 22),
528 527 'added': [FileNode('fallout', content='War never changes')],
529 528 'changed': [
530 529 FileNode('foo/bar', content='baz'),
531 530 FileNode('foobar', content='baz'),
532 531 ],
533 532 'removed': [FileNode('qwe')],
534 533 },
535 534 ]
536 535
537 536 def test_initial_commit(self, local_dt_to_utc):
538 537 commit = self.repo.get_commit(commit_idx=0)
539 538 assert set(commit.added) == set([
540 539 commit.get_node('foo/bar'),
541 540 commit.get_node('foo/baΕ‚'),
542 541 commit.get_node('foobar'),
543 542 commit.get_node('qwe'),
544 543 ])
545 544 assert set(commit.changed) == set()
546 545 assert set(commit.removed) == set()
547 546 assert set(commit.affected_files) == set(
548 547 ['foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'])
549 548 assert commit.date == local_dt_to_utc(
550 549 datetime.datetime(2010, 1, 1, 20, 0))
551 550
552 551 def test_head_added(self):
553 552 commit = self.repo.get_commit()
554 553 assert isinstance(commit.added, AddedFileNodesGenerator)
555 554 assert set(commit.added) == set([commit.get_node('fallout')])
556 555 assert isinstance(commit.changed, ChangedFileNodesGenerator)
557 556 assert set(commit.changed) == set([
558 557 commit.get_node('foo/bar'),
559 558 commit.get_node('foobar'),
560 559 ])
561 560 assert isinstance(commit.removed, RemovedFileNodesGenerator)
562 561 assert len(commit.removed) == 1
563 562 assert list(commit.removed)[0].path == 'qwe'
564 563
565 564 def test_get_filemode(self):
566 565 commit = self.repo.get_commit()
567 566 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
568 567
569 568 def test_get_filemode_non_ascii(self):
570 569 commit = self.repo.get_commit()
571 570 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
572 571 assert FILEMODE_DEFAULT == commit.get_file_mode(u'foo/baΕ‚')
573 572
574 573 def test_get_path_history(self):
575 574 commit = self.repo.get_commit()
576 575 history = commit.get_path_history('foo/bar')
577 576 assert len(history) == 2
578 577
579 578 def test_get_path_history_with_limit(self):
580 579 commit = self.repo.get_commit()
581 580 history = commit.get_path_history('foo/bar', limit=1)
582 581 assert len(history) == 1
583 582
584 583 def test_get_path_history_first_commit(self):
585 584 commit = self.repo[0]
586 585 history = commit.get_path_history('foo/bar')
587 586 assert len(history) == 1
588 587
589 588
590 589 def assert_text_equal(expected, given):
591 590 assert expected == given
592 591 assert isinstance(expected, unicode)
593 592 assert isinstance(given, unicode)
@@ -1,1288 +1,1288 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 25 import shutil
26 26
27 27 import pytest
28 28
29 29 from rhodecode.lib.utils import make_db_config
30 30 from rhodecode.lib.vcs.backends.base import Reference
31 31 from rhodecode.lib.vcs.backends.git import (
32 32 GitRepository, GitCommit, discover_git_version)
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 RepositoryError, VCSError, NodeDoesNotExistError)
35 35 from rhodecode.lib.vcs.nodes import (
36 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 39
40 40
41 41 pytestmark = pytest.mark.backends("git")
42 42
43 43
44 44 def repo_path_generator():
45 45 """
46 46 Return a different path to be used for cloning repos.
47 47 """
48 48 i = 0
49 49 while True:
50 50 i += 1
51 51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52 52
53 53
54 54 REPO_PATH_GENERATOR = repo_path_generator()
55 55
56 56
57 57 class TestGitRepository:
58 58
59 59 # pylint: disable=protected-access
60 60
61 61 def __check_for_existing_repo(self):
62 62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 63 self.fail('Cannot test git clone repo as location %s already '
64 64 'exists. You should manually remove it first.'
65 65 % TEST_GIT_REPO_CLONE)
66 66
67 67 @pytest.fixture(autouse=True)
68 68 def prepare(self, request, baseapp):
69 69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70 70
71 71 def get_clone_repo(self):
72 72 """
73 73 Return a non bare clone of the base repo.
74 74 """
75 75 clone_path = next(REPO_PATH_GENERATOR)
76 76 repo_clone = GitRepository(
77 77 clone_path, create=True, src_url=self.repo.path, bare=False)
78 78
79 79 return repo_clone
80 80
81 81 def get_empty_repo(self, bare=False):
82 82 """
83 83 Return a non bare empty repo.
84 84 """
85 85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86 86
87 87 def test_wrong_repo_path(self):
88 88 wrong_repo_path = '/tmp/errorrepo_git'
89 89 with pytest.raises(RepositoryError):
90 90 GitRepository(wrong_repo_path)
91 91
92 92 def test_repo_clone(self):
93 93 self.__check_for_existing_repo()
94 94 repo = GitRepository(TEST_GIT_REPO)
95 95 repo_clone = GitRepository(
96 96 TEST_GIT_REPO_CLONE,
97 97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 99 # Checking hashes of commits should be enough
100 100 for commit in repo.get_commits():
101 101 raw_id = commit.raw_id
102 102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103 103
104 104 def test_repo_clone_without_create(self):
105 105 with pytest.raises(RepositoryError):
106 106 GitRepository(
107 107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108 108
109 109 def test_repo_clone_with_update(self):
110 110 repo = GitRepository(TEST_GIT_REPO)
111 111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 112 repo_clone = GitRepository(
113 113 clone_path,
114 114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116
117 117 # check if current workdir was updated
118 118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 119 assert os.path.isfile(fpath)
120 120
121 121 def test_repo_clone_without_update(self):
122 122 repo = GitRepository(TEST_GIT_REPO)
123 123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 124 repo_clone = GitRepository(
125 125 clone_path,
126 126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 128 # check if current workdir was *NOT* updated
129 129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 130 # Make sure it's not bare repo
131 131 assert not repo_clone.bare
132 132 assert not os.path.isfile(fpath)
133 133
134 134 def test_repo_clone_into_bare_repo(self):
135 135 repo = GitRepository(TEST_GIT_REPO)
136 136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 137 repo_clone = GitRepository(
138 138 clone_path, create=True, src_url=repo.path, bare=True)
139 139 assert repo_clone.bare
140 140
141 141 def test_create_repo_is_not_bare_by_default(self):
142 142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 143 assert not repo.bare
144 144
145 145 def test_create_bare_repo(self):
146 146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 147 assert repo.bare
148 148
149 149 def test_update_server_info(self):
150 150 self.repo._update_server_info()
151 151
152 152 def test_fetch(self, vcsbackend_git):
153 153 # Note: This is a git specific part of the API, it's only implemented
154 154 # by the git backend.
155 155 source_repo = vcsbackend_git.repo
156 156 target_repo = vcsbackend_git.create_repo(bare=True)
157 157 target_repo.fetch(source_repo.path)
158 158 # Note: Get a fresh instance, avoids caching trouble
159 159 target_repo = vcsbackend_git.backend(target_repo.path)
160 160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161 161
162 162 def test_commit_ids(self):
163 163 # there are 112 commits (by now)
164 164 # so we can assume they would be available from now on
165 165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 174 '8430a588b43b5d6da365400117c89400326e7992',
175 175 'd955cd312c17b02143c04fa1099a352b04368118',
176 176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 190 assert subset.issubset(set(self.repo.commit_ids))
191 191
192 192 def test_slicing(self):
193 193 # 4 1 5 10 95
194 194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 195 (10, 20, 10), (5, 100, 95)]:
196 196 commit_ids = list(self.repo[sfrom:sto])
197 197 assert len(commit_ids) == size
198 198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200 200
201 201 def test_branches(self):
202 202 # TODO: Need more tests here
203 203 # Removed (those are 'remotes' branches for cloned repo)
204 204 # assert 'master' in self.repo.branches
205 205 # assert 'gittree' in self.repo.branches
206 206 # assert 'web-branch' in self.repo.branches
207 207 for __, commit_id in self.repo.branches.items():
208 208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209 209
210 210 def test_tags(self):
211 211 # TODO: Need more tests here
212 212 assert 'v0.1.1' in self.repo.tags
213 213 assert 'v0.1.2' in self.repo.tags
214 214 for __, commit_id in self.repo.tags.items():
215 215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216 216
217 217 def _test_single_commit_cache(self, commit_id):
218 218 commit = self.repo.get_commit(commit_id)
219 219 assert commit_id in self.repo.commits
220 220 assert commit is self.repo.commits[commit_id]
221 221
222 222 def test_initial_commit(self):
223 223 commit_id = self.repo.commit_ids[0]
224 224 init_commit = self.repo.get_commit(commit_id)
225 225 init_author = init_commit.author
226 226
227 227 assert init_commit.message == 'initial import\n'
228 228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 229 assert init_author == init_commit.committer
230 230 for path in ('vcs/__init__.py',
231 231 'vcs/backends/BaseRepository.py',
232 232 'vcs/backends/__init__.py'):
233 233 assert isinstance(init_commit.get_node(path), FileNode)
234 234 for path in ('', 'vcs', 'vcs/backends'):
235 235 assert isinstance(init_commit.get_node(path), DirNode)
236 236
237 237 with pytest.raises(NodeDoesNotExistError):
238 238 init_commit.get_node(path='foobar')
239 239
240 240 node = init_commit.get_node('vcs/')
241 241 assert hasattr(node, 'kind')
242 242 assert node.kind == NodeKind.DIR
243 243
244 244 node = init_commit.get_node('vcs')
245 245 assert hasattr(node, 'kind')
246 246 assert node.kind == NodeKind.DIR
247 247
248 248 node = init_commit.get_node('vcs/__init__.py')
249 249 assert hasattr(node, 'kind')
250 250 assert node.kind == NodeKind.FILE
251 251
252 252 def test_not_existing_commit(self):
253 253 with pytest.raises(RepositoryError):
254 254 self.repo.get_commit('f' * 40)
255 255
256 256 def test_commit10(self):
257 257
258 258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 259 README = """===
260 260 VCS
261 261 ===
262 262
263 263 Various Version Control System management abstraction layer for Python.
264 264
265 265 Introduction
266 266 ------------
267 267
268 268 TODO: To be written...
269 269
270 270 """
271 271 node = commit10.get_node('README.rst')
272 272 assert node.kind == NodeKind.FILE
273 273 assert node.content == README
274 274
275 275 def test_head(self):
276 276 assert self.repo.head == self.repo.get_commit().raw_id
277 277
278 278 def test_checkout_with_create(self):
279 279 repo_clone = self.get_clone_repo()
280 280
281 281 new_branch = 'new_branch'
282 282 assert repo_clone._current_branch() == 'master'
283 283 assert set(repo_clone.branches) == {'master'}
284 284 repo_clone._checkout(new_branch, create=True)
285 285
286 286 # Branches is a lazy property so we need to recrete the Repo object.
287 287 repo_clone = GitRepository(repo_clone.path)
288 288 assert set(repo_clone.branches) == {'master', new_branch}
289 289 assert repo_clone._current_branch() == new_branch
290 290
291 291 def test_checkout(self):
292 292 repo_clone = self.get_clone_repo()
293 293
294 294 repo_clone._checkout('new_branch', create=True)
295 295 repo_clone._checkout('master')
296 296
297 297 assert repo_clone._current_branch() == 'master'
298 298
299 299 def test_checkout_same_branch(self):
300 300 repo_clone = self.get_clone_repo()
301 301
302 302 repo_clone._checkout('master')
303 303 assert repo_clone._current_branch() == 'master'
304 304
305 305 def test_checkout_branch_already_exists(self):
306 306 repo_clone = self.get_clone_repo()
307 307
308 308 with pytest.raises(RepositoryError):
309 309 repo_clone._checkout('master', create=True)
310 310
311 311 def test_checkout_bare_repo(self):
312 312 with pytest.raises(RepositoryError):
313 313 self.repo._checkout('master')
314 314
315 315 def test_current_branch_bare_repo(self):
316 316 with pytest.raises(RepositoryError):
317 317 self.repo._current_branch()
318 318
319 319 def test_current_branch_empty_repo(self):
320 320 repo = self.get_empty_repo()
321 321 assert repo._current_branch() is None
322 322
323 323 def test_local_clone(self):
324 324 clone_path = next(REPO_PATH_GENERATOR)
325 325 self.repo._local_clone(clone_path, 'master')
326 326 repo_clone = GitRepository(clone_path)
327 327
328 328 assert self.repo.commit_ids == repo_clone.commit_ids
329 329
330 330 def test_local_clone_with_specific_branch(self):
331 331 source_repo = self.get_clone_repo()
332 332
333 333 # Create a new branch in source repo
334 334 new_branch_commit = source_repo.commit_ids[-3]
335 335 source_repo._checkout(new_branch_commit)
336 336 source_repo._checkout('new_branch', create=True)
337 337
338 338 clone_path = next(REPO_PATH_GENERATOR)
339 339 source_repo._local_clone(clone_path, 'new_branch')
340 340 repo_clone = GitRepository(clone_path)
341 341
342 342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343 343
344 344 clone_path = next(REPO_PATH_GENERATOR)
345 345 source_repo._local_clone(clone_path, 'master')
346 346 repo_clone = GitRepository(clone_path)
347 347
348 348 assert source_repo.commit_ids == repo_clone.commit_ids
349 349
350 350 def test_local_clone_fails_if_target_exists(self):
351 351 with pytest.raises(RepositoryError):
352 352 self.repo._local_clone(self.repo.path, 'master')
353 353
354 354 def test_local_fetch(self):
355 355 target_repo = self.get_empty_repo()
356 356 source_repo = self.get_clone_repo()
357 357
358 358 # Create a new branch in source repo
359 359 master_commit = source_repo.commit_ids[-1]
360 360 new_branch_commit = source_repo.commit_ids[-3]
361 361 source_repo._checkout(new_branch_commit)
362 362 source_repo._checkout('new_branch', create=True)
363 363
364 364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366 366
367 367 target_repo._local_fetch(source_repo.path, 'master')
368 368 assert target_repo._last_fetch_heads() == [master_commit]
369 369
370 370 def test_local_fetch_from_bare_repo(self):
371 371 target_repo = self.get_empty_repo()
372 372 target_repo._local_fetch(self.repo.path, 'master')
373 373
374 374 master_commit = self.repo.commit_ids[-1]
375 375 assert target_repo._last_fetch_heads() == [master_commit]
376 376
377 377 def test_local_fetch_from_same_repo(self):
378 378 with pytest.raises(ValueError):
379 379 self.repo._local_fetch(self.repo.path, 'master')
380 380
381 381 def test_local_fetch_branch_does_not_exist(self):
382 382 target_repo = self.get_empty_repo()
383 383
384 384 with pytest.raises(RepositoryError):
385 385 target_repo._local_fetch(self.repo.path, 'new_branch')
386 386
387 387 def test_local_pull(self):
388 388 target_repo = self.get_empty_repo()
389 389 source_repo = self.get_clone_repo()
390 390
391 391 # Create a new branch in source repo
392 392 master_commit = source_repo.commit_ids[-1]
393 393 new_branch_commit = source_repo.commit_ids[-3]
394 394 source_repo._checkout(new_branch_commit)
395 395 source_repo._checkout('new_branch', create=True)
396 396
397 397 target_repo._local_pull(source_repo.path, 'new_branch')
398 398 target_repo = GitRepository(target_repo.path)
399 399 assert target_repo.head == new_branch_commit
400 400
401 401 target_repo._local_pull(source_repo.path, 'master')
402 402 target_repo = GitRepository(target_repo.path)
403 403 assert target_repo.head == master_commit
404 404
405 405 def test_local_pull_in_bare_repo(self):
406 406 with pytest.raises(RepositoryError):
407 407 self.repo._local_pull(self.repo.path, 'master')
408 408
409 409 def test_local_merge(self):
410 410 target_repo = self.get_empty_repo()
411 411 source_repo = self.get_clone_repo()
412 412
413 413 # Create a new branch in source repo
414 414 master_commit = source_repo.commit_ids[-1]
415 415 new_branch_commit = source_repo.commit_ids[-3]
416 416 source_repo._checkout(new_branch_commit)
417 417 source_repo._checkout('new_branch', create=True)
418 418
419 419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 420 target_repo._local_pull(source_repo.path, 'new_branch')
421 421
422 422 target_repo._local_fetch(source_repo.path, 'master')
423 423 merge_message = 'Merge message\n\nDescription:...'
424 424 user_name = 'Albert Einstein'
425 425 user_email = 'albert@einstein.com'
426 426 target_repo._local_merge(merge_message, user_name, user_email,
427 427 target_repo._last_fetch_heads())
428 428
429 429 target_repo = GitRepository(target_repo.path)
430 430 assert target_repo.commit_ids[-2] == master_commit
431 431 last_commit = target_repo.get_commit(target_repo.head)
432 432 assert last_commit.message.strip() == merge_message
433 433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434 434
435 435 assert not os.path.exists(
436 436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437 437
438 438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441 441
442 442 target_repo._local_fetch(self.repo.path, 'master')
443 443 with pytest.raises(RepositoryError):
444 444 target_repo._local_merge(
445 445 'merge_message', 'user name', 'user@name.com',
446 446 target_repo._last_fetch_heads())
447 447
448 448 # Check we are not left in an intermediate merge state
449 449 assert not os.path.exists(
450 450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451 451
452 452 def test_local_merge_into_empty_repo(self):
453 453 target_repo = self.get_empty_repo()
454 454
455 455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 456 target_repo._local_fetch(self.repo.path, 'master')
457 457 with pytest.raises(RepositoryError):
458 458 target_repo._local_merge(
459 459 'merge_message', 'user name', 'user@name.com',
460 460 target_repo._last_fetch_heads())
461 461
462 462 def test_local_merge_in_bare_repo(self):
463 463 with pytest.raises(RepositoryError):
464 464 self.repo._local_merge(
465 465 'merge_message', 'user name', 'user@name.com', None)
466 466
467 467 def test_local_push_non_bare(self):
468 468 target_repo = self.get_empty_repo()
469 469
470 470 pushed_branch = 'pushed_branch'
471 471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 473 # report any branches.
474 474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 475 f.write('ref: refs/heads/%s' % pushed_branch)
476 476
477 477 target_repo = GitRepository(target_repo.path)
478 478
479 479 assert (target_repo.branches[pushed_branch] ==
480 480 self.repo.branches['master'])
481 481
482 482 def test_local_push_bare(self):
483 483 target_repo = self.get_empty_repo(bare=True)
484 484
485 485 pushed_branch = 'pushed_branch'
486 486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 488 # report any branches.
489 489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 490 f.write('ref: refs/heads/%s' % pushed_branch)
491 491
492 492 target_repo = GitRepository(target_repo.path)
493 493
494 494 assert (target_repo.branches[pushed_branch] ==
495 495 self.repo.branches['master'])
496 496
497 497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 498 target_repo = self.get_clone_repo()
499 499
500 500 pushed_branch = 'pushed_branch'
501 501 # Create a new branch in source repo
502 502 new_branch_commit = target_repo.commit_ids[-3]
503 503 target_repo._checkout(new_branch_commit)
504 504 target_repo._checkout(pushed_branch, create=True)
505 505
506 506 self.repo._local_push('master', target_repo.path, pushed_branch)
507 507
508 508 target_repo = GitRepository(target_repo.path)
509 509
510 510 assert (target_repo.branches[pushed_branch] ==
511 511 self.repo.branches['master'])
512 512
513 513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 515 with pytest.raises(RepositoryError):
516 516 self.repo._local_push('master', target_repo.path, 'master')
517 517
518 518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 519 target_repo = self.get_empty_repo(bare=True)
520 520
521 521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 522 self.repo._local_push(
523 523 'master', target_repo.path, 'master', enable_hooks=True)
524 524 env = run_mock.call_args[1]['extra_env']
525 525 assert 'RC_SKIP_HOOKS' not in env
526 526
527 527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 528 path_components = (
529 529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 530 hook_path = os.path.join(repo_path, *path_components)
531 531 with open(hook_path, 'w') as f:
532 532 script_lines = [
533 533 '#!%s' % sys.executable,
534 534 'import os',
535 535 'import sys',
536 536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 537 ' sys.exit(0)',
538 538 'sys.exit(1)',
539 539 ]
540 540 f.write('\n'.join(script_lines))
541 541 os.chmod(hook_path, 0o755)
542 542
543 543 def test_local_push_does_not_execute_hook(self):
544 544 target_repo = self.get_empty_repo()
545 545
546 546 pushed_branch = 'pushed_branch'
547 547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 550 # report any branches.
551 551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 552 f.write('ref: refs/heads/%s' % pushed_branch)
553 553
554 554 target_repo = GitRepository(target_repo.path)
555 555
556 556 assert (target_repo.branches[pushed_branch] ==
557 557 self.repo.branches['master'])
558 558
559 559 def test_local_push_executes_hook(self):
560 560 target_repo = self.get_empty_repo(bare=True)
561 561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 562 with pytest.raises(RepositoryError):
563 563 self.repo._local_push(
564 564 'master', target_repo.path, 'master', enable_hooks=True)
565 565
566 566 def test_maybe_prepare_merge_workspace(self):
567 567 workspace = self.repo._maybe_prepare_merge_workspace(
568 568 2, 'pr2', Reference('branch', 'master', 'unused'),
569 569 Reference('branch', 'master', 'unused'))
570 570
571 571 assert os.path.isdir(workspace)
572 572 workspace_repo = GitRepository(workspace)
573 573 assert workspace_repo.branches == self.repo.branches
574 574
575 575 # Calling it a second time should also succeed
576 576 workspace = self.repo._maybe_prepare_merge_workspace(
577 577 2, 'pr2', Reference('branch', 'master', 'unused'),
578 578 Reference('branch', 'master', 'unused'))
579 579 assert os.path.isdir(workspace)
580 580
581 581 def test_maybe_prepare_merge_workspace_different_refs(self):
582 582 workspace = self.repo._maybe_prepare_merge_workspace(
583 583 2, 'pr2', Reference('branch', 'master', 'unused'),
584 584 Reference('branch', 'develop', 'unused'))
585 585
586 586 assert os.path.isdir(workspace)
587 587 workspace_repo = GitRepository(workspace)
588 588 assert workspace_repo.branches == self.repo.branches
589 589
590 590 # Calling it a second time should also succeed
591 591 workspace = self.repo._maybe_prepare_merge_workspace(
592 592 2, 'pr2', Reference('branch', 'master', 'unused'),
593 593 Reference('branch', 'develop', 'unused'))
594 594 assert os.path.isdir(workspace)
595 595
596 596 def test_cleanup_merge_workspace(self):
597 597 workspace = self.repo._maybe_prepare_merge_workspace(
598 598 2, 'pr3', Reference('branch', 'master', 'unused'),
599 599 Reference('branch', 'master', 'unused'))
600 600 self.repo.cleanup_merge_workspace(2, 'pr3')
601 601
602 602 assert not os.path.exists(workspace)
603 603
604 604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 605 # No assert: because in case of an inexistent workspace this function
606 606 # should still succeed.
607 607 self.repo.cleanup_merge_workspace(1, 'pr4')
608 608
609 609 def test_set_refs(self):
610 610 test_ref = 'refs/test-refs/abcde'
611 611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612 612
613 613 self.repo.set_refs(test_ref, test_commit_id)
614 614 stdout, _ = self.repo.run_git_command(['show-ref'])
615 615 assert test_ref in stdout
616 616 assert test_commit_id in stdout
617 617
618 618 def test_remove_ref(self):
619 619 test_ref = 'refs/test-refs/abcde'
620 620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 621 self.repo.set_refs(test_ref, test_commit_id)
622 622 stdout, _ = self.repo.run_git_command(['show-ref'])
623 623 assert test_ref in stdout
624 624 assert test_commit_id in stdout
625 625
626 626 self.repo.remove_ref(test_ref)
627 627 stdout, _ = self.repo.run_git_command(['show-ref'])
628 628 assert test_ref not in stdout
629 629 assert test_commit_id not in stdout
630 630
631 631
632 632 class TestGitCommit(object):
633 633
634 634 @pytest.fixture(autouse=True)
635 635 def prepare(self):
636 636 self.repo = GitRepository(TEST_GIT_REPO)
637 637
638 638 def test_default_commit(self):
639 639 tip = self.repo.get_commit()
640 640 assert tip == self.repo.get_commit(None)
641 641 assert tip == self.repo.get_commit('tip')
642 642
643 643 def test_root_node(self):
644 644 tip = self.repo.get_commit()
645 645 assert tip.root is tip.get_node('')
646 646
647 647 def test_lazy_fetch(self):
648 648 """
649 649 Test if commit's nodes expands and are cached as we walk through
650 650 the commit. This test is somewhat hard to write as order of tests
651 651 is a key here. Written by running command after command in a shell.
652 652 """
653 653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 654 assert commit_id in self.repo.commit_ids
655 655 commit = self.repo.get_commit(commit_id)
656 656 assert len(commit.nodes) == 0
657 657 root = commit.root
658 658 assert len(commit.nodes) == 1
659 659 assert len(root.nodes) == 8
660 660 # accessing root.nodes updates commit.nodes
661 661 assert len(commit.nodes) == 9
662 662
663 663 docs = root.get_node('docs')
664 664 # we haven't yet accessed anything new as docs dir was already cached
665 665 assert len(commit.nodes) == 9
666 666 assert len(docs.nodes) == 8
667 667 # accessing docs.nodes updates commit.nodes
668 668 assert len(commit.nodes) == 17
669 669
670 670 assert docs is commit.get_node('docs')
671 671 assert docs is root.nodes[0]
672 672 assert docs is root.dirs[0]
673 673 assert docs is commit.get_node('docs')
674 674
675 675 def test_nodes_with_commit(self):
676 676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 677 commit = self.repo.get_commit(commit_id)
678 678 root = commit.root
679 679 docs = root.get_node('docs')
680 680 assert docs is commit.get_node('docs')
681 681 api = docs.get_node('api')
682 682 assert api is commit.get_node('docs/api')
683 683 index = api.get_node('index.rst')
684 684 assert index is commit.get_node('docs/api/index.rst')
685 685 assert index is commit.get_node('docs')\
686 686 .get_node('api')\
687 687 .get_node('index.rst')
688 688
689 689 def test_branch_and_tags(self):
690 690 """
691 691 rev0 = self.repo.commit_ids[0]
692 692 commit0 = self.repo.get_commit(rev0)
693 693 assert commit0.branch == 'master'
694 694 assert commit0.tags == []
695 695
696 696 rev10 = self.repo.commit_ids[10]
697 697 commit10 = self.repo.get_commit(rev10)
698 698 assert commit10.branch == 'master'
699 699 assert commit10.tags == []
700 700
701 701 rev44 = self.repo.commit_ids[44]
702 702 commit44 = self.repo.get_commit(rev44)
703 703 assert commit44.branch == 'web-branch'
704 704
705 705 tip = self.repo.get_commit('tip')
706 706 assert 'tip' in tip.tags
707 707 """
708 708 # Those tests would fail - branches are now going
709 709 # to be changed at main API in order to support git backend
710 710 pass
711 711
712 712 def test_file_size(self):
713 713 to_check = (
714 714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 715 'vcs/backends/BaseRepository.py', 502),
716 716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 717 'vcs/backends/hg.py', 854),
718 718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 719 'setup.py', 1068),
720 720
721 721 ('d955cd312c17b02143c04fa1099a352b04368118',
722 722 'vcs/backends/base.py', 2921),
723 723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 724 'vcs/backends/base.py', 3936),
725 725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 726 'vcs/backends/base.py', 6189),
727 727 )
728 728 for commit_id, path, size in to_check:
729 729 node = self.repo.get_commit(commit_id).get_node(path)
730 730 assert node.is_file()
731 731 assert node.size == size
732 732
733 733 def test_file_history_from_commits(self):
734 734 node = self.repo[10].get_node('setup.py')
735 735 commit_ids = [commit.raw_id for commit in node.history]
736 736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737 737
738 738 node = self.repo[20].get_node('setup.py')
739 739 node_ids = [commit.raw_id for commit in node.history]
740 740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742 742
743 743 # special case we check history from commit that has this particular
744 744 # file changed this means we check if it's included as well
745 745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 746 .get_node('setup.py')
747 747 node_ids = [commit.raw_id for commit in node.history]
748 748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750 750
751 751 def test_file_history(self):
752 752 # we can only check if those commits are present in the history
753 753 # as we cannot update this test every time file is changed
754 754 files = {
755 755 'setup.py': [
756 756 '54386793436c938cff89326944d4c2702340037d',
757 757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 758 '998ed409c795fec2012b1c0ca054d99888b22090',
759 759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 765 ],
766 766 'vcs/nodes.py': [
767 767 '33fa3223355104431402a888fa77a4e9956feb3e',
768 768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 772 '4313566d2e417cb382948f8d9d7c765330356054',
773 773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 774 '54386793436c938cff89326944d4c2702340037d',
775 775 '54000345d2e78b03a99d561399e8e548de3f3203',
776 776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 798 ],
799 799 'vcs/backends/git.py': [
800 800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 801 '9a751d84d8e9408e736329767387f41b36935153',
802 802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 807 '54000345d2e78b03a99d561399e8e548de3f3203',
808 808 ],
809 809 }
810 810 for path, commit_ids in files.items():
811 811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 812 node_ids = [commit.raw_id for commit in node.history]
813 813 assert set(commit_ids).issubset(set(node_ids)), (
814 814 "We assumed that %s is subset of commit_ids for which file %s "
815 815 "has been changed, and history of that node returned: %s"
816 816 % (commit_ids, path, node_ids))
817 817
818 818 def test_file_annotate(self):
819 819 files = {
820 820 'vcs/backends/__init__.py': {
821 821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 822 'lines_no': 1,
823 823 'commits': [
824 824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 825 ],
826 826 },
827 827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 828 'lines_no': 21,
829 829 'commits': [
830 830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 851 ],
852 852 },
853 853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 854 'lines_no': 32,
855 855 'commits': [
856 856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 863 '54000345d2e78b03a99d561399e8e548de3f3203',
864 864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 888 ],
889 889 },
890 890 },
891 891 }
892 892
893 893 for fname, commit_dict in files.items():
894 894 for commit_id, __ in commit_dict.items():
895 895 commit = self.repo.get_commit(commit_id)
896 896
897 897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 899 assert l1_1 == l1_2
900 900 l1 = l1_1
901 901 l2 = files[fname][commit_id]['commits']
902 902 assert l1 == l2, (
903 903 "The lists of commit_ids for %s@commit_id %s"
904 904 "from annotation list should match each other, "
905 905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906 906
907 907 def test_files_state(self):
908 908 """
909 909 Tests state of FileNodes.
910 910 """
911 911 node = self.repo\
912 912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 913 .get_node('vcs/utils/diffs.py')
914 914 assert node.state, NodeState.ADDED
915 915 assert node.added
916 916 assert not node.changed
917 917 assert not node.not_changed
918 918 assert not node.removed
919 919
920 920 node = self.repo\
921 921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 922 .get_node('.hgignore')
923 923 assert node.state, NodeState.CHANGED
924 924 assert not node.added
925 925 assert node.changed
926 926 assert not node.not_changed
927 927 assert not node.removed
928 928
929 929 node = self.repo\
930 930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 931 .get_node('setup.py')
932 932 assert node.state, NodeState.NOT_CHANGED
933 933 assert not node.added
934 934 assert not node.changed
935 935 assert node.not_changed
936 936 assert not node.removed
937 937
938 938 # If node has REMOVED state then trying to fetch it would raise
939 939 # CommitError exception
940 940 commit = self.repo.get_commit(
941 941 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 942 path = 'vcs/backends/BaseRepository.py'
943 943 with pytest.raises(NodeDoesNotExistError):
944 944 commit.get_node(path)
945 945 # but it would be one of ``removed`` (commit's attribute)
946 946 assert path in [rf.path for rf in commit.removed]
947 947
948 948 commit = self.repo.get_commit(
949 949 '54386793436c938cff89326944d4c2702340037d')
950 950 changed = [
951 951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 952 'vcs/nodes.py']
953 953 assert set(changed) == set([f.path for f in commit.changed])
954 954
955 955 def test_unicode_branch_refs(self):
956 956 unicode_branches = {
957 957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
959 959 }
960 960 with mock.patch(
961 961 ("rhodecode.lib.vcs.backends.git.repository"
962 962 ".GitRepository._refs"),
963 963 unicode_branches):
964 964 branches = self.repo.branches
965 965
966 966 assert 'unicode' in branches
967 967 assert u'uniΓ§ΓΆβˆ‚e' in branches
968 968
969 969 def test_unicode_tag_refs(self):
970 970 unicode_tags = {
971 971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 973 }
974 974 with mock.patch(
975 975 ("rhodecode.lib.vcs.backends.git.repository"
976 976 ".GitRepository._refs"),
977 977 unicode_tags):
978 978 tags = self.repo.tags
979 979
980 980 assert 'unicode' in tags
981 981 assert u'uniΓ§ΓΆβˆ‚e' in tags
982 982
983 983 def test_commit_message_is_unicode(self):
984 984 for commit in self.repo:
985 985 assert type(commit.message) == unicode
986 986
987 987 def test_commit_author_is_unicode(self):
988 988 for commit in self.repo:
989 989 assert type(commit.author) == unicode
990 990
991 991 def test_repo_files_content_is_unicode(self):
992 992 commit = self.repo.get_commit()
993 993 for node in commit.get_node('/'):
994 994 if node.is_file():
995 995 assert type(node.content) == unicode
996 996
997 997 def test_wrong_path(self):
998 998 # There is 'setup.py' in the root dir but not there:
999 999 path = 'foo/bar/setup.py'
1000 1000 tip = self.repo.get_commit()
1001 1001 with pytest.raises(VCSError):
1002 1002 tip.get_node(path)
1003 1003
1004 1004 @pytest.mark.parametrize("author_email, commit_id", [
1005 1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 1006 ('lukasz.balcerzak@python-center.pl',
1007 1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 1009 ])
1010 1010 def test_author_email(self, author_email, commit_id):
1011 1011 commit = self.repo.get_commit(commit_id)
1012 1012 assert author_email == commit.author_email
1013 1013
1014 1014 @pytest.mark.parametrize("author, commit_id", [
1015 1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 1018 ])
1019 1019 def test_author_username(self, author, commit_id):
1020 1020 commit = self.repo.get_commit(commit_id)
1021 1021 assert author == commit.author_name
1022 1022
1023 1023
1024 1024 class TestLargeFileRepo(object):
1025 1025
1026 1026 def test_large_file(self, backend_git):
1027 1027 conf = make_db_config()
1028 1028 repo = backend_git.create_test_repo('largefiles', conf)
1029 1029
1030 1030 tip = repo.scm_instance().get_commit()
1031 1031
1032 1032 # extract stored LF node into the origin cache
1033 1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034 1034
1035 1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 1036 oid_path = os.path.join(lfs_store, oid)
1037 1037 oid_destination = os.path.join(
1038 1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 1039 shutil.copy(oid_path, oid_destination)
1040 1040
1041 1041 node = tip.get_node('1MB.zip')
1042 1042
1043 1043 lf_node = node.get_largefile_node()
1044 1044
1045 1045 assert lf_node.is_largefile() is True
1046 1046 assert lf_node.size == 1024000
1047 1047 assert lf_node.name == '1MB.zip'
1048 1048
1049 1049
1050 1050 @pytest.mark.usefixtures("vcs_repository_support")
1051 1051 class TestGitSpecificWithRepo(BackendTestMixin):
1052 1052
1053 1053 @classmethod
1054 1054 def _get_commits(cls):
1055 1055 return [
1056 1056 {
1057 1057 'message': 'Initial',
1058 1058 'author': 'Joe Doe <joe.doe@example.com>',
1059 1059 'date': datetime.datetime(2010, 1, 1, 20),
1060 1060 'added': [
1061 1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 1062 FileNode(
1063 1063 'foobar/static/admin', content='admin',
1064 1064 mode=0o120000), # this is a link
1065 1065 FileNode('foo', content='foo'),
1066 1066 ],
1067 1067 },
1068 1068 {
1069 1069 'message': 'Second',
1070 1070 'author': 'Joe Doe <joe.doe@example.com>',
1071 1071 'date': datetime.datetime(2010, 1, 1, 22),
1072 1072 'added': [
1073 1073 FileNode('foo2', content='foo2'),
1074 1074 ],
1075 1075 },
1076 1076 ]
1077 1077
1078 1078 def test_paths_slow_traversing(self):
1079 1079 commit = self.repo.get_commit()
1080 1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 1081 .get_node('admin').get_node('base.js').content == 'base'
1082 1082
1083 1083 def test_paths_fast_traversing(self):
1084 1084 commit = self.repo.get_commit()
1085 1085 assert (
1086 1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 1087 'base')
1088 1088
1089 1089 def test_get_diff_runs_git_command_with_hashes(self):
1090 1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 1091 self.repo.get_diff(self.repo[0], self.repo[1])
1092 1092 self.repo.run_git_command.assert_called_once_with(
1093 1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 '--abbrev=40', self.repo._get_commit_id(0),
1095 self.repo._get_commit_id(1)])
1094 '--abbrev=40', self.repo._lookup_commit(0),
1095 self.repo._lookup_commit(1)])
1096 1096
1097 1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 1100 self.repo.run_git_command.assert_called_once_with(
1101 1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 '--abbrev=40', self.repo._get_commit_id(1)])
1102 '--abbrev=40', self.repo._lookup_commit(1)])
1103 1103
1104 1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 1107 self.repo.run_git_command.assert_called_once_with(
1108 1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 '--abbrev=40', self.repo._get_commit_id(0),
1110 self.repo._get_commit_id(1), '--', 'foo'])
1109 '--abbrev=40', self.repo._lookup_commit(0),
1110 self.repo._lookup_commit(1), '--', 'foo'])
1111 1111
1112 1112
1113 1113 @pytest.mark.usefixtures("vcs_repository_support")
1114 1114 class TestGitRegression(BackendTestMixin):
1115 1115
1116 1116 @classmethod
1117 1117 def _get_commits(cls):
1118 1118 return [
1119 1119 {
1120 1120 'message': 'Initial',
1121 1121 'author': 'Joe Doe <joe.doe@example.com>',
1122 1122 'date': datetime.datetime(2010, 1, 1, 20),
1123 1123 'added': [
1124 1124 FileNode('bot/__init__.py', content='base'),
1125 1125 FileNode('bot/templates/404.html', content='base'),
1126 1126 FileNode('bot/templates/500.html', content='base'),
1127 1127 ],
1128 1128 },
1129 1129 {
1130 1130 'message': 'Second',
1131 1131 'author': 'Joe Doe <joe.doe@example.com>',
1132 1132 'date': datetime.datetime(2010, 1, 1, 22),
1133 1133 'added': [
1134 1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 1136 FileNode(
1137 1137 'bot/build/static/templates/f.html', content='foo2'),
1138 1138 FileNode(
1139 1139 'bot/build/static/templates/f1.html', content='foo2'),
1140 1140 FileNode('bot/build/templates/err.html', content='foo2'),
1141 1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 1142 ],
1143 1143 },
1144 1144 ]
1145 1145
1146 1146 @pytest.mark.parametrize("path, expected_paths", [
1147 1147 ('bot', [
1148 1148 'bot/build',
1149 1149 'bot/templates',
1150 1150 'bot/__init__.py']),
1151 1151 ('bot/build', [
1152 1152 'bot/build/migrations',
1153 1153 'bot/build/static',
1154 1154 'bot/build/templates']),
1155 1155 ('bot/build/static', [
1156 1156 'bot/build/static/templates']),
1157 1157 ('bot/build/static/templates', [
1158 1158 'bot/build/static/templates/f.html',
1159 1159 'bot/build/static/templates/f1.html']),
1160 1160 ('bot/build/templates', [
1161 1161 'bot/build/templates/err.html',
1162 1162 'bot/build/templates/err2.html']),
1163 1163 ('bot/templates/', [
1164 1164 'bot/templates/404.html',
1165 1165 'bot/templates/500.html']),
1166 1166 ])
1167 1167 def test_similar_paths(self, path, expected_paths):
1168 1168 commit = self.repo.get_commit()
1169 1169 paths = [n.path for n in commit.get_nodes(path)]
1170 1170 assert paths == expected_paths
1171 1171
1172 1172
1173 1173 class TestDiscoverGitVersion(object):
1174 1174
1175 1175 def test_returns_git_version(self, baseapp):
1176 1176 version = discover_git_version()
1177 1177 assert version
1178 1178
1179 1179 def test_returns_empty_string_without_vcsserver(self):
1180 1180 mock_connection = mock.Mock()
1181 1181 mock_connection.discover_git_version = mock.Mock(
1182 1182 side_effect=Exception)
1183 1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 1184 version = discover_git_version()
1185 1185 assert version == ''
1186 1186
1187 1187
1188 1188 class TestGetSubmoduleUrl(object):
1189 1189 def test_submodules_file_found(self):
1190 1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 1191 node = mock.Mock()
1192 1192 with mock.patch.object(
1193 1193 commit, 'get_node', return_value=node) as get_node_mock:
1194 1194 node.content = (
1195 1195 '[submodule "subrepo1"]\n'
1196 1196 '\tpath = subrepo1\n'
1197 1197 '\turl = https://code.rhodecode.com/dulwich\n'
1198 1198 )
1199 1199 result = commit._get_submodule_url('subrepo1')
1200 1200 get_node_mock.assert_called_once_with('.gitmodules')
1201 1201 assert result == 'https://code.rhodecode.com/dulwich'
1202 1202
1203 1203 def test_complex_submodule_path(self):
1204 1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 1205 node = mock.Mock()
1206 1206 with mock.patch.object(
1207 1207 commit, 'get_node', return_value=node) as get_node_mock:
1208 1208 node.content = (
1209 1209 '[submodule "complex/subrepo/path"]\n'
1210 1210 '\tpath = complex/subrepo/path\n'
1211 1211 '\turl = https://code.rhodecode.com/dulwich\n'
1212 1212 )
1213 1213 result = commit._get_submodule_url('complex/subrepo/path')
1214 1214 get_node_mock.assert_called_once_with('.gitmodules')
1215 1215 assert result == 'https://code.rhodecode.com/dulwich'
1216 1216
1217 1217 def test_submodules_file_not_found(self):
1218 1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 1219 with mock.patch.object(
1220 1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 1221 result = commit._get_submodule_url('complex/subrepo/path')
1222 1222 assert result is None
1223 1223
1224 1224 def test_path_not_found(self):
1225 1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 1226 node = mock.Mock()
1227 1227 with mock.patch.object(
1228 1228 commit, 'get_node', return_value=node) as get_node_mock:
1229 1229 node.content = (
1230 1230 '[submodule "subrepo1"]\n'
1231 1231 '\tpath = subrepo1\n'
1232 1232 '\turl = https://code.rhodecode.com/dulwich\n'
1233 1233 )
1234 1234 result = commit._get_submodule_url('subrepo2')
1235 1235 get_node_mock.assert_called_once_with('.gitmodules')
1236 1236 assert result is None
1237 1237
1238 1238 def test_returns_cached_values(self):
1239 1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 1240 node = mock.Mock()
1241 1241 with mock.patch.object(
1242 1242 commit, 'get_node', return_value=node) as get_node_mock:
1243 1243 node.content = (
1244 1244 '[submodule "subrepo1"]\n'
1245 1245 '\tpath = subrepo1\n'
1246 1246 '\turl = https://code.rhodecode.com/dulwich\n'
1247 1247 )
1248 1248 for _ in range(3):
1249 1249 commit._get_submodule_url('subrepo1')
1250 1250 get_node_mock.assert_called_once_with('.gitmodules')
1251 1251
1252 1252 def test_get_node_returns_a_link(self):
1253 1253 repository = mock.Mock()
1254 1254 repository.alias = 'git'
1255 1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 1257 get_id_patch = mock.patch.object(
1258 1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 1259 get_submodule_patch = mock.patch.object(
1260 1260 commit, '_get_submodule_url', return_value=submodule_url)
1261 1261
1262 1262 with get_id_patch, get_submodule_patch as submodule_mock:
1263 1263 node = commit.get_node('/abcde')
1264 1264
1265 1265 submodule_mock.assert_called_once_with('/abcde')
1266 1266 assert type(node) == SubModuleNode
1267 1267 assert node.url == submodule_url
1268 1268
1269 1269 def test_get_nodes_returns_links(self):
1270 1270 repository = mock.MagicMock()
1271 1271 repository.alias = 'git'
1272 1272 repository._remote.tree_items.return_value = [
1273 1273 ('subrepo', 'stat', 1, 'link')
1274 1274 ]
1275 1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 1277 get_id_patch = mock.patch.object(
1278 1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 1279 get_submodule_patch = mock.patch.object(
1280 1280 commit, '_get_submodule_url', return_value=submodule_url)
1281 1281
1282 1282 with get_id_patch, get_submodule_patch as submodule_mock:
1283 1283 nodes = commit.get_nodes('/abcde')
1284 1284
1285 1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 1286 assert len(nodes) == 1
1287 1287 assert type(nodes[0]) == SubModuleNode
1288 1288 assert nodes[0].url == submodule_url
General Comments 0
You need to be logged in to leave comments. Login now