##// END OF EJS Templates
commits: re-implemented fetching a single commit for git case....
marcink -
r3740:dcd8fbea new-ui
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,502 +1,501 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23 import collections
24 24
25 25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 26 from pyramid.view import view_config
27 27 from pyramid.renderers import render
28 28 from pyramid.response import Response
29 29
30 30 from rhodecode.apps._base import RepoAppView
31 31
32 32 from rhodecode.lib import diffs, codeblocks
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35 35
36 36 from rhodecode.lib.compat import OrderedDict
37 37 from rhodecode.lib.diffs import (
38 38 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
39 39 get_diff_whitespace_flag)
40 40 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
41 41 import rhodecode.lib.helpers as h
42 42 from rhodecode.lib.utils2 import safe_unicode, str2bool
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 RepositoryError, CommitDoesNotExistError)
46 46 from rhodecode.model.db import ChangesetComment, ChangesetStatus
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import CommentsModel
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 def _update_with_GET(params, request):
56 56 for k in ['diff1', 'diff2', 'diff']:
57 57 params[k] += request.GET.getall(k)
58 58
59 59
60 60
61 61
62 62
63 63 class RepoCommitsView(RepoAppView):
64 64 def load_default_context(self):
65 65 c = self._get_local_tmpl_context(include_app_defaults=True)
66 66 c.rhodecode_repo = self.rhodecode_vcs_repo
67 67
68 68 return c
69 69
70 70 def _is_diff_cache_enabled(self, target_repo):
71 71 caching_enabled = self._get_general_setting(
72 72 target_repo, 'rhodecode_diff_cache')
73 73 log.debug('Diff caching enabled: %s', caching_enabled)
74 74 return caching_enabled
75 75
76 76 def _commit(self, commit_id_range, method):
77 77 _ = self.request.translate
78 78 c = self.load_default_context()
79 79 c.fulldiff = self.request.GET.get('fulldiff')
80 80
81 81 # fetch global flags of ignore ws or context lines
82 82 diff_context = get_diff_context(self.request)
83 83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84 84
85 85 # diff_limit will cut off the whole diff if the limit is applied
86 86 # otherwise it will just hide the big files from the front-end
87 87 diff_limit = c.visual.cut_off_limit_diff
88 88 file_limit = c.visual.cut_off_limit_file
89 89
90 90 # get ranges of commit ids if preset
91 91 commit_range = commit_id_range.split('...')[:2]
92 92
93 93 try:
94 94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 95 'message', 'parents']
96 96
97 97 if len(commit_range) == 2:
98 98 commits = self.rhodecode_vcs_repo.get_commits(
99 99 start_id=commit_range[0], end_id=commit_range[1],
100 100 pre_load=pre_load, translate_tags=False)
101 101 commits = list(commits)
102 102 else:
103 103 commits = [self.rhodecode_vcs_repo.get_commit(
104 104 commit_id=commit_id_range, pre_load=pre_load)]
105 105
106 106 c.commit_ranges = commits
107 107 if not c.commit_ranges:
108 raise RepositoryError(
109 'The commit range returned an empty result')
110 except CommitDoesNotExistError:
111 msg = _('No such commit exists for this repository')
108 raise RepositoryError('The commit range returned an empty result')
109 except CommitDoesNotExistError as e:
110 msg = _('No such commit exists. Org exception: `{}`').format(e)
112 111 h.flash(msg, category='error')
113 112 raise HTTPNotFound()
114 113 except Exception:
115 114 log.exception("General failure")
116 115 raise HTTPNotFound()
117 116
118 117 c.changes = OrderedDict()
119 118 c.lines_added = 0
120 119 c.lines_deleted = 0
121 120
122 121 # auto collapse if we have more than limit
123 122 collapse_limit = diffs.DiffProcessor._collapse_commits_over
124 123 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
125 124
126 125 c.commit_statuses = ChangesetStatus.STATUSES
127 126 c.inline_comments = []
128 127 c.files = []
129 128
130 129 c.statuses = []
131 130 c.comments = []
132 131 c.unresolved_comments = []
133 132 if len(c.commit_ranges) == 1:
134 133 commit = c.commit_ranges[0]
135 134 c.comments = CommentsModel().get_comments(
136 135 self.db_repo.repo_id,
137 136 revision=commit.raw_id)
138 137 c.statuses.append(ChangesetStatusModel().get_status(
139 138 self.db_repo.repo_id, commit.raw_id))
140 139 # comments from PR
141 140 statuses = ChangesetStatusModel().get_statuses(
142 141 self.db_repo.repo_id, commit.raw_id,
143 142 with_revisions=True)
144 143 prs = set(st.pull_request for st in statuses
145 144 if st.pull_request is not None)
146 145 # from associated statuses, check the pull requests, and
147 146 # show comments from them
148 147 for pr in prs:
149 148 c.comments.extend(pr.comments)
150 149
151 150 c.unresolved_comments = CommentsModel()\
152 151 .get_commit_unresolved_todos(commit.raw_id)
153 152
154 153 diff = None
155 154 # Iterate over ranges (default commit view is always one commit)
156 155 for commit in c.commit_ranges:
157 156 c.changes[commit.raw_id] = []
158 157
159 158 commit2 = commit
160 159 commit1 = commit.first_parent
161 160
162 161 if method == 'show':
163 162 inline_comments = CommentsModel().get_inline_comments(
164 163 self.db_repo.repo_id, revision=commit.raw_id)
165 164 c.inline_cnt = CommentsModel().get_inline_comments_count(
166 165 inline_comments)
167 166 c.inline_comments = inline_comments
168 167
169 168 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
170 169 self.db_repo)
171 170 cache_file_path = diff_cache_exist(
172 171 cache_path, 'diff', commit.raw_id,
173 172 hide_whitespace_changes, diff_context, c.fulldiff)
174 173
175 174 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
176 175 force_recache = str2bool(self.request.GET.get('force_recache'))
177 176
178 177 cached_diff = None
179 178 if caching_enabled:
180 179 cached_diff = load_cached_diff(cache_file_path)
181 180
182 181 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
183 182 if not force_recache and has_proper_diff_cache:
184 183 diffset = cached_diff['diff']
185 184 else:
186 185 vcs_diff = self.rhodecode_vcs_repo.get_diff(
187 186 commit1, commit2,
188 187 ignore_whitespace=hide_whitespace_changes,
189 188 context=diff_context)
190 189
191 190 diff_processor = diffs.DiffProcessor(
192 191 vcs_diff, format='newdiff', diff_limit=diff_limit,
193 192 file_limit=file_limit, show_full_diff=c.fulldiff)
194 193
195 194 _parsed = diff_processor.prepare()
196 195
197 196 diffset = codeblocks.DiffSet(
198 197 repo_name=self.db_repo_name,
199 198 source_node_getter=codeblocks.diffset_node_getter(commit1),
200 199 target_node_getter=codeblocks.diffset_node_getter(commit2))
201 200
202 201 diffset = self.path_filter.render_patchset_filtered(
203 202 diffset, _parsed, commit1.raw_id, commit2.raw_id)
204 203
205 204 # save cached diff
206 205 if caching_enabled:
207 206 cache_diff(cache_file_path, diffset, None)
208 207
209 208 c.limited_diff = diffset.limited_diff
210 209 c.changes[commit.raw_id] = diffset
211 210 else:
212 211 # TODO(marcink): no cache usage here...
213 212 _diff = self.rhodecode_vcs_repo.get_diff(
214 213 commit1, commit2,
215 214 ignore_whitespace=hide_whitespace_changes, context=diff_context)
216 215 diff_processor = diffs.DiffProcessor(
217 216 _diff, format='newdiff', diff_limit=diff_limit,
218 217 file_limit=file_limit, show_full_diff=c.fulldiff)
219 218 # downloads/raw we only need RAW diff nothing else
220 219 diff = self.path_filter.get_raw_patch(diff_processor)
221 220 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
222 221
223 222 # sort comments by how they were generated
224 223 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
225 224
226 225 if len(c.commit_ranges) == 1:
227 226 c.commit = c.commit_ranges[0]
228 227 c.parent_tmpl = ''.join(
229 228 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
230 229
231 230 if method == 'download':
232 231 response = Response(diff)
233 232 response.content_type = 'text/plain'
234 233 response.content_disposition = (
235 234 'attachment; filename=%s.diff' % commit_id_range[:12])
236 235 return response
237 236 elif method == 'patch':
238 237 c.diff = safe_unicode(diff)
239 238 patch = render(
240 239 'rhodecode:templates/changeset/patch_changeset.mako',
241 240 self._get_template_context(c), self.request)
242 241 response = Response(patch)
243 242 response.content_type = 'text/plain'
244 243 return response
245 244 elif method == 'raw':
246 245 response = Response(diff)
247 246 response.content_type = 'text/plain'
248 247 return response
249 248 elif method == 'show':
250 249 if len(c.commit_ranges) == 1:
251 250 html = render(
252 251 'rhodecode:templates/changeset/changeset.mako',
253 252 self._get_template_context(c), self.request)
254 253 return Response(html)
255 254 else:
256 255 c.ancestor = None
257 256 c.target_repo = self.db_repo
258 257 html = render(
259 258 'rhodecode:templates/changeset/changeset_range.mako',
260 259 self._get_template_context(c), self.request)
261 260 return Response(html)
262 261
263 262 raise HTTPBadRequest()
264 263
265 264 @LoginRequired()
266 265 @HasRepoPermissionAnyDecorator(
267 266 'repository.read', 'repository.write', 'repository.admin')
268 267 @view_config(
269 268 route_name='repo_commit', request_method='GET',
270 269 renderer=None)
271 270 def repo_commit_show(self):
272 271 commit_id = self.request.matchdict['commit_id']
273 272 return self._commit(commit_id, method='show')
274 273
275 274 @LoginRequired()
276 275 @HasRepoPermissionAnyDecorator(
277 276 'repository.read', 'repository.write', 'repository.admin')
278 277 @view_config(
279 278 route_name='repo_commit_raw', request_method='GET',
280 279 renderer=None)
281 280 @view_config(
282 281 route_name='repo_commit_raw_deprecated', request_method='GET',
283 282 renderer=None)
284 283 def repo_commit_raw(self):
285 284 commit_id = self.request.matchdict['commit_id']
286 285 return self._commit(commit_id, method='raw')
287 286
288 287 @LoginRequired()
289 288 @HasRepoPermissionAnyDecorator(
290 289 'repository.read', 'repository.write', 'repository.admin')
291 290 @view_config(
292 291 route_name='repo_commit_patch', request_method='GET',
293 292 renderer=None)
294 293 def repo_commit_patch(self):
295 294 commit_id = self.request.matchdict['commit_id']
296 295 return self._commit(commit_id, method='patch')
297 296
298 297 @LoginRequired()
299 298 @HasRepoPermissionAnyDecorator(
300 299 'repository.read', 'repository.write', 'repository.admin')
301 300 @view_config(
302 301 route_name='repo_commit_download', request_method='GET',
303 302 renderer=None)
304 303 def repo_commit_download(self):
305 304 commit_id = self.request.matchdict['commit_id']
306 305 return self._commit(commit_id, method='download')
307 306
308 307 @LoginRequired()
309 308 @NotAnonymous()
310 309 @HasRepoPermissionAnyDecorator(
311 310 'repository.read', 'repository.write', 'repository.admin')
312 311 @CSRFRequired()
313 312 @view_config(
314 313 route_name='repo_commit_comment_create', request_method='POST',
315 314 renderer='json_ext')
316 315 def repo_commit_comment_create(self):
317 316 _ = self.request.translate
318 317 commit_id = self.request.matchdict['commit_id']
319 318
320 319 c = self.load_default_context()
321 320 status = self.request.POST.get('changeset_status', None)
322 321 text = self.request.POST.get('text')
323 322 comment_type = self.request.POST.get('comment_type')
324 323 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
325 324
326 325 if status:
327 326 text = text or (_('Status change %(transition_icon)s %(status)s')
328 327 % {'transition_icon': '>',
329 328 'status': ChangesetStatus.get_status_lbl(status)})
330 329
331 330 multi_commit_ids = []
332 331 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
333 332 if _commit_id not in ['', None, EmptyCommit.raw_id]:
334 333 if _commit_id not in multi_commit_ids:
335 334 multi_commit_ids.append(_commit_id)
336 335
337 336 commit_ids = multi_commit_ids or [commit_id]
338 337
339 338 comment = None
340 339 for current_id in filter(None, commit_ids):
341 340 comment = CommentsModel().create(
342 341 text=text,
343 342 repo=self.db_repo.repo_id,
344 343 user=self._rhodecode_db_user.user_id,
345 344 commit_id=current_id,
346 345 f_path=self.request.POST.get('f_path'),
347 346 line_no=self.request.POST.get('line'),
348 347 status_change=(ChangesetStatus.get_status_lbl(status)
349 348 if status else None),
350 349 status_change_type=status,
351 350 comment_type=comment_type,
352 351 resolves_comment_id=resolves_comment_id,
353 352 auth_user=self._rhodecode_user
354 353 )
355 354
356 355 # get status if set !
357 356 if status:
358 357 # if latest status was from pull request and it's closed
359 358 # disallow changing status !
360 359 # dont_allow_on_closed_pull_request = True !
361 360
362 361 try:
363 362 ChangesetStatusModel().set_status(
364 363 self.db_repo.repo_id,
365 364 status,
366 365 self._rhodecode_db_user.user_id,
367 366 comment,
368 367 revision=current_id,
369 368 dont_allow_on_closed_pull_request=True
370 369 )
371 370 except StatusChangeOnClosedPullRequestError:
372 371 msg = _('Changing the status of a commit associated with '
373 372 'a closed pull request is not allowed')
374 373 log.exception(msg)
375 374 h.flash(msg, category='warning')
376 375 raise HTTPFound(h.route_path(
377 376 'repo_commit', repo_name=self.db_repo_name,
378 377 commit_id=current_id))
379 378
380 379 # finalize, commit and redirect
381 380 Session().commit()
382 381
383 382 data = {
384 383 'target_id': h.safeid(h.safe_unicode(
385 384 self.request.POST.get('f_path'))),
386 385 }
387 386 if comment:
388 387 c.co = comment
389 388 rendered_comment = render(
390 389 'rhodecode:templates/changeset/changeset_comment_block.mako',
391 390 self._get_template_context(c), self.request)
392 391
393 392 data.update(comment.get_dict())
394 393 data.update({'rendered_text': rendered_comment})
395 394
396 395 return data
397 396
398 397 @LoginRequired()
399 398 @NotAnonymous()
400 399 @HasRepoPermissionAnyDecorator(
401 400 'repository.read', 'repository.write', 'repository.admin')
402 401 @CSRFRequired()
403 402 @view_config(
404 403 route_name='repo_commit_comment_preview', request_method='POST',
405 404 renderer='string', xhr=True)
406 405 def repo_commit_comment_preview(self):
407 406 # Technically a CSRF token is not needed as no state changes with this
408 407 # call. However, as this is a POST is better to have it, so automated
409 408 # tools don't flag it as potential CSRF.
410 409 # Post is required because the payload could be bigger than the maximum
411 410 # allowed by GET.
412 411
413 412 text = self.request.POST.get('text')
414 413 renderer = self.request.POST.get('renderer') or 'rst'
415 414 if text:
416 415 return h.render(text, renderer=renderer, mentions=True)
417 416 return ''
418 417
419 418 @LoginRequired()
420 419 @NotAnonymous()
421 420 @HasRepoPermissionAnyDecorator(
422 421 'repository.read', 'repository.write', 'repository.admin')
423 422 @CSRFRequired()
424 423 @view_config(
425 424 route_name='repo_commit_comment_delete', request_method='POST',
426 425 renderer='json_ext')
427 426 def repo_commit_comment_delete(self):
428 427 commit_id = self.request.matchdict['commit_id']
429 428 comment_id = self.request.matchdict['comment_id']
430 429
431 430 comment = ChangesetComment.get_or_404(comment_id)
432 431 if not comment:
433 432 log.debug('Comment with id:%s not found, skipping', comment_id)
434 433 # comment already deleted in another call probably
435 434 return True
436 435
437 436 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
438 437 super_admin = h.HasPermissionAny('hg.admin')()
439 438 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
440 439 is_repo_comment = comment.repo.repo_name == self.db_repo_name
441 440 comment_repo_admin = is_repo_admin and is_repo_comment
442 441
443 442 if super_admin or comment_owner or comment_repo_admin:
444 443 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
445 444 Session().commit()
446 445 return True
447 446 else:
448 447 log.warning('No permissions for user %s to delete comment_id: %s',
449 448 self._rhodecode_db_user, comment_id)
450 449 raise HTTPNotFound()
451 450
452 451 @LoginRequired()
453 452 @HasRepoPermissionAnyDecorator(
454 453 'repository.read', 'repository.write', 'repository.admin')
455 454 @view_config(
456 455 route_name='repo_commit_data', request_method='GET',
457 456 renderer='json_ext', xhr=True)
458 457 def repo_commit_data(self):
459 458 commit_id = self.request.matchdict['commit_id']
460 459 self.load_default_context()
461 460
462 461 try:
463 462 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
464 463 except CommitDoesNotExistError as e:
465 464 return EmptyCommit(message=str(e))
466 465
467 466 @LoginRequired()
468 467 @HasRepoPermissionAnyDecorator(
469 468 'repository.read', 'repository.write', 'repository.admin')
470 469 @view_config(
471 470 route_name='repo_commit_children', request_method='GET',
472 471 renderer='json_ext', xhr=True)
473 472 def repo_commit_children(self):
474 473 commit_id = self.request.matchdict['commit_id']
475 474 self.load_default_context()
476 475
477 476 try:
478 477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
479 478 children = commit.children
480 479 except CommitDoesNotExistError:
481 480 children = []
482 481
483 482 result = {"results": children}
484 483 return result
485 484
486 485 @LoginRequired()
487 486 @HasRepoPermissionAnyDecorator(
488 487 'repository.read', 'repository.write', 'repository.admin')
489 488 @view_config(
490 489 route_name='repo_commit_parents', request_method='GET',
491 490 renderer='json_ext')
492 491 def repo_commit_parents(self):
493 492 commit_id = self.request.matchdict['commit_id']
494 493 self.load_default_context()
495 494
496 495 try:
497 496 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
498 497 parents = commit.parents
499 498 except CommitDoesNotExistError:
500 499 parents = []
501 500 result = {"results": parents}
502 501 return result
@@ -1,105 +1,106 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT inmemory module
23 23 """
24 24
25 25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 26 from rhodecode.lib.utils import safe_str
27 27 from rhodecode.lib.vcs.backends import base
28 28
29 29
30 30 class GitInMemoryCommit(base.BaseInMemoryCommit):
31 31
32 32 def commit(self, message, author, parents=None, branch=None, date=None,
33 33 **kwargs):
34 34 """
35 35 Performs in-memory commit (doesn't check workdir in any way) and
36 36 returns newly created `GitCommit`. Updates repository's
37 37 `commit_ids`.
38 38
39 39 :param message: message of the commit
40 40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
41 41 :param parents: single parent or sequence of parents from which commit
42 42 would be derived
43 43 :param date: `datetime.datetime` instance. Defaults to
44 44 ``datetime.datetime.now()``.
45 45 :param branch: branch name, as string. If none given, default backend's
46 46 branch would be used.
47 47
48 48 :raises `CommitError`: if any error occurs while committing
49 49 """
50 50 self.check_integrity(parents)
51 51 if branch is None:
52 52 branch = self.repository.DEFAULT_BRANCH_NAME
53 53
54 54 ENCODING = "UTF-8"
55 55
56 56 commit_tree = None
57 57 if self.parents[0]:
58 58 commit_tree = self.parents[0]._commit['tree']
59 59
60 60 updated = []
61 61 for node in self.added + self.changed:
62 62 if not node.is_binary:
63 63 content = node.content.encode(ENCODING)
64 64 else:
65 65 content = node.content
66 66 updated.append({
67 67 'path': node.path,
68 68 'node_path': node.name.encode(ENCODING),
69 69 'content': content,
70 70 'mode': node.mode,
71 71 })
72 72
73 73 removed = [node.path for node in self.removed]
74 74
75 75 date, tz = date_to_timestamp_plus_offset(date)
76 76
77 77 # TODO: johbo: Make kwargs explicit and check if this is needed.
78 78 author_time = kwargs.pop('author_time', date)
79 79 author_tz = kwargs.pop('author_timezone', tz)
80 80
81 81 commit_data = {
82 82 'parents': [p._commit['id'] for p in self.parents if p],
83 83 'author': safe_str(author),
84 84 'committer': safe_str(author),
85 85 'encoding': ENCODING,
86 86 'message': safe_str(message),
87 87 'commit_time': int(date),
88 88 'author_time': int(author_time),
89 89 'commit_timezone': tz,
90 90 'author_timezone': author_tz,
91 91 }
92 92
93 93 commit_id = self.repository._remote.commit(
94 94 commit_data, branch, commit_tree, updated, removed)
95 95
96 96 # Update vcs repository object
97 if commit_id not in self.repository.commit_ids:
97 98 self.repository.commit_ids.append(commit_id)
98 99 self.repository._rebuild_cache(self.repository.commit_ids)
99 100
100 101 # invalidate parsed refs after commit
101 102 self.repository._refs = self.repository._get_refs()
102 103 self.repository.branches = self.repository._get_branches()
103 104 tip = self.repository.get_commit()
104 105 self.reset()
105 106 return tip
@@ -1,1012 +1,1031 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference)
39 39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError,
44 44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 45
46 46
47 47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class GitRepository(BaseRepository):
53 53 """
54 54 Git repository backend.
55 55 """
56 56 DEFAULT_BRANCH_NAME = 'master'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self.with_wire = with_wire
66 66
67 67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 68
69 69 # caches
70 70 self._commit_ids = {}
71 71
72 72 @LazyProperty
73 73 def _remote(self):
74 74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75 75
76 76 @LazyProperty
77 77 def bare(self):
78 78 return self._remote.bare()
79 79
80 80 @LazyProperty
81 81 def head(self):
82 82 return self._remote.head()
83 83
84 84 @LazyProperty
85 85 def commit_ids(self):
86 86 """
87 87 Returns list of commit ids, in ascending order. Being lazy
88 88 attribute allows external tools to inject commit ids from cache.
89 89 """
90 90 commit_ids = self._get_all_commit_ids()
91 91 self._rebuild_cache(commit_ids)
92 92 return commit_ids
93 93
94 94 def _rebuild_cache(self, commit_ids):
95 95 self._commit_ids = dict((commit_id, index)
96 96 for index, commit_id in enumerate(commit_ids))
97 97
98 98 def run_git_command(self, cmd, **opts):
99 99 """
100 100 Runs given ``cmd`` as git command and returns tuple
101 101 (stdout, stderr).
102 102
103 103 :param cmd: git command to be executed
104 104 :param opts: env options to pass into Subprocess command
105 105 """
106 106 if not isinstance(cmd, list):
107 107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108 108
109 109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 110 out, err = self._remote.run_git_command(cmd, **opts)
111 111 if err and not skip_stderr_log:
112 112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 113 return out, err
114 114
115 115 @staticmethod
116 116 def check_url(url, config):
117 117 """
118 118 Function will check given url and try to verify if it's a valid
119 119 link. Sometimes it may happened that git will issue basic
120 120 auth request that can cause whole API to hang when used from python
121 121 or other external calls.
122 122
123 123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 124 when the return code is non 200
125 125 """
126 126 # check first if it's not an url
127 127 if os.path.isdir(url) or url.startswith('file:'):
128 128 return True
129 129
130 130 if '+' in url.split('://', 1)[0]:
131 131 url = url.split('+', 1)[1]
132 132
133 133 # Request the _remote to verify the url
134 134 return connection.Git.check_url(url, config.serialize())
135 135
136 136 @staticmethod
137 137 def is_valid_repository(path):
138 138 if os.path.isdir(os.path.join(path, '.git')):
139 139 return True
140 140 # check case of bare repository
141 141 try:
142 142 GitRepository(path)
143 143 return True
144 144 except VCSError:
145 145 pass
146 146 return False
147 147
148 148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 149 bare=False):
150 150 if create and os.path.exists(self.path):
151 151 raise RepositoryError(
152 152 "Cannot create repository at %s, location already exist"
153 153 % self.path)
154 154
155 155 if bare and do_workspace_checkout:
156 156 raise RepositoryError("Cannot update a bare repository")
157 157 try:
158 158
159 159 if src_url:
160 160 # check URL before any actions
161 161 GitRepository.check_url(src_url, self.config)
162 162
163 163 if create:
164 164 os.makedirs(self.path, mode=0o755)
165 165
166 166 if bare:
167 167 self._remote.init_bare()
168 168 else:
169 169 self._remote.init()
170 170
171 171 if src_url and bare:
172 172 # bare repository only allows a fetch and checkout is not allowed
173 173 self.fetch(src_url, commit_ids=None)
174 174 elif src_url:
175 175 self.pull(src_url, commit_ids=None,
176 176 update_after=do_workspace_checkout)
177 177
178 178 else:
179 179 if not self._remote.assert_correct_path():
180 180 raise RepositoryError(
181 181 'Path "%s" does not contain a Git repository' %
182 182 (self.path,))
183 183
184 184 # TODO: johbo: check if we have to translate the OSError here
185 185 except OSError as err:
186 186 raise RepositoryError(err)
187 187
188 188 def _get_all_commit_ids(self, filters=None):
189 189 # we must check if this repo is not empty, since later command
190 190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 191 # errors
192 192
193 193 head = self._remote.head(show_exc=False)
194 194 if not head:
195 195 return []
196 196
197 197 rev_filter = ['--branches', '--tags']
198 198 extra_filter = []
199 199
200 200 if filters:
201 201 if filters.get('since'):
202 202 extra_filter.append('--since=%s' % (filters['since']))
203 203 if filters.get('until'):
204 204 extra_filter.append('--until=%s' % (filters['until']))
205 205 if filters.get('branch_name'):
206 206 rev_filter = ['--tags']
207 207 extra_filter.append(filters['branch_name'])
208 208 rev_filter.extend(extra_filter)
209 209
210 210 # if filters.get('start') or filters.get('end'):
211 211 # # skip is offset, max-count is limit
212 212 # if filters.get('start'):
213 213 # extra_filter += ' --skip=%s' % filters['start']
214 214 # if filters.get('end'):
215 215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 216
217 217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 218 try:
219 219 output, __ = self.run_git_command(cmd)
220 220 except RepositoryError:
221 221 # Can be raised for empty repositories
222 222 return []
223 223 return output.splitlines()
224 224
225 def _get_commit_id(self, commit_id_or_idx):
225 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
226 226 def is_null(value):
227 227 return len(value) == commit_id_or_idx.count('0')
228 228
229 if self.is_empty():
230 raise EmptyRepositoryError("There are no commits yet")
231
232 229 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
233 230 return self.commit_ids[-1]
234 231
235 232 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
236 233 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
237 234 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
238 235 try:
239 236 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
240 237 except Exception:
241 msg = "Commit %s does not exist for %s" % (
242 commit_id_or_idx, self)
238 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
243 239 raise CommitDoesNotExistError(msg)
244 240
245 241 elif is_bstr:
246 242 # check full path ref, eg. refs/heads/master
247 243 ref_id = self._refs.get(commit_id_or_idx)
248 244 if ref_id:
249 245 return ref_id
250 246
251 247 # check branch name
252 248 branch_ids = self.branches.values()
253 249 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 250 if ref_id:
255 251 return ref_id
256 252
257 253 # check tag name
258 254 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 255 if ref_id:
260 256 return ref_id
261 257
262 258 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 259 commit_id_or_idx not in self.commit_ids):
264 msg = "Commit %s does not exist for %s" % (
265 commit_id_or_idx, self)
260 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
266 261 raise CommitDoesNotExistError(msg)
267 262
268 263 # Ensure we return full id
269 264 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 265 raise CommitDoesNotExistError(
271 266 "Given commit id %s not recognized" % commit_id_or_idx)
272 267 return commit_id_or_idx
273 268
274 269 def get_hook_location(self):
275 270 """
276 271 returns absolute path to location where hooks are stored
277 272 """
278 273 loc = os.path.join(self.path, 'hooks')
279 274 if not self.bare:
280 275 loc = os.path.join(self.path, '.git', 'hooks')
281 276 return loc
282 277
283 278 @LazyProperty
284 279 def last_change(self):
285 280 """
286 281 Returns last change made on this repository as
287 282 `datetime.datetime` object.
288 283 """
289 284 try:
290 285 return self.get_commit().date
291 286 except RepositoryError:
292 287 tzoffset = makedate()[1]
293 288 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294 289
295 290 def _get_fs_mtime(self):
296 291 idx_loc = '' if self.bare else '.git'
297 292 # fallback to filesystem
298 293 in_path = os.path.join(self.path, idx_loc, "index")
299 294 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 295 if os.path.exists(in_path):
301 296 return os.stat(in_path).st_mtime
302 297 else:
303 298 return os.stat(he_path).st_mtime
304 299
305 300 @LazyProperty
306 301 def description(self):
307 302 description = self._remote.get_description()
308 303 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309 304
310 305 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 306 if self.is_empty():
312 307 return OrderedDict()
313 308
314 309 result = []
315 310 for ref, sha in self._refs.iteritems():
316 311 if ref.startswith(prefix):
317 312 ref_name = ref
318 313 if strip_prefix:
319 314 ref_name = ref[len(prefix):]
320 315 result.append((safe_unicode(ref_name), sha))
321 316
322 317 def get_name(entry):
323 318 return entry[0]
324 319
325 320 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326 321
327 322 def _get_branches(self):
328 323 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329 324
330 325 @LazyProperty
331 326 def branches(self):
332 327 return self._get_branches()
333 328
334 329 @LazyProperty
335 330 def branches_closed(self):
336 331 return {}
337 332
338 333 @LazyProperty
339 334 def bookmarks(self):
340 335 return {}
341 336
342 337 @LazyProperty
343 338 def branches_all(self):
344 339 all_branches = {}
345 340 all_branches.update(self.branches)
346 341 all_branches.update(self.branches_closed)
347 342 return all_branches
348 343
349 344 @LazyProperty
350 345 def tags(self):
351 346 return self._get_tags()
352 347
353 348 def _get_tags(self):
354 349 return self._get_refs_entries(
355 350 prefix='refs/tags/', strip_prefix=True, reverse=True)
356 351
357 352 def tag(self, name, user, commit_id=None, message=None, date=None,
358 353 **kwargs):
359 354 # TODO: fix this method to apply annotated tags correct with message
360 355 """
361 356 Creates and returns a tag for the given ``commit_id``.
362 357
363 358 :param name: name for new tag
364 359 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 360 :param commit_id: commit id for which new tag would be created
366 361 :param message: message of the tag's commit
367 362 :param date: date of tag's commit
368 363
369 364 :raises TagAlreadyExistError: if tag with same name already exists
370 365 """
371 366 if name in self.tags:
372 367 raise TagAlreadyExistError("Tag %s already exists" % name)
373 368 commit = self.get_commit(commit_id=commit_id)
374 369 message = message or "Added tag %s for commit %s" % (
375 370 name, commit.raw_id)
376 371 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377 372
378 373 self._refs = self._get_refs()
379 374 self.tags = self._get_tags()
380 375 return commit
381 376
382 377 def remove_tag(self, name, user, message=None, date=None):
383 378 """
384 379 Removes tag with the given ``name``.
385 380
386 381 :param name: name of the tag to be removed
387 382 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 383 :param message: message of the tag's removal commit
389 384 :param date: date of tag's removal commit
390 385
391 386 :raises TagDoesNotExistError: if tag with given name does not exists
392 387 """
393 388 if name not in self.tags:
394 389 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 390 tagpath = vcspath.join(
396 391 self._remote.get_refs_path(), 'refs', 'tags', name)
397 392 try:
398 393 os.remove(tagpath)
399 394 self._refs = self._get_refs()
400 395 self.tags = self._get_tags()
401 396 except OSError as e:
402 397 raise RepositoryError(e.strerror)
403 398
404 399 def _get_refs(self):
405 400 return self._remote.get_refs()
406 401
407 402 @LazyProperty
408 403 def _refs(self):
409 404 return self._get_refs()
410 405
411 406 @property
412 407 def _ref_tree(self):
413 408 node = tree = {}
414 409 for ref, sha in self._refs.iteritems():
415 410 path = ref.split('/')
416 411 for bit in path[:-1]:
417 412 node = node.setdefault(bit, {})
418 413 node[path[-1]] = sha
419 414 node = tree
420 415 return tree
421 416
422 417 def get_remote_ref(self, ref_name):
423 418 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 419 try:
425 420 return self._refs[ref_key]
426 421 except Exception:
427 422 return
428 423
429 424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
430 425 """
431 426 Returns `GitCommit` object representing commit from git repository
432 427 at the given `commit_id` or head (most recent commit) if None given.
433 428 """
429 if self.is_empty():
430 raise EmptyRepositoryError("There are no commits yet")
431
434 432 if commit_id is not None:
435 433 self._validate_commit_id(commit_id)
434 try:
435 # we have cached idx, use it without contacting the remote
436 idx = self._commit_ids[commit_id]
437 return GitCommit(self, commit_id, idx, pre_load=pre_load)
438 except KeyError:
439 pass
440
436 441 elif commit_idx is not None:
437 442 self._validate_commit_idx(commit_idx)
443 try:
444 _commit_id = self.commit_ids[commit_idx]
445 if commit_idx < 0:
446 commit_idx = self.commit_ids.index(_commit_id)
447 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
448 except IndexError:
438 449 commit_id = commit_idx
439 commit_id = self._get_commit_id(commit_id)
440 try:
450 else:
451 commit_id = "tip"
452
453 commit_id = self._lookup_commit(commit_id)
454 remote_idx = None
441 455 if translate_tag:
442 456 # Need to call remote to translate id for tagging scenario
443 commit_id = self._remote.get_object(commit_id)["commit_id"]
457 remote_data = self._remote.get_object(commit_id)
458 commit_id = remote_data["commit_id"]
459 remote_idx = remote_data["idx"]
460
461 try:
444 462 idx = self._commit_ids[commit_id]
445 463 except KeyError:
446 raise RepositoryError("Cannot get object with id %s" % commit_id)
464 idx = remote_idx or 0
447 465
448 466 return GitCommit(self, commit_id, idx, pre_load=pre_load)
449 467
450 468 def get_commits(
451 469 self, start_id=None, end_id=None, start_date=None, end_date=None,
452 470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
453 471 """
454 472 Returns generator of `GitCommit` objects from start to end (both
455 473 are inclusive), in ascending date order.
456 474
457 475 :param start_id: None, str(commit_id)
458 476 :param end_id: None, str(commit_id)
459 477 :param start_date: if specified, commits with commit date less than
460 478 ``start_date`` would be filtered out from returned set
461 479 :param end_date: if specified, commits with commit date greater than
462 480 ``end_date`` would be filtered out from returned set
463 481 :param branch_name: if specified, commits not reachable from given
464 482 branch would be filtered out from returned set
465 483 :param show_hidden: Show hidden commits such as obsolete or hidden from
466 484 Mercurial evolve
467 485 :raise BranchDoesNotExistError: If given `branch_name` does not
468 486 exist.
469 487 :raise CommitDoesNotExistError: If commits for given `start` or
470 488 `end` could not be found.
471 489
472 490 """
473 491 if self.is_empty():
474 492 raise EmptyRepositoryError("There are no commits yet")
493
475 494 self._validate_branch_name(branch_name)
476 495
477 496 if start_id is not None:
478 497 self._validate_commit_id(start_id)
479 498 if end_id is not None:
480 499 self._validate_commit_id(end_id)
481 500
482 start_raw_id = self._get_commit_id(start_id)
501 start_raw_id = self._lookup_commit(start_id)
483 502 start_pos = self._commit_ids[start_raw_id] if start_id else None
484 end_raw_id = self._get_commit_id(end_id)
503 end_raw_id = self._lookup_commit(end_id)
485 504 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
486 505
487 506 if None not in [start_id, end_id] and start_pos > end_pos:
488 507 raise RepositoryError(
489 508 "Start commit '%s' cannot be after end commit '%s'" %
490 509 (start_id, end_id))
491 510
492 511 if end_pos is not None:
493 512 end_pos += 1
494 513
495 514 filter_ = []
496 515 if branch_name:
497 516 filter_.append({'branch_name': branch_name})
498 517 if start_date and not end_date:
499 518 filter_.append({'since': start_date})
500 519 if end_date and not start_date:
501 520 filter_.append({'until': end_date})
502 521 if start_date and end_date:
503 522 filter_.append({'since': start_date})
504 523 filter_.append({'until': end_date})
505 524
506 525 # if start_pos or end_pos:
507 526 # filter_.append({'start': start_pos})
508 527 # filter_.append({'end': end_pos})
509 528
510 529 if filter_:
511 530 revfilters = {
512 531 'branch_name': branch_name,
513 532 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
514 533 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
515 534 'start': start_pos,
516 535 'end': end_pos,
517 536 }
518 537 commit_ids = self._get_all_commit_ids(filters=revfilters)
519 538
520 539 # pure python stuff, it's slow due to walker walking whole repo
521 540 # def get_revs(walker):
522 541 # for walker_entry in walker:
523 542 # yield walker_entry.commit.id
524 543 # revfilters = {}
525 544 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
526 545 else:
527 546 commit_ids = self.commit_ids
528 547
529 548 if start_pos or end_pos:
530 549 commit_ids = commit_ids[start_pos: end_pos]
531 550
532 551 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 552 translate_tag=translate_tags)
534 553
535 554 def get_diff(
536 555 self, commit1, commit2, path='', ignore_whitespace=False,
537 556 context=3, path1=None):
538 557 """
539 558 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 559 ``commit2`` since ``commit1``.
541 560
542 561 :param commit1: Entry point from which diff is shown. Can be
543 562 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 563 the changes since empty state of the repository until ``commit2``
545 564 :param commit2: Until which commits changes should be shown.
546 565 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 566 changes. Defaults to ``False``.
548 567 :param context: How many lines before/after changed lines should be
549 568 shown. Defaults to ``3``.
550 569 """
551 570 self._validate_diff_commits(commit1, commit2)
552 571 if path1 is not None and path1 != path:
553 572 raise ValueError("Diff of two different paths not supported.")
554 573
555 574 flags = [
556 575 '-U%s' % context, '--full-index', '--binary', '-p',
557 576 '-M', '--abbrev=40']
558 577 if ignore_whitespace:
559 578 flags.append('-w')
560 579
561 580 if commit1 == self.EMPTY_COMMIT:
562 581 cmd = ['show'] + flags + [commit2.raw_id]
563 582 else:
564 583 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
565 584
566 585 if path:
567 586 cmd.extend(['--', path])
568 587
569 588 stdout, __ = self.run_git_command(cmd)
570 589 # If we used 'show' command, strip first few lines (until actual diff
571 590 # starts)
572 591 if commit1 == self.EMPTY_COMMIT:
573 592 lines = stdout.splitlines()
574 593 x = 0
575 594 for line in lines:
576 595 if line.startswith('diff'):
577 596 break
578 597 x += 1
579 598 # Append new line just like 'diff' command do
580 599 stdout = '\n'.join(lines[x:]) + '\n'
581 600 return GitDiff(stdout)
582 601
583 602 def strip(self, commit_id, branch_name):
584 603 commit = self.get_commit(commit_id=commit_id)
585 604 if commit.merge:
586 605 raise Exception('Cannot reset to merge commit')
587 606
588 607 # parent is going to be the new head now
589 608 commit = commit.parents[0]
590 609 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
591 610
592 611 self.commit_ids = self._get_all_commit_ids()
593 612 self._rebuild_cache(self.commit_ids)
594 613
595 614 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
596 615 if commit_id1 == commit_id2:
597 616 return commit_id1
598 617
599 618 if self != repo2:
600 619 commits = self._remote.get_missing_revs(
601 620 commit_id1, commit_id2, repo2.path)
602 621 if commits:
603 622 commit = repo2.get_commit(commits[-1])
604 623 if commit.parents:
605 624 ancestor_id = commit.parents[0].raw_id
606 625 else:
607 626 ancestor_id = None
608 627 else:
609 628 # no commits from other repo, ancestor_id is the commit_id2
610 629 ancestor_id = commit_id2
611 630 else:
612 631 output, __ = self.run_git_command(
613 632 ['merge-base', commit_id1, commit_id2])
614 633 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
615 634
616 635 return ancestor_id
617 636
618 637 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
619 638 repo1 = self
620 639 ancestor_id = None
621 640
622 641 if commit_id1 == commit_id2:
623 642 commits = []
624 643 elif repo1 != repo2:
625 644 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
626 645 repo2.path)
627 646 commits = [
628 647 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
629 648 for commit_id in reversed(missing_ids)]
630 649 else:
631 650 output, __ = repo1.run_git_command(
632 651 ['log', '--reverse', '--pretty=format: %H', '-s',
633 652 '%s..%s' % (commit_id1, commit_id2)])
634 653 commits = [
635 654 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
636 655 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
637 656
638 657 return commits
639 658
640 659 @LazyProperty
641 660 def in_memory_commit(self):
642 661 """
643 662 Returns ``GitInMemoryCommit`` object for this repository.
644 663 """
645 664 return GitInMemoryCommit(self)
646 665
647 666 def pull(self, url, commit_ids=None, update_after=False):
648 667 """
649 668 Pull changes from external location. Pull is different in GIT
650 669 that fetch since it's doing a checkout
651 670
652 671 :param commit_ids: Optional. Can be set to a list of commit ids
653 672 which shall be pulled from the other repository.
654 673 """
655 674 refs = None
656 675 if commit_ids is not None:
657 676 remote_refs = self._remote.get_remote_refs(url)
658 677 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
659 678 self._remote.pull(url, refs=refs, update_after=update_after)
660 679 self._remote.invalidate_vcs_cache()
661 680
662 681 def fetch(self, url, commit_ids=None):
663 682 """
664 683 Fetch all git objects from external location.
665 684 """
666 685 self._remote.sync_fetch(url, refs=commit_ids)
667 686 self._remote.invalidate_vcs_cache()
668 687
669 688 def push(self, url):
670 689 refs = None
671 690 self._remote.sync_push(url, refs=refs)
672 691
673 692 def set_refs(self, ref_name, commit_id):
674 693 self._remote.set_refs(ref_name, commit_id)
675 694
676 695 def remove_ref(self, ref_name):
677 696 self._remote.remove_ref(ref_name)
678 697
679 698 def _update_server_info(self):
680 699 """
681 700 runs gits update-server-info command in this repo instance
682 701 """
683 702 self._remote.update_server_info()
684 703
685 704 def _current_branch(self):
686 705 """
687 706 Return the name of the current branch.
688 707
689 708 It only works for non bare repositories (i.e. repositories with a
690 709 working copy)
691 710 """
692 711 if self.bare:
693 712 raise RepositoryError('Bare git repos do not have active branches')
694 713
695 714 if self.is_empty():
696 715 return None
697 716
698 717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
699 718 return stdout.strip()
700 719
701 720 def _checkout(self, branch_name, create=False, force=False):
702 721 """
703 722 Checkout a branch in the working directory.
704 723
705 724 It tries to create the branch if create is True, failing if the branch
706 725 already exists.
707 726
708 727 It only works for non bare repositories (i.e. repositories with a
709 728 working copy)
710 729 """
711 730 if self.bare:
712 731 raise RepositoryError('Cannot checkout branches in a bare git repo')
713 732
714 733 cmd = ['checkout']
715 734 if force:
716 735 cmd.append('-f')
717 736 if create:
718 737 cmd.append('-b')
719 738 cmd.append(branch_name)
720 739 self.run_git_command(cmd, fail_on_stderr=False)
721 740
722 741 def _identify(self):
723 742 """
724 743 Return the current state of the working directory.
725 744 """
726 745 if self.bare:
727 746 raise RepositoryError('Bare git repos do not have active branches')
728 747
729 748 if self.is_empty():
730 749 return None
731 750
732 751 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
733 752 return stdout.strip()
734 753
735 754 def _local_clone(self, clone_path, branch_name, source_branch=None):
736 755 """
737 756 Create a local clone of the current repo.
738 757 """
739 758 # N.B.(skreft): the --branch option is required as otherwise the shallow
740 759 # clone will only fetch the active branch.
741 760 cmd = ['clone', '--branch', branch_name,
742 761 self.path, os.path.abspath(clone_path)]
743 762
744 763 self.run_git_command(cmd, fail_on_stderr=False)
745 764
746 765 # if we get the different source branch, make sure we also fetch it for
747 766 # merge conditions
748 767 if source_branch and source_branch != branch_name:
749 768 # check if the ref exists.
750 769 shadow_repo = GitRepository(os.path.abspath(clone_path))
751 770 if shadow_repo.get_remote_ref(source_branch):
752 771 cmd = ['fetch', self.path, source_branch]
753 772 self.run_git_command(cmd, fail_on_stderr=False)
754 773
755 774 def _local_fetch(self, repository_path, branch_name, use_origin=False):
756 775 """
757 776 Fetch a branch from a local repository.
758 777 """
759 778 repository_path = os.path.abspath(repository_path)
760 779 if repository_path == self.path:
761 780 raise ValueError('Cannot fetch from the same repository')
762 781
763 782 if use_origin:
764 783 branch_name = '+{branch}:refs/heads/{branch}'.format(
765 784 branch=branch_name)
766 785
767 786 cmd = ['fetch', '--no-tags', '--update-head-ok',
768 787 repository_path, branch_name]
769 788 self.run_git_command(cmd, fail_on_stderr=False)
770 789
771 790 def _local_reset(self, branch_name):
772 791 branch_name = '{}'.format(branch_name)
773 792 cmd = ['reset', '--hard', branch_name, '--']
774 793 self.run_git_command(cmd, fail_on_stderr=False)
775 794
776 795 def _last_fetch_heads(self):
777 796 """
778 797 Return the last fetched heads that need merging.
779 798
780 799 The algorithm is defined at
781 800 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
782 801 """
783 802 if not self.bare:
784 803 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
785 804 else:
786 805 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
787 806
788 807 heads = []
789 808 with open(fetch_heads_path) as f:
790 809 for line in f:
791 810 if ' not-for-merge ' in line:
792 811 continue
793 812 line = re.sub('\t.*', '', line, flags=re.DOTALL)
794 813 heads.append(line)
795 814
796 815 return heads
797 816
798 817 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
799 818 return GitRepository(shadow_repository_path)
800 819
801 820 def _local_pull(self, repository_path, branch_name, ff_only=True):
802 821 """
803 822 Pull a branch from a local repository.
804 823 """
805 824 if self.bare:
806 825 raise RepositoryError('Cannot pull into a bare git repository')
807 826 # N.B.(skreft): The --ff-only option is to make sure this is a
808 827 # fast-forward (i.e., we are only pulling new changes and there are no
809 828 # conflicts with our current branch)
810 829 # Additionally, that option needs to go before --no-tags, otherwise git
811 830 # pull complains about it being an unknown flag.
812 831 cmd = ['pull']
813 832 if ff_only:
814 833 cmd.append('--ff-only')
815 834 cmd.extend(['--no-tags', repository_path, branch_name])
816 835 self.run_git_command(cmd, fail_on_stderr=False)
817 836
818 837 def _local_merge(self, merge_message, user_name, user_email, heads):
819 838 """
820 839 Merge the given head into the checked out branch.
821 840
822 841 It will force a merge commit.
823 842
824 843 Currently it raises an error if the repo is empty, as it is not possible
825 844 to create a merge commit in an empty repo.
826 845
827 846 :param merge_message: The message to use for the merge commit.
828 847 :param heads: the heads to merge.
829 848 """
830 849 if self.bare:
831 850 raise RepositoryError('Cannot merge into a bare git repository')
832 851
833 852 if not heads:
834 853 return
835 854
836 855 if self.is_empty():
837 856 # TODO(skreft): do somehting more robust in this case.
838 857 raise RepositoryError(
839 858 'Do not know how to merge into empty repositories yet')
840 859
841 860 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
842 861 # commit message. We also specify the user who is doing the merge.
843 862 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
844 863 '-c', 'user.email=%s' % safe_str(user_email),
845 864 'merge', '--no-ff', '-m', safe_str(merge_message)]
846 865 cmd.extend(heads)
847 866 try:
848 867 output = self.run_git_command(cmd, fail_on_stderr=False)
849 868 except RepositoryError:
850 869 # Cleanup any merge leftovers
851 870 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
852 871 raise
853 872
854 873 def _local_push(
855 874 self, source_branch, repository_path, target_branch,
856 875 enable_hooks=False, rc_scm_data=None):
857 876 """
858 877 Push the source_branch to the given repository and target_branch.
859 878
860 879 Currently it if the target_branch is not master and the target repo is
861 880 empty, the push will work, but then GitRepository won't be able to find
862 881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
863 882 pointing to master, which does not exist).
864 883
865 884 It does not run the hooks in the target repo.
866 885 """
867 886 # TODO(skreft): deal with the case in which the target repo is empty,
868 887 # and the target_branch is not master.
869 888 target_repo = GitRepository(repository_path)
870 889 if (not target_repo.bare and
871 890 target_repo._current_branch() == target_branch):
872 891 # Git prevents pushing to the checked out branch, so simulate it by
873 892 # pulling into the target repository.
874 893 target_repo._local_pull(self.path, source_branch)
875 894 else:
876 895 cmd = ['push', os.path.abspath(repository_path),
877 896 '%s:%s' % (source_branch, target_branch)]
878 897 gitenv = {}
879 898 if rc_scm_data:
880 899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
881 900
882 901 if not enable_hooks:
883 902 gitenv['RC_SKIP_HOOKS'] = '1'
884 903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
885 904
886 905 def _get_new_pr_branch(self, source_branch, target_branch):
887 906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
888 907 pr_branches = []
889 908 for branch in self.branches:
890 909 if branch.startswith(prefix):
891 910 pr_branches.append(int(branch[len(prefix):]))
892 911
893 912 if not pr_branches:
894 913 branch_id = 0
895 914 else:
896 915 branch_id = max(pr_branches) + 1
897 916
898 917 return '%s%d' % (prefix, branch_id)
899 918
900 919 def _maybe_prepare_merge_workspace(
901 920 self, repo_id, workspace_id, target_ref, source_ref):
902 921 shadow_repository_path = self._get_shadow_repository_path(
903 922 repo_id, workspace_id)
904 923 if not os.path.exists(shadow_repository_path):
905 924 self._local_clone(
906 925 shadow_repository_path, target_ref.name, source_ref.name)
907 926 log.debug(
908 927 'Prepared shadow repository in %s', shadow_repository_path)
909 928
910 929 return shadow_repository_path
911 930
912 931 def _merge_repo(self, repo_id, workspace_id, target_ref,
913 932 source_repo, source_ref, merge_message,
914 933 merger_name, merger_email, dry_run=False,
915 934 use_rebase=False, close_branch=False):
916 935
917 936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
918 937 'rebase' if use_rebase else 'merge', dry_run)
919 938 if target_ref.commit_id != self.branches[target_ref.name]:
920 939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
921 940 target_ref.commit_id, self.branches[target_ref.name])
922 941 return MergeResponse(
923 942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
924 943 metadata={'target_ref': target_ref})
925 944
926 945 shadow_repository_path = self._maybe_prepare_merge_workspace(
927 946 repo_id, workspace_id, target_ref, source_ref)
928 947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
929 948
930 949 # checkout source, if it's different. Otherwise we could not
931 950 # fetch proper commits for merge testing
932 951 if source_ref.name != target_ref.name:
933 952 if shadow_repo.get_remote_ref(source_ref.name):
934 953 shadow_repo._checkout(source_ref.name, force=True)
935 954
936 955 # checkout target, and fetch changes
937 956 shadow_repo._checkout(target_ref.name, force=True)
938 957
939 958 # fetch/reset pull the target, in case it is changed
940 959 # this handles even force changes
941 960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
942 961 shadow_repo._local_reset(target_ref.name)
943 962
944 963 # Need to reload repo to invalidate the cache, or otherwise we cannot
945 964 # retrieve the last target commit.
946 965 shadow_repo = self._get_shadow_instance(shadow_repository_path)
947 966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
948 967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
949 968 target_ref, target_ref.commit_id,
950 969 shadow_repo.branches[target_ref.name])
951 970 return MergeResponse(
952 971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
953 972 metadata={'target_ref': target_ref})
954 973
955 974 # calculate new branch
956 975 pr_branch = shadow_repo._get_new_pr_branch(
957 976 source_ref.name, target_ref.name)
958 977 log.debug('using pull-request merge branch: `%s`', pr_branch)
959 978 # checkout to temp branch, and fetch changes
960 979 shadow_repo._checkout(pr_branch, create=True)
961 980 try:
962 981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
963 982 except RepositoryError:
964 983 log.exception('Failure when doing local fetch on '
965 984 'shadow repo: %s', shadow_repo)
966 985 return MergeResponse(
967 986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
968 987 metadata={'source_ref': source_ref})
969 988
970 989 merge_ref = None
971 990 merge_failure_reason = MergeFailureReason.NONE
972 991 metadata = {}
973 992 try:
974 993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
975 994 [source_ref.commit_id])
976 995 merge_possible = True
977 996
978 997 # Need to reload repo to invalidate the cache, or otherwise we
979 998 # cannot retrieve the merge commit.
980 999 shadow_repo = GitRepository(shadow_repository_path)
981 1000 merge_commit_id = shadow_repo.branches[pr_branch]
982 1001
983 1002 # Set a reference pointing to the merge commit. This reference may
984 1003 # be used to easily identify the last successful merge commit in
985 1004 # the shadow repository.
986 1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
987 1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
988 1007 except RepositoryError:
989 1008 log.exception('Failure when doing local merge on git shadow repo')
990 1009 merge_possible = False
991 1010 merge_failure_reason = MergeFailureReason.MERGE_FAILED
992 1011
993 1012 if merge_possible and not dry_run:
994 1013 try:
995 1014 shadow_repo._local_push(
996 1015 pr_branch, self.path, target_ref.name, enable_hooks=True,
997 1016 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
998 1017 merge_succeeded = True
999 1018 except RepositoryError:
1000 1019 log.exception(
1001 1020 'Failure when doing local push from the shadow '
1002 1021 'repository to the target repository at %s.', self.path)
1003 1022 merge_succeeded = False
1004 1023 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1005 1024 metadata['target'] = 'git shadow repo'
1006 1025 metadata['merge_commit'] = pr_branch
1007 1026 else:
1008 1027 merge_succeeded = False
1009 1028
1010 1029 return MergeResponse(
1011 1030 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1012 1031 metadata=metadata)
@@ -1,97 +1,98 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG inmemory module
23 23 """
24 24
25 25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 26 from rhodecode.lib.utils import safe_str
27 27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
28 28 from rhodecode.lib.vcs.exceptions import RepositoryError
29 29
30 30
31 31 class MercurialInMemoryCommit(BaseInMemoryCommit):
32 32
33 33 def commit(self, message, author, parents=None, branch=None, date=None,
34 34 **kwargs):
35 35 """
36 36 Performs in-memory commit (doesn't check workdir in any way) and
37 37 returns newly created `MercurialCommit`. Updates repository's
38 38 `commit_ids`.
39 39
40 40 :param message: message of the commit
41 41 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
42 42 :param parents: single parent or sequence of parents from which commit
43 43 would be derived
44 44 :param date: `datetime.datetime` instance. Defaults to
45 45 ``datetime.datetime.now()``.
46 46 :param branch: Optional. Branch name as unicode. Will use the backend's
47 47 default if not given.
48 48
49 49 :raises `RepositoryError`: if any error occurs while committing
50 50 """
51 51 self.check_integrity(parents)
52 52
53 53 if not isinstance(message, unicode) or not isinstance(author, unicode):
54 54 # TODO: johbo: Should be a TypeError
55 55 raise RepositoryError('Given message and author needs to be '
56 56 'an <unicode> instance got %r & %r instead'
57 57 % (type(message), type(author)))
58 58
59 59 if branch is None:
60 60 branch = self.repository.DEFAULT_BRANCH_NAME
61 61 kwargs['branch'] = safe_str(branch)
62 62
63 63 message = safe_str(message)
64 64 author = safe_str(author)
65 65
66 66 parent_ids = [p.raw_id if p else None for p in self.parents]
67 67
68 68 ENCODING = "UTF-8"
69 69
70 70 updated = []
71 71 for node in self.added + self.changed:
72 72 if node.is_binary:
73 73 content = node.content
74 74 else:
75 75 content = node.content.encode(ENCODING)
76 76 updated.append({
77 77 'path': node.path,
78 78 'content': content,
79 79 'mode': node.mode,
80 80 })
81 81
82 82 removed = [node.path for node in self.removed]
83 83
84 84 date, tz = date_to_timestamp_plus_offset(date)
85 85
86 new_id = self.repository._remote.commitctx(
86 commit_id = self.repository._remote.commitctx(
87 87 message=message, parents=parent_ids,
88 88 commit_time=date, commit_timezone=tz, user=author,
89 89 files=self.get_paths(), extra=kwargs, removed=removed,
90 90 updated=updated)
91 if commit_id not in self.repository.commit_ids:
92 self.repository.commit_ids.append(commit_id)
93 self.repository._rebuild_cache(self.repository.commit_ids)
91 94
92 self.repository.commit_ids.append(new_id)
93 self.repository._rebuild_cache(self.repository.commit_ids)
94 95 self.repository.branches = self.repository._get_branches()
95 96 tip = self.repository.get_commit()
96 97 self.reset()
97 98 return tip
@@ -1,941 +1,942 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 do_workspace_checkout=False, with_wire=None, bare=False):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param do_workspace_checkout=False: sets update of working copy after
71 71 making a clone
72 72 :param bare: not used, compatible with other VCS
73 73 """
74 74
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 # mercurial since 4.4.X requires certain configuration to be present
77 77 # because sometimes we init the repos with config we need to meet
78 78 # special requirements
79 79 self.config = config if config else self.get_default_config(
80 80 default=[('extensions', 'largefiles', '1')])
81 81 self.with_wire = with_wire
82 82
83 83 self._init_repo(create, src_url, do_workspace_checkout)
84 84
85 85 # caches
86 86 self._commit_ids = {}
87 87
88 88 @LazyProperty
89 89 def _remote(self):
90 90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 91
92 92 @LazyProperty
93 93 def commit_ids(self):
94 94 """
95 95 Returns list of commit ids, in ascending order. Being lazy
96 96 attribute allows external tools to inject shas from cache.
97 97 """
98 98 commit_ids = self._get_all_commit_ids()
99 99 self._rebuild_cache(commit_ids)
100 100 return commit_ids
101 101
102 102 def _rebuild_cache(self, commit_ids):
103 103 self._commit_ids = dict((commit_id, index)
104 104 for index, commit_id in enumerate(commit_ids))
105 105
106 106 @LazyProperty
107 107 def branches(self):
108 108 return self._get_branches()
109 109
110 110 @LazyProperty
111 111 def branches_closed(self):
112 112 return self._get_branches(active=False, closed=True)
113 113
114 114 @LazyProperty
115 115 def branches_all(self):
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 def _get_branches(self, active=True, closed=False):
122 122 """
123 123 Gets branches for this repository
124 124 Returns only not closed active branches by default
125 125
126 126 :param active: return also active branches
127 127 :param closed: return also closed branches
128 128
129 129 """
130 130 if self.is_empty():
131 131 return {}
132 132
133 133 def get_name(ctx):
134 134 return ctx[0]
135 135
136 136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 137 self._remote.branches(active, closed).items()]
138 138
139 139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 140
141 141 @LazyProperty
142 142 def tags(self):
143 143 """
144 144 Gets tags for this repository
145 145 """
146 146 return self._get_tags()
147 147
148 148 def _get_tags(self):
149 149 if self.is_empty():
150 150 return {}
151 151
152 152 def get_name(ctx):
153 153 return ctx[0]
154 154
155 155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 156 self._remote.tags().items()]
157 157
158 158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 159
160 160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 161 **kwargs):
162 162 """
163 163 Creates and returns a tag for the given ``commit_id``.
164 164
165 165 :param name: name for new tag
166 166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 167 :param commit_id: commit id for which new tag would be created
168 168 :param message: message of the tag's commit
169 169 :param date: date of tag's commit
170 170
171 171 :raises TagAlreadyExistError: if tag with same name already exists
172 172 """
173 173 if name in self.tags:
174 174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 175 commit = self.get_commit(commit_id=commit_id)
176 176 local = kwargs.setdefault('local', False)
177 177
178 178 if message is None:
179 179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 180
181 181 date, tz = date_to_timestamp_plus_offset(date)
182 182
183 183 self._remote.tag(
184 184 name, commit.raw_id, message, local, user, date, tz)
185 185 self._remote.invalidate_vcs_cache()
186 186
187 187 # Reinitialize tags
188 188 self.tags = self._get_tags()
189 189 tag_id = self.tags[name]
190 190
191 191 return self.get_commit(commit_id=tag_id)
192 192
193 193 def remove_tag(self, name, user, message=None, date=None):
194 194 """
195 195 Removes tag with the given `name`.
196 196
197 197 :param name: name of the tag to be removed
198 198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 199 :param message: message of the tag's removal commit
200 200 :param date: date of tag's removal commit
201 201
202 202 :raises TagDoesNotExistError: if tag with given name does not exists
203 203 """
204 204 if name not in self.tags:
205 205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 206 if message is None:
207 207 message = "Removed tag %s" % name
208 208 local = False
209 209
210 210 date, tz = date_to_timestamp_plus_offset(date)
211 211
212 212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 213 self._remote.invalidate_vcs_cache()
214 214 self.tags = self._get_tags()
215 215
216 216 @LazyProperty
217 217 def bookmarks(self):
218 218 """
219 219 Gets bookmarks for this repository
220 220 """
221 221 return self._get_bookmarks()
222 222
223 223 def _get_bookmarks(self):
224 224 if self.is_empty():
225 225 return {}
226 226
227 227 def get_name(ctx):
228 228 return ctx[0]
229 229
230 230 _bookmarks = [
231 231 (safe_unicode(n), hexlify(h)) for n, h in
232 232 self._remote.bookmarks().items()]
233 233
234 234 return OrderedDict(sorted(_bookmarks, key=get_name))
235 235
236 236 def _get_all_commit_ids(self):
237 237 return self._remote.get_all_commit_ids('visible')
238 238
239 239 def get_diff(
240 240 self, commit1, commit2, path='', ignore_whitespace=False,
241 241 context=3, path1=None):
242 242 """
243 243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 244 `commit2` since `commit1`.
245 245
246 246 :param commit1: Entry point from which diff is shown. Can be
247 247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 248 the changes since empty state of the repository until `commit2`
249 249 :param commit2: Until which commit changes should be shown.
250 250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 251 changes. Defaults to ``False``.
252 252 :param context: How many lines before/after changed lines should be
253 253 shown. Defaults to ``3``.
254 254 """
255 255 self._validate_diff_commits(commit1, commit2)
256 256 if path1 is not None and path1 != path:
257 257 raise ValueError("Diff of two different paths not supported.")
258 258
259 259 if path:
260 260 file_filter = [self.path, path]
261 261 else:
262 262 file_filter = None
263 263
264 264 diff = self._remote.diff(
265 265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 266 opt_git=True, opt_ignorews=ignore_whitespace,
267 267 context=context)
268 268 return MercurialDiff(diff)
269 269
270 270 def strip(self, commit_id, branch=None):
271 271 self._remote.strip(commit_id, update=False, backup="none")
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 self.commit_ids = self._get_all_commit_ids()
275 275 self._rebuild_cache(self.commit_ids)
276 276
277 277 def verify(self):
278 278 verify = self._remote.verify()
279 279
280 280 self._remote.invalidate_vcs_cache()
281 281 return verify
282 282
283 283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 284 if commit_id1 == commit_id2:
285 285 return commit_id1
286 286
287 287 ancestors = self._remote.revs_from_revspec(
288 288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 289 other_path=repo2.path)
290 290 return repo2[ancestors[0]].raw_id if ancestors else None
291 291
292 292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 293 if commit_id1 == commit_id2:
294 294 commits = []
295 295 else:
296 296 if merge:
297 297 indexes = self._remote.revs_from_revspec(
298 298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 300 else:
301 301 indexes = self._remote.revs_from_revspec(
302 302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 303 commit_id1, other_path=repo2.path)
304 304
305 305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 306 for idx in indexes]
307 307
308 308 return commits
309 309
310 310 @staticmethod
311 311 def check_url(url, config):
312 312 """
313 313 Function will check given url and try to verify if it's a valid
314 314 link. Sometimes it may happened that mercurial will issue basic
315 315 auth request that can cause whole API to hang when used from python
316 316 or other external calls.
317 317
318 318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 319 when the return code is non 200
320 320 """
321 321 # check first if it's not an local url
322 322 if os.path.isdir(url) or url.startswith('file:'):
323 323 return True
324 324
325 325 # Request the _remote to verify the url
326 326 return connection.Hg.check_url(url, config.serialize())
327 327
328 328 @staticmethod
329 329 def is_valid_repository(path):
330 330 return os.path.isdir(os.path.join(path, '.hg'))
331 331
332 332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 333 """
334 334 Function will check for mercurial repository in given path. If there
335 335 is no repository in that path it will raise an exception unless
336 336 `create` parameter is set to True - in that case repository would
337 337 be created.
338 338
339 339 If `src_url` is given, would try to clone repository from the
340 340 location at given clone_point. Additionally it'll make update to
341 341 working copy accordingly to `do_workspace_checkout` flag.
342 342 """
343 343 if create and os.path.exists(self.path):
344 344 raise RepositoryError(
345 345 "Cannot create repository at %s, location already exist"
346 346 % self.path)
347 347
348 348 if src_url:
349 349 url = str(self._get_url(src_url))
350 350 MercurialRepository.check_url(url, self.config)
351 351
352 352 self._remote.clone(url, self.path, do_workspace_checkout)
353 353
354 354 # Don't try to create if we've already cloned repo
355 355 create = False
356 356
357 357 if create:
358 358 os.makedirs(self.path, mode=0o755)
359 359
360 360 self._remote.localrepository(create)
361 361
362 362 @LazyProperty
363 363 def in_memory_commit(self):
364 364 return MercurialInMemoryCommit(self)
365 365
366 366 @LazyProperty
367 367 def description(self):
368 368 description = self._remote.get_config_value(
369 369 'web', 'description', untrusted=True)
370 370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371 371
372 372 @LazyProperty
373 373 def contact(self):
374 374 contact = (
375 375 self._remote.get_config_value("web", "contact") or
376 376 self._remote.get_config_value("ui", "username"))
377 377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378 378
379 379 @LazyProperty
380 380 def last_change(self):
381 381 """
382 382 Returns last change made on this repository as
383 383 `datetime.datetime` object.
384 384 """
385 385 try:
386 386 return self.get_commit().date
387 387 except RepositoryError:
388 388 tzoffset = makedate()[1]
389 389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390 390
391 391 def _get_fs_mtime(self):
392 392 # fallback to filesystem
393 393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 394 st_path = os.path.join(self.path, '.hg', "store")
395 395 if os.path.exists(cl_path):
396 396 return os.stat(cl_path).st_mtime
397 397 else:
398 398 return os.stat(st_path).st_mtime
399 399
400 400 def _get_url(self, url):
401 401 """
402 402 Returns normalized url. If schema is not given, would fall
403 403 to filesystem
404 404 (``file:///``) schema.
405 405 """
406 406 url = url.encode('utf8')
407 407 if url != 'default' and '://' not in url:
408 408 url = "file:" + urllib.pathname2url(url)
409 409 return url
410 410
411 411 def get_hook_location(self):
412 412 """
413 413 returns absolute path to location where hooks are stored
414 414 """
415 415 return os.path.join(self.path, '.hg', '.hgrc')
416 416
417 417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 418 """
419 419 Returns ``MercurialCommit`` object representing repository's
420 420 commit at the given `commit_id` or `commit_idx`.
421 421 """
422 422 if self.is_empty():
423 423 raise EmptyRepositoryError("There are no commits yet")
424 424
425 425 if commit_id is not None:
426 426 self._validate_commit_id(commit_id)
427 427 try:
428 # we have cached idx, use it without contacting the remote
428 429 idx = self._commit_ids[commit_id]
429 430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 431 except KeyError:
431 432 pass
433
432 434 elif commit_idx is not None:
433 435 self._validate_commit_idx(commit_idx)
434 436 try:
435 id_ = self.commit_ids[commit_idx]
437 _commit_id = self.commit_ids[commit_idx]
436 438 if commit_idx < 0:
437 commit_idx += len(self.commit_ids)
438 return MercurialCommit(
439 self, id_, commit_idx, pre_load=pre_load)
439 commit_idx = self.commit_ids.index(_commit_id)
440
441 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 442 except IndexError:
441 443 commit_id = commit_idx
442 444 else:
443 445 commit_id = "tip"
444 446
445 447 if isinstance(commit_id, unicode):
446 448 commit_id = safe_str(commit_id)
447 449
448 450 try:
449 451 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 452 except CommitDoesNotExistError:
451 msg = "Commit %s does not exist for %s" % (
452 commit_id, self)
453 msg = "Commit %s does not exist for %s" % (commit_id, self.name)
453 454 raise CommitDoesNotExistError(msg)
454 455
455 456 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 457
457 458 def get_commits(
458 459 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 460 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 461 """
461 462 Returns generator of ``MercurialCommit`` objects from start to end
462 463 (both are inclusive)
463 464
464 465 :param start_id: None, str(commit_id)
465 466 :param end_id: None, str(commit_id)
466 467 :param start_date: if specified, commits with commit date less than
467 468 ``start_date`` would be filtered out from returned set
468 469 :param end_date: if specified, commits with commit date greater than
469 470 ``end_date`` would be filtered out from returned set
470 471 :param branch_name: if specified, commits not reachable from given
471 472 branch would be filtered out from returned set
472 473 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 474 Mercurial evolve
474 475 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 476 exist.
476 477 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 478 ``end`` could not be found.
478 479 """
479 480 # actually we should check now if it's not an empty repo
480 481 branch_ancestors = False
481 482 if self.is_empty():
482 483 raise EmptyRepositoryError("There are no commits yet")
483 484 self._validate_branch_name(branch_name)
484 485
485 486 if start_id is not None:
486 487 self._validate_commit_id(start_id)
487 488 c_start = self.get_commit(commit_id=start_id)
488 489 start_pos = self._commit_ids[c_start.raw_id]
489 490 else:
490 491 start_pos = None
491 492
492 493 if end_id is not None:
493 494 self._validate_commit_id(end_id)
494 495 c_end = self.get_commit(commit_id=end_id)
495 496 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 497 else:
497 498 end_pos = None
498 499
499 500 if None not in [start_id, end_id] and start_pos > end_pos:
500 501 raise RepositoryError(
501 502 "Start commit '%s' cannot be after end commit '%s'" %
502 503 (start_id, end_id))
503 504
504 505 if end_pos is not None:
505 506 end_pos += 1
506 507
507 508 commit_filter = []
508 509
509 510 if branch_name and not branch_ancestors:
510 511 commit_filter.append('branch("%s")' % (branch_name,))
511 512 elif branch_name and branch_ancestors:
512 513 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 514
514 515 if start_date and not end_date:
515 516 commit_filter.append('date(">%s")' % (start_date,))
516 517 if end_date and not start_date:
517 518 commit_filter.append('date("<%s")' % (end_date,))
518 519 if start_date and end_date:
519 520 commit_filter.append(
520 521 'date(">%s") and date("<%s")' % (start_date, end_date))
521 522
522 523 if not show_hidden:
523 524 commit_filter.append('not obsolete()')
524 525 commit_filter.append('not hidden()')
525 526
526 527 # TODO: johbo: Figure out a simpler way for this solution
527 528 collection_generator = CollectionGenerator
528 529 if commit_filter:
529 530 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 531 revisions = self._remote.rev_range([commit_filter])
531 532 collection_generator = MercurialIndexBasedCollectionGenerator
532 533 else:
533 534 revisions = self.commit_ids
534 535
535 536 if start_pos or end_pos:
536 537 revisions = revisions[start_pos:end_pos]
537 538
538 539 return collection_generator(self, revisions, pre_load=pre_load)
539 540
540 541 def pull(self, url, commit_ids=None):
541 542 """
542 543 Pull changes from external location.
543 544
544 545 :param commit_ids: Optional. Can be set to a list of commit ids
545 546 which shall be pulled from the other repository.
546 547 """
547 548 url = self._get_url(url)
548 549 self._remote.pull(url, commit_ids=commit_ids)
549 550 self._remote.invalidate_vcs_cache()
550 551
551 552 def fetch(self, url, commit_ids=None):
552 553 """
553 554 Backward compatibility with GIT fetch==pull
554 555 """
555 556 return self.pull(url, commit_ids=commit_ids)
556 557
557 558 def push(self, url):
558 559 url = self._get_url(url)
559 560 self._remote.sync_push(url)
560 561
561 562 def _local_clone(self, clone_path):
562 563 """
563 564 Create a local clone of the current repo.
564 565 """
565 566 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 567 hooks=False)
567 568
568 569 def _update(self, revision, clean=False):
569 570 """
570 571 Update the working copy to the specified revision.
571 572 """
572 573 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 574 self._remote.update(revision, clean=clean)
574 575
575 576 def _identify(self):
576 577 """
577 578 Return the current state of the working directory.
578 579 """
579 580 return self._remote.identify().strip().rstrip('+')
580 581
581 582 def _heads(self, branch=None):
582 583 """
583 584 Return the commit ids of the repository heads.
584 585 """
585 586 return self._remote.heads(branch=branch).strip().split(' ')
586 587
587 588 def _ancestor(self, revision1, revision2):
588 589 """
589 590 Return the common ancestor of the two revisions.
590 591 """
591 592 return self._remote.ancestor(revision1, revision2)
592 593
593 594 def _local_push(
594 595 self, revision, repository_path, push_branches=False,
595 596 enable_hooks=False):
596 597 """
597 598 Push the given revision to the specified repository.
598 599
599 600 :param push_branches: allow to create branches in the target repo.
600 601 """
601 602 self._remote.push(
602 603 [revision], repository_path, hooks=enable_hooks,
603 604 push_branches=push_branches)
604 605
605 606 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 607 source_ref, use_rebase=False, dry_run=False):
607 608 """
608 609 Merge the given source_revision into the checked out revision.
609 610
610 611 Returns the commit id of the merge and a boolean indicating if the
611 612 commit needs to be pushed.
612 613 """
613 614 self._update(target_ref.commit_id, clean=True)
614 615
615 616 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 617 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 618
618 619 if ancestor == source_ref.commit_id:
619 620 # Nothing to do, the changes were already integrated
620 621 return target_ref.commit_id, False
621 622
622 623 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 624 # In this case we should force a commit message
624 625 return source_ref.commit_id, True
625 626
626 627 if use_rebase:
627 628 try:
628 629 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 630 target_ref.commit_id)
630 631 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 632 self._remote.rebase(
632 633 source=source_ref.commit_id, dest=target_ref.commit_id)
633 634 self._remote.invalidate_vcs_cache()
634 635 self._update(bookmark_name, clean=True)
635 636 return self._identify(), True
636 637 except RepositoryError:
637 638 # The rebase-abort may raise another exception which 'hides'
638 639 # the original one, therefore we log it here.
639 640 log.exception('Error while rebasing shadow repo during merge.')
640 641
641 642 # Cleanup any rebase leftovers
642 643 self._remote.invalidate_vcs_cache()
643 644 self._remote.rebase(abort=True)
644 645 self._remote.invalidate_vcs_cache()
645 646 self._remote.update(clean=True)
646 647 raise
647 648 else:
648 649 try:
649 650 self._remote.merge(source_ref.commit_id)
650 651 self._remote.invalidate_vcs_cache()
651 652 self._remote.commit(
652 653 message=safe_str(merge_message),
653 654 username=safe_str('%s <%s>' % (user_name, user_email)))
654 655 self._remote.invalidate_vcs_cache()
655 656 return self._identify(), True
656 657 except RepositoryError:
657 658 # Cleanup any merge leftovers
658 659 self._remote.update(clean=True)
659 660 raise
660 661
661 662 def _local_close(self, target_ref, user_name, user_email,
662 663 source_ref, close_message=''):
663 664 """
664 665 Close the branch of the given source_revision
665 666
666 667 Returns the commit id of the close and a boolean indicating if the
667 668 commit needs to be pushed.
668 669 """
669 670 self._update(source_ref.commit_id)
670 671 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 672 try:
672 673 self._remote.commit(
673 674 message=safe_str(message),
674 675 username=safe_str('%s <%s>' % (user_name, user_email)),
675 676 close_branch=True)
676 677 self._remote.invalidate_vcs_cache()
677 678 return self._identify(), True
678 679 except RepositoryError:
679 680 # Cleanup any commit leftovers
680 681 self._remote.update(clean=True)
681 682 raise
682 683
683 684 def _is_the_same_branch(self, target_ref, source_ref):
684 685 return (
685 686 self._get_branch_name(target_ref) ==
686 687 self._get_branch_name(source_ref))
687 688
688 689 def _get_branch_name(self, ref):
689 690 if ref.type == 'branch':
690 691 return ref.name
691 692 return self._remote.ctx_branch(ref.commit_id)
692 693
693 694 def _maybe_prepare_merge_workspace(
694 695 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 696 shadow_repository_path = self._get_shadow_repository_path(
696 697 repo_id, workspace_id)
697 698 if not os.path.exists(shadow_repository_path):
698 699 self._local_clone(shadow_repository_path)
699 700 log.debug(
700 701 'Prepared shadow repository in %s', shadow_repository_path)
701 702
702 703 return shadow_repository_path
703 704
704 705 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 706 source_repo, source_ref, merge_message,
706 707 merger_name, merger_email, dry_run=False,
707 708 use_rebase=False, close_branch=False):
708 709
709 710 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 711 'rebase' if use_rebase else 'merge', dry_run)
711 712 if target_ref.commit_id not in self._heads():
712 713 return MergeResponse(
713 714 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 715 metadata={'target_ref': target_ref})
715 716
716 717 try:
717 718 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 719 heads = '\n,'.join(self._heads(target_ref.name))
719 720 metadata = {
720 721 'target_ref': target_ref,
721 722 'source_ref': source_ref,
722 723 'heads': heads
723 724 }
724 725 return MergeResponse(
725 726 False, False, None,
726 727 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
727 728 metadata=metadata)
728 729 except CommitDoesNotExistError:
729 730 log.exception('Failure when looking up branch heads on hg target')
730 731 return MergeResponse(
731 732 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
732 733 metadata={'target_ref': target_ref})
733 734
734 735 shadow_repository_path = self._maybe_prepare_merge_workspace(
735 736 repo_id, workspace_id, target_ref, source_ref)
736 737 shadow_repo = self._get_shadow_instance(shadow_repository_path)
737 738
738 739 log.debug('Pulling in target reference %s', target_ref)
739 740 self._validate_pull_reference(target_ref)
740 741 shadow_repo._local_pull(self.path, target_ref)
741 742
742 743 try:
743 744 log.debug('Pulling in source reference %s', source_ref)
744 745 source_repo._validate_pull_reference(source_ref)
745 746 shadow_repo._local_pull(source_repo.path, source_ref)
746 747 except CommitDoesNotExistError:
747 748 log.exception('Failure when doing local pull on hg shadow repo')
748 749 return MergeResponse(
749 750 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
750 751 metadata={'source_ref': source_ref})
751 752
752 753 merge_ref = None
753 754 merge_commit_id = None
754 755 close_commit_id = None
755 756 merge_failure_reason = MergeFailureReason.NONE
756 757 metadata = {}
757 758
758 759 # enforce that close branch should be used only in case we source from
759 760 # an actual Branch
760 761 close_branch = close_branch and source_ref.type == 'branch'
761 762
762 763 # don't allow to close branch if source and target are the same
763 764 close_branch = close_branch and source_ref.name != target_ref.name
764 765
765 766 needs_push_on_close = False
766 767 if close_branch and not use_rebase and not dry_run:
767 768 try:
768 769 close_commit_id, needs_push_on_close = shadow_repo._local_close(
769 770 target_ref, merger_name, merger_email, source_ref)
770 771 merge_possible = True
771 772 except RepositoryError:
772 773 log.exception('Failure when doing close branch on '
773 774 'shadow repo: %s', shadow_repo)
774 775 merge_possible = False
775 776 merge_failure_reason = MergeFailureReason.MERGE_FAILED
776 777 else:
777 778 merge_possible = True
778 779
779 780 needs_push = False
780 781 if merge_possible:
781 782 try:
782 783 merge_commit_id, needs_push = shadow_repo._local_merge(
783 784 target_ref, merge_message, merger_name, merger_email,
784 785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
785 786 merge_possible = True
786 787
787 788 # read the state of the close action, if it
788 789 # maybe required a push
789 790 needs_push = needs_push or needs_push_on_close
790 791
791 792 # Set a bookmark pointing to the merge commit. This bookmark
792 793 # may be used to easily identify the last successful merge
793 794 # commit in the shadow repository.
794 795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
795 796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
796 797 except SubrepoMergeError:
797 798 log.exception(
798 799 'Subrepo merge error during local merge on hg shadow repo.')
799 800 merge_possible = False
800 801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
801 802 needs_push = False
802 803 except RepositoryError:
803 804 log.exception('Failure when doing local merge on hg shadow repo')
804 805 merge_possible = False
805 806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
806 807 needs_push = False
807 808
808 809 if merge_possible and not dry_run:
809 810 if needs_push:
810 811 # In case the target is a bookmark, update it, so after pushing
811 812 # the bookmarks is also updated in the target.
812 813 if target_ref.type == 'book':
813 814 shadow_repo.bookmark(
814 815 target_ref.name, revision=merge_commit_id)
815 816 try:
816 817 shadow_repo_with_hooks = self._get_shadow_instance(
817 818 shadow_repository_path,
818 819 enable_hooks=True)
819 820 # This is the actual merge action, we push from shadow
820 821 # into origin.
821 822 # Note: the push_branches option will push any new branch
822 823 # defined in the source repository to the target. This may
823 824 # be dangerous as branches are permanent in Mercurial.
824 825 # This feature was requested in issue #441.
825 826 shadow_repo_with_hooks._local_push(
826 827 merge_commit_id, self.path, push_branches=True,
827 828 enable_hooks=True)
828 829
829 830 # maybe we also need to push the close_commit_id
830 831 if close_commit_id:
831 832 shadow_repo_with_hooks._local_push(
832 833 close_commit_id, self.path, push_branches=True,
833 834 enable_hooks=True)
834 835 merge_succeeded = True
835 836 except RepositoryError:
836 837 log.exception(
837 838 'Failure when doing local push from the shadow '
838 839 'repository to the target repository at %s.', self.path)
839 840 merge_succeeded = False
840 841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
841 842 metadata['target'] = 'hg shadow repo'
842 843 metadata['merge_commit'] = merge_commit_id
843 844 else:
844 845 merge_succeeded = True
845 846 else:
846 847 merge_succeeded = False
847 848
848 849 return MergeResponse(
849 850 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
850 851 metadata=metadata)
851 852
852 853 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
853 854 config = self.config.copy()
854 855 if not enable_hooks:
855 856 config.clear_section('hooks')
856 857 return MercurialRepository(shadow_repository_path, config)
857 858
858 859 def _validate_pull_reference(self, reference):
859 860 if not (reference.name in self.bookmarks or
860 861 reference.name in self.branches or
861 862 self.get_commit(reference.commit_id)):
862 863 raise CommitDoesNotExistError(
863 864 'Unknown branch, bookmark or commit id')
864 865
865 866 def _local_pull(self, repository_path, reference):
866 867 """
867 868 Fetch a branch, bookmark or commit from a local repository.
868 869 """
869 870 repository_path = os.path.abspath(repository_path)
870 871 if repository_path == self.path:
871 872 raise ValueError('Cannot pull from the same repository')
872 873
873 874 reference_type_to_option_name = {
874 875 'book': 'bookmark',
875 876 'branch': 'branch',
876 877 }
877 878 option_name = reference_type_to_option_name.get(
878 879 reference.type, 'revision')
879 880
880 881 if option_name == 'revision':
881 882 ref = reference.commit_id
882 883 else:
883 884 ref = reference.name
884 885
885 886 options = {option_name: [ref]}
886 887 self._remote.pull_cmd(repository_path, hooks=False, **options)
887 888 self._remote.invalidate_vcs_cache()
888 889
889 890 def bookmark(self, bookmark, revision=None):
890 891 if isinstance(bookmark, unicode):
891 892 bookmark = safe_str(bookmark)
892 893 self._remote.bookmark(bookmark, revision=revision)
893 894 self._remote.invalidate_vcs_cache()
894 895
895 896 def get_path_permissions(self, username):
896 897 hgacl_file = os.path.join(self.path, '.hg/hgacl')
897 898
898 899 def read_patterns(suffix):
899 900 svalue = None
900 901 for section, option in [
901 902 ('narrowacl', username + suffix),
902 903 ('narrowacl', 'default' + suffix),
903 904 ('narrowhgacl', username + suffix),
904 905 ('narrowhgacl', 'default' + suffix)
905 906 ]:
906 907 try:
907 908 svalue = hgacl.get(section, option)
908 909 break # stop at the first value we find
909 910 except configparser.NoOptionError:
910 911 pass
911 912 if not svalue:
912 913 return None
913 914 result = ['/']
914 915 for pattern in svalue.split():
915 916 result.append(pattern)
916 917 if '*' not in pattern and '?' not in pattern:
917 918 result.append(pattern + '/*')
918 919 return result
919 920
920 921 if os.path.exists(hgacl_file):
921 922 try:
922 923 hgacl = configparser.RawConfigParser()
923 924 hgacl.read(hgacl_file)
924 925
925 926 includes = read_patterns('.includes')
926 927 excludes = read_patterns('.excludes')
927 928 return BasePathPermissionChecker.create_from_patterns(
928 929 includes, excludes)
929 930 except BaseException as e:
930 931 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
931 932 hgacl_file, self.name, e)
932 933 raise exceptions.RepositoryRequirementError(msg)
933 934 else:
934 935 return None
935 936
936 937
937 938 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
938 939
939 940 def _commit_factory(self, commit_id):
940 941 return self.repo.get_commit(
941 942 commit_idx=commit_id, pre_load=self.pre_load)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1745 +1,1742 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 source_repo.count() # cache rebuild
687 686
688 687 try:
689 688 source_commit = source_repo.get_commit(commit_id=source_ref_name)
690 689 except CommitDoesNotExistError:
691 690 return UpdateResponse(
692 691 executed=False,
693 692 reason=UpdateFailureReason.MISSING_SOURCE_REF,
694 693 old=pull_request, new=None, changes=None,
695 694 source_changed=False, target_changed=False)
696 695
697 696 source_changed = source_ref_id != source_commit.raw_id
698 697
699 698 # target repo
700 699 target_repo = pull_request.target_repo.scm_instance()
701 target_repo.count() # cache rebuild
702 700
703 701 try:
704 702 target_commit = target_repo.get_commit(commit_id=target_ref_name)
705 703 except CommitDoesNotExistError:
706 704 return UpdateResponse(
707 705 executed=False,
708 706 reason=UpdateFailureReason.MISSING_TARGET_REF,
709 707 old=pull_request, new=None, changes=None,
710 708 source_changed=False, target_changed=False)
711 709 target_changed = target_ref_id != target_commit.raw_id
712 710
713 711 if not (source_changed or target_changed):
714 712 log.debug("Nothing changed in pull request %s", pull_request)
715 713 return UpdateResponse(
716 714 executed=False,
717 715 reason=UpdateFailureReason.NO_CHANGE,
718 716 old=pull_request, new=None, changes=None,
719 717 source_changed=target_changed, target_changed=source_changed)
720 718
721 719 change_in_found = 'target repo' if target_changed else 'source repo'
722 720 log.debug('Updating pull request because of change in %s detected',
723 721 change_in_found)
724 722
725 723 # Finally there is a need for an update, in case of source change
726 724 # we create a new version, else just an update
727 725 if source_changed:
728 726 pull_request_version = self._create_version_from_snapshot(pull_request)
729 727 self._link_comments_to_version(pull_request_version)
730 728 else:
731 729 try:
732 730 ver = pull_request.versions[-1]
733 731 except IndexError:
734 732 ver = None
735 733
736 734 pull_request.pull_request_version_id = \
737 735 ver.pull_request_version_id if ver else None
738 736 pull_request_version = pull_request
739 737
740 738 try:
741 739 if target_ref_type in self.REF_TYPES:
742 740 target_commit = target_repo.get_commit(target_ref_name)
743 741 else:
744 742 target_commit = target_repo.get_commit(target_ref_id)
745 743 except CommitDoesNotExistError:
746 744 return UpdateResponse(
747 745 executed=False,
748 746 reason=UpdateFailureReason.MISSING_TARGET_REF,
749 747 old=pull_request, new=None, changes=None,
750 748 source_changed=source_changed, target_changed=target_changed)
751 749
752 750 # re-compute commit ids
753 751 old_commit_ids = pull_request.revisions
754 752 pre_load = ["author", "branch", "date", "message"]
755 753 commit_ranges = target_repo.compare(
756 754 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
757 755 pre_load=pre_load)
758 756
759 757 ancestor = source_repo.get_common_ancestor(
760 758 source_commit.raw_id, target_commit.raw_id, target_repo)
761 759
762 760 pull_request.source_ref = '%s:%s:%s' % (
763 761 source_ref_type, source_ref_name, source_commit.raw_id)
764 762 pull_request.target_ref = '%s:%s:%s' % (
765 763 target_ref_type, target_ref_name, ancestor)
766 764
767 765 pull_request.revisions = [
768 766 commit.raw_id for commit in reversed(commit_ranges)]
769 767 pull_request.updated_on = datetime.datetime.now()
770 768 Session().add(pull_request)
771 769 new_commit_ids = pull_request.revisions
772 770
773 771 old_diff_data, new_diff_data = self._generate_update_diffs(
774 772 pull_request, pull_request_version)
775 773
776 774 # calculate commit and file changes
777 775 changes = self._calculate_commit_id_changes(
778 776 old_commit_ids, new_commit_ids)
779 777 file_changes = self._calculate_file_changes(
780 778 old_diff_data, new_diff_data)
781 779
782 780 # set comments as outdated if DIFFS changed
783 781 CommentsModel().outdate_comments(
784 782 pull_request, old_diff_data=old_diff_data,
785 783 new_diff_data=new_diff_data)
786 784
787 785 commit_changes = (changes.added or changes.removed)
788 786 file_node_changes = (
789 787 file_changes.added or file_changes.modified or file_changes.removed)
790 788 pr_has_changes = commit_changes or file_node_changes
791 789
792 790 # Add an automatic comment to the pull request, in case
793 791 # anything has changed
794 792 if pr_has_changes:
795 793 update_comment = CommentsModel().create(
796 794 text=self._render_update_message(changes, file_changes),
797 795 repo=pull_request.target_repo,
798 796 user=pull_request.author,
799 797 pull_request=pull_request,
800 798 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
801 799
802 800 # Update status to "Under Review" for added commits
803 801 for commit_id in changes.added:
804 802 ChangesetStatusModel().set_status(
805 803 repo=pull_request.source_repo,
806 804 status=ChangesetStatus.STATUS_UNDER_REVIEW,
807 805 comment=update_comment,
808 806 user=pull_request.author,
809 807 pull_request=pull_request,
810 808 revision=commit_id)
811 809
812 810 log.debug(
813 811 'Updated pull request %s, added_ids: %s, common_ids: %s, '
814 812 'removed_ids: %s', pull_request.pull_request_id,
815 813 changes.added, changes.common, changes.removed)
816 814 log.debug(
817 815 'Updated pull request with the following file changes: %s',
818 816 file_changes)
819 817
820 818 log.info(
821 819 "Updated pull request %s from commit %s to commit %s, "
822 820 "stored new version %s of this pull request.",
823 821 pull_request.pull_request_id, source_ref_id,
824 822 pull_request.source_ref_parts.commit_id,
825 823 pull_request_version.pull_request_version_id)
826 824 Session().commit()
827 825 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
828 826
829 827 return UpdateResponse(
830 828 executed=True, reason=UpdateFailureReason.NONE,
831 829 old=pull_request, new=pull_request_version, changes=changes,
832 830 source_changed=source_changed, target_changed=target_changed)
833 831
834 832 def _create_version_from_snapshot(self, pull_request):
835 833 version = PullRequestVersion()
836 834 version.title = pull_request.title
837 835 version.description = pull_request.description
838 836 version.status = pull_request.status
839 837 version.pull_request_state = pull_request.pull_request_state
840 838 version.created_on = datetime.datetime.now()
841 839 version.updated_on = pull_request.updated_on
842 840 version.user_id = pull_request.user_id
843 841 version.source_repo = pull_request.source_repo
844 842 version.source_ref = pull_request.source_ref
845 843 version.target_repo = pull_request.target_repo
846 844 version.target_ref = pull_request.target_ref
847 845
848 846 version._last_merge_source_rev = pull_request._last_merge_source_rev
849 847 version._last_merge_target_rev = pull_request._last_merge_target_rev
850 848 version.last_merge_status = pull_request.last_merge_status
851 849 version.shadow_merge_ref = pull_request.shadow_merge_ref
852 850 version.merge_rev = pull_request.merge_rev
853 851 version.reviewer_data = pull_request.reviewer_data
854 852
855 853 version.revisions = pull_request.revisions
856 854 version.pull_request = pull_request
857 855 Session().add(version)
858 856 Session().flush()
859 857
860 858 return version
861 859
862 860 def _generate_update_diffs(self, pull_request, pull_request_version):
863 861
864 862 diff_context = (
865 863 self.DIFF_CONTEXT +
866 864 CommentsModel.needed_extra_diff_context())
867 865 hide_whitespace_changes = False
868 866 source_repo = pull_request_version.source_repo
869 867 source_ref_id = pull_request_version.source_ref_parts.commit_id
870 868 target_ref_id = pull_request_version.target_ref_parts.commit_id
871 869 old_diff = self._get_diff_from_pr_or_version(
872 870 source_repo, source_ref_id, target_ref_id,
873 871 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
874 872
875 873 source_repo = pull_request.source_repo
876 874 source_ref_id = pull_request.source_ref_parts.commit_id
877 875 target_ref_id = pull_request.target_ref_parts.commit_id
878 876
879 877 new_diff = self._get_diff_from_pr_or_version(
880 878 source_repo, source_ref_id, target_ref_id,
881 879 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
882 880
883 881 old_diff_data = diffs.DiffProcessor(old_diff)
884 882 old_diff_data.prepare()
885 883 new_diff_data = diffs.DiffProcessor(new_diff)
886 884 new_diff_data.prepare()
887 885
888 886 return old_diff_data, new_diff_data
889 887
890 888 def _link_comments_to_version(self, pull_request_version):
891 889 """
892 890 Link all unlinked comments of this pull request to the given version.
893 891
894 892 :param pull_request_version: The `PullRequestVersion` to which
895 893 the comments shall be linked.
896 894
897 895 """
898 896 pull_request = pull_request_version.pull_request
899 897 comments = ChangesetComment.query()\
900 898 .filter(
901 899 # TODO: johbo: Should we query for the repo at all here?
902 900 # Pending decision on how comments of PRs are to be related
903 901 # to either the source repo, the target repo or no repo at all.
904 902 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
905 903 ChangesetComment.pull_request == pull_request,
906 904 ChangesetComment.pull_request_version == None)\
907 905 .order_by(ChangesetComment.comment_id.asc())
908 906
909 907 # TODO: johbo: Find out why this breaks if it is done in a bulk
910 908 # operation.
911 909 for comment in comments:
912 910 comment.pull_request_version_id = (
913 911 pull_request_version.pull_request_version_id)
914 912 Session().add(comment)
915 913
916 914 def _calculate_commit_id_changes(self, old_ids, new_ids):
917 915 added = [x for x in new_ids if x not in old_ids]
918 916 common = [x for x in new_ids if x in old_ids]
919 917 removed = [x for x in old_ids if x not in new_ids]
920 918 total = new_ids
921 919 return ChangeTuple(added, common, removed, total)
922 920
923 921 def _calculate_file_changes(self, old_diff_data, new_diff_data):
924 922
925 923 old_files = OrderedDict()
926 924 for diff_data in old_diff_data.parsed_diff:
927 925 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
928 926
929 927 added_files = []
930 928 modified_files = []
931 929 removed_files = []
932 930 for diff_data in new_diff_data.parsed_diff:
933 931 new_filename = diff_data['filename']
934 932 new_hash = md5_safe(diff_data['raw_diff'])
935 933
936 934 old_hash = old_files.get(new_filename)
937 935 if not old_hash:
938 936 # file is not present in old diff, means it's added
939 937 added_files.append(new_filename)
940 938 else:
941 939 if new_hash != old_hash:
942 940 modified_files.append(new_filename)
943 941 # now remove a file from old, since we have seen it already
944 942 del old_files[new_filename]
945 943
946 944 # removed files is when there are present in old, but not in NEW,
947 945 # since we remove old files that are present in new diff, left-overs
948 946 # if any should be the removed files
949 947 removed_files.extend(old_files.keys())
950 948
951 949 return FileChangeTuple(added_files, modified_files, removed_files)
952 950
953 951 def _render_update_message(self, changes, file_changes):
954 952 """
955 953 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
956 954 so it's always looking the same disregarding on which default
957 955 renderer system is using.
958 956
959 957 :param changes: changes named tuple
960 958 :param file_changes: file changes named tuple
961 959
962 960 """
963 961 new_status = ChangesetStatus.get_status_lbl(
964 962 ChangesetStatus.STATUS_UNDER_REVIEW)
965 963
966 964 changed_files = (
967 965 file_changes.added + file_changes.modified + file_changes.removed)
968 966
969 967 params = {
970 968 'under_review_label': new_status,
971 969 'added_commits': changes.added,
972 970 'removed_commits': changes.removed,
973 971 'changed_files': changed_files,
974 972 'added_files': file_changes.added,
975 973 'modified_files': file_changes.modified,
976 974 'removed_files': file_changes.removed,
977 975 }
978 976 renderer = RstTemplateRenderer()
979 977 return renderer.render('pull_request_update.mako', **params)
980 978
981 979 def edit(self, pull_request, title, description, description_renderer, user):
982 980 pull_request = self.__get_pull_request(pull_request)
983 981 old_data = pull_request.get_api_data(with_merge_state=False)
984 982 if pull_request.is_closed():
985 983 raise ValueError('This pull request is closed')
986 984 if title:
987 985 pull_request.title = title
988 986 pull_request.description = description
989 987 pull_request.updated_on = datetime.datetime.now()
990 988 pull_request.description_renderer = description_renderer
991 989 Session().add(pull_request)
992 990 self._log_audit_action(
993 991 'repo.pull_request.edit', {'old_data': old_data},
994 992 user, pull_request)
995 993
996 994 def update_reviewers(self, pull_request, reviewer_data, user):
997 995 """
998 996 Update the reviewers in the pull request
999 997
1000 998 :param pull_request: the pr to update
1001 999 :param reviewer_data: list of tuples
1002 1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1003 1001 """
1004 1002 pull_request = self.__get_pull_request(pull_request)
1005 1003 if pull_request.is_closed():
1006 1004 raise ValueError('This pull request is closed')
1007 1005
1008 1006 reviewers = {}
1009 1007 for user_id, reasons, mandatory, rules in reviewer_data:
1010 1008 if isinstance(user_id, (int, compat.string_types)):
1011 1009 user_id = self._get_user(user_id).user_id
1012 1010 reviewers[user_id] = {
1013 1011 'reasons': reasons, 'mandatory': mandatory}
1014 1012
1015 1013 reviewers_ids = set(reviewers.keys())
1016 1014 current_reviewers = PullRequestReviewers.query()\
1017 1015 .filter(PullRequestReviewers.pull_request ==
1018 1016 pull_request).all()
1019 1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1020 1018
1021 1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1022 1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1023 1021
1024 1022 log.debug("Adding %s reviewers", ids_to_add)
1025 1023 log.debug("Removing %s reviewers", ids_to_remove)
1026 1024 changed = False
1027 1025 added_audit_reviewers = []
1028 1026 removed_audit_reviewers = []
1029 1027
1030 1028 for uid in ids_to_add:
1031 1029 changed = True
1032 1030 _usr = self._get_user(uid)
1033 1031 reviewer = PullRequestReviewers()
1034 1032 reviewer.user = _usr
1035 1033 reviewer.pull_request = pull_request
1036 1034 reviewer.reasons = reviewers[uid]['reasons']
1037 1035 # NOTE(marcink): mandatory shouldn't be changed now
1038 1036 # reviewer.mandatory = reviewers[uid]['reasons']
1039 1037 Session().add(reviewer)
1040 1038 added_audit_reviewers.append(reviewer.get_dict())
1041 1039
1042 1040 for uid in ids_to_remove:
1043 1041 changed = True
1044 1042 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1045 1043 # that prevents and fixes cases that we added the same reviewer twice.
1046 1044 # this CAN happen due to the lack of DB checks
1047 1045 reviewers = PullRequestReviewers.query()\
1048 1046 .filter(PullRequestReviewers.user_id == uid,
1049 1047 PullRequestReviewers.pull_request == pull_request)\
1050 1048 .all()
1051 1049
1052 1050 for obj in reviewers:
1053 1051 added_audit_reviewers.append(obj.get_dict())
1054 1052 Session().delete(obj)
1055 1053
1056 1054 if changed:
1057 1055 Session().expire_all()
1058 1056 pull_request.updated_on = datetime.datetime.now()
1059 1057 Session().add(pull_request)
1060 1058
1061 1059 # finally store audit logs
1062 1060 for user_data in added_audit_reviewers:
1063 1061 self._log_audit_action(
1064 1062 'repo.pull_request.reviewer.add', {'data': user_data},
1065 1063 user, pull_request)
1066 1064 for user_data in removed_audit_reviewers:
1067 1065 self._log_audit_action(
1068 1066 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1069 1067 user, pull_request)
1070 1068
1071 1069 self.notify_reviewers(pull_request, ids_to_add)
1072 1070 return ids_to_add, ids_to_remove
1073 1071
1074 1072 def get_url(self, pull_request, request=None, permalink=False):
1075 1073 if not request:
1076 1074 request = get_current_request()
1077 1075
1078 1076 if permalink:
1079 1077 return request.route_url(
1080 1078 'pull_requests_global',
1081 1079 pull_request_id=pull_request.pull_request_id,)
1082 1080 else:
1083 1081 return request.route_url('pullrequest_show',
1084 1082 repo_name=safe_str(pull_request.target_repo.repo_name),
1085 1083 pull_request_id=pull_request.pull_request_id,)
1086 1084
1087 1085 def get_shadow_clone_url(self, pull_request, request=None):
1088 1086 """
1089 1087 Returns qualified url pointing to the shadow repository. If this pull
1090 1088 request is closed there is no shadow repository and ``None`` will be
1091 1089 returned.
1092 1090 """
1093 1091 if pull_request.is_closed():
1094 1092 return None
1095 1093 else:
1096 1094 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1097 1095 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1098 1096
1099 1097 def notify_reviewers(self, pull_request, reviewers_ids):
1100 1098 # notification to reviewers
1101 1099 if not reviewers_ids:
1102 1100 return
1103 1101
1104 1102 pull_request_obj = pull_request
1105 1103 # get the current participants of this pull request
1106 1104 recipients = reviewers_ids
1107 1105 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1108 1106
1109 1107 pr_source_repo = pull_request_obj.source_repo
1110 1108 pr_target_repo = pull_request_obj.target_repo
1111 1109
1112 1110 pr_url = h.route_url('pullrequest_show',
1113 1111 repo_name=pr_target_repo.repo_name,
1114 1112 pull_request_id=pull_request_obj.pull_request_id,)
1115 1113
1116 1114 # set some variables for email notification
1117 1115 pr_target_repo_url = h.route_url(
1118 1116 'repo_summary', repo_name=pr_target_repo.repo_name)
1119 1117
1120 1118 pr_source_repo_url = h.route_url(
1121 1119 'repo_summary', repo_name=pr_source_repo.repo_name)
1122 1120
1123 1121 # pull request specifics
1124 1122 pull_request_commits = [
1125 1123 (x.raw_id, x.message)
1126 1124 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1127 1125
1128 1126 kwargs = {
1129 1127 'user': pull_request.author,
1130 1128 'pull_request': pull_request_obj,
1131 1129 'pull_request_commits': pull_request_commits,
1132 1130
1133 1131 'pull_request_target_repo': pr_target_repo,
1134 1132 'pull_request_target_repo_url': pr_target_repo_url,
1135 1133
1136 1134 'pull_request_source_repo': pr_source_repo,
1137 1135 'pull_request_source_repo_url': pr_source_repo_url,
1138 1136
1139 1137 'pull_request_url': pr_url,
1140 1138 }
1141 1139
1142 1140 # pre-generate the subject for notification itself
1143 1141 (subject,
1144 1142 _h, _e, # we don't care about those
1145 1143 body_plaintext) = EmailNotificationModel().render_email(
1146 1144 notification_type, **kwargs)
1147 1145
1148 1146 # create notification objects, and emails
1149 1147 NotificationModel().create(
1150 1148 created_by=pull_request.author,
1151 1149 notification_subject=subject,
1152 1150 notification_body=body_plaintext,
1153 1151 notification_type=notification_type,
1154 1152 recipients=recipients,
1155 1153 email_kwargs=kwargs,
1156 1154 )
1157 1155
1158 1156 def delete(self, pull_request, user):
1159 1157 pull_request = self.__get_pull_request(pull_request)
1160 1158 old_data = pull_request.get_api_data(with_merge_state=False)
1161 1159 self._cleanup_merge_workspace(pull_request)
1162 1160 self._log_audit_action(
1163 1161 'repo.pull_request.delete', {'old_data': old_data},
1164 1162 user, pull_request)
1165 1163 Session().delete(pull_request)
1166 1164
1167 1165 def close_pull_request(self, pull_request, user):
1168 1166 pull_request = self.__get_pull_request(pull_request)
1169 1167 self._cleanup_merge_workspace(pull_request)
1170 1168 pull_request.status = PullRequest.STATUS_CLOSED
1171 1169 pull_request.updated_on = datetime.datetime.now()
1172 1170 Session().add(pull_request)
1173 1171 self.trigger_pull_request_hook(
1174 1172 pull_request, pull_request.author, 'close')
1175 1173
1176 1174 pr_data = pull_request.get_api_data(with_merge_state=False)
1177 1175 self._log_audit_action(
1178 1176 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1179 1177
1180 1178 def close_pull_request_with_comment(
1181 1179 self, pull_request, user, repo, message=None, auth_user=None):
1182 1180
1183 1181 pull_request_review_status = pull_request.calculated_review_status()
1184 1182
1185 1183 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1186 1184 # approved only if we have voting consent
1187 1185 status = ChangesetStatus.STATUS_APPROVED
1188 1186 else:
1189 1187 status = ChangesetStatus.STATUS_REJECTED
1190 1188 status_lbl = ChangesetStatus.get_status_lbl(status)
1191 1189
1192 1190 default_message = (
1193 1191 'Closing with status change {transition_icon} {status}.'
1194 1192 ).format(transition_icon='>', status=status_lbl)
1195 1193 text = message or default_message
1196 1194
1197 1195 # create a comment, and link it to new status
1198 1196 comment = CommentsModel().create(
1199 1197 text=text,
1200 1198 repo=repo.repo_id,
1201 1199 user=user.user_id,
1202 1200 pull_request=pull_request.pull_request_id,
1203 1201 status_change=status_lbl,
1204 1202 status_change_type=status,
1205 1203 closing_pr=True,
1206 1204 auth_user=auth_user,
1207 1205 )
1208 1206
1209 1207 # calculate old status before we change it
1210 1208 old_calculated_status = pull_request.calculated_review_status()
1211 1209 ChangesetStatusModel().set_status(
1212 1210 repo.repo_id,
1213 1211 status,
1214 1212 user.user_id,
1215 1213 comment=comment,
1216 1214 pull_request=pull_request.pull_request_id
1217 1215 )
1218 1216
1219 1217 Session().flush()
1220 1218 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1221 1219 # we now calculate the status of pull request again, and based on that
1222 1220 # calculation trigger status change. This might happen in cases
1223 1221 # that non-reviewer admin closes a pr, which means his vote doesn't
1224 1222 # change the status, while if he's a reviewer this might change it.
1225 1223 calculated_status = pull_request.calculated_review_status()
1226 1224 if old_calculated_status != calculated_status:
1227 1225 self.trigger_pull_request_hook(
1228 1226 pull_request, user, 'review_status_change',
1229 1227 data={'status': calculated_status})
1230 1228
1231 1229 # finally close the PR
1232 1230 PullRequestModel().close_pull_request(
1233 1231 pull_request.pull_request_id, user)
1234 1232
1235 1233 return comment, status
1236 1234
1237 1235 def merge_status(self, pull_request, translator=None,
1238 1236 force_shadow_repo_refresh=False):
1239 1237 _ = translator or get_current_request().translate
1240 1238
1241 1239 if not self._is_merge_enabled(pull_request):
1242 1240 return False, _('Server-side pull request merging is disabled.')
1243 1241 if pull_request.is_closed():
1244 1242 return False, _('This pull request is closed.')
1245 1243 merge_possible, msg = self._check_repo_requirements(
1246 1244 target=pull_request.target_repo, source=pull_request.source_repo,
1247 1245 translator=_)
1248 1246 if not merge_possible:
1249 1247 return merge_possible, msg
1250 1248
1251 1249 try:
1252 1250 resp = self._try_merge(
1253 1251 pull_request,
1254 1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1255 1253 log.debug("Merge response: %s", resp)
1256 1254 status = resp.possible, resp.merge_status_message
1257 1255 except NotImplementedError:
1258 1256 status = False, _('Pull request merging is not supported.')
1259 1257
1260 1258 return status
1261 1259
1262 1260 def _check_repo_requirements(self, target, source, translator):
1263 1261 """
1264 1262 Check if `target` and `source` have compatible requirements.
1265 1263
1266 1264 Currently this is just checking for largefiles.
1267 1265 """
1268 1266 _ = translator
1269 1267 target_has_largefiles = self._has_largefiles(target)
1270 1268 source_has_largefiles = self._has_largefiles(source)
1271 1269 merge_possible = True
1272 1270 message = u''
1273 1271
1274 1272 if target_has_largefiles != source_has_largefiles:
1275 1273 merge_possible = False
1276 1274 if source_has_largefiles:
1277 1275 message = _(
1278 1276 'Target repository large files support is disabled.')
1279 1277 else:
1280 1278 message = _(
1281 1279 'Source repository large files support is disabled.')
1282 1280
1283 1281 return merge_possible, message
1284 1282
1285 1283 def _has_largefiles(self, repo):
1286 1284 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1287 1285 'extensions', 'largefiles')
1288 1286 return largefiles_ui and largefiles_ui[0].active
1289 1287
1290 1288 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1291 1289 """
1292 1290 Try to merge the pull request and return the merge status.
1293 1291 """
1294 1292 log.debug(
1295 1293 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1296 1294 pull_request.pull_request_id, force_shadow_repo_refresh)
1297 1295 target_vcs = pull_request.target_repo.scm_instance()
1298 1296 # Refresh the target reference.
1299 1297 try:
1300 1298 target_ref = self._refresh_reference(
1301 1299 pull_request.target_ref_parts, target_vcs)
1302 1300 except CommitDoesNotExistError:
1303 1301 merge_state = MergeResponse(
1304 1302 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1305 1303 metadata={'target_ref': pull_request.target_ref_parts})
1306 1304 return merge_state
1307 1305
1308 1306 target_locked = pull_request.target_repo.locked
1309 1307 if target_locked and target_locked[0]:
1310 1308 locked_by = 'user:{}'.format(target_locked[0])
1311 1309 log.debug("The target repository is locked by %s.", locked_by)
1312 1310 merge_state = MergeResponse(
1313 1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1314 1312 metadata={'locked_by': locked_by})
1315 1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1316 1314 pull_request, target_ref):
1317 1315 log.debug("Refreshing the merge status of the repository.")
1318 1316 merge_state = self._refresh_merge_state(
1319 1317 pull_request, target_vcs, target_ref)
1320 1318 else:
1321 1319 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1322 1320 metadata = {
1323 1321 'target_ref': pull_request.target_ref_parts,
1324 1322 'source_ref': pull_request.source_ref_parts,
1325 1323 }
1326 1324 if not possible and target_ref.type == 'branch':
1327 1325 # NOTE(marcink): case for mercurial multiple heads on branch
1328 1326 heads = target_vcs._heads(target_ref.name)
1329 1327 if len(heads) != 1:
1330 1328 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1331 1329 metadata.update({
1332 1330 'heads': heads
1333 1331 })
1334 1332 merge_state = MergeResponse(
1335 1333 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1336 1334
1337 1335 return merge_state
1338 1336
1339 1337 def _refresh_reference(self, reference, vcs_repository):
1340 1338 if reference.type in self.UPDATABLE_REF_TYPES:
1341 1339 name_or_id = reference.name
1342 1340 else:
1343 1341 name_or_id = reference.commit_id
1344 1342
1345 vcs_repository.count() # cache rebuild
1346 1343 refreshed_commit = vcs_repository.get_commit(name_or_id)
1347 1344 refreshed_reference = Reference(
1348 1345 reference.type, reference.name, refreshed_commit.raw_id)
1349 1346 return refreshed_reference
1350 1347
1351 1348 def _needs_merge_state_refresh(self, pull_request, target_reference):
1352 1349 return not(
1353 1350 pull_request.revisions and
1354 1351 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1355 1352 target_reference.commit_id == pull_request._last_merge_target_rev)
1356 1353
1357 1354 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1358 1355 workspace_id = self._workspace_id(pull_request)
1359 1356 source_vcs = pull_request.source_repo.scm_instance()
1360 1357 repo_id = pull_request.target_repo.repo_id
1361 1358 use_rebase = self._use_rebase_for_merging(pull_request)
1362 1359 close_branch = self._close_branch_before_merging(pull_request)
1363 1360 merge_state = target_vcs.merge(
1364 1361 repo_id, workspace_id,
1365 1362 target_reference, source_vcs, pull_request.source_ref_parts,
1366 1363 dry_run=True, use_rebase=use_rebase,
1367 1364 close_branch=close_branch)
1368 1365
1369 1366 # Do not store the response if there was an unknown error.
1370 1367 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1371 1368 pull_request._last_merge_source_rev = \
1372 1369 pull_request.source_ref_parts.commit_id
1373 1370 pull_request._last_merge_target_rev = target_reference.commit_id
1374 1371 pull_request.last_merge_status = merge_state.failure_reason
1375 1372 pull_request.shadow_merge_ref = merge_state.merge_ref
1376 1373 Session().add(pull_request)
1377 1374 Session().commit()
1378 1375
1379 1376 return merge_state
1380 1377
1381 1378 def _workspace_id(self, pull_request):
1382 1379 workspace_id = 'pr-%s' % pull_request.pull_request_id
1383 1380 return workspace_id
1384 1381
1385 1382 def generate_repo_data(self, repo, commit_id=None, branch=None,
1386 1383 bookmark=None, translator=None):
1387 1384 from rhodecode.model.repo import RepoModel
1388 1385
1389 1386 all_refs, selected_ref = \
1390 1387 self._get_repo_pullrequest_sources(
1391 1388 repo.scm_instance(), commit_id=commit_id,
1392 1389 branch=branch, bookmark=bookmark, translator=translator)
1393 1390
1394 1391 refs_select2 = []
1395 1392 for element in all_refs:
1396 1393 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1397 1394 refs_select2.append({'text': element[1], 'children': children})
1398 1395
1399 1396 return {
1400 1397 'user': {
1401 1398 'user_id': repo.user.user_id,
1402 1399 'username': repo.user.username,
1403 1400 'firstname': repo.user.first_name,
1404 1401 'lastname': repo.user.last_name,
1405 1402 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1406 1403 },
1407 1404 'name': repo.repo_name,
1408 1405 'link': RepoModel().get_url(repo),
1409 1406 'description': h.chop_at_smart(repo.description_safe, '\n'),
1410 1407 'refs': {
1411 1408 'all_refs': all_refs,
1412 1409 'selected_ref': selected_ref,
1413 1410 'select2_refs': refs_select2
1414 1411 }
1415 1412 }
1416 1413
1417 1414 def generate_pullrequest_title(self, source, source_ref, target):
1418 1415 return u'{source}#{at_ref} to {target}'.format(
1419 1416 source=source,
1420 1417 at_ref=source_ref,
1421 1418 target=target,
1422 1419 )
1423 1420
1424 1421 def _cleanup_merge_workspace(self, pull_request):
1425 1422 # Merging related cleanup
1426 1423 repo_id = pull_request.target_repo.repo_id
1427 1424 target_scm = pull_request.target_repo.scm_instance()
1428 1425 workspace_id = self._workspace_id(pull_request)
1429 1426
1430 1427 try:
1431 1428 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1432 1429 except NotImplementedError:
1433 1430 pass
1434 1431
1435 1432 def _get_repo_pullrequest_sources(
1436 1433 self, repo, commit_id=None, branch=None, bookmark=None,
1437 1434 translator=None):
1438 1435 """
1439 1436 Return a structure with repo's interesting commits, suitable for
1440 1437 the selectors in pullrequest controller
1441 1438
1442 1439 :param commit_id: a commit that must be in the list somehow
1443 1440 and selected by default
1444 1441 :param branch: a branch that must be in the list and selected
1445 1442 by default - even if closed
1446 1443 :param bookmark: a bookmark that must be in the list and selected
1447 1444 """
1448 1445 _ = translator or get_current_request().translate
1449 1446
1450 1447 commit_id = safe_str(commit_id) if commit_id else None
1451 1448 branch = safe_unicode(branch) if branch else None
1452 1449 bookmark = safe_unicode(bookmark) if bookmark else None
1453 1450
1454 1451 selected = None
1455 1452
1456 1453 # order matters: first source that has commit_id in it will be selected
1457 1454 sources = []
1458 1455 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1459 1456 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1460 1457
1461 1458 if commit_id:
1462 1459 ref_commit = (h.short_id(commit_id), commit_id)
1463 1460 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1464 1461
1465 1462 sources.append(
1466 1463 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1467 1464 )
1468 1465
1469 1466 groups = []
1470 1467
1471 1468 for group_key, ref_list, group_name, match in sources:
1472 1469 group_refs = []
1473 1470 for ref_name, ref_id in ref_list:
1474 1471 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1475 1472 group_refs.append((ref_key, ref_name))
1476 1473
1477 1474 if not selected:
1478 1475 if set([commit_id, match]) & set([ref_id, ref_name]):
1479 1476 selected = ref_key
1480 1477
1481 1478 if group_refs:
1482 1479 groups.append((group_refs, group_name))
1483 1480
1484 1481 if not selected:
1485 1482 ref = commit_id or branch or bookmark
1486 1483 if ref:
1487 1484 raise CommitDoesNotExistError(
1488 1485 u'No commit refs could be found matching: {}'.format(ref))
1489 1486 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1490 1487 selected = u'branch:{}:{}'.format(
1491 1488 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1492 1489 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1493 1490 )
1494 1491 elif repo.commit_ids:
1495 1492 # make the user select in this case
1496 1493 selected = None
1497 1494 else:
1498 1495 raise EmptyRepositoryError()
1499 1496 return groups, selected
1500 1497
1501 1498 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1502 1499 hide_whitespace_changes, diff_context):
1503 1500
1504 1501 return self._get_diff_from_pr_or_version(
1505 1502 source_repo, source_ref_id, target_ref_id,
1506 1503 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1507 1504
1508 1505 def _get_diff_from_pr_or_version(
1509 1506 self, source_repo, source_ref_id, target_ref_id,
1510 1507 hide_whitespace_changes, diff_context):
1511 1508
1512 1509 target_commit = source_repo.get_commit(
1513 1510 commit_id=safe_str(target_ref_id))
1514 1511 source_commit = source_repo.get_commit(
1515 1512 commit_id=safe_str(source_ref_id))
1516 1513 if isinstance(source_repo, Repository):
1517 1514 vcs_repo = source_repo.scm_instance()
1518 1515 else:
1519 1516 vcs_repo = source_repo
1520 1517
1521 1518 # TODO: johbo: In the context of an update, we cannot reach
1522 1519 # the old commit anymore with our normal mechanisms. It needs
1523 1520 # some sort of special support in the vcs layer to avoid this
1524 1521 # workaround.
1525 1522 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1526 1523 vcs_repo.alias == 'git'):
1527 1524 source_commit.raw_id = safe_str(source_ref_id)
1528 1525
1529 1526 log.debug('calculating diff between '
1530 1527 'source_ref:%s and target_ref:%s for repo `%s`',
1531 1528 target_ref_id, source_ref_id,
1532 1529 safe_unicode(vcs_repo.path))
1533 1530
1534 1531 vcs_diff = vcs_repo.get_diff(
1535 1532 commit1=target_commit, commit2=source_commit,
1536 1533 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1537 1534 return vcs_diff
1538 1535
1539 1536 def _is_merge_enabled(self, pull_request):
1540 1537 return self._get_general_setting(
1541 1538 pull_request, 'rhodecode_pr_merge_enabled')
1542 1539
1543 1540 def _use_rebase_for_merging(self, pull_request):
1544 1541 repo_type = pull_request.target_repo.repo_type
1545 1542 if repo_type == 'hg':
1546 1543 return self._get_general_setting(
1547 1544 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1548 1545 elif repo_type == 'git':
1549 1546 return self._get_general_setting(
1550 1547 pull_request, 'rhodecode_git_use_rebase_for_merging')
1551 1548
1552 1549 return False
1553 1550
1554 1551 def _close_branch_before_merging(self, pull_request):
1555 1552 repo_type = pull_request.target_repo.repo_type
1556 1553 if repo_type == 'hg':
1557 1554 return self._get_general_setting(
1558 1555 pull_request, 'rhodecode_hg_close_branch_before_merging')
1559 1556 elif repo_type == 'git':
1560 1557 return self._get_general_setting(
1561 1558 pull_request, 'rhodecode_git_close_branch_before_merging')
1562 1559
1563 1560 return False
1564 1561
1565 1562 def _get_general_setting(self, pull_request, settings_key, default=False):
1566 1563 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1567 1564 settings = settings_model.get_general_settings()
1568 1565 return settings.get(settings_key, default)
1569 1566
1570 1567 def _log_audit_action(self, action, action_data, user, pull_request):
1571 1568 audit_logger.store(
1572 1569 action=action,
1573 1570 action_data=action_data,
1574 1571 user=user,
1575 1572 repo=pull_request.target_repo)
1576 1573
1577 1574 def get_reviewer_functions(self):
1578 1575 """
1579 1576 Fetches functions for validation and fetching default reviewers.
1580 1577 If available we use the EE package, else we fallback to CE
1581 1578 package functions
1582 1579 """
1583 1580 try:
1584 1581 from rc_reviewers.utils import get_default_reviewers_data
1585 1582 from rc_reviewers.utils import validate_default_reviewers
1586 1583 except ImportError:
1587 1584 from rhodecode.apps.repository.utils import get_default_reviewers_data
1588 1585 from rhodecode.apps.repository.utils import validate_default_reviewers
1589 1586
1590 1587 return get_default_reviewers_data, validate_default_reviewers
1591 1588
1592 1589
1593 1590 class MergeCheck(object):
1594 1591 """
1595 1592 Perform Merge Checks and returns a check object which stores information
1596 1593 about merge errors, and merge conditions
1597 1594 """
1598 1595 TODO_CHECK = 'todo'
1599 1596 PERM_CHECK = 'perm'
1600 1597 REVIEW_CHECK = 'review'
1601 1598 MERGE_CHECK = 'merge'
1602 1599
1603 1600 def __init__(self):
1604 1601 self.review_status = None
1605 1602 self.merge_possible = None
1606 1603 self.merge_msg = ''
1607 1604 self.failed = None
1608 1605 self.errors = []
1609 1606 self.error_details = OrderedDict()
1610 1607
1611 1608 def push_error(self, error_type, message, error_key, details):
1612 1609 self.failed = True
1613 1610 self.errors.append([error_type, message])
1614 1611 self.error_details[error_key] = dict(
1615 1612 details=details,
1616 1613 error_type=error_type,
1617 1614 message=message
1618 1615 )
1619 1616
1620 1617 @classmethod
1621 1618 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1622 1619 force_shadow_repo_refresh=False):
1623 1620 _ = translator
1624 1621 merge_check = cls()
1625 1622
1626 1623 # permissions to merge
1627 1624 user_allowed_to_merge = PullRequestModel().check_user_merge(
1628 1625 pull_request, auth_user)
1629 1626 if not user_allowed_to_merge:
1630 1627 log.debug("MergeCheck: cannot merge, approval is pending.")
1631 1628
1632 1629 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1633 1630 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1634 1631 if fail_early:
1635 1632 return merge_check
1636 1633
1637 1634 # permission to merge into the target branch
1638 1635 target_commit_id = pull_request.target_ref_parts.commit_id
1639 1636 if pull_request.target_ref_parts.type == 'branch':
1640 1637 branch_name = pull_request.target_ref_parts.name
1641 1638 else:
1642 1639 # for mercurial we can always figure out the branch from the commit
1643 1640 # in case of bookmark
1644 1641 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1645 1642 branch_name = target_commit.branch
1646 1643
1647 1644 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1648 1645 pull_request.target_repo.repo_name, branch_name)
1649 1646 if branch_perm and branch_perm == 'branch.none':
1650 1647 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1651 1648 branch_name, rule)
1652 1649 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1653 1650 if fail_early:
1654 1651 return merge_check
1655 1652
1656 1653 # review status, must be always present
1657 1654 review_status = pull_request.calculated_review_status()
1658 1655 merge_check.review_status = review_status
1659 1656
1660 1657 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1661 1658 if not status_approved:
1662 1659 log.debug("MergeCheck: cannot merge, approval is pending.")
1663 1660
1664 1661 msg = _('Pull request reviewer approval is pending.')
1665 1662
1666 1663 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1667 1664
1668 1665 if fail_early:
1669 1666 return merge_check
1670 1667
1671 1668 # left over TODOs
1672 1669 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1673 1670 if todos:
1674 1671 log.debug("MergeCheck: cannot merge, {} "
1675 1672 "unresolved TODOs left.".format(len(todos)))
1676 1673
1677 1674 if len(todos) == 1:
1678 1675 msg = _('Cannot merge, {} TODO still not resolved.').format(
1679 1676 len(todos))
1680 1677 else:
1681 1678 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1682 1679 len(todos))
1683 1680
1684 1681 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1685 1682
1686 1683 if fail_early:
1687 1684 return merge_check
1688 1685
1689 1686 # merge possible, here is the filesystem simulation + shadow repo
1690 1687 merge_status, msg = PullRequestModel().merge_status(
1691 1688 pull_request, translator=translator,
1692 1689 force_shadow_repo_refresh=force_shadow_repo_refresh)
1693 1690 merge_check.merge_possible = merge_status
1694 1691 merge_check.merge_msg = msg
1695 1692 if not merge_status:
1696 1693 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1697 1694 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1698 1695
1699 1696 if fail_early:
1700 1697 return merge_check
1701 1698
1702 1699 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1703 1700 return merge_check
1704 1701
1705 1702 @classmethod
1706 1703 def get_merge_conditions(cls, pull_request, translator):
1707 1704 _ = translator
1708 1705 merge_details = {}
1709 1706
1710 1707 model = PullRequestModel()
1711 1708 use_rebase = model._use_rebase_for_merging(pull_request)
1712 1709
1713 1710 if use_rebase:
1714 1711 merge_details['merge_strategy'] = dict(
1715 1712 details={},
1716 1713 message=_('Merge strategy: rebase')
1717 1714 )
1718 1715 else:
1719 1716 merge_details['merge_strategy'] = dict(
1720 1717 details={},
1721 1718 message=_('Merge strategy: explicit merge commit')
1722 1719 )
1723 1720
1724 1721 close_branch = model._close_branch_before_merging(pull_request)
1725 1722 if close_branch:
1726 1723 repo_type = pull_request.target_repo.repo_type
1727 1724 close_msg = ''
1728 1725 if repo_type == 'hg':
1729 1726 close_msg = _('Source branch will be closed after merge.')
1730 1727 elif repo_type == 'git':
1731 1728 close_msg = _('Source branch will be deleted after merge.')
1732 1729
1733 1730 merge_details['close_branch'] = dict(
1734 1731 details={},
1735 1732 message=close_msg
1736 1733 )
1737 1734
1738 1735 return merge_details
1739 1736
1740 1737
1741 1738 ChangeTuple = collections.namedtuple(
1742 1739 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1743 1740
1744 1741 FileChangeTuple = collections.namedtuple(
1745 1742 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,196 +1,195 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import stat
23 23 import sys
24 24
25 25 import pytest
26 26 from mock import Mock, patch, DEFAULT
27 27
28 28 import rhodecode
29 29 from rhodecode.model import db, scm
30 30 from rhodecode.tests import no_newline_id_generator
31 31
32 32
33 33 def test_scm_instance_config(backend):
34 34 repo = backend.create_repo()
35 35 with patch.multiple('rhodecode.model.db.Repository',
36 36 _get_instance=DEFAULT,
37 37 _get_instance_cached=DEFAULT) as mocks:
38 38 repo.scm_instance()
39 39 mocks['_get_instance'].assert_called_with(
40 40 config=None, cache=False)
41 41
42 42 config = {'some': 'value'}
43 43 repo.scm_instance(config=config)
44 44 mocks['_get_instance'].assert_called_with(
45 45 config=config, cache=False)
46 46
47 47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
48 48 repo.scm_instance(config=config)
49 49 mocks['_get_instance_cached'].assert_called()
50 50
51 51
52 52 def test__get_instance_config(backend):
53 53 repo = backend.create_repo()
54 54 vcs_class = Mock()
55 55 with patch.multiple('rhodecode.lib.vcs.backends',
56 56 get_scm=DEFAULT,
57 57 get_backend=DEFAULT) as mocks:
58 58 mocks['get_scm'].return_value = backend.alias
59 59 mocks['get_backend'].return_value = vcs_class
60 60 with patch('rhodecode.model.db.Repository._config') as config_mock:
61 61 repo._get_instance()
62 62 vcs_class.assert_called_with(
63 63 repo_path=repo.repo_full_path, config=config_mock,
64 64 create=False, with_wire={'cache': True})
65 65
66 66 new_config = {'override': 'old_config'}
67 67 repo._get_instance(config=new_config)
68 68 vcs_class.assert_called_with(
69 69 repo_path=repo.repo_full_path, config=new_config, create=False,
70 70 with_wire={'cache': True})
71 71
72 72
73 73 def test_mark_for_invalidation_config(backend):
74 74 repo = backend.create_repo()
75 75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
76 76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
77 77 _, kwargs = _mock.call_args
78 78 assert kwargs['config'].__dict__ == repo._config.__dict__
79 79
80 80
81 81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
82 82 commits = [{'message': 'A'}, {'message': 'B'}]
83 83 repo = backend.create_repo(commits=commits)
84 84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
85 85 assert repo.changeset_cache['revision'] == 1
86 86
87 87
88 88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
89 89 repo = backend.create_repo()
90 90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
91 91 assert repo.changeset_cache['revision'] == -1
92 92
93 93
94 94 def test_strip_with_multiple_heads(backend_hg):
95 95 commits = [
96 96 {'message': 'A'},
97 97 {'message': 'a'},
98 98 {'message': 'b'},
99 99 {'message': 'B', 'parents': ['A']},
100 100 {'message': 'a1'},
101 101 ]
102 102 repo = backend_hg.create_repo(commits=commits)
103 103 commit_ids = backend_hg.commit_ids
104 104
105 105 model = scm.ScmModel()
106 106 model.strip(repo, commit_ids['b'], branch=None)
107 107
108 108 vcs_repo = repo.scm_instance()
109 109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
110 110 assert len(rest_commit_ids) == 4
111 111 assert commit_ids['b'] not in rest_commit_ids
112 112
113 113
114 114 def test_strip_with_single_heads(backend_hg):
115 115 commits = [
116 116 {'message': 'A'},
117 117 {'message': 'a'},
118 118 {'message': 'b'},
119 119 ]
120 120 repo = backend_hg.create_repo(commits=commits)
121 121 commit_ids = backend_hg.commit_ids
122 122
123 123 model = scm.ScmModel()
124 124 model.strip(repo, commit_ids['b'], branch=None)
125 125
126 126 vcs_repo = repo.scm_instance()
127 127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
128 128 assert len(rest_commit_ids) == 2
129 129 assert commit_ids['b'] not in rest_commit_ids
130 130
131 131
132 def test_get_nodes_returns_unicode_flat(backend_random):
133 repo = backend_random.repo
134 directories, files = scm.ScmModel().get_nodes(
135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
136 flat=True)
132 def test_get_nodes_returns_unicode_flat(backend):
133 repo = backend.repo
134 commit_id = repo.get_commit(commit_idx=0).raw_id
135 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=True)
137 136 assert_contains_only_unicode(directories)
138 137 assert_contains_only_unicode(files)
139 138
140 139
141 def test_get_nodes_returns_unicode_non_flat(backend_random):
142 repo = backend_random.repo
143 directories, files = scm.ScmModel().get_nodes(
144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
145 flat=False)
140 def test_get_nodes_returns_unicode_non_flat(backend):
141 repo = backend.repo
142 commit_id = repo.get_commit(commit_idx=0).raw_id
143
144 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=False)
146 145 # johbo: Checking only the names for now, since that is the critical
147 146 # part.
148 147 assert_contains_only_unicode([d['name'] for d in directories])
149 148 assert_contains_only_unicode([f['name'] for f in files])
150 149
151 150
152 151 def test_get_nodes_max_file_bytes(backend_random):
153 152 repo = backend_random.repo
154 153 max_file_bytes = 10
155 154 directories, files = scm.ScmModel().get_nodes(
156 155 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
157 156 extended_info=True, flat=False)
158 157 assert any(file['content'] and len(file['content']) > max_file_bytes
159 158 for file in files)
160 159
161 160 directories, files = scm.ScmModel().get_nodes(
162 161 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
163 162 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
164 163 assert all(
165 164 file['content'] is None if file['size'] > max_file_bytes else True
166 165 for file in files)
167 166
168 167
169 168 def assert_contains_only_unicode(structure):
170 169 assert structure
171 170 for value in structure:
172 171 assert isinstance(value, unicode)
173 172
174 173
175 174 @pytest.mark.backends("hg", "git")
176 175 def test_get_non_unicode_reference(backend):
177 176 model = scm.ScmModel()
178 177 non_unicode_list = ["AdΔ±nΔ±".decode("cp1254")]
179 178
180 179 def scm_instance():
181 180 return Mock(
182 181 branches=non_unicode_list, bookmarks=non_unicode_list,
183 182 tags=non_unicode_list, alias=backend.alias)
184 183
185 184 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
186 185 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
187 186 if backend.alias == 'hg':
188 187 valid_choices = [
189 188 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
190 189 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
191 190 else:
192 191 valid_choices = [
193 192 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
194 193 u'tag:Ad\xc4\xb1n\xc4\xb1']
195 194
196 195 assert choices == valid_choices
@@ -1,257 +1,256 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import shutil
23 23 import datetime
24 24
25 25 import pytest
26 26
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import Config
29 29 from rhodecode.lib.vcs.nodes import FileNode
30 30 from rhodecode.tests import get_new_dir
31 31 from rhodecode.tests.utils import check_skip_backends, check_xfail_backends
32 32
33 33
34 34 @pytest.fixture()
35 35 def vcs_repository_support(
36 36 request, backend_alias, baseapp, _vcs_repo_container):
37 37 """
38 38 Provide a test repository for the test run.
39 39
40 40 Depending on the value of `recreate_repo_per_test` a new repo for each
41 41 test will be created.
42 42
43 43 The parameter `--backends` can be used to limit this fixture to specific
44 44 backend implementations.
45 45 """
46 46 cls = request.cls
47 47
48 48 check_skip_backends(request.node, backend_alias)
49 49 check_xfail_backends(request.node, backend_alias)
50 50
51 51 if _should_create_repo_per_test(cls):
52 52 _vcs_repo_container = _create_vcs_repo_container(request)
53 53
54 54 repo = _vcs_repo_container.get_repo(cls, backend_alias=backend_alias)
55 55
56 56 # TODO: johbo: Supporting old test class api, think about removing this
57 57 cls.repo = repo
58 58 cls.repo_path = repo.path
59 59 cls.default_branch = repo.DEFAULT_BRANCH_NAME
60 60 cls.Backend = cls.backend_class = repo.__class__
61 61 cls.imc = repo.in_memory_commit
62 62
63 63 return backend_alias, repo
64 64
65 65
66 66 @pytest.fixture(scope='class')
67 67 def _vcs_repo_container(request):
68 68 """
69 69 Internal fixture intended to help support class based scoping on demand.
70 70 """
71 71 return _create_vcs_repo_container(request)
72 72
73 73
74 74 def _create_vcs_repo_container(request):
75 75 repo_container = VcsRepoContainer()
76 76 if not request.config.getoption('--keep-tmp-path'):
77 77 request.addfinalizer(repo_container.cleanup)
78 78 return repo_container
79 79
80 80
81 81 class VcsRepoContainer(object):
82 82
83 83 def __init__(self):
84 84 self._cleanup_paths = []
85 85 self._repos = {}
86 86
87 87 def get_repo(self, test_class, backend_alias):
88 88 if backend_alias not in self._repos:
89 89 repo = _create_empty_repository(test_class, backend_alias)
90 90
91 91 self._cleanup_paths.append(repo.path)
92 92 self._repos[backend_alias] = repo
93 93 return self._repos[backend_alias]
94 94
95 95 def cleanup(self):
96 96 for repo_path in reversed(self._cleanup_paths):
97 97 shutil.rmtree(repo_path)
98 98
99 99
100 100 def _should_create_repo_per_test(cls):
101 101 return getattr(cls, 'recreate_repo_per_test', False)
102 102
103 103
104 104 def _create_empty_repository(cls, backend_alias=None):
105 105 Backend = get_backend(backend_alias or cls.backend_alias)
106 106 repo_path = get_new_dir(str(time.time()))
107 107 repo = Backend(repo_path, create=True)
108 108 if hasattr(cls, '_get_commits'):
109 109 commits = cls._get_commits()
110 110 cls.tip = _add_commits_to_repo(repo, commits)
111 111
112 112 return repo
113 113
114 114
115 115 @pytest.fixture
116 116 def config():
117 117 """
118 118 Instance of a repository config.
119 119
120 120 The instance contains only one value:
121 121
122 122 - Section: "section-a"
123 123 - Key: "a-1"
124 124 - Value: "value-a-1"
125 125
126 126 The intended usage is for cases where a config instance is needed but no
127 127 specific content is required.
128 128 """
129 129 config = Config()
130 130 config.set('section-a', 'a-1', 'value-a-1')
131 131 return config
132 132
133 133
134 134 def _add_commits_to_repo(repo, commits):
135 135 imc = repo.in_memory_commit
136 136 tip = None
137 137
138 138 for commit in commits:
139 139 for node in commit.get('added', []):
140 140 imc.add(FileNode(node.path, content=node.content))
141 141 for node in commit.get('changed', []):
142 142 imc.change(FileNode(node.path, content=node.content))
143 143 for node in commit.get('removed', []):
144 144 imc.remove(FileNode(node.path))
145 145
146 146 tip = imc.commit(
147 147 message=unicode(commit['message']),
148 148 author=unicode(commit['author']),
149 149 date=commit['date'],
150 150 branch=commit.get('branch'))
151
152 151 return tip
153 152
154 153
155 154 @pytest.fixture
156 155 def vcs_repo(request, backend_alias):
157 156 Backend = get_backend(backend_alias)
158 157 repo_path = get_new_dir(str(time.time()))
159 158 repo = Backend(repo_path, create=True)
160 159
161 160 @request.addfinalizer
162 161 def cleanup():
163 162 shutil.rmtree(repo_path)
164 163
165 164 return repo
166 165
167 166
168 167 @pytest.fixture
169 168 def generate_repo_with_commits(vcs_repo):
170 169 """
171 170 Creates a fabric to generate N comits with some file nodes on a randomly
172 171 generated repository
173 172 """
174 173
175 174 def commit_generator(num):
176 175 start_date = datetime.datetime(2010, 1, 1, 20)
177 176 for x in xrange(num):
178 177 yield {
179 178 'message': 'Commit %d' % x,
180 179 'author': 'Joe Doe <joe.doe@example.com>',
181 180 'date': start_date + datetime.timedelta(hours=12 * x),
182 181 'added': [
183 182 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
184 183 ],
185 184 'modified': [
186 185 FileNode('file_%d.txt' % x,
187 186 content='Foobar %d modified' % (x-1)),
188 187 ]
189 188 }
190 189
191 190 def commit_maker(num=5):
192 191 _add_commits_to_repo(vcs_repo, commit_generator(num))
193 192 return vcs_repo
194 193
195 194 return commit_maker
196 195
197 196
198 197 @pytest.fixture
199 198 def hg_repo(request, vcs_repo):
200 199 repo = vcs_repo
201 200
202 201 commits = repo._get_commits()
203 202 _add_commits_to_repo(repo, commits)
204 203
205 204 return repo
206 205
207 206
208 207 @pytest.fixture
209 208 def hg_commit(hg_repo):
210 209 return hg_repo.get_commit()
211 210
212 211
213 212 class BackendTestMixin(object):
214 213 """
215 214 This is a backend independent test case class which should be created
216 215 with ``type`` method.
217 216
218 217 It is required to set following attributes at subclass:
219 218
220 219 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
221 220 - ``repo_path``: path to the repository which would be created for set of
222 221 tests
223 222 - ``recreate_repo_per_test``: If set to ``False``, repo would NOT be
224 223 created
225 224 before every single test. Defaults to ``True``.
226 225 """
227 226 recreate_repo_per_test = True
228 227
229 228 @classmethod
230 229 def _get_commits(cls):
231 230 commits = [
232 231 {
233 232 'message': u'Initial commit',
234 233 'author': u'Joe Doe <joe.doe@example.com>',
235 234 'date': datetime.datetime(2010, 1, 1, 20),
236 235 'added': [
237 236 FileNode('foobar', content='Foobar'),
238 237 FileNode('foobar2', content='Foobar II'),
239 238 FileNode('foo/bar/baz', content='baz here!'),
240 239 ],
241 240 },
242 241 {
243 242 'message': u'Changes...',
244 243 'author': u'Jane Doe <jane.doe@example.com>',
245 244 'date': datetime.datetime(2010, 1, 1, 21),
246 245 'added': [
247 246 FileNode('some/new.txt', content='news...'),
248 247 ],
249 248 'changed': [
250 249 FileNode('foobar', 'Foobar I'),
251 250 ],
252 251 'removed': [],
253 252 },
254 253 ]
255 254 return commits
256 255
257 256
@@ -1,593 +1,592 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import time
23 23
24 24 import pytest
25 25
26 26 from rhodecode.lib.vcs.backends.base import (
27 27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
28 28 from rhodecode.lib.vcs.exceptions import (
29 29 BranchDoesNotExistError, CommitDoesNotExistError,
30 30 RepositoryError, EmptyRepositoryError)
31 31 from rhodecode.lib.vcs.nodes import (
32 32 FileNode, AddedFileNodesGenerator,
33 33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
34 34 from rhodecode.tests import get_new_dir
35 35 from rhodecode.tests.vcs.conftest import BackendTestMixin
36 36
37 37
38 38 class TestBaseChangeset:
39 39
40 40 def test_is_deprecated(self):
41 41 from rhodecode.lib.vcs.backends.base import BaseChangeset
42 42 pytest.deprecated_call(BaseChangeset)
43 43
44 44
45 45 class TestEmptyCommit(object):
46 46
47 47 def test_branch_without_alias_returns_none(self):
48 48 commit = EmptyCommit()
49 49 assert commit.branch is None
50 50
51 51
52 52 @pytest.mark.usefixtures("vcs_repository_support")
53 53 class TestCommitsInNonEmptyRepo(BackendTestMixin):
54 54 recreate_repo_per_test = True
55 55
56 56 @classmethod
57 57 def _get_commits(cls):
58 58 start_date = datetime.datetime(2010, 1, 1, 20)
59 59 for x in xrange(5):
60 60 yield {
61 61 'message': 'Commit %d' % x,
62 62 'author': 'Joe Doe <joe.doe@example.com>',
63 63 'date': start_date + datetime.timedelta(hours=12 * x),
64 64 'added': [
65 65 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
66 66 ],
67 67 }
68 68
69 69 def test_walk_returns_empty_list_in_case_of_file(self):
70 70 result = list(self.tip.walk('file_0.txt'))
71 71 assert result == []
72 72
73 73 @pytest.mark.backends("git", "hg")
74 74 def test_new_branch(self):
75 75 self.imc.add(FileNode('docs/index.txt',
76 76 content='Documentation\n'))
77 77 foobar_tip = self.imc.commit(
78 78 message=u'New branch: foobar',
79 79 author=u'joe',
80 80 branch='foobar',
81 81 )
82 82 assert 'foobar' in self.repo.branches
83 83 assert foobar_tip.branch == 'foobar'
84 84 # 'foobar' should be the only branch that contains the new commit
85 85 branch = self.repo.branches.values()
86 86 assert branch[0] != branch[1]
87 87
88 88 @pytest.mark.backends("git", "hg")
89 89 def test_new_head_in_default_branch(self):
90 90 tip = self.repo.get_commit()
91 91 self.imc.add(FileNode('docs/index.txt',
92 92 content='Documentation\n'))
93 93 foobar_tip = self.imc.commit(
94 94 message=u'New branch: foobar',
95 95 author=u'joe',
96 96 branch='foobar',
97 97 parents=[tip],
98 98 )
99 99 self.imc.change(FileNode('docs/index.txt',
100 100 content='Documentation\nand more...\n'))
101 101 newtip = self.imc.commit(
102 102 message=u'At default branch',
103 103 author=u'joe',
104 104 branch=foobar_tip.branch,
105 105 parents=[foobar_tip],
106 106 )
107 107
108 108 newest_tip = self.imc.commit(
109 109 message=u'Merged with %s' % foobar_tip.raw_id,
110 110 author=u'joe',
111 111 branch=self.backend_class.DEFAULT_BRANCH_NAME,
112 112 parents=[newtip, foobar_tip],
113 113 )
114 114
115 115 assert newest_tip.branch == self.backend_class.DEFAULT_BRANCH_NAME
116 116
117 117 @pytest.mark.backends("git", "hg")
118 118 def test_get_commits_respects_branch_name(self):
119 119 """
120 120 * e1930d0 (HEAD, master) Back in default branch
121 121 | * e1930d0 (docs) New Branch: docs2
122 122 | * dcc14fa New branch: docs
123 123 |/
124 124 * e63c41a Initial commit
125 125 ...
126 126 * 624d3db Commit 0
127 127
128 128 :return:
129 129 """
130 130 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
131 131 TEST_BRANCH = 'docs'
132 132 org_tip = self.repo.get_commit()
133 133
134 134 self.imc.add(FileNode('readme.txt', content='Document\n'))
135 135 initial = self.imc.commit(
136 136 message=u'Initial commit',
137 137 author=u'joe',
138 138 parents=[org_tip],
139 139 branch=DEFAULT_BRANCH,)
140 140
141 141 self.imc.add(FileNode('newdoc.txt', content='foobar\n'))
142 142 docs_branch_commit1 = self.imc.commit(
143 143 message=u'New branch: docs',
144 144 author=u'joe',
145 145 parents=[initial],
146 146 branch=TEST_BRANCH,)
147 147
148 148 self.imc.add(FileNode('newdoc2.txt', content='foobar2\n'))
149 149 docs_branch_commit2 = self.imc.commit(
150 150 message=u'New branch: docs2',
151 151 author=u'joe',
152 152 parents=[docs_branch_commit1],
153 153 branch=TEST_BRANCH,)
154 154
155 155 self.imc.add(FileNode('newfile', content='hello world\n'))
156 156 self.imc.commit(
157 157 message=u'Back in default branch',
158 158 author=u'joe',
159 159 parents=[initial],
160 160 branch=DEFAULT_BRANCH,)
161 161
162 default_branch_commits = self.repo.get_commits(
163 branch_name=DEFAULT_BRANCH)
162 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
164 163 assert docs_branch_commit1 not in list(default_branch_commits)
165 164 assert docs_branch_commit2 not in list(default_branch_commits)
166 165
167 166 docs_branch_commits = self.repo.get_commits(
168 167 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
169 168 branch_name=TEST_BRANCH)
170 169 assert docs_branch_commit1 in list(docs_branch_commits)
171 170 assert docs_branch_commit2 in list(docs_branch_commits)
172 171
173 172 @pytest.mark.backends("svn")
174 173 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
175 174 repo = vcsbackend_svn['svn-simple-layout']
176 175 commits = repo.get_commits(branch_name='trunk')
177 176 commit_indexes = [c.idx for c in commits]
178 177 assert commit_indexes == [1, 2, 3, 7, 12, 15]
179 178
180 179 def test_get_commit_by_branch(self):
181 180 for branch, commit_id in self.repo.branches.iteritems():
182 181 assert commit_id == self.repo.get_commit(branch).raw_id
183 182
184 183 def test_get_commit_by_tag(self):
185 184 for tag, commit_id in self.repo.tags.iteritems():
186 185 assert commit_id == self.repo.get_commit(tag).raw_id
187 186
188 187 def test_get_commit_parents(self):
189 188 repo = self.repo
190 189 for test_idx in [1, 2, 3]:
191 190 commit = repo.get_commit(commit_idx=test_idx - 1)
192 191 assert [commit] == repo.get_commit(commit_idx=test_idx).parents
193 192
194 193 def test_get_commit_children(self):
195 194 repo = self.repo
196 195 for test_idx in [1, 2, 3]:
197 196 commit = repo.get_commit(commit_idx=test_idx + 1)
198 197 assert [commit] == repo.get_commit(commit_idx=test_idx).children
199 198
200 199
201 200 @pytest.mark.usefixtures("vcs_repository_support")
202 201 class TestCommits(BackendTestMixin):
203 202 recreate_repo_per_test = False
204 203
205 204 @classmethod
206 205 def _get_commits(cls):
207 206 start_date = datetime.datetime(2010, 1, 1, 20)
208 207 for x in xrange(5):
209 208 yield {
210 209 'message': u'Commit %d' % x,
211 210 'author': u'Joe Doe <joe.doe@example.com>',
212 211 'date': start_date + datetime.timedelta(hours=12 * x),
213 212 'added': [
214 213 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
215 214 ],
216 215 }
217 216
218 217 def test_simple(self):
219 218 tip = self.repo.get_commit()
220 219 assert tip.date, datetime.datetime(2010, 1, 3 == 20)
221 220
222 221 def test_simple_serialized_commit(self):
223 222 tip = self.repo.get_commit()
224 223 # json.dumps(tip) uses .__json__() method
225 224 data = tip.__json__()
226 225 assert 'branch' in data
227 226 assert data['revision']
228 227
229 228 def test_retrieve_tip(self):
230 229 tip = self.repo.get_commit('tip')
231 230 assert tip == self.repo.get_commit()
232 231
233 232 def test_invalid(self):
234 233 with pytest.raises(CommitDoesNotExistError):
235 234 self.repo.get_commit(commit_idx=123456789)
236 235
237 236 def test_idx(self):
238 237 commit = self.repo[0]
239 238 assert commit.idx == 0
240 239
241 240 def test_negative_idx(self):
242 241 commit = self.repo.get_commit(commit_idx=-1)
243 242 assert commit.idx >= 0
244 243
245 244 def test_revision_is_deprecated(self):
246 245 def get_revision(commit):
247 246 return commit.revision
248 247
249 248 commit = self.repo[0]
250 249 pytest.deprecated_call(get_revision, commit)
251 250
252 251 def test_size(self):
253 252 tip = self.repo.get_commit()
254 253 size = 5 * len('Foobar N') # Size of 5 files
255 254 assert tip.size == size
256 255
257 256 def test_size_at_commit(self):
258 257 tip = self.repo.get_commit()
259 258 size = 5 * len('Foobar N') # Size of 5 files
260 259 assert self.repo.size_at_commit(tip.raw_id) == size
261 260
262 261 def test_size_at_first_commit(self):
263 262 commit = self.repo[0]
264 263 size = len('Foobar N') # Size of 1 file
265 264 assert self.repo.size_at_commit(commit.raw_id) == size
266 265
267 266 def test_author(self):
268 267 tip = self.repo.get_commit()
269 268 assert_text_equal(tip.author, u'Joe Doe <joe.doe@example.com>')
270 269
271 270 def test_author_name(self):
272 271 tip = self.repo.get_commit()
273 272 assert_text_equal(tip.author_name, u'Joe Doe')
274 273
275 274 def test_author_email(self):
276 275 tip = self.repo.get_commit()
277 276 assert_text_equal(tip.author_email, u'joe.doe@example.com')
278 277
279 278 def test_message(self):
280 279 tip = self.repo.get_commit()
281 280 assert_text_equal(tip.message, u'Commit 4')
282 281
283 282 def test_diff(self):
284 283 tip = self.repo.get_commit()
285 284 diff = tip.diff()
286 285 assert "+Foobar 4" in diff.raw
287 286
288 287 def test_prev(self):
289 288 tip = self.repo.get_commit()
290 289 prev_commit = tip.prev()
291 290 assert prev_commit.message == 'Commit 3'
292 291
293 292 def test_prev_raises_on_first_commit(self):
294 293 commit = self.repo.get_commit(commit_idx=0)
295 294 with pytest.raises(CommitDoesNotExistError):
296 295 commit.prev()
297 296
298 297 def test_prev_works_on_second_commit_issue_183(self):
299 298 commit = self.repo.get_commit(commit_idx=1)
300 299 prev_commit = commit.prev()
301 300 assert prev_commit.idx == 0
302 301
303 302 def test_next(self):
304 303 commit = self.repo.get_commit(commit_idx=2)
305 304 next_commit = commit.next()
306 305 assert next_commit.message == 'Commit 3'
307 306
308 307 def test_next_raises_on_tip(self):
309 308 commit = self.repo.get_commit()
310 309 with pytest.raises(CommitDoesNotExistError):
311 310 commit.next()
312 311
313 312 def test_get_path_commit(self):
314 313 commit = self.repo.get_commit()
315 314 commit.get_path_commit('file_4.txt')
316 315 assert commit.message == 'Commit 4'
317 316
318 317 def test_get_filenodes_generator(self):
319 318 tip = self.repo.get_commit()
320 319 filepaths = [node.path for node in tip.get_filenodes_generator()]
321 320 assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
322 321
323 322 def test_get_file_annotate(self):
324 323 file_added_commit = self.repo.get_commit(commit_idx=3)
325 324 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
326 325
327 326 line_no, commit_id, commit_loader, line = annotations[0]
328 327
329 328 assert line_no == 1
330 329 assert commit_id == file_added_commit.raw_id
331 330 assert commit_loader() == file_added_commit
332 331 assert 'Foobar 3' in line
333 332
334 333 def test_get_file_annotate_does_not_exist(self):
335 334 file_added_commit = self.repo.get_commit(commit_idx=2)
336 335 # TODO: Should use a specific exception class here?
337 336 with pytest.raises(Exception):
338 337 list(file_added_commit.get_file_annotate('file_3.txt'))
339 338
340 339 def test_get_file_annotate_tip(self):
341 340 tip = self.repo.get_commit()
342 341 commit = self.repo.get_commit(commit_idx=3)
343 342 expected_values = list(commit.get_file_annotate('file_3.txt'))
344 343 annotations = list(tip.get_file_annotate('file_3.txt'))
345 344
346 345 # Note: Skip index 2 because the loader function is not the same
347 346 for idx in (0, 1, 3):
348 347 assert annotations[0][idx] == expected_values[0][idx]
349 348
350 349 def test_get_commits_is_ordered_by_date(self):
351 350 commits = self.repo.get_commits()
352 351 assert isinstance(commits, CollectionGenerator)
353 352 assert len(commits) == 0 or len(commits) != 0
354 353 commits = list(commits)
355 354 ordered_by_date = sorted(commits, key=lambda commit: commit.date)
356 355 assert commits == ordered_by_date
357 356
358 357 def test_get_commits_respects_start(self):
359 358 second_id = self.repo.commit_ids[1]
360 359 commits = self.repo.get_commits(start_id=second_id)
361 360 assert isinstance(commits, CollectionGenerator)
362 361 commits = list(commits)
363 362 assert len(commits) == 4
364 363
365 364 def test_get_commits_includes_start_commit(self):
366 365 second_id = self.repo.commit_ids[1]
367 366 commits = self.repo.get_commits(start_id=second_id)
368 367 assert isinstance(commits, CollectionGenerator)
369 368 commits = list(commits)
370 369 assert commits[0].raw_id == second_id
371 370
372 371 def test_get_commits_respects_end(self):
373 372 second_id = self.repo.commit_ids[1]
374 373 commits = self.repo.get_commits(end_id=second_id)
375 374 assert isinstance(commits, CollectionGenerator)
376 375 commits = list(commits)
377 376 assert commits[-1].raw_id == second_id
378 377 assert len(commits) == 2
379 378
380 379 def test_get_commits_respects_both_start_and_end(self):
381 380 second_id = self.repo.commit_ids[1]
382 381 third_id = self.repo.commit_ids[2]
383 382 commits = self.repo.get_commits(start_id=second_id, end_id=third_id)
384 383 assert isinstance(commits, CollectionGenerator)
385 384 commits = list(commits)
386 385 assert len(commits) == 2
387 386
388 387 def test_get_commits_on_empty_repo_raises_EmptyRepository_error(self):
389 388 repo_path = get_new_dir(str(time.time()))
390 389 repo = self.Backend(repo_path, create=True)
391 390
392 391 with pytest.raises(EmptyRepositoryError):
393 392 list(repo.get_commits(start_id='foobar'))
394 393
395 394 def test_get_commits_respects_hidden(self):
396 395 commits = self.repo.get_commits(show_hidden=True)
397 396 assert isinstance(commits, CollectionGenerator)
398 397 assert len(commits) == 5
399 398
400 399 def test_get_commits_includes_end_commit(self):
401 400 second_id = self.repo.commit_ids[1]
402 401 commits = self.repo.get_commits(end_id=second_id)
403 402 assert isinstance(commits, CollectionGenerator)
404 403 assert len(commits) == 2
405 404 commits = list(commits)
406 405 assert commits[-1].raw_id == second_id
407 406
408 407 def test_get_commits_respects_start_date(self):
409 408 start_date = datetime.datetime(2010, 1, 2)
410 409 commits = self.repo.get_commits(start_date=start_date)
411 410 assert isinstance(commits, CollectionGenerator)
412 411 # Should be 4 commits after 2010-01-02 00:00:00
413 412 assert len(commits) == 4
414 413 for c in commits:
415 414 assert c.date >= start_date
416 415
417 416 def test_get_commits_respects_start_date_with_branch(self):
418 417 start_date = datetime.datetime(2010, 1, 2)
419 418 commits = self.repo.get_commits(
420 419 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
421 420 assert isinstance(commits, CollectionGenerator)
422 421 # Should be 4 commits after 2010-01-02 00:00:00
423 422 assert len(commits) == 4
424 423 for c in commits:
425 424 assert c.date >= start_date
426 425
427 426 def test_get_commits_respects_start_date_and_end_date(self):
428 427 start_date = datetime.datetime(2010, 1, 2)
429 428 end_date = datetime.datetime(2010, 1, 3)
430 429 commits = self.repo.get_commits(start_date=start_date,
431 430 end_date=end_date)
432 431 assert isinstance(commits, CollectionGenerator)
433 432 assert len(commits) == 2
434 433 for c in commits:
435 434 assert c.date >= start_date
436 435 assert c.date <= end_date
437 436
438 437 def test_get_commits_respects_end_date(self):
439 438 end_date = datetime.datetime(2010, 1, 2)
440 439 commits = self.repo.get_commits(end_date=end_date)
441 440 assert isinstance(commits, CollectionGenerator)
442 441 assert len(commits) == 1
443 442 for c in commits:
444 443 assert c.date <= end_date
445 444
446 445 def test_get_commits_respects_reverse(self):
447 446 commits = self.repo.get_commits() # no longer reverse support
448 447 assert isinstance(commits, CollectionGenerator)
449 448 assert len(commits) == 5
450 449 commit_ids = reversed([c.raw_id for c in commits])
451 450 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
452 451
453 452 def test_get_commits_slice_generator(self):
454 453 commits = self.repo.get_commits(
455 454 branch_name=self.repo.DEFAULT_BRANCH_NAME)
456 455 assert isinstance(commits, CollectionGenerator)
457 456 commit_slice = list(commits[1:3])
458 457 assert len(commit_slice) == 2
459 458
460 459 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
461 460 with pytest.raises(CommitDoesNotExistError):
462 461 list(self.repo.get_commits(start_id='foobar'))
463 462
464 463 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
465 464 with pytest.raises(CommitDoesNotExistError):
466 465 list(self.repo.get_commits(end_id='foobar'))
467 466
468 467 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
469 468 with pytest.raises(BranchDoesNotExistError):
470 469 list(self.repo.get_commits(branch_name='foobar'))
471 470
472 471 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
473 472 start_id = self.repo.commit_ids[-1]
474 473 end_id = self.repo.commit_ids[0]
475 474 with pytest.raises(RepositoryError):
476 475 list(self.repo.get_commits(start_id=start_id, end_id=end_id))
477 476
478 477 def test_get_commits_raises_for_numerical_ids(self):
479 478 with pytest.raises(TypeError):
480 479 self.repo.get_commits(start_id=1, end_id=2)
481 480
482 481 def test_commit_equality(self):
483 482 commit1 = self.repo.get_commit(self.repo.commit_ids[0])
484 483 commit2 = self.repo.get_commit(self.repo.commit_ids[1])
485 484
486 485 assert commit1 == commit1
487 486 assert commit2 == commit2
488 487 assert commit1 != commit2
489 488 assert commit2 != commit1
490 489 assert commit1 != None
491 490 assert None != commit1
492 491 assert 1 != commit1
493 492 assert 'string' != commit1
494 493
495 494
496 495 @pytest.mark.parametrize("filename, expected", [
497 496 ("README.rst", False),
498 497 ("README", True),
499 498 ])
500 499 def test_commit_is_link(vcsbackend, filename, expected):
501 500 commit = vcsbackend.repo.get_commit()
502 501 link_status = commit.is_link(filename)
503 502 assert link_status is expected
504 503
505 504
506 505 @pytest.mark.usefixtures("vcs_repository_support")
507 506 class TestCommitsChanges(BackendTestMixin):
508 507 recreate_repo_per_test = False
509 508
510 509 @classmethod
511 510 def _get_commits(cls):
512 511 return [
513 512 {
514 513 'message': u'Initial',
515 514 'author': u'Joe Doe <joe.doe@example.com>',
516 515 'date': datetime.datetime(2010, 1, 1, 20),
517 516 'added': [
518 517 FileNode('foo/bar', content='foo'),
519 518 FileNode('foo/baΕ‚', content='foo'),
520 519 FileNode('foobar', content='foo'),
521 520 FileNode('qwe', content='foo'),
522 521 ],
523 522 },
524 523 {
525 524 'message': u'Massive changes',
526 525 'author': u'Joe Doe <joe.doe@example.com>',
527 526 'date': datetime.datetime(2010, 1, 1, 22),
528 527 'added': [FileNode('fallout', content='War never changes')],
529 528 'changed': [
530 529 FileNode('foo/bar', content='baz'),
531 530 FileNode('foobar', content='baz'),
532 531 ],
533 532 'removed': [FileNode('qwe')],
534 533 },
535 534 ]
536 535
537 536 def test_initial_commit(self, local_dt_to_utc):
538 537 commit = self.repo.get_commit(commit_idx=0)
539 538 assert set(commit.added) == set([
540 539 commit.get_node('foo/bar'),
541 540 commit.get_node('foo/baΕ‚'),
542 541 commit.get_node('foobar'),
543 542 commit.get_node('qwe'),
544 543 ])
545 544 assert set(commit.changed) == set()
546 545 assert set(commit.removed) == set()
547 546 assert set(commit.affected_files) == set(
548 547 ['foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'])
549 548 assert commit.date == local_dt_to_utc(
550 549 datetime.datetime(2010, 1, 1, 20, 0))
551 550
552 551 def test_head_added(self):
553 552 commit = self.repo.get_commit()
554 553 assert isinstance(commit.added, AddedFileNodesGenerator)
555 554 assert set(commit.added) == set([commit.get_node('fallout')])
556 555 assert isinstance(commit.changed, ChangedFileNodesGenerator)
557 556 assert set(commit.changed) == set([
558 557 commit.get_node('foo/bar'),
559 558 commit.get_node('foobar'),
560 559 ])
561 560 assert isinstance(commit.removed, RemovedFileNodesGenerator)
562 561 assert len(commit.removed) == 1
563 562 assert list(commit.removed)[0].path == 'qwe'
564 563
565 564 def test_get_filemode(self):
566 565 commit = self.repo.get_commit()
567 566 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
568 567
569 568 def test_get_filemode_non_ascii(self):
570 569 commit = self.repo.get_commit()
571 570 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
572 571 assert FILEMODE_DEFAULT == commit.get_file_mode(u'foo/baΕ‚')
573 572
574 573 def test_get_path_history(self):
575 574 commit = self.repo.get_commit()
576 575 history = commit.get_path_history('foo/bar')
577 576 assert len(history) == 2
578 577
579 578 def test_get_path_history_with_limit(self):
580 579 commit = self.repo.get_commit()
581 580 history = commit.get_path_history('foo/bar', limit=1)
582 581 assert len(history) == 1
583 582
584 583 def test_get_path_history_first_commit(self):
585 584 commit = self.repo[0]
586 585 history = commit.get_path_history('foo/bar')
587 586 assert len(history) == 1
588 587
589 588
590 589 def assert_text_equal(expected, given):
591 590 assert expected == given
592 591 assert isinstance(expected, unicode)
593 592 assert isinstance(given, unicode)
@@ -1,1288 +1,1288 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 25 import shutil
26 26
27 27 import pytest
28 28
29 29 from rhodecode.lib.utils import make_db_config
30 30 from rhodecode.lib.vcs.backends.base import Reference
31 31 from rhodecode.lib.vcs.backends.git import (
32 32 GitRepository, GitCommit, discover_git_version)
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 RepositoryError, VCSError, NodeDoesNotExistError)
35 35 from rhodecode.lib.vcs.nodes import (
36 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 39
40 40
41 41 pytestmark = pytest.mark.backends("git")
42 42
43 43
44 44 def repo_path_generator():
45 45 """
46 46 Return a different path to be used for cloning repos.
47 47 """
48 48 i = 0
49 49 while True:
50 50 i += 1
51 51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52 52
53 53
54 54 REPO_PATH_GENERATOR = repo_path_generator()
55 55
56 56
57 57 class TestGitRepository:
58 58
59 59 # pylint: disable=protected-access
60 60
61 61 def __check_for_existing_repo(self):
62 62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 63 self.fail('Cannot test git clone repo as location %s already '
64 64 'exists. You should manually remove it first.'
65 65 % TEST_GIT_REPO_CLONE)
66 66
67 67 @pytest.fixture(autouse=True)
68 68 def prepare(self, request, baseapp):
69 69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70 70
71 71 def get_clone_repo(self):
72 72 """
73 73 Return a non bare clone of the base repo.
74 74 """
75 75 clone_path = next(REPO_PATH_GENERATOR)
76 76 repo_clone = GitRepository(
77 77 clone_path, create=True, src_url=self.repo.path, bare=False)
78 78
79 79 return repo_clone
80 80
81 81 def get_empty_repo(self, bare=False):
82 82 """
83 83 Return a non bare empty repo.
84 84 """
85 85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86 86
87 87 def test_wrong_repo_path(self):
88 88 wrong_repo_path = '/tmp/errorrepo_git'
89 89 with pytest.raises(RepositoryError):
90 90 GitRepository(wrong_repo_path)
91 91
92 92 def test_repo_clone(self):
93 93 self.__check_for_existing_repo()
94 94 repo = GitRepository(TEST_GIT_REPO)
95 95 repo_clone = GitRepository(
96 96 TEST_GIT_REPO_CLONE,
97 97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 99 # Checking hashes of commits should be enough
100 100 for commit in repo.get_commits():
101 101 raw_id = commit.raw_id
102 102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103 103
104 104 def test_repo_clone_without_create(self):
105 105 with pytest.raises(RepositoryError):
106 106 GitRepository(
107 107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108 108
109 109 def test_repo_clone_with_update(self):
110 110 repo = GitRepository(TEST_GIT_REPO)
111 111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 112 repo_clone = GitRepository(
113 113 clone_path,
114 114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116
117 117 # check if current workdir was updated
118 118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 119 assert os.path.isfile(fpath)
120 120
121 121 def test_repo_clone_without_update(self):
122 122 repo = GitRepository(TEST_GIT_REPO)
123 123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 124 repo_clone = GitRepository(
125 125 clone_path,
126 126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 128 # check if current workdir was *NOT* updated
129 129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 130 # Make sure it's not bare repo
131 131 assert not repo_clone.bare
132 132 assert not os.path.isfile(fpath)
133 133
134 134 def test_repo_clone_into_bare_repo(self):
135 135 repo = GitRepository(TEST_GIT_REPO)
136 136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 137 repo_clone = GitRepository(
138 138 clone_path, create=True, src_url=repo.path, bare=True)
139 139 assert repo_clone.bare
140 140
141 141 def test_create_repo_is_not_bare_by_default(self):
142 142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 143 assert not repo.bare
144 144
145 145 def test_create_bare_repo(self):
146 146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 147 assert repo.bare
148 148
149 149 def test_update_server_info(self):
150 150 self.repo._update_server_info()
151 151
152 152 def test_fetch(self, vcsbackend_git):
153 153 # Note: This is a git specific part of the API, it's only implemented
154 154 # by the git backend.
155 155 source_repo = vcsbackend_git.repo
156 156 target_repo = vcsbackend_git.create_repo(bare=True)
157 157 target_repo.fetch(source_repo.path)
158 158 # Note: Get a fresh instance, avoids caching trouble
159 159 target_repo = vcsbackend_git.backend(target_repo.path)
160 160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161 161
162 162 def test_commit_ids(self):
163 163 # there are 112 commits (by now)
164 164 # so we can assume they would be available from now on
165 165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 174 '8430a588b43b5d6da365400117c89400326e7992',
175 175 'd955cd312c17b02143c04fa1099a352b04368118',
176 176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 190 assert subset.issubset(set(self.repo.commit_ids))
191 191
192 192 def test_slicing(self):
193 193 # 4 1 5 10 95
194 194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 195 (10, 20, 10), (5, 100, 95)]:
196 196 commit_ids = list(self.repo[sfrom:sto])
197 197 assert len(commit_ids) == size
198 198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200 200
201 201 def test_branches(self):
202 202 # TODO: Need more tests here
203 203 # Removed (those are 'remotes' branches for cloned repo)
204 204 # assert 'master' in self.repo.branches
205 205 # assert 'gittree' in self.repo.branches
206 206 # assert 'web-branch' in self.repo.branches
207 207 for __, commit_id in self.repo.branches.items():
208 208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209 209
210 210 def test_tags(self):
211 211 # TODO: Need more tests here
212 212 assert 'v0.1.1' in self.repo.tags
213 213 assert 'v0.1.2' in self.repo.tags
214 214 for __, commit_id in self.repo.tags.items():
215 215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216 216
217 217 def _test_single_commit_cache(self, commit_id):
218 218 commit = self.repo.get_commit(commit_id)
219 219 assert commit_id in self.repo.commits
220 220 assert commit is self.repo.commits[commit_id]
221 221
222 222 def test_initial_commit(self):
223 223 commit_id = self.repo.commit_ids[0]
224 224 init_commit = self.repo.get_commit(commit_id)
225 225 init_author = init_commit.author
226 226
227 227 assert init_commit.message == 'initial import\n'
228 228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 229 assert init_author == init_commit.committer
230 230 for path in ('vcs/__init__.py',
231 231 'vcs/backends/BaseRepository.py',
232 232 'vcs/backends/__init__.py'):
233 233 assert isinstance(init_commit.get_node(path), FileNode)
234 234 for path in ('', 'vcs', 'vcs/backends'):
235 235 assert isinstance(init_commit.get_node(path), DirNode)
236 236
237 237 with pytest.raises(NodeDoesNotExistError):
238 238 init_commit.get_node(path='foobar')
239 239
240 240 node = init_commit.get_node('vcs/')
241 241 assert hasattr(node, 'kind')
242 242 assert node.kind == NodeKind.DIR
243 243
244 244 node = init_commit.get_node('vcs')
245 245 assert hasattr(node, 'kind')
246 246 assert node.kind == NodeKind.DIR
247 247
248 248 node = init_commit.get_node('vcs/__init__.py')
249 249 assert hasattr(node, 'kind')
250 250 assert node.kind == NodeKind.FILE
251 251
252 252 def test_not_existing_commit(self):
253 253 with pytest.raises(RepositoryError):
254 254 self.repo.get_commit('f' * 40)
255 255
256 256 def test_commit10(self):
257 257
258 258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 259 README = """===
260 260 VCS
261 261 ===
262 262
263 263 Various Version Control System management abstraction layer for Python.
264 264
265 265 Introduction
266 266 ------------
267 267
268 268 TODO: To be written...
269 269
270 270 """
271 271 node = commit10.get_node('README.rst')
272 272 assert node.kind == NodeKind.FILE
273 273 assert node.content == README
274 274
275 275 def test_head(self):
276 276 assert self.repo.head == self.repo.get_commit().raw_id
277 277
278 278 def test_checkout_with_create(self):
279 279 repo_clone = self.get_clone_repo()
280 280
281 281 new_branch = 'new_branch'
282 282 assert repo_clone._current_branch() == 'master'
283 283 assert set(repo_clone.branches) == {'master'}
284 284 repo_clone._checkout(new_branch, create=True)
285 285
286 286 # Branches is a lazy property so we need to recrete the Repo object.
287 287 repo_clone = GitRepository(repo_clone.path)
288 288 assert set(repo_clone.branches) == {'master', new_branch}
289 289 assert repo_clone._current_branch() == new_branch
290 290
291 291 def test_checkout(self):
292 292 repo_clone = self.get_clone_repo()
293 293
294 294 repo_clone._checkout('new_branch', create=True)
295 295 repo_clone._checkout('master')
296 296
297 297 assert repo_clone._current_branch() == 'master'
298 298
299 299 def test_checkout_same_branch(self):
300 300 repo_clone = self.get_clone_repo()
301 301
302 302 repo_clone._checkout('master')
303 303 assert repo_clone._current_branch() == 'master'
304 304
305 305 def test_checkout_branch_already_exists(self):
306 306 repo_clone = self.get_clone_repo()
307 307
308 308 with pytest.raises(RepositoryError):
309 309 repo_clone._checkout('master', create=True)
310 310
311 311 def test_checkout_bare_repo(self):
312 312 with pytest.raises(RepositoryError):
313 313 self.repo._checkout('master')
314 314
315 315 def test_current_branch_bare_repo(self):
316 316 with pytest.raises(RepositoryError):
317 317 self.repo._current_branch()
318 318
319 319 def test_current_branch_empty_repo(self):
320 320 repo = self.get_empty_repo()
321 321 assert repo._current_branch() is None
322 322
323 323 def test_local_clone(self):
324 324 clone_path = next(REPO_PATH_GENERATOR)
325 325 self.repo._local_clone(clone_path, 'master')
326 326 repo_clone = GitRepository(clone_path)
327 327
328 328 assert self.repo.commit_ids == repo_clone.commit_ids
329 329
330 330 def test_local_clone_with_specific_branch(self):
331 331 source_repo = self.get_clone_repo()
332 332
333 333 # Create a new branch in source repo
334 334 new_branch_commit = source_repo.commit_ids[-3]
335 335 source_repo._checkout(new_branch_commit)
336 336 source_repo._checkout('new_branch', create=True)
337 337
338 338 clone_path = next(REPO_PATH_GENERATOR)
339 339 source_repo._local_clone(clone_path, 'new_branch')
340 340 repo_clone = GitRepository(clone_path)
341 341
342 342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343 343
344 344 clone_path = next(REPO_PATH_GENERATOR)
345 345 source_repo._local_clone(clone_path, 'master')
346 346 repo_clone = GitRepository(clone_path)
347 347
348 348 assert source_repo.commit_ids == repo_clone.commit_ids
349 349
350 350 def test_local_clone_fails_if_target_exists(self):
351 351 with pytest.raises(RepositoryError):
352 352 self.repo._local_clone(self.repo.path, 'master')
353 353
354 354 def test_local_fetch(self):
355 355 target_repo = self.get_empty_repo()
356 356 source_repo = self.get_clone_repo()
357 357
358 358 # Create a new branch in source repo
359 359 master_commit = source_repo.commit_ids[-1]
360 360 new_branch_commit = source_repo.commit_ids[-3]
361 361 source_repo._checkout(new_branch_commit)
362 362 source_repo._checkout('new_branch', create=True)
363 363
364 364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366 366
367 367 target_repo._local_fetch(source_repo.path, 'master')
368 368 assert target_repo._last_fetch_heads() == [master_commit]
369 369
370 370 def test_local_fetch_from_bare_repo(self):
371 371 target_repo = self.get_empty_repo()
372 372 target_repo._local_fetch(self.repo.path, 'master')
373 373
374 374 master_commit = self.repo.commit_ids[-1]
375 375 assert target_repo._last_fetch_heads() == [master_commit]
376 376
377 377 def test_local_fetch_from_same_repo(self):
378 378 with pytest.raises(ValueError):
379 379 self.repo._local_fetch(self.repo.path, 'master')
380 380
381 381 def test_local_fetch_branch_does_not_exist(self):
382 382 target_repo = self.get_empty_repo()
383 383
384 384 with pytest.raises(RepositoryError):
385 385 target_repo._local_fetch(self.repo.path, 'new_branch')
386 386
387 387 def test_local_pull(self):
388 388 target_repo = self.get_empty_repo()
389 389 source_repo = self.get_clone_repo()
390 390
391 391 # Create a new branch in source repo
392 392 master_commit = source_repo.commit_ids[-1]
393 393 new_branch_commit = source_repo.commit_ids[-3]
394 394 source_repo._checkout(new_branch_commit)
395 395 source_repo._checkout('new_branch', create=True)
396 396
397 397 target_repo._local_pull(source_repo.path, 'new_branch')
398 398 target_repo = GitRepository(target_repo.path)
399 399 assert target_repo.head == new_branch_commit
400 400
401 401 target_repo._local_pull(source_repo.path, 'master')
402 402 target_repo = GitRepository(target_repo.path)
403 403 assert target_repo.head == master_commit
404 404
405 405 def test_local_pull_in_bare_repo(self):
406 406 with pytest.raises(RepositoryError):
407 407 self.repo._local_pull(self.repo.path, 'master')
408 408
409 409 def test_local_merge(self):
410 410 target_repo = self.get_empty_repo()
411 411 source_repo = self.get_clone_repo()
412 412
413 413 # Create a new branch in source repo
414 414 master_commit = source_repo.commit_ids[-1]
415 415 new_branch_commit = source_repo.commit_ids[-3]
416 416 source_repo._checkout(new_branch_commit)
417 417 source_repo._checkout('new_branch', create=True)
418 418
419 419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 420 target_repo._local_pull(source_repo.path, 'new_branch')
421 421
422 422 target_repo._local_fetch(source_repo.path, 'master')
423 423 merge_message = 'Merge message\n\nDescription:...'
424 424 user_name = 'Albert Einstein'
425 425 user_email = 'albert@einstein.com'
426 426 target_repo._local_merge(merge_message, user_name, user_email,
427 427 target_repo._last_fetch_heads())
428 428
429 429 target_repo = GitRepository(target_repo.path)
430 430 assert target_repo.commit_ids[-2] == master_commit
431 431 last_commit = target_repo.get_commit(target_repo.head)
432 432 assert last_commit.message.strip() == merge_message
433 433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434 434
435 435 assert not os.path.exists(
436 436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437 437
438 438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441 441
442 442 target_repo._local_fetch(self.repo.path, 'master')
443 443 with pytest.raises(RepositoryError):
444 444 target_repo._local_merge(
445 445 'merge_message', 'user name', 'user@name.com',
446 446 target_repo._last_fetch_heads())
447 447
448 448 # Check we are not left in an intermediate merge state
449 449 assert not os.path.exists(
450 450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451 451
452 452 def test_local_merge_into_empty_repo(self):
453 453 target_repo = self.get_empty_repo()
454 454
455 455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 456 target_repo._local_fetch(self.repo.path, 'master')
457 457 with pytest.raises(RepositoryError):
458 458 target_repo._local_merge(
459 459 'merge_message', 'user name', 'user@name.com',
460 460 target_repo._last_fetch_heads())
461 461
462 462 def test_local_merge_in_bare_repo(self):
463 463 with pytest.raises(RepositoryError):
464 464 self.repo._local_merge(
465 465 'merge_message', 'user name', 'user@name.com', None)
466 466
467 467 def test_local_push_non_bare(self):
468 468 target_repo = self.get_empty_repo()
469 469
470 470 pushed_branch = 'pushed_branch'
471 471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 473 # report any branches.
474 474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 475 f.write('ref: refs/heads/%s' % pushed_branch)
476 476
477 477 target_repo = GitRepository(target_repo.path)
478 478
479 479 assert (target_repo.branches[pushed_branch] ==
480 480 self.repo.branches['master'])
481 481
482 482 def test_local_push_bare(self):
483 483 target_repo = self.get_empty_repo(bare=True)
484 484
485 485 pushed_branch = 'pushed_branch'
486 486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 488 # report any branches.
489 489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 490 f.write('ref: refs/heads/%s' % pushed_branch)
491 491
492 492 target_repo = GitRepository(target_repo.path)
493 493
494 494 assert (target_repo.branches[pushed_branch] ==
495 495 self.repo.branches['master'])
496 496
497 497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 498 target_repo = self.get_clone_repo()
499 499
500 500 pushed_branch = 'pushed_branch'
501 501 # Create a new branch in source repo
502 502 new_branch_commit = target_repo.commit_ids[-3]
503 503 target_repo._checkout(new_branch_commit)
504 504 target_repo._checkout(pushed_branch, create=True)
505 505
506 506 self.repo._local_push('master', target_repo.path, pushed_branch)
507 507
508 508 target_repo = GitRepository(target_repo.path)
509 509
510 510 assert (target_repo.branches[pushed_branch] ==
511 511 self.repo.branches['master'])
512 512
513 513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 515 with pytest.raises(RepositoryError):
516 516 self.repo._local_push('master', target_repo.path, 'master')
517 517
518 518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 519 target_repo = self.get_empty_repo(bare=True)
520 520
521 521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 522 self.repo._local_push(
523 523 'master', target_repo.path, 'master', enable_hooks=True)
524 524 env = run_mock.call_args[1]['extra_env']
525 525 assert 'RC_SKIP_HOOKS' not in env
526 526
527 527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 528 path_components = (
529 529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 530 hook_path = os.path.join(repo_path, *path_components)
531 531 with open(hook_path, 'w') as f:
532 532 script_lines = [
533 533 '#!%s' % sys.executable,
534 534 'import os',
535 535 'import sys',
536 536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 537 ' sys.exit(0)',
538 538 'sys.exit(1)',
539 539 ]
540 540 f.write('\n'.join(script_lines))
541 541 os.chmod(hook_path, 0o755)
542 542
543 543 def test_local_push_does_not_execute_hook(self):
544 544 target_repo = self.get_empty_repo()
545 545
546 546 pushed_branch = 'pushed_branch'
547 547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 550 # report any branches.
551 551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 552 f.write('ref: refs/heads/%s' % pushed_branch)
553 553
554 554 target_repo = GitRepository(target_repo.path)
555 555
556 556 assert (target_repo.branches[pushed_branch] ==
557 557 self.repo.branches['master'])
558 558
559 559 def test_local_push_executes_hook(self):
560 560 target_repo = self.get_empty_repo(bare=True)
561 561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 562 with pytest.raises(RepositoryError):
563 563 self.repo._local_push(
564 564 'master', target_repo.path, 'master', enable_hooks=True)
565 565
566 566 def test_maybe_prepare_merge_workspace(self):
567 567 workspace = self.repo._maybe_prepare_merge_workspace(
568 568 2, 'pr2', Reference('branch', 'master', 'unused'),
569 569 Reference('branch', 'master', 'unused'))
570 570
571 571 assert os.path.isdir(workspace)
572 572 workspace_repo = GitRepository(workspace)
573 573 assert workspace_repo.branches == self.repo.branches
574 574
575 575 # Calling it a second time should also succeed
576 576 workspace = self.repo._maybe_prepare_merge_workspace(
577 577 2, 'pr2', Reference('branch', 'master', 'unused'),
578 578 Reference('branch', 'master', 'unused'))
579 579 assert os.path.isdir(workspace)
580 580
581 581 def test_maybe_prepare_merge_workspace_different_refs(self):
582 582 workspace = self.repo._maybe_prepare_merge_workspace(
583 583 2, 'pr2', Reference('branch', 'master', 'unused'),
584 584 Reference('branch', 'develop', 'unused'))
585 585
586 586 assert os.path.isdir(workspace)
587 587 workspace_repo = GitRepository(workspace)
588 588 assert workspace_repo.branches == self.repo.branches
589 589
590 590 # Calling it a second time should also succeed
591 591 workspace = self.repo._maybe_prepare_merge_workspace(
592 592 2, 'pr2', Reference('branch', 'master', 'unused'),
593 593 Reference('branch', 'develop', 'unused'))
594 594 assert os.path.isdir(workspace)
595 595
596 596 def test_cleanup_merge_workspace(self):
597 597 workspace = self.repo._maybe_prepare_merge_workspace(
598 598 2, 'pr3', Reference('branch', 'master', 'unused'),
599 599 Reference('branch', 'master', 'unused'))
600 600 self.repo.cleanup_merge_workspace(2, 'pr3')
601 601
602 602 assert not os.path.exists(workspace)
603 603
604 604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 605 # No assert: because in case of an inexistent workspace this function
606 606 # should still succeed.
607 607 self.repo.cleanup_merge_workspace(1, 'pr4')
608 608
609 609 def test_set_refs(self):
610 610 test_ref = 'refs/test-refs/abcde'
611 611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612 612
613 613 self.repo.set_refs(test_ref, test_commit_id)
614 614 stdout, _ = self.repo.run_git_command(['show-ref'])
615 615 assert test_ref in stdout
616 616 assert test_commit_id in stdout
617 617
618 618 def test_remove_ref(self):
619 619 test_ref = 'refs/test-refs/abcde'
620 620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 621 self.repo.set_refs(test_ref, test_commit_id)
622 622 stdout, _ = self.repo.run_git_command(['show-ref'])
623 623 assert test_ref in stdout
624 624 assert test_commit_id in stdout
625 625
626 626 self.repo.remove_ref(test_ref)
627 627 stdout, _ = self.repo.run_git_command(['show-ref'])
628 628 assert test_ref not in stdout
629 629 assert test_commit_id not in stdout
630 630
631 631
632 632 class TestGitCommit(object):
633 633
634 634 @pytest.fixture(autouse=True)
635 635 def prepare(self):
636 636 self.repo = GitRepository(TEST_GIT_REPO)
637 637
638 638 def test_default_commit(self):
639 639 tip = self.repo.get_commit()
640 640 assert tip == self.repo.get_commit(None)
641 641 assert tip == self.repo.get_commit('tip')
642 642
643 643 def test_root_node(self):
644 644 tip = self.repo.get_commit()
645 645 assert tip.root is tip.get_node('')
646 646
647 647 def test_lazy_fetch(self):
648 648 """
649 649 Test if commit's nodes expands and are cached as we walk through
650 650 the commit. This test is somewhat hard to write as order of tests
651 651 is a key here. Written by running command after command in a shell.
652 652 """
653 653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 654 assert commit_id in self.repo.commit_ids
655 655 commit = self.repo.get_commit(commit_id)
656 656 assert len(commit.nodes) == 0
657 657 root = commit.root
658 658 assert len(commit.nodes) == 1
659 659 assert len(root.nodes) == 8
660 660 # accessing root.nodes updates commit.nodes
661 661 assert len(commit.nodes) == 9
662 662
663 663 docs = root.get_node('docs')
664 664 # we haven't yet accessed anything new as docs dir was already cached
665 665 assert len(commit.nodes) == 9
666 666 assert len(docs.nodes) == 8
667 667 # accessing docs.nodes updates commit.nodes
668 668 assert len(commit.nodes) == 17
669 669
670 670 assert docs is commit.get_node('docs')
671 671 assert docs is root.nodes[0]
672 672 assert docs is root.dirs[0]
673 673 assert docs is commit.get_node('docs')
674 674
675 675 def test_nodes_with_commit(self):
676 676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 677 commit = self.repo.get_commit(commit_id)
678 678 root = commit.root
679 679 docs = root.get_node('docs')
680 680 assert docs is commit.get_node('docs')
681 681 api = docs.get_node('api')
682 682 assert api is commit.get_node('docs/api')
683 683 index = api.get_node('index.rst')
684 684 assert index is commit.get_node('docs/api/index.rst')
685 685 assert index is commit.get_node('docs')\
686 686 .get_node('api')\
687 687 .get_node('index.rst')
688 688
689 689 def test_branch_and_tags(self):
690 690 """
691 691 rev0 = self.repo.commit_ids[0]
692 692 commit0 = self.repo.get_commit(rev0)
693 693 assert commit0.branch == 'master'
694 694 assert commit0.tags == []
695 695
696 696 rev10 = self.repo.commit_ids[10]
697 697 commit10 = self.repo.get_commit(rev10)
698 698 assert commit10.branch == 'master'
699 699 assert commit10.tags == []
700 700
701 701 rev44 = self.repo.commit_ids[44]
702 702 commit44 = self.repo.get_commit(rev44)
703 703 assert commit44.branch == 'web-branch'
704 704
705 705 tip = self.repo.get_commit('tip')
706 706 assert 'tip' in tip.tags
707 707 """
708 708 # Those tests would fail - branches are now going
709 709 # to be changed at main API in order to support git backend
710 710 pass
711 711
712 712 def test_file_size(self):
713 713 to_check = (
714 714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 715 'vcs/backends/BaseRepository.py', 502),
716 716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 717 'vcs/backends/hg.py', 854),
718 718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 719 'setup.py', 1068),
720 720
721 721 ('d955cd312c17b02143c04fa1099a352b04368118',
722 722 'vcs/backends/base.py', 2921),
723 723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 724 'vcs/backends/base.py', 3936),
725 725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 726 'vcs/backends/base.py', 6189),
727 727 )
728 728 for commit_id, path, size in to_check:
729 729 node = self.repo.get_commit(commit_id).get_node(path)
730 730 assert node.is_file()
731 731 assert node.size == size
732 732
733 733 def test_file_history_from_commits(self):
734 734 node = self.repo[10].get_node('setup.py')
735 735 commit_ids = [commit.raw_id for commit in node.history]
736 736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737 737
738 738 node = self.repo[20].get_node('setup.py')
739 739 node_ids = [commit.raw_id for commit in node.history]
740 740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742 742
743 743 # special case we check history from commit that has this particular
744 744 # file changed this means we check if it's included as well
745 745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 746 .get_node('setup.py')
747 747 node_ids = [commit.raw_id for commit in node.history]
748 748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750 750
751 751 def test_file_history(self):
752 752 # we can only check if those commits are present in the history
753 753 # as we cannot update this test every time file is changed
754 754 files = {
755 755 'setup.py': [
756 756 '54386793436c938cff89326944d4c2702340037d',
757 757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 758 '998ed409c795fec2012b1c0ca054d99888b22090',
759 759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 765 ],
766 766 'vcs/nodes.py': [
767 767 '33fa3223355104431402a888fa77a4e9956feb3e',
768 768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 772 '4313566d2e417cb382948f8d9d7c765330356054',
773 773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 774 '54386793436c938cff89326944d4c2702340037d',
775 775 '54000345d2e78b03a99d561399e8e548de3f3203',
776 776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 798 ],
799 799 'vcs/backends/git.py': [
800 800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 801 '9a751d84d8e9408e736329767387f41b36935153',
802 802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 807 '54000345d2e78b03a99d561399e8e548de3f3203',
808 808 ],
809 809 }
810 810 for path, commit_ids in files.items():
811 811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 812 node_ids = [commit.raw_id for commit in node.history]
813 813 assert set(commit_ids).issubset(set(node_ids)), (
814 814 "We assumed that %s is subset of commit_ids for which file %s "
815 815 "has been changed, and history of that node returned: %s"
816 816 % (commit_ids, path, node_ids))
817 817
818 818 def test_file_annotate(self):
819 819 files = {
820 820 'vcs/backends/__init__.py': {
821 821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 822 'lines_no': 1,
823 823 'commits': [
824 824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 825 ],
826 826 },
827 827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 828 'lines_no': 21,
829 829 'commits': [
830 830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 851 ],
852 852 },
853 853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 854 'lines_no': 32,
855 855 'commits': [
856 856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 863 '54000345d2e78b03a99d561399e8e548de3f3203',
864 864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 888 ],
889 889 },
890 890 },
891 891 }
892 892
893 893 for fname, commit_dict in files.items():
894 894 for commit_id, __ in commit_dict.items():
895 895 commit = self.repo.get_commit(commit_id)
896 896
897 897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 899 assert l1_1 == l1_2
900 900 l1 = l1_1
901 901 l2 = files[fname][commit_id]['commits']
902 902 assert l1 == l2, (
903 903 "The lists of commit_ids for %s@commit_id %s"
904 904 "from annotation list should match each other, "
905 905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906 906
907 907 def test_files_state(self):
908 908 """
909 909 Tests state of FileNodes.
910 910 """
911 911 node = self.repo\
912 912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 913 .get_node('vcs/utils/diffs.py')
914 914 assert node.state, NodeState.ADDED
915 915 assert node.added
916 916 assert not node.changed
917 917 assert not node.not_changed
918 918 assert not node.removed
919 919
920 920 node = self.repo\
921 921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 922 .get_node('.hgignore')
923 923 assert node.state, NodeState.CHANGED
924 924 assert not node.added
925 925 assert node.changed
926 926 assert not node.not_changed
927 927 assert not node.removed
928 928
929 929 node = self.repo\
930 930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 931 .get_node('setup.py')
932 932 assert node.state, NodeState.NOT_CHANGED
933 933 assert not node.added
934 934 assert not node.changed
935 935 assert node.not_changed
936 936 assert not node.removed
937 937
938 938 # If node has REMOVED state then trying to fetch it would raise
939 939 # CommitError exception
940 940 commit = self.repo.get_commit(
941 941 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 942 path = 'vcs/backends/BaseRepository.py'
943 943 with pytest.raises(NodeDoesNotExistError):
944 944 commit.get_node(path)
945 945 # but it would be one of ``removed`` (commit's attribute)
946 946 assert path in [rf.path for rf in commit.removed]
947 947
948 948 commit = self.repo.get_commit(
949 949 '54386793436c938cff89326944d4c2702340037d')
950 950 changed = [
951 951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 952 'vcs/nodes.py']
953 953 assert set(changed) == set([f.path for f in commit.changed])
954 954
955 955 def test_unicode_branch_refs(self):
956 956 unicode_branches = {
957 957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
959 959 }
960 960 with mock.patch(
961 961 ("rhodecode.lib.vcs.backends.git.repository"
962 962 ".GitRepository._refs"),
963 963 unicode_branches):
964 964 branches = self.repo.branches
965 965
966 966 assert 'unicode' in branches
967 967 assert u'uniΓ§ΓΆβˆ‚e' in branches
968 968
969 969 def test_unicode_tag_refs(self):
970 970 unicode_tags = {
971 971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 973 }
974 974 with mock.patch(
975 975 ("rhodecode.lib.vcs.backends.git.repository"
976 976 ".GitRepository._refs"),
977 977 unicode_tags):
978 978 tags = self.repo.tags
979 979
980 980 assert 'unicode' in tags
981 981 assert u'uniΓ§ΓΆβˆ‚e' in tags
982 982
983 983 def test_commit_message_is_unicode(self):
984 984 for commit in self.repo:
985 985 assert type(commit.message) == unicode
986 986
987 987 def test_commit_author_is_unicode(self):
988 988 for commit in self.repo:
989 989 assert type(commit.author) == unicode
990 990
991 991 def test_repo_files_content_is_unicode(self):
992 992 commit = self.repo.get_commit()
993 993 for node in commit.get_node('/'):
994 994 if node.is_file():
995 995 assert type(node.content) == unicode
996 996
997 997 def test_wrong_path(self):
998 998 # There is 'setup.py' in the root dir but not there:
999 999 path = 'foo/bar/setup.py'
1000 1000 tip = self.repo.get_commit()
1001 1001 with pytest.raises(VCSError):
1002 1002 tip.get_node(path)
1003 1003
1004 1004 @pytest.mark.parametrize("author_email, commit_id", [
1005 1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 1006 ('lukasz.balcerzak@python-center.pl',
1007 1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 1009 ])
1010 1010 def test_author_email(self, author_email, commit_id):
1011 1011 commit = self.repo.get_commit(commit_id)
1012 1012 assert author_email == commit.author_email
1013 1013
1014 1014 @pytest.mark.parametrize("author, commit_id", [
1015 1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 1018 ])
1019 1019 def test_author_username(self, author, commit_id):
1020 1020 commit = self.repo.get_commit(commit_id)
1021 1021 assert author == commit.author_name
1022 1022
1023 1023
1024 1024 class TestLargeFileRepo(object):
1025 1025
1026 1026 def test_large_file(self, backend_git):
1027 1027 conf = make_db_config()
1028 1028 repo = backend_git.create_test_repo('largefiles', conf)
1029 1029
1030 1030 tip = repo.scm_instance().get_commit()
1031 1031
1032 1032 # extract stored LF node into the origin cache
1033 1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034 1034
1035 1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 1036 oid_path = os.path.join(lfs_store, oid)
1037 1037 oid_destination = os.path.join(
1038 1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 1039 shutil.copy(oid_path, oid_destination)
1040 1040
1041 1041 node = tip.get_node('1MB.zip')
1042 1042
1043 1043 lf_node = node.get_largefile_node()
1044 1044
1045 1045 assert lf_node.is_largefile() is True
1046 1046 assert lf_node.size == 1024000
1047 1047 assert lf_node.name == '1MB.zip'
1048 1048
1049 1049
1050 1050 @pytest.mark.usefixtures("vcs_repository_support")
1051 1051 class TestGitSpecificWithRepo(BackendTestMixin):
1052 1052
1053 1053 @classmethod
1054 1054 def _get_commits(cls):
1055 1055 return [
1056 1056 {
1057 1057 'message': 'Initial',
1058 1058 'author': 'Joe Doe <joe.doe@example.com>',
1059 1059 'date': datetime.datetime(2010, 1, 1, 20),
1060 1060 'added': [
1061 1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 1062 FileNode(
1063 1063 'foobar/static/admin', content='admin',
1064 1064 mode=0o120000), # this is a link
1065 1065 FileNode('foo', content='foo'),
1066 1066 ],
1067 1067 },
1068 1068 {
1069 1069 'message': 'Second',
1070 1070 'author': 'Joe Doe <joe.doe@example.com>',
1071 1071 'date': datetime.datetime(2010, 1, 1, 22),
1072 1072 'added': [
1073 1073 FileNode('foo2', content='foo2'),
1074 1074 ],
1075 1075 },
1076 1076 ]
1077 1077
1078 1078 def test_paths_slow_traversing(self):
1079 1079 commit = self.repo.get_commit()
1080 1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 1081 .get_node('admin').get_node('base.js').content == 'base'
1082 1082
1083 1083 def test_paths_fast_traversing(self):
1084 1084 commit = self.repo.get_commit()
1085 1085 assert (
1086 1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 1087 'base')
1088 1088
1089 1089 def test_get_diff_runs_git_command_with_hashes(self):
1090 1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 1091 self.repo.get_diff(self.repo[0], self.repo[1])
1092 1092 self.repo.run_git_command.assert_called_once_with(
1093 1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 '--abbrev=40', self.repo._get_commit_id(0),
1095 self.repo._get_commit_id(1)])
1094 '--abbrev=40', self.repo._lookup_commit(0),
1095 self.repo._lookup_commit(1)])
1096 1096
1097 1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 1100 self.repo.run_git_command.assert_called_once_with(
1101 1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 '--abbrev=40', self.repo._get_commit_id(1)])
1102 '--abbrev=40', self.repo._lookup_commit(1)])
1103 1103
1104 1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 1107 self.repo.run_git_command.assert_called_once_with(
1108 1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 '--abbrev=40', self.repo._get_commit_id(0),
1110 self.repo._get_commit_id(1), '--', 'foo'])
1109 '--abbrev=40', self.repo._lookup_commit(0),
1110 self.repo._lookup_commit(1), '--', 'foo'])
1111 1111
1112 1112
1113 1113 @pytest.mark.usefixtures("vcs_repository_support")
1114 1114 class TestGitRegression(BackendTestMixin):
1115 1115
1116 1116 @classmethod
1117 1117 def _get_commits(cls):
1118 1118 return [
1119 1119 {
1120 1120 'message': 'Initial',
1121 1121 'author': 'Joe Doe <joe.doe@example.com>',
1122 1122 'date': datetime.datetime(2010, 1, 1, 20),
1123 1123 'added': [
1124 1124 FileNode('bot/__init__.py', content='base'),
1125 1125 FileNode('bot/templates/404.html', content='base'),
1126 1126 FileNode('bot/templates/500.html', content='base'),
1127 1127 ],
1128 1128 },
1129 1129 {
1130 1130 'message': 'Second',
1131 1131 'author': 'Joe Doe <joe.doe@example.com>',
1132 1132 'date': datetime.datetime(2010, 1, 1, 22),
1133 1133 'added': [
1134 1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 1136 FileNode(
1137 1137 'bot/build/static/templates/f.html', content='foo2'),
1138 1138 FileNode(
1139 1139 'bot/build/static/templates/f1.html', content='foo2'),
1140 1140 FileNode('bot/build/templates/err.html', content='foo2'),
1141 1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 1142 ],
1143 1143 },
1144 1144 ]
1145 1145
1146 1146 @pytest.mark.parametrize("path, expected_paths", [
1147 1147 ('bot', [
1148 1148 'bot/build',
1149 1149 'bot/templates',
1150 1150 'bot/__init__.py']),
1151 1151 ('bot/build', [
1152 1152 'bot/build/migrations',
1153 1153 'bot/build/static',
1154 1154 'bot/build/templates']),
1155 1155 ('bot/build/static', [
1156 1156 'bot/build/static/templates']),
1157 1157 ('bot/build/static/templates', [
1158 1158 'bot/build/static/templates/f.html',
1159 1159 'bot/build/static/templates/f1.html']),
1160 1160 ('bot/build/templates', [
1161 1161 'bot/build/templates/err.html',
1162 1162 'bot/build/templates/err2.html']),
1163 1163 ('bot/templates/', [
1164 1164 'bot/templates/404.html',
1165 1165 'bot/templates/500.html']),
1166 1166 ])
1167 1167 def test_similar_paths(self, path, expected_paths):
1168 1168 commit = self.repo.get_commit()
1169 1169 paths = [n.path for n in commit.get_nodes(path)]
1170 1170 assert paths == expected_paths
1171 1171
1172 1172
1173 1173 class TestDiscoverGitVersion(object):
1174 1174
1175 1175 def test_returns_git_version(self, baseapp):
1176 1176 version = discover_git_version()
1177 1177 assert version
1178 1178
1179 1179 def test_returns_empty_string_without_vcsserver(self):
1180 1180 mock_connection = mock.Mock()
1181 1181 mock_connection.discover_git_version = mock.Mock(
1182 1182 side_effect=Exception)
1183 1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 1184 version = discover_git_version()
1185 1185 assert version == ''
1186 1186
1187 1187
1188 1188 class TestGetSubmoduleUrl(object):
1189 1189 def test_submodules_file_found(self):
1190 1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 1191 node = mock.Mock()
1192 1192 with mock.patch.object(
1193 1193 commit, 'get_node', return_value=node) as get_node_mock:
1194 1194 node.content = (
1195 1195 '[submodule "subrepo1"]\n'
1196 1196 '\tpath = subrepo1\n'
1197 1197 '\turl = https://code.rhodecode.com/dulwich\n'
1198 1198 )
1199 1199 result = commit._get_submodule_url('subrepo1')
1200 1200 get_node_mock.assert_called_once_with('.gitmodules')
1201 1201 assert result == 'https://code.rhodecode.com/dulwich'
1202 1202
1203 1203 def test_complex_submodule_path(self):
1204 1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 1205 node = mock.Mock()
1206 1206 with mock.patch.object(
1207 1207 commit, 'get_node', return_value=node) as get_node_mock:
1208 1208 node.content = (
1209 1209 '[submodule "complex/subrepo/path"]\n'
1210 1210 '\tpath = complex/subrepo/path\n'
1211 1211 '\turl = https://code.rhodecode.com/dulwich\n'
1212 1212 )
1213 1213 result = commit._get_submodule_url('complex/subrepo/path')
1214 1214 get_node_mock.assert_called_once_with('.gitmodules')
1215 1215 assert result == 'https://code.rhodecode.com/dulwich'
1216 1216
1217 1217 def test_submodules_file_not_found(self):
1218 1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 1219 with mock.patch.object(
1220 1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 1221 result = commit._get_submodule_url('complex/subrepo/path')
1222 1222 assert result is None
1223 1223
1224 1224 def test_path_not_found(self):
1225 1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 1226 node = mock.Mock()
1227 1227 with mock.patch.object(
1228 1228 commit, 'get_node', return_value=node) as get_node_mock:
1229 1229 node.content = (
1230 1230 '[submodule "subrepo1"]\n'
1231 1231 '\tpath = subrepo1\n'
1232 1232 '\turl = https://code.rhodecode.com/dulwich\n'
1233 1233 )
1234 1234 result = commit._get_submodule_url('subrepo2')
1235 1235 get_node_mock.assert_called_once_with('.gitmodules')
1236 1236 assert result is None
1237 1237
1238 1238 def test_returns_cached_values(self):
1239 1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 1240 node = mock.Mock()
1241 1241 with mock.patch.object(
1242 1242 commit, 'get_node', return_value=node) as get_node_mock:
1243 1243 node.content = (
1244 1244 '[submodule "subrepo1"]\n'
1245 1245 '\tpath = subrepo1\n'
1246 1246 '\turl = https://code.rhodecode.com/dulwich\n'
1247 1247 )
1248 1248 for _ in range(3):
1249 1249 commit._get_submodule_url('subrepo1')
1250 1250 get_node_mock.assert_called_once_with('.gitmodules')
1251 1251
1252 1252 def test_get_node_returns_a_link(self):
1253 1253 repository = mock.Mock()
1254 1254 repository.alias = 'git'
1255 1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 1257 get_id_patch = mock.patch.object(
1258 1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 1259 get_submodule_patch = mock.patch.object(
1260 1260 commit, '_get_submodule_url', return_value=submodule_url)
1261 1261
1262 1262 with get_id_patch, get_submodule_patch as submodule_mock:
1263 1263 node = commit.get_node('/abcde')
1264 1264
1265 1265 submodule_mock.assert_called_once_with('/abcde')
1266 1266 assert type(node) == SubModuleNode
1267 1267 assert node.url == submodule_url
1268 1268
1269 1269 def test_get_nodes_returns_links(self):
1270 1270 repository = mock.MagicMock()
1271 1271 repository.alias = 'git'
1272 1272 repository._remote.tree_items.return_value = [
1273 1273 ('subrepo', 'stat', 1, 'link')
1274 1274 ]
1275 1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 1277 get_id_patch = mock.patch.object(
1278 1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 1279 get_submodule_patch = mock.patch.object(
1280 1280 commit, '_get_submodule_url', return_value=submodule_url)
1281 1281
1282 1282 with get_id_patch, get_submodule_patch as submodule_mock:
1283 1283 nodes = commit.get_nodes('/abcde')
1284 1284
1285 1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 1286 assert len(nodes) == 1
1287 1287 assert type(nodes[0]) == SubModuleNode
1288 1288 assert nodes[0].url == submodule_url
General Comments 0
You need to be logged in to leave comments. Login now