##// END OF EJS Templates
commits: re-implemented fetching a single commit for git case....
marcink -
r3740:dcd8fbea new-ui
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,502 +1,501 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23 import collections
23 import collections
24
24
25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from pyramid.renderers import render
27 from pyramid.renderers import render
28 from pyramid.response import Response
28 from pyramid.response import Response
29
29
30 from rhodecode.apps._base import RepoAppView
30 from rhodecode.apps._base import RepoAppView
31
31
32 from rhodecode.lib import diffs, codeblocks
32 from rhodecode.lib import diffs, codeblocks
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35
35
36 from rhodecode.lib.compat import OrderedDict
36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.diffs import (
37 from rhodecode.lib.diffs import (
38 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
38 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
39 get_diff_whitespace_flag)
39 get_diff_whitespace_flag)
40 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
40 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
41 import rhodecode.lib.helpers as h
41 import rhodecode.lib.helpers as h
42 from rhodecode.lib.utils2 import safe_unicode, str2bool
42 from rhodecode.lib.utils2 import safe_unicode, str2bool
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 RepositoryError, CommitDoesNotExistError)
45 RepositoryError, CommitDoesNotExistError)
46 from rhodecode.model.db import ChangesetComment, ChangesetStatus
46 from rhodecode.model.db import ChangesetComment, ChangesetStatus
47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.comment import CommentsModel
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 def _update_with_GET(params, request):
55 def _update_with_GET(params, request):
56 for k in ['diff1', 'diff2', 'diff']:
56 for k in ['diff1', 'diff2', 'diff']:
57 params[k] += request.GET.getall(k)
57 params[k] += request.GET.getall(k)
58
58
59
59
60
60
61
61
62
62
63 class RepoCommitsView(RepoAppView):
63 class RepoCommitsView(RepoAppView):
64 def load_default_context(self):
64 def load_default_context(self):
65 c = self._get_local_tmpl_context(include_app_defaults=True)
65 c = self._get_local_tmpl_context(include_app_defaults=True)
66 c.rhodecode_repo = self.rhodecode_vcs_repo
66 c.rhodecode_repo = self.rhodecode_vcs_repo
67
67
68 return c
68 return c
69
69
70 def _is_diff_cache_enabled(self, target_repo):
70 def _is_diff_cache_enabled(self, target_repo):
71 caching_enabled = self._get_general_setting(
71 caching_enabled = self._get_general_setting(
72 target_repo, 'rhodecode_diff_cache')
72 target_repo, 'rhodecode_diff_cache')
73 log.debug('Diff caching enabled: %s', caching_enabled)
73 log.debug('Diff caching enabled: %s', caching_enabled)
74 return caching_enabled
74 return caching_enabled
75
75
76 def _commit(self, commit_id_range, method):
76 def _commit(self, commit_id_range, method):
77 _ = self.request.translate
77 _ = self.request.translate
78 c = self.load_default_context()
78 c = self.load_default_context()
79 c.fulldiff = self.request.GET.get('fulldiff')
79 c.fulldiff = self.request.GET.get('fulldiff')
80
80
81 # fetch global flags of ignore ws or context lines
81 # fetch global flags of ignore ws or context lines
82 diff_context = get_diff_context(self.request)
82 diff_context = get_diff_context(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84
84
85 # diff_limit will cut off the whole diff if the limit is applied
85 # diff_limit will cut off the whole diff if the limit is applied
86 # otherwise it will just hide the big files from the front-end
86 # otherwise it will just hide the big files from the front-end
87 diff_limit = c.visual.cut_off_limit_diff
87 diff_limit = c.visual.cut_off_limit_diff
88 file_limit = c.visual.cut_off_limit_file
88 file_limit = c.visual.cut_off_limit_file
89
89
90 # get ranges of commit ids if preset
90 # get ranges of commit ids if preset
91 commit_range = commit_id_range.split('...')[:2]
91 commit_range = commit_id_range.split('...')[:2]
92
92
93 try:
93 try:
94 pre_load = ['affected_files', 'author', 'branch', 'date',
94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 'message', 'parents']
95 'message', 'parents']
96
96
97 if len(commit_range) == 2:
97 if len(commit_range) == 2:
98 commits = self.rhodecode_vcs_repo.get_commits(
98 commits = self.rhodecode_vcs_repo.get_commits(
99 start_id=commit_range[0], end_id=commit_range[1],
99 start_id=commit_range[0], end_id=commit_range[1],
100 pre_load=pre_load, translate_tags=False)
100 pre_load=pre_load, translate_tags=False)
101 commits = list(commits)
101 commits = list(commits)
102 else:
102 else:
103 commits = [self.rhodecode_vcs_repo.get_commit(
103 commits = [self.rhodecode_vcs_repo.get_commit(
104 commit_id=commit_id_range, pre_load=pre_load)]
104 commit_id=commit_id_range, pre_load=pre_load)]
105
105
106 c.commit_ranges = commits
106 c.commit_ranges = commits
107 if not c.commit_ranges:
107 if not c.commit_ranges:
108 raise RepositoryError(
108 raise RepositoryError('The commit range returned an empty result')
109 'The commit range returned an empty result')
109 except CommitDoesNotExistError as e:
110 except CommitDoesNotExistError:
110 msg = _('No such commit exists. Org exception: `{}`').format(e)
111 msg = _('No such commit exists for this repository')
112 h.flash(msg, category='error')
111 h.flash(msg, category='error')
113 raise HTTPNotFound()
112 raise HTTPNotFound()
114 except Exception:
113 except Exception:
115 log.exception("General failure")
114 log.exception("General failure")
116 raise HTTPNotFound()
115 raise HTTPNotFound()
117
116
118 c.changes = OrderedDict()
117 c.changes = OrderedDict()
119 c.lines_added = 0
118 c.lines_added = 0
120 c.lines_deleted = 0
119 c.lines_deleted = 0
121
120
122 # auto collapse if we have more than limit
121 # auto collapse if we have more than limit
123 collapse_limit = diffs.DiffProcessor._collapse_commits_over
122 collapse_limit = diffs.DiffProcessor._collapse_commits_over
124 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
123 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
125
124
126 c.commit_statuses = ChangesetStatus.STATUSES
125 c.commit_statuses = ChangesetStatus.STATUSES
127 c.inline_comments = []
126 c.inline_comments = []
128 c.files = []
127 c.files = []
129
128
130 c.statuses = []
129 c.statuses = []
131 c.comments = []
130 c.comments = []
132 c.unresolved_comments = []
131 c.unresolved_comments = []
133 if len(c.commit_ranges) == 1:
132 if len(c.commit_ranges) == 1:
134 commit = c.commit_ranges[0]
133 commit = c.commit_ranges[0]
135 c.comments = CommentsModel().get_comments(
134 c.comments = CommentsModel().get_comments(
136 self.db_repo.repo_id,
135 self.db_repo.repo_id,
137 revision=commit.raw_id)
136 revision=commit.raw_id)
138 c.statuses.append(ChangesetStatusModel().get_status(
137 c.statuses.append(ChangesetStatusModel().get_status(
139 self.db_repo.repo_id, commit.raw_id))
138 self.db_repo.repo_id, commit.raw_id))
140 # comments from PR
139 # comments from PR
141 statuses = ChangesetStatusModel().get_statuses(
140 statuses = ChangesetStatusModel().get_statuses(
142 self.db_repo.repo_id, commit.raw_id,
141 self.db_repo.repo_id, commit.raw_id,
143 with_revisions=True)
142 with_revisions=True)
144 prs = set(st.pull_request for st in statuses
143 prs = set(st.pull_request for st in statuses
145 if st.pull_request is not None)
144 if st.pull_request is not None)
146 # from associated statuses, check the pull requests, and
145 # from associated statuses, check the pull requests, and
147 # show comments from them
146 # show comments from them
148 for pr in prs:
147 for pr in prs:
149 c.comments.extend(pr.comments)
148 c.comments.extend(pr.comments)
150
149
151 c.unresolved_comments = CommentsModel()\
150 c.unresolved_comments = CommentsModel()\
152 .get_commit_unresolved_todos(commit.raw_id)
151 .get_commit_unresolved_todos(commit.raw_id)
153
152
154 diff = None
153 diff = None
155 # Iterate over ranges (default commit view is always one commit)
154 # Iterate over ranges (default commit view is always one commit)
156 for commit in c.commit_ranges:
155 for commit in c.commit_ranges:
157 c.changes[commit.raw_id] = []
156 c.changes[commit.raw_id] = []
158
157
159 commit2 = commit
158 commit2 = commit
160 commit1 = commit.first_parent
159 commit1 = commit.first_parent
161
160
162 if method == 'show':
161 if method == 'show':
163 inline_comments = CommentsModel().get_inline_comments(
162 inline_comments = CommentsModel().get_inline_comments(
164 self.db_repo.repo_id, revision=commit.raw_id)
163 self.db_repo.repo_id, revision=commit.raw_id)
165 c.inline_cnt = CommentsModel().get_inline_comments_count(
164 c.inline_cnt = CommentsModel().get_inline_comments_count(
166 inline_comments)
165 inline_comments)
167 c.inline_comments = inline_comments
166 c.inline_comments = inline_comments
168
167
169 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
168 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
170 self.db_repo)
169 self.db_repo)
171 cache_file_path = diff_cache_exist(
170 cache_file_path = diff_cache_exist(
172 cache_path, 'diff', commit.raw_id,
171 cache_path, 'diff', commit.raw_id,
173 hide_whitespace_changes, diff_context, c.fulldiff)
172 hide_whitespace_changes, diff_context, c.fulldiff)
174
173
175 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
174 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
176 force_recache = str2bool(self.request.GET.get('force_recache'))
175 force_recache = str2bool(self.request.GET.get('force_recache'))
177
176
178 cached_diff = None
177 cached_diff = None
179 if caching_enabled:
178 if caching_enabled:
180 cached_diff = load_cached_diff(cache_file_path)
179 cached_diff = load_cached_diff(cache_file_path)
181
180
182 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
181 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
183 if not force_recache and has_proper_diff_cache:
182 if not force_recache and has_proper_diff_cache:
184 diffset = cached_diff['diff']
183 diffset = cached_diff['diff']
185 else:
184 else:
186 vcs_diff = self.rhodecode_vcs_repo.get_diff(
185 vcs_diff = self.rhodecode_vcs_repo.get_diff(
187 commit1, commit2,
186 commit1, commit2,
188 ignore_whitespace=hide_whitespace_changes,
187 ignore_whitespace=hide_whitespace_changes,
189 context=diff_context)
188 context=diff_context)
190
189
191 diff_processor = diffs.DiffProcessor(
190 diff_processor = diffs.DiffProcessor(
192 vcs_diff, format='newdiff', diff_limit=diff_limit,
191 vcs_diff, format='newdiff', diff_limit=diff_limit,
193 file_limit=file_limit, show_full_diff=c.fulldiff)
192 file_limit=file_limit, show_full_diff=c.fulldiff)
194
193
195 _parsed = diff_processor.prepare()
194 _parsed = diff_processor.prepare()
196
195
197 diffset = codeblocks.DiffSet(
196 diffset = codeblocks.DiffSet(
198 repo_name=self.db_repo_name,
197 repo_name=self.db_repo_name,
199 source_node_getter=codeblocks.diffset_node_getter(commit1),
198 source_node_getter=codeblocks.diffset_node_getter(commit1),
200 target_node_getter=codeblocks.diffset_node_getter(commit2))
199 target_node_getter=codeblocks.diffset_node_getter(commit2))
201
200
202 diffset = self.path_filter.render_patchset_filtered(
201 diffset = self.path_filter.render_patchset_filtered(
203 diffset, _parsed, commit1.raw_id, commit2.raw_id)
202 diffset, _parsed, commit1.raw_id, commit2.raw_id)
204
203
205 # save cached diff
204 # save cached diff
206 if caching_enabled:
205 if caching_enabled:
207 cache_diff(cache_file_path, diffset, None)
206 cache_diff(cache_file_path, diffset, None)
208
207
209 c.limited_diff = diffset.limited_diff
208 c.limited_diff = diffset.limited_diff
210 c.changes[commit.raw_id] = diffset
209 c.changes[commit.raw_id] = diffset
211 else:
210 else:
212 # TODO(marcink): no cache usage here...
211 # TODO(marcink): no cache usage here...
213 _diff = self.rhodecode_vcs_repo.get_diff(
212 _diff = self.rhodecode_vcs_repo.get_diff(
214 commit1, commit2,
213 commit1, commit2,
215 ignore_whitespace=hide_whitespace_changes, context=diff_context)
214 ignore_whitespace=hide_whitespace_changes, context=diff_context)
216 diff_processor = diffs.DiffProcessor(
215 diff_processor = diffs.DiffProcessor(
217 _diff, format='newdiff', diff_limit=diff_limit,
216 _diff, format='newdiff', diff_limit=diff_limit,
218 file_limit=file_limit, show_full_diff=c.fulldiff)
217 file_limit=file_limit, show_full_diff=c.fulldiff)
219 # downloads/raw we only need RAW diff nothing else
218 # downloads/raw we only need RAW diff nothing else
220 diff = self.path_filter.get_raw_patch(diff_processor)
219 diff = self.path_filter.get_raw_patch(diff_processor)
221 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
220 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
222
221
223 # sort comments by how they were generated
222 # sort comments by how they were generated
224 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
223 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
225
224
226 if len(c.commit_ranges) == 1:
225 if len(c.commit_ranges) == 1:
227 c.commit = c.commit_ranges[0]
226 c.commit = c.commit_ranges[0]
228 c.parent_tmpl = ''.join(
227 c.parent_tmpl = ''.join(
229 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
228 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
230
229
231 if method == 'download':
230 if method == 'download':
232 response = Response(diff)
231 response = Response(diff)
233 response.content_type = 'text/plain'
232 response.content_type = 'text/plain'
234 response.content_disposition = (
233 response.content_disposition = (
235 'attachment; filename=%s.diff' % commit_id_range[:12])
234 'attachment; filename=%s.diff' % commit_id_range[:12])
236 return response
235 return response
237 elif method == 'patch':
236 elif method == 'patch':
238 c.diff = safe_unicode(diff)
237 c.diff = safe_unicode(diff)
239 patch = render(
238 patch = render(
240 'rhodecode:templates/changeset/patch_changeset.mako',
239 'rhodecode:templates/changeset/patch_changeset.mako',
241 self._get_template_context(c), self.request)
240 self._get_template_context(c), self.request)
242 response = Response(patch)
241 response = Response(patch)
243 response.content_type = 'text/plain'
242 response.content_type = 'text/plain'
244 return response
243 return response
245 elif method == 'raw':
244 elif method == 'raw':
246 response = Response(diff)
245 response = Response(diff)
247 response.content_type = 'text/plain'
246 response.content_type = 'text/plain'
248 return response
247 return response
249 elif method == 'show':
248 elif method == 'show':
250 if len(c.commit_ranges) == 1:
249 if len(c.commit_ranges) == 1:
251 html = render(
250 html = render(
252 'rhodecode:templates/changeset/changeset.mako',
251 'rhodecode:templates/changeset/changeset.mako',
253 self._get_template_context(c), self.request)
252 self._get_template_context(c), self.request)
254 return Response(html)
253 return Response(html)
255 else:
254 else:
256 c.ancestor = None
255 c.ancestor = None
257 c.target_repo = self.db_repo
256 c.target_repo = self.db_repo
258 html = render(
257 html = render(
259 'rhodecode:templates/changeset/changeset_range.mako',
258 'rhodecode:templates/changeset/changeset_range.mako',
260 self._get_template_context(c), self.request)
259 self._get_template_context(c), self.request)
261 return Response(html)
260 return Response(html)
262
261
263 raise HTTPBadRequest()
262 raise HTTPBadRequest()
264
263
265 @LoginRequired()
264 @LoginRequired()
266 @HasRepoPermissionAnyDecorator(
265 @HasRepoPermissionAnyDecorator(
267 'repository.read', 'repository.write', 'repository.admin')
266 'repository.read', 'repository.write', 'repository.admin')
268 @view_config(
267 @view_config(
269 route_name='repo_commit', request_method='GET',
268 route_name='repo_commit', request_method='GET',
270 renderer=None)
269 renderer=None)
271 def repo_commit_show(self):
270 def repo_commit_show(self):
272 commit_id = self.request.matchdict['commit_id']
271 commit_id = self.request.matchdict['commit_id']
273 return self._commit(commit_id, method='show')
272 return self._commit(commit_id, method='show')
274
273
275 @LoginRequired()
274 @LoginRequired()
276 @HasRepoPermissionAnyDecorator(
275 @HasRepoPermissionAnyDecorator(
277 'repository.read', 'repository.write', 'repository.admin')
276 'repository.read', 'repository.write', 'repository.admin')
278 @view_config(
277 @view_config(
279 route_name='repo_commit_raw', request_method='GET',
278 route_name='repo_commit_raw', request_method='GET',
280 renderer=None)
279 renderer=None)
281 @view_config(
280 @view_config(
282 route_name='repo_commit_raw_deprecated', request_method='GET',
281 route_name='repo_commit_raw_deprecated', request_method='GET',
283 renderer=None)
282 renderer=None)
284 def repo_commit_raw(self):
283 def repo_commit_raw(self):
285 commit_id = self.request.matchdict['commit_id']
284 commit_id = self.request.matchdict['commit_id']
286 return self._commit(commit_id, method='raw')
285 return self._commit(commit_id, method='raw')
287
286
288 @LoginRequired()
287 @LoginRequired()
289 @HasRepoPermissionAnyDecorator(
288 @HasRepoPermissionAnyDecorator(
290 'repository.read', 'repository.write', 'repository.admin')
289 'repository.read', 'repository.write', 'repository.admin')
291 @view_config(
290 @view_config(
292 route_name='repo_commit_patch', request_method='GET',
291 route_name='repo_commit_patch', request_method='GET',
293 renderer=None)
292 renderer=None)
294 def repo_commit_patch(self):
293 def repo_commit_patch(self):
295 commit_id = self.request.matchdict['commit_id']
294 commit_id = self.request.matchdict['commit_id']
296 return self._commit(commit_id, method='patch')
295 return self._commit(commit_id, method='patch')
297
296
298 @LoginRequired()
297 @LoginRequired()
299 @HasRepoPermissionAnyDecorator(
298 @HasRepoPermissionAnyDecorator(
300 'repository.read', 'repository.write', 'repository.admin')
299 'repository.read', 'repository.write', 'repository.admin')
301 @view_config(
300 @view_config(
302 route_name='repo_commit_download', request_method='GET',
301 route_name='repo_commit_download', request_method='GET',
303 renderer=None)
302 renderer=None)
304 def repo_commit_download(self):
303 def repo_commit_download(self):
305 commit_id = self.request.matchdict['commit_id']
304 commit_id = self.request.matchdict['commit_id']
306 return self._commit(commit_id, method='download')
305 return self._commit(commit_id, method='download')
307
306
308 @LoginRequired()
307 @LoginRequired()
309 @NotAnonymous()
308 @NotAnonymous()
310 @HasRepoPermissionAnyDecorator(
309 @HasRepoPermissionAnyDecorator(
311 'repository.read', 'repository.write', 'repository.admin')
310 'repository.read', 'repository.write', 'repository.admin')
312 @CSRFRequired()
311 @CSRFRequired()
313 @view_config(
312 @view_config(
314 route_name='repo_commit_comment_create', request_method='POST',
313 route_name='repo_commit_comment_create', request_method='POST',
315 renderer='json_ext')
314 renderer='json_ext')
316 def repo_commit_comment_create(self):
315 def repo_commit_comment_create(self):
317 _ = self.request.translate
316 _ = self.request.translate
318 commit_id = self.request.matchdict['commit_id']
317 commit_id = self.request.matchdict['commit_id']
319
318
320 c = self.load_default_context()
319 c = self.load_default_context()
321 status = self.request.POST.get('changeset_status', None)
320 status = self.request.POST.get('changeset_status', None)
322 text = self.request.POST.get('text')
321 text = self.request.POST.get('text')
323 comment_type = self.request.POST.get('comment_type')
322 comment_type = self.request.POST.get('comment_type')
324 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
323 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
325
324
326 if status:
325 if status:
327 text = text or (_('Status change %(transition_icon)s %(status)s')
326 text = text or (_('Status change %(transition_icon)s %(status)s')
328 % {'transition_icon': '>',
327 % {'transition_icon': '>',
329 'status': ChangesetStatus.get_status_lbl(status)})
328 'status': ChangesetStatus.get_status_lbl(status)})
330
329
331 multi_commit_ids = []
330 multi_commit_ids = []
332 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
331 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
333 if _commit_id not in ['', None, EmptyCommit.raw_id]:
332 if _commit_id not in ['', None, EmptyCommit.raw_id]:
334 if _commit_id not in multi_commit_ids:
333 if _commit_id not in multi_commit_ids:
335 multi_commit_ids.append(_commit_id)
334 multi_commit_ids.append(_commit_id)
336
335
337 commit_ids = multi_commit_ids or [commit_id]
336 commit_ids = multi_commit_ids or [commit_id]
338
337
339 comment = None
338 comment = None
340 for current_id in filter(None, commit_ids):
339 for current_id in filter(None, commit_ids):
341 comment = CommentsModel().create(
340 comment = CommentsModel().create(
342 text=text,
341 text=text,
343 repo=self.db_repo.repo_id,
342 repo=self.db_repo.repo_id,
344 user=self._rhodecode_db_user.user_id,
343 user=self._rhodecode_db_user.user_id,
345 commit_id=current_id,
344 commit_id=current_id,
346 f_path=self.request.POST.get('f_path'),
345 f_path=self.request.POST.get('f_path'),
347 line_no=self.request.POST.get('line'),
346 line_no=self.request.POST.get('line'),
348 status_change=(ChangesetStatus.get_status_lbl(status)
347 status_change=(ChangesetStatus.get_status_lbl(status)
349 if status else None),
348 if status else None),
350 status_change_type=status,
349 status_change_type=status,
351 comment_type=comment_type,
350 comment_type=comment_type,
352 resolves_comment_id=resolves_comment_id,
351 resolves_comment_id=resolves_comment_id,
353 auth_user=self._rhodecode_user
352 auth_user=self._rhodecode_user
354 )
353 )
355
354
356 # get status if set !
355 # get status if set !
357 if status:
356 if status:
358 # if latest status was from pull request and it's closed
357 # if latest status was from pull request and it's closed
359 # disallow changing status !
358 # disallow changing status !
360 # dont_allow_on_closed_pull_request = True !
359 # dont_allow_on_closed_pull_request = True !
361
360
362 try:
361 try:
363 ChangesetStatusModel().set_status(
362 ChangesetStatusModel().set_status(
364 self.db_repo.repo_id,
363 self.db_repo.repo_id,
365 status,
364 status,
366 self._rhodecode_db_user.user_id,
365 self._rhodecode_db_user.user_id,
367 comment,
366 comment,
368 revision=current_id,
367 revision=current_id,
369 dont_allow_on_closed_pull_request=True
368 dont_allow_on_closed_pull_request=True
370 )
369 )
371 except StatusChangeOnClosedPullRequestError:
370 except StatusChangeOnClosedPullRequestError:
372 msg = _('Changing the status of a commit associated with '
371 msg = _('Changing the status of a commit associated with '
373 'a closed pull request is not allowed')
372 'a closed pull request is not allowed')
374 log.exception(msg)
373 log.exception(msg)
375 h.flash(msg, category='warning')
374 h.flash(msg, category='warning')
376 raise HTTPFound(h.route_path(
375 raise HTTPFound(h.route_path(
377 'repo_commit', repo_name=self.db_repo_name,
376 'repo_commit', repo_name=self.db_repo_name,
378 commit_id=current_id))
377 commit_id=current_id))
379
378
380 # finalize, commit and redirect
379 # finalize, commit and redirect
381 Session().commit()
380 Session().commit()
382
381
383 data = {
382 data = {
384 'target_id': h.safeid(h.safe_unicode(
383 'target_id': h.safeid(h.safe_unicode(
385 self.request.POST.get('f_path'))),
384 self.request.POST.get('f_path'))),
386 }
385 }
387 if comment:
386 if comment:
388 c.co = comment
387 c.co = comment
389 rendered_comment = render(
388 rendered_comment = render(
390 'rhodecode:templates/changeset/changeset_comment_block.mako',
389 'rhodecode:templates/changeset/changeset_comment_block.mako',
391 self._get_template_context(c), self.request)
390 self._get_template_context(c), self.request)
392
391
393 data.update(comment.get_dict())
392 data.update(comment.get_dict())
394 data.update({'rendered_text': rendered_comment})
393 data.update({'rendered_text': rendered_comment})
395
394
396 return data
395 return data
397
396
398 @LoginRequired()
397 @LoginRequired()
399 @NotAnonymous()
398 @NotAnonymous()
400 @HasRepoPermissionAnyDecorator(
399 @HasRepoPermissionAnyDecorator(
401 'repository.read', 'repository.write', 'repository.admin')
400 'repository.read', 'repository.write', 'repository.admin')
402 @CSRFRequired()
401 @CSRFRequired()
403 @view_config(
402 @view_config(
404 route_name='repo_commit_comment_preview', request_method='POST',
403 route_name='repo_commit_comment_preview', request_method='POST',
405 renderer='string', xhr=True)
404 renderer='string', xhr=True)
406 def repo_commit_comment_preview(self):
405 def repo_commit_comment_preview(self):
407 # Technically a CSRF token is not needed as no state changes with this
406 # Technically a CSRF token is not needed as no state changes with this
408 # call. However, as this is a POST is better to have it, so automated
407 # call. However, as this is a POST is better to have it, so automated
409 # tools don't flag it as potential CSRF.
408 # tools don't flag it as potential CSRF.
410 # Post is required because the payload could be bigger than the maximum
409 # Post is required because the payload could be bigger than the maximum
411 # allowed by GET.
410 # allowed by GET.
412
411
413 text = self.request.POST.get('text')
412 text = self.request.POST.get('text')
414 renderer = self.request.POST.get('renderer') or 'rst'
413 renderer = self.request.POST.get('renderer') or 'rst'
415 if text:
414 if text:
416 return h.render(text, renderer=renderer, mentions=True)
415 return h.render(text, renderer=renderer, mentions=True)
417 return ''
416 return ''
418
417
419 @LoginRequired()
418 @LoginRequired()
420 @NotAnonymous()
419 @NotAnonymous()
421 @HasRepoPermissionAnyDecorator(
420 @HasRepoPermissionAnyDecorator(
422 'repository.read', 'repository.write', 'repository.admin')
421 'repository.read', 'repository.write', 'repository.admin')
423 @CSRFRequired()
422 @CSRFRequired()
424 @view_config(
423 @view_config(
425 route_name='repo_commit_comment_delete', request_method='POST',
424 route_name='repo_commit_comment_delete', request_method='POST',
426 renderer='json_ext')
425 renderer='json_ext')
427 def repo_commit_comment_delete(self):
426 def repo_commit_comment_delete(self):
428 commit_id = self.request.matchdict['commit_id']
427 commit_id = self.request.matchdict['commit_id']
429 comment_id = self.request.matchdict['comment_id']
428 comment_id = self.request.matchdict['comment_id']
430
429
431 comment = ChangesetComment.get_or_404(comment_id)
430 comment = ChangesetComment.get_or_404(comment_id)
432 if not comment:
431 if not comment:
433 log.debug('Comment with id:%s not found, skipping', comment_id)
432 log.debug('Comment with id:%s not found, skipping', comment_id)
434 # comment already deleted in another call probably
433 # comment already deleted in another call probably
435 return True
434 return True
436
435
437 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
436 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
438 super_admin = h.HasPermissionAny('hg.admin')()
437 super_admin = h.HasPermissionAny('hg.admin')()
439 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
438 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
440 is_repo_comment = comment.repo.repo_name == self.db_repo_name
439 is_repo_comment = comment.repo.repo_name == self.db_repo_name
441 comment_repo_admin = is_repo_admin and is_repo_comment
440 comment_repo_admin = is_repo_admin and is_repo_comment
442
441
443 if super_admin or comment_owner or comment_repo_admin:
442 if super_admin or comment_owner or comment_repo_admin:
444 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
443 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
445 Session().commit()
444 Session().commit()
446 return True
445 return True
447 else:
446 else:
448 log.warning('No permissions for user %s to delete comment_id: %s',
447 log.warning('No permissions for user %s to delete comment_id: %s',
449 self._rhodecode_db_user, comment_id)
448 self._rhodecode_db_user, comment_id)
450 raise HTTPNotFound()
449 raise HTTPNotFound()
451
450
452 @LoginRequired()
451 @LoginRequired()
453 @HasRepoPermissionAnyDecorator(
452 @HasRepoPermissionAnyDecorator(
454 'repository.read', 'repository.write', 'repository.admin')
453 'repository.read', 'repository.write', 'repository.admin')
455 @view_config(
454 @view_config(
456 route_name='repo_commit_data', request_method='GET',
455 route_name='repo_commit_data', request_method='GET',
457 renderer='json_ext', xhr=True)
456 renderer='json_ext', xhr=True)
458 def repo_commit_data(self):
457 def repo_commit_data(self):
459 commit_id = self.request.matchdict['commit_id']
458 commit_id = self.request.matchdict['commit_id']
460 self.load_default_context()
459 self.load_default_context()
461
460
462 try:
461 try:
463 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
462 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
464 except CommitDoesNotExistError as e:
463 except CommitDoesNotExistError as e:
465 return EmptyCommit(message=str(e))
464 return EmptyCommit(message=str(e))
466
465
467 @LoginRequired()
466 @LoginRequired()
468 @HasRepoPermissionAnyDecorator(
467 @HasRepoPermissionAnyDecorator(
469 'repository.read', 'repository.write', 'repository.admin')
468 'repository.read', 'repository.write', 'repository.admin')
470 @view_config(
469 @view_config(
471 route_name='repo_commit_children', request_method='GET',
470 route_name='repo_commit_children', request_method='GET',
472 renderer='json_ext', xhr=True)
471 renderer='json_ext', xhr=True)
473 def repo_commit_children(self):
472 def repo_commit_children(self):
474 commit_id = self.request.matchdict['commit_id']
473 commit_id = self.request.matchdict['commit_id']
475 self.load_default_context()
474 self.load_default_context()
476
475
477 try:
476 try:
478 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
479 children = commit.children
478 children = commit.children
480 except CommitDoesNotExistError:
479 except CommitDoesNotExistError:
481 children = []
480 children = []
482
481
483 result = {"results": children}
482 result = {"results": children}
484 return result
483 return result
485
484
486 @LoginRequired()
485 @LoginRequired()
487 @HasRepoPermissionAnyDecorator(
486 @HasRepoPermissionAnyDecorator(
488 'repository.read', 'repository.write', 'repository.admin')
487 'repository.read', 'repository.write', 'repository.admin')
489 @view_config(
488 @view_config(
490 route_name='repo_commit_parents', request_method='GET',
489 route_name='repo_commit_parents', request_method='GET',
491 renderer='json_ext')
490 renderer='json_ext')
492 def repo_commit_parents(self):
491 def repo_commit_parents(self):
493 commit_id = self.request.matchdict['commit_id']
492 commit_id = self.request.matchdict['commit_id']
494 self.load_default_context()
493 self.load_default_context()
495
494
496 try:
495 try:
497 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
496 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
498 parents = commit.parents
497 parents = commit.parents
499 except CommitDoesNotExistError:
498 except CommitDoesNotExistError:
500 parents = []
499 parents = []
501 result = {"results": parents}
500 result = {"results": parents}
502 return result
501 return result
@@ -1,105 +1,106 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT inmemory module
22 GIT inmemory module
23 """
23 """
24
24
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 from rhodecode.lib.utils import safe_str
26 from rhodecode.lib.utils import safe_str
27 from rhodecode.lib.vcs.backends import base
27 from rhodecode.lib.vcs.backends import base
28
28
29
29
30 class GitInMemoryCommit(base.BaseInMemoryCommit):
30 class GitInMemoryCommit(base.BaseInMemoryCommit):
31
31
32 def commit(self, message, author, parents=None, branch=None, date=None,
32 def commit(self, message, author, parents=None, branch=None, date=None,
33 **kwargs):
33 **kwargs):
34 """
34 """
35 Performs in-memory commit (doesn't check workdir in any way) and
35 Performs in-memory commit (doesn't check workdir in any way) and
36 returns newly created `GitCommit`. Updates repository's
36 returns newly created `GitCommit`. Updates repository's
37 `commit_ids`.
37 `commit_ids`.
38
38
39 :param message: message of the commit
39 :param message: message of the commit
40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
41 :param parents: single parent or sequence of parents from which commit
41 :param parents: single parent or sequence of parents from which commit
42 would be derived
42 would be derived
43 :param date: `datetime.datetime` instance. Defaults to
43 :param date: `datetime.datetime` instance. Defaults to
44 ``datetime.datetime.now()``.
44 ``datetime.datetime.now()``.
45 :param branch: branch name, as string. If none given, default backend's
45 :param branch: branch name, as string. If none given, default backend's
46 branch would be used.
46 branch would be used.
47
47
48 :raises `CommitError`: if any error occurs while committing
48 :raises `CommitError`: if any error occurs while committing
49 """
49 """
50 self.check_integrity(parents)
50 self.check_integrity(parents)
51 if branch is None:
51 if branch is None:
52 branch = self.repository.DEFAULT_BRANCH_NAME
52 branch = self.repository.DEFAULT_BRANCH_NAME
53
53
54 ENCODING = "UTF-8"
54 ENCODING = "UTF-8"
55
55
56 commit_tree = None
56 commit_tree = None
57 if self.parents[0]:
57 if self.parents[0]:
58 commit_tree = self.parents[0]._commit['tree']
58 commit_tree = self.parents[0]._commit['tree']
59
59
60 updated = []
60 updated = []
61 for node in self.added + self.changed:
61 for node in self.added + self.changed:
62 if not node.is_binary:
62 if not node.is_binary:
63 content = node.content.encode(ENCODING)
63 content = node.content.encode(ENCODING)
64 else:
64 else:
65 content = node.content
65 content = node.content
66 updated.append({
66 updated.append({
67 'path': node.path,
67 'path': node.path,
68 'node_path': node.name.encode(ENCODING),
68 'node_path': node.name.encode(ENCODING),
69 'content': content,
69 'content': content,
70 'mode': node.mode,
70 'mode': node.mode,
71 })
71 })
72
72
73 removed = [node.path for node in self.removed]
73 removed = [node.path for node in self.removed]
74
74
75 date, tz = date_to_timestamp_plus_offset(date)
75 date, tz = date_to_timestamp_plus_offset(date)
76
76
77 # TODO: johbo: Make kwargs explicit and check if this is needed.
77 # TODO: johbo: Make kwargs explicit and check if this is needed.
78 author_time = kwargs.pop('author_time', date)
78 author_time = kwargs.pop('author_time', date)
79 author_tz = kwargs.pop('author_timezone', tz)
79 author_tz = kwargs.pop('author_timezone', tz)
80
80
81 commit_data = {
81 commit_data = {
82 'parents': [p._commit['id'] for p in self.parents if p],
82 'parents': [p._commit['id'] for p in self.parents if p],
83 'author': safe_str(author),
83 'author': safe_str(author),
84 'committer': safe_str(author),
84 'committer': safe_str(author),
85 'encoding': ENCODING,
85 'encoding': ENCODING,
86 'message': safe_str(message),
86 'message': safe_str(message),
87 'commit_time': int(date),
87 'commit_time': int(date),
88 'author_time': int(author_time),
88 'author_time': int(author_time),
89 'commit_timezone': tz,
89 'commit_timezone': tz,
90 'author_timezone': author_tz,
90 'author_timezone': author_tz,
91 }
91 }
92
92
93 commit_id = self.repository._remote.commit(
93 commit_id = self.repository._remote.commit(
94 commit_data, branch, commit_tree, updated, removed)
94 commit_data, branch, commit_tree, updated, removed)
95
95
96 # Update vcs repository object
96 # Update vcs repository object
97 if commit_id not in self.repository.commit_ids:
97 self.repository.commit_ids.append(commit_id)
98 self.repository.commit_ids.append(commit_id)
98 self.repository._rebuild_cache(self.repository.commit_ids)
99 self.repository._rebuild_cache(self.repository.commit_ids)
99
100
100 # invalidate parsed refs after commit
101 # invalidate parsed refs after commit
101 self.repository._refs = self.repository._get_refs()
102 self.repository._refs = self.repository._get_refs()
102 self.repository.branches = self.repository._get_branches()
103 self.repository.branches = self.repository._get_branches()
103 tip = self.repository.get_commit()
104 tip = self.repository.get_commit()
104 self.reset()
105 self.reset()
105 return tip
106 return tip
@@ -1,1012 +1,1031 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
38 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
45
46
46
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 class GitRepository(BaseRepository):
52 class GitRepository(BaseRepository):
53 """
53 """
54 Git repository backend.
54 Git repository backend.
55 """
55 """
56 DEFAULT_BRANCH_NAME = 'master'
56 DEFAULT_BRANCH_NAME = 'master'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire
65 self.with_wire = with_wire
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75
75
76 @LazyProperty
76 @LazyProperty
77 def bare(self):
77 def bare(self):
78 return self._remote.bare()
78 return self._remote.bare()
79
79
80 @LazyProperty
80 @LazyProperty
81 def head(self):
81 def head(self):
82 return self._remote.head()
82 return self._remote.head()
83
83
84 @LazyProperty
84 @LazyProperty
85 def commit_ids(self):
85 def commit_ids(self):
86 """
86 """
87 Returns list of commit ids, in ascending order. Being lazy
87 Returns list of commit ids, in ascending order. Being lazy
88 attribute allows external tools to inject commit ids from cache.
88 attribute allows external tools to inject commit ids from cache.
89 """
89 """
90 commit_ids = self._get_all_commit_ids()
90 commit_ids = self._get_all_commit_ids()
91 self._rebuild_cache(commit_ids)
91 self._rebuild_cache(commit_ids)
92 return commit_ids
92 return commit_ids
93
93
94 def _rebuild_cache(self, commit_ids):
94 def _rebuild_cache(self, commit_ids):
95 self._commit_ids = dict((commit_id, index)
95 self._commit_ids = dict((commit_id, index)
96 for index, commit_id in enumerate(commit_ids))
96 for index, commit_id in enumerate(commit_ids))
97
97
98 def run_git_command(self, cmd, **opts):
98 def run_git_command(self, cmd, **opts):
99 """
99 """
100 Runs given ``cmd`` as git command and returns tuple
100 Runs given ``cmd`` as git command and returns tuple
101 (stdout, stderr).
101 (stdout, stderr).
102
102
103 :param cmd: git command to be executed
103 :param cmd: git command to be executed
104 :param opts: env options to pass into Subprocess command
104 :param opts: env options to pass into Subprocess command
105 """
105 """
106 if not isinstance(cmd, list):
106 if not isinstance(cmd, list):
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108
108
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 out, err = self._remote.run_git_command(cmd, **opts)
110 out, err = self._remote.run_git_command(cmd, **opts)
111 if err and not skip_stderr_log:
111 if err and not skip_stderr_log:
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 return out, err
113 return out, err
114
114
115 @staticmethod
115 @staticmethod
116 def check_url(url, config):
116 def check_url(url, config):
117 """
117 """
118 Function will check given url and try to verify if it's a valid
118 Function will check given url and try to verify if it's a valid
119 link. Sometimes it may happened that git will issue basic
119 link. Sometimes it may happened that git will issue basic
120 auth request that can cause whole API to hang when used from python
120 auth request that can cause whole API to hang when used from python
121 or other external calls.
121 or other external calls.
122
122
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 when the return code is non 200
124 when the return code is non 200
125 """
125 """
126 # check first if it's not an url
126 # check first if it's not an url
127 if os.path.isdir(url) or url.startswith('file:'):
127 if os.path.isdir(url) or url.startswith('file:'):
128 return True
128 return True
129
129
130 if '+' in url.split('://', 1)[0]:
130 if '+' in url.split('://', 1)[0]:
131 url = url.split('+', 1)[1]
131 url = url.split('+', 1)[1]
132
132
133 # Request the _remote to verify the url
133 # Request the _remote to verify the url
134 return connection.Git.check_url(url, config.serialize())
134 return connection.Git.check_url(url, config.serialize())
135
135
136 @staticmethod
136 @staticmethod
137 def is_valid_repository(path):
137 def is_valid_repository(path):
138 if os.path.isdir(os.path.join(path, '.git')):
138 if os.path.isdir(os.path.join(path, '.git')):
139 return True
139 return True
140 # check case of bare repository
140 # check case of bare repository
141 try:
141 try:
142 GitRepository(path)
142 GitRepository(path)
143 return True
143 return True
144 except VCSError:
144 except VCSError:
145 pass
145 pass
146 return False
146 return False
147
147
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 bare=False):
149 bare=False):
150 if create and os.path.exists(self.path):
150 if create and os.path.exists(self.path):
151 raise RepositoryError(
151 raise RepositoryError(
152 "Cannot create repository at %s, location already exist"
152 "Cannot create repository at %s, location already exist"
153 % self.path)
153 % self.path)
154
154
155 if bare and do_workspace_checkout:
155 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
156 raise RepositoryError("Cannot update a bare repository")
157 try:
157 try:
158
158
159 if src_url:
159 if src_url:
160 # check URL before any actions
160 # check URL before any actions
161 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
162
162
163 if create:
163 if create:
164 os.makedirs(self.path, mode=0o755)
164 os.makedirs(self.path, mode=0o755)
165
165
166 if bare:
166 if bare:
167 self._remote.init_bare()
167 self._remote.init_bare()
168 else:
168 else:
169 self._remote.init()
169 self._remote.init()
170
170
171 if src_url and bare:
171 if src_url and bare:
172 # bare repository only allows a fetch and checkout is not allowed
172 # bare repository only allows a fetch and checkout is not allowed
173 self.fetch(src_url, commit_ids=None)
173 self.fetch(src_url, commit_ids=None)
174 elif src_url:
174 elif src_url:
175 self.pull(src_url, commit_ids=None,
175 self.pull(src_url, commit_ids=None,
176 update_after=do_workspace_checkout)
176 update_after=do_workspace_checkout)
177
177
178 else:
178 else:
179 if not self._remote.assert_correct_path():
179 if not self._remote.assert_correct_path():
180 raise RepositoryError(
180 raise RepositoryError(
181 'Path "%s" does not contain a Git repository' %
181 'Path "%s" does not contain a Git repository' %
182 (self.path,))
182 (self.path,))
183
183
184 # TODO: johbo: check if we have to translate the OSError here
184 # TODO: johbo: check if we have to translate the OSError here
185 except OSError as err:
185 except OSError as err:
186 raise RepositoryError(err)
186 raise RepositoryError(err)
187
187
188 def _get_all_commit_ids(self, filters=None):
188 def _get_all_commit_ids(self, filters=None):
189 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
190 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 # errors
191 # errors
192
192
193 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
194 if not head:
194 if not head:
195 return []
195 return []
196
196
197 rev_filter = ['--branches', '--tags']
197 rev_filter = ['--branches', '--tags']
198 extra_filter = []
198 extra_filter = []
199
199
200 if filters:
200 if filters:
201 if filters.get('since'):
201 if filters.get('since'):
202 extra_filter.append('--since=%s' % (filters['since']))
202 extra_filter.append('--since=%s' % (filters['since']))
203 if filters.get('until'):
203 if filters.get('until'):
204 extra_filter.append('--until=%s' % (filters['until']))
204 extra_filter.append('--until=%s' % (filters['until']))
205 if filters.get('branch_name'):
205 if filters.get('branch_name'):
206 rev_filter = ['--tags']
206 rev_filter = ['--tags']
207 extra_filter.append(filters['branch_name'])
207 extra_filter.append(filters['branch_name'])
208 rev_filter.extend(extra_filter)
208 rev_filter.extend(extra_filter)
209
209
210 # if filters.get('start') or filters.get('end'):
210 # if filters.get('start') or filters.get('end'):
211 # # skip is offset, max-count is limit
211 # # skip is offset, max-count is limit
212 # if filters.get('start'):
212 # if filters.get('start'):
213 # extra_filter += ' --skip=%s' % filters['start']
213 # extra_filter += ' --skip=%s' % filters['start']
214 # if filters.get('end'):
214 # if filters.get('end'):
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216
216
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 try:
218 try:
219 output, __ = self.run_git_command(cmd)
219 output, __ = self.run_git_command(cmd)
220 except RepositoryError:
220 except RepositoryError:
221 # Can be raised for empty repositories
221 # Can be raised for empty repositories
222 return []
222 return []
223 return output.splitlines()
223 return output.splitlines()
224
224
225 def _get_commit_id(self, commit_id_or_idx):
225 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
226 def is_null(value):
226 def is_null(value):
227 return len(value) == commit_id_or_idx.count('0')
227 return len(value) == commit_id_or_idx.count('0')
228
228
229 if self.is_empty():
230 raise EmptyRepositoryError("There are no commits yet")
231
232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
229 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
233 return self.commit_ids[-1]
230 return self.commit_ids[-1]
234
231
235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
232 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
233 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
234 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
238 try:
235 try:
239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
236 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
240 except Exception:
237 except Exception:
241 msg = "Commit %s does not exist for %s" % (
238 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
242 commit_id_or_idx, self)
243 raise CommitDoesNotExistError(msg)
239 raise CommitDoesNotExistError(msg)
244
240
245 elif is_bstr:
241 elif is_bstr:
246 # check full path ref, eg. refs/heads/master
242 # check full path ref, eg. refs/heads/master
247 ref_id = self._refs.get(commit_id_or_idx)
243 ref_id = self._refs.get(commit_id_or_idx)
248 if ref_id:
244 if ref_id:
249 return ref_id
245 return ref_id
250
246
251 # check branch name
247 # check branch name
252 branch_ids = self.branches.values()
248 branch_ids = self.branches.values()
253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
249 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 if ref_id:
250 if ref_id:
255 return ref_id
251 return ref_id
256
252
257 # check tag name
253 # check tag name
258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
254 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 if ref_id:
255 if ref_id:
260 return ref_id
256 return ref_id
261
257
262 if (not SHA_PATTERN.match(commit_id_or_idx) or
258 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 commit_id_or_idx not in self.commit_ids):
259 commit_id_or_idx not in self.commit_ids):
264 msg = "Commit %s does not exist for %s" % (
260 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
265 commit_id_or_idx, self)
266 raise CommitDoesNotExistError(msg)
261 raise CommitDoesNotExistError(msg)
267
262
268 # Ensure we return full id
263 # Ensure we return full id
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
264 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 raise CommitDoesNotExistError(
265 raise CommitDoesNotExistError(
271 "Given commit id %s not recognized" % commit_id_or_idx)
266 "Given commit id %s not recognized" % commit_id_or_idx)
272 return commit_id_or_idx
267 return commit_id_or_idx
273
268
274 def get_hook_location(self):
269 def get_hook_location(self):
275 """
270 """
276 returns absolute path to location where hooks are stored
271 returns absolute path to location where hooks are stored
277 """
272 """
278 loc = os.path.join(self.path, 'hooks')
273 loc = os.path.join(self.path, 'hooks')
279 if not self.bare:
274 if not self.bare:
280 loc = os.path.join(self.path, '.git', 'hooks')
275 loc = os.path.join(self.path, '.git', 'hooks')
281 return loc
276 return loc
282
277
283 @LazyProperty
278 @LazyProperty
284 def last_change(self):
279 def last_change(self):
285 """
280 """
286 Returns last change made on this repository as
281 Returns last change made on this repository as
287 `datetime.datetime` object.
282 `datetime.datetime` object.
288 """
283 """
289 try:
284 try:
290 return self.get_commit().date
285 return self.get_commit().date
291 except RepositoryError:
286 except RepositoryError:
292 tzoffset = makedate()[1]
287 tzoffset = makedate()[1]
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
288 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294
289
295 def _get_fs_mtime(self):
290 def _get_fs_mtime(self):
296 idx_loc = '' if self.bare else '.git'
291 idx_loc = '' if self.bare else '.git'
297 # fallback to filesystem
292 # fallback to filesystem
298 in_path = os.path.join(self.path, idx_loc, "index")
293 in_path = os.path.join(self.path, idx_loc, "index")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
294 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 if os.path.exists(in_path):
295 if os.path.exists(in_path):
301 return os.stat(in_path).st_mtime
296 return os.stat(in_path).st_mtime
302 else:
297 else:
303 return os.stat(he_path).st_mtime
298 return os.stat(he_path).st_mtime
304
299
305 @LazyProperty
300 @LazyProperty
306 def description(self):
301 def description(self):
307 description = self._remote.get_description()
302 description = self._remote.get_description()
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
303 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309
304
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
305 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 if self.is_empty():
306 if self.is_empty():
312 return OrderedDict()
307 return OrderedDict()
313
308
314 result = []
309 result = []
315 for ref, sha in self._refs.iteritems():
310 for ref, sha in self._refs.iteritems():
316 if ref.startswith(prefix):
311 if ref.startswith(prefix):
317 ref_name = ref
312 ref_name = ref
318 if strip_prefix:
313 if strip_prefix:
319 ref_name = ref[len(prefix):]
314 ref_name = ref[len(prefix):]
320 result.append((safe_unicode(ref_name), sha))
315 result.append((safe_unicode(ref_name), sha))
321
316
322 def get_name(entry):
317 def get_name(entry):
323 return entry[0]
318 return entry[0]
324
319
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
320 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326
321
327 def _get_branches(self):
322 def _get_branches(self):
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
323 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329
324
330 @LazyProperty
325 @LazyProperty
331 def branches(self):
326 def branches(self):
332 return self._get_branches()
327 return self._get_branches()
333
328
334 @LazyProperty
329 @LazyProperty
335 def branches_closed(self):
330 def branches_closed(self):
336 return {}
331 return {}
337
332
338 @LazyProperty
333 @LazyProperty
339 def bookmarks(self):
334 def bookmarks(self):
340 return {}
335 return {}
341
336
342 @LazyProperty
337 @LazyProperty
343 def branches_all(self):
338 def branches_all(self):
344 all_branches = {}
339 all_branches = {}
345 all_branches.update(self.branches)
340 all_branches.update(self.branches)
346 all_branches.update(self.branches_closed)
341 all_branches.update(self.branches_closed)
347 return all_branches
342 return all_branches
348
343
349 @LazyProperty
344 @LazyProperty
350 def tags(self):
345 def tags(self):
351 return self._get_tags()
346 return self._get_tags()
352
347
353 def _get_tags(self):
348 def _get_tags(self):
354 return self._get_refs_entries(
349 return self._get_refs_entries(
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
350 prefix='refs/tags/', strip_prefix=True, reverse=True)
356
351
357 def tag(self, name, user, commit_id=None, message=None, date=None,
352 def tag(self, name, user, commit_id=None, message=None, date=None,
358 **kwargs):
353 **kwargs):
359 # TODO: fix this method to apply annotated tags correct with message
354 # TODO: fix this method to apply annotated tags correct with message
360 """
355 """
361 Creates and returns a tag for the given ``commit_id``.
356 Creates and returns a tag for the given ``commit_id``.
362
357
363 :param name: name for new tag
358 :param name: name for new tag
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
359 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 :param commit_id: commit id for which new tag would be created
360 :param commit_id: commit id for which new tag would be created
366 :param message: message of the tag's commit
361 :param message: message of the tag's commit
367 :param date: date of tag's commit
362 :param date: date of tag's commit
368
363
369 :raises TagAlreadyExistError: if tag with same name already exists
364 :raises TagAlreadyExistError: if tag with same name already exists
370 """
365 """
371 if name in self.tags:
366 if name in self.tags:
372 raise TagAlreadyExistError("Tag %s already exists" % name)
367 raise TagAlreadyExistError("Tag %s already exists" % name)
373 commit = self.get_commit(commit_id=commit_id)
368 commit = self.get_commit(commit_id=commit_id)
374 message = message or "Added tag %s for commit %s" % (
369 message = message or "Added tag %s for commit %s" % (
375 name, commit.raw_id)
370 name, commit.raw_id)
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
371 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377
372
378 self._refs = self._get_refs()
373 self._refs = self._get_refs()
379 self.tags = self._get_tags()
374 self.tags = self._get_tags()
380 return commit
375 return commit
381
376
382 def remove_tag(self, name, user, message=None, date=None):
377 def remove_tag(self, name, user, message=None, date=None):
383 """
378 """
384 Removes tag with the given ``name``.
379 Removes tag with the given ``name``.
385
380
386 :param name: name of the tag to be removed
381 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
382 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
383 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
384 :param date: date of tag's removal commit
390
385
391 :raises TagDoesNotExistError: if tag with given name does not exists
386 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
387 """
393 if name not in self.tags:
388 if name not in self.tags:
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
389 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 tagpath = vcspath.join(
390 tagpath = vcspath.join(
396 self._remote.get_refs_path(), 'refs', 'tags', name)
391 self._remote.get_refs_path(), 'refs', 'tags', name)
397 try:
392 try:
398 os.remove(tagpath)
393 os.remove(tagpath)
399 self._refs = self._get_refs()
394 self._refs = self._get_refs()
400 self.tags = self._get_tags()
395 self.tags = self._get_tags()
401 except OSError as e:
396 except OSError as e:
402 raise RepositoryError(e.strerror)
397 raise RepositoryError(e.strerror)
403
398
404 def _get_refs(self):
399 def _get_refs(self):
405 return self._remote.get_refs()
400 return self._remote.get_refs()
406
401
407 @LazyProperty
402 @LazyProperty
408 def _refs(self):
403 def _refs(self):
409 return self._get_refs()
404 return self._get_refs()
410
405
411 @property
406 @property
412 def _ref_tree(self):
407 def _ref_tree(self):
413 node = tree = {}
408 node = tree = {}
414 for ref, sha in self._refs.iteritems():
409 for ref, sha in self._refs.iteritems():
415 path = ref.split('/')
410 path = ref.split('/')
416 for bit in path[:-1]:
411 for bit in path[:-1]:
417 node = node.setdefault(bit, {})
412 node = node.setdefault(bit, {})
418 node[path[-1]] = sha
413 node[path[-1]] = sha
419 node = tree
414 node = tree
420 return tree
415 return tree
421
416
422 def get_remote_ref(self, ref_name):
417 def get_remote_ref(self, ref_name):
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
418 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 try:
419 try:
425 return self._refs[ref_key]
420 return self._refs[ref_key]
426 except Exception:
421 except Exception:
427 return
422 return
428
423
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
430 """
425 """
431 Returns `GitCommit` object representing commit from git repository
426 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
427 at the given `commit_id` or head (most recent commit) if None given.
433 """
428 """
429 if self.is_empty():
430 raise EmptyRepositoryError("There are no commits yet")
431
434 if commit_id is not None:
432 if commit_id is not None:
435 self._validate_commit_id(commit_id)
433 self._validate_commit_id(commit_id)
434 try:
435 # we have cached idx, use it without contacting the remote
436 idx = self._commit_ids[commit_id]
437 return GitCommit(self, commit_id, idx, pre_load=pre_load)
438 except KeyError:
439 pass
440
436 elif commit_idx is not None:
441 elif commit_idx is not None:
437 self._validate_commit_idx(commit_idx)
442 self._validate_commit_idx(commit_idx)
443 try:
444 _commit_id = self.commit_ids[commit_idx]
445 if commit_idx < 0:
446 commit_idx = self.commit_ids.index(_commit_id)
447 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
448 except IndexError:
438 commit_id = commit_idx
449 commit_id = commit_idx
439 commit_id = self._get_commit_id(commit_id)
450 else:
440 try:
451 commit_id = "tip"
452
453 commit_id = self._lookup_commit(commit_id)
454 remote_idx = None
441 if translate_tag:
455 if translate_tag:
442 # Need to call remote to translate id for tagging scenario
456 # Need to call remote to translate id for tagging scenario
443 commit_id = self._remote.get_object(commit_id)["commit_id"]
457 remote_data = self._remote.get_object(commit_id)
458 commit_id = remote_data["commit_id"]
459 remote_idx = remote_data["idx"]
460
461 try:
444 idx = self._commit_ids[commit_id]
462 idx = self._commit_ids[commit_id]
445 except KeyError:
463 except KeyError:
446 raise RepositoryError("Cannot get object with id %s" % commit_id)
464 idx = remote_idx or 0
447
465
448 return GitCommit(self, commit_id, idx, pre_load=pre_load)
466 return GitCommit(self, commit_id, idx, pre_load=pre_load)
449
467
450 def get_commits(
468 def get_commits(
451 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
452 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
453 """
471 """
454 Returns generator of `GitCommit` objects from start to end (both
472 Returns generator of `GitCommit` objects from start to end (both
455 are inclusive), in ascending date order.
473 are inclusive), in ascending date order.
456
474
457 :param start_id: None, str(commit_id)
475 :param start_id: None, str(commit_id)
458 :param end_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
459 :param start_date: if specified, commits with commit date less than
477 :param start_date: if specified, commits with commit date less than
460 ``start_date`` would be filtered out from returned set
478 ``start_date`` would be filtered out from returned set
461 :param end_date: if specified, commits with commit date greater than
479 :param end_date: if specified, commits with commit date greater than
462 ``end_date`` would be filtered out from returned set
480 ``end_date`` would be filtered out from returned set
463 :param branch_name: if specified, commits not reachable from given
481 :param branch_name: if specified, commits not reachable from given
464 branch would be filtered out from returned set
482 branch would be filtered out from returned set
465 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
466 Mercurial evolve
484 Mercurial evolve
467 :raise BranchDoesNotExistError: If given `branch_name` does not
485 :raise BranchDoesNotExistError: If given `branch_name` does not
468 exist.
486 exist.
469 :raise CommitDoesNotExistError: If commits for given `start` or
487 :raise CommitDoesNotExistError: If commits for given `start` or
470 `end` could not be found.
488 `end` could not be found.
471
489
472 """
490 """
473 if self.is_empty():
491 if self.is_empty():
474 raise EmptyRepositoryError("There are no commits yet")
492 raise EmptyRepositoryError("There are no commits yet")
493
475 self._validate_branch_name(branch_name)
494 self._validate_branch_name(branch_name)
476
495
477 if start_id is not None:
496 if start_id is not None:
478 self._validate_commit_id(start_id)
497 self._validate_commit_id(start_id)
479 if end_id is not None:
498 if end_id is not None:
480 self._validate_commit_id(end_id)
499 self._validate_commit_id(end_id)
481
500
482 start_raw_id = self._get_commit_id(start_id)
501 start_raw_id = self._lookup_commit(start_id)
483 start_pos = self._commit_ids[start_raw_id] if start_id else None
502 start_pos = self._commit_ids[start_raw_id] if start_id else None
484 end_raw_id = self._get_commit_id(end_id)
503 end_raw_id = self._lookup_commit(end_id)
485 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
504 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
486
505
487 if None not in [start_id, end_id] and start_pos > end_pos:
506 if None not in [start_id, end_id] and start_pos > end_pos:
488 raise RepositoryError(
507 raise RepositoryError(
489 "Start commit '%s' cannot be after end commit '%s'" %
508 "Start commit '%s' cannot be after end commit '%s'" %
490 (start_id, end_id))
509 (start_id, end_id))
491
510
492 if end_pos is not None:
511 if end_pos is not None:
493 end_pos += 1
512 end_pos += 1
494
513
495 filter_ = []
514 filter_ = []
496 if branch_name:
515 if branch_name:
497 filter_.append({'branch_name': branch_name})
516 filter_.append({'branch_name': branch_name})
498 if start_date and not end_date:
517 if start_date and not end_date:
499 filter_.append({'since': start_date})
518 filter_.append({'since': start_date})
500 if end_date and not start_date:
519 if end_date and not start_date:
501 filter_.append({'until': end_date})
520 filter_.append({'until': end_date})
502 if start_date and end_date:
521 if start_date and end_date:
503 filter_.append({'since': start_date})
522 filter_.append({'since': start_date})
504 filter_.append({'until': end_date})
523 filter_.append({'until': end_date})
505
524
506 # if start_pos or end_pos:
525 # if start_pos or end_pos:
507 # filter_.append({'start': start_pos})
526 # filter_.append({'start': start_pos})
508 # filter_.append({'end': end_pos})
527 # filter_.append({'end': end_pos})
509
528
510 if filter_:
529 if filter_:
511 revfilters = {
530 revfilters = {
512 'branch_name': branch_name,
531 'branch_name': branch_name,
513 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
532 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
514 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
533 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
515 'start': start_pos,
534 'start': start_pos,
516 'end': end_pos,
535 'end': end_pos,
517 }
536 }
518 commit_ids = self._get_all_commit_ids(filters=revfilters)
537 commit_ids = self._get_all_commit_ids(filters=revfilters)
519
538
520 # pure python stuff, it's slow due to walker walking whole repo
539 # pure python stuff, it's slow due to walker walking whole repo
521 # def get_revs(walker):
540 # def get_revs(walker):
522 # for walker_entry in walker:
541 # for walker_entry in walker:
523 # yield walker_entry.commit.id
542 # yield walker_entry.commit.id
524 # revfilters = {}
543 # revfilters = {}
525 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
544 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
526 else:
545 else:
527 commit_ids = self.commit_ids
546 commit_ids = self.commit_ids
528
547
529 if start_pos or end_pos:
548 if start_pos or end_pos:
530 commit_ids = commit_ids[start_pos: end_pos]
549 commit_ids = commit_ids[start_pos: end_pos]
531
550
532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
551 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 translate_tag=translate_tags)
552 translate_tag=translate_tags)
534
553
535 def get_diff(
554 def get_diff(
536 self, commit1, commit2, path='', ignore_whitespace=False,
555 self, commit1, commit2, path='', ignore_whitespace=False,
537 context=3, path1=None):
556 context=3, path1=None):
538 """
557 """
539 Returns (git like) *diff*, as plain text. Shows changes introduced by
558 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 ``commit2`` since ``commit1``.
559 ``commit2`` since ``commit1``.
541
560
542 :param commit1: Entry point from which diff is shown. Can be
561 :param commit1: Entry point from which diff is shown. Can be
543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
562 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 the changes since empty state of the repository until ``commit2``
563 the changes since empty state of the repository until ``commit2``
545 :param commit2: Until which commits changes should be shown.
564 :param commit2: Until which commits changes should be shown.
546 :param ignore_whitespace: If set to ``True``, would not show whitespace
565 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 changes. Defaults to ``False``.
566 changes. Defaults to ``False``.
548 :param context: How many lines before/after changed lines should be
567 :param context: How many lines before/after changed lines should be
549 shown. Defaults to ``3``.
568 shown. Defaults to ``3``.
550 """
569 """
551 self._validate_diff_commits(commit1, commit2)
570 self._validate_diff_commits(commit1, commit2)
552 if path1 is not None and path1 != path:
571 if path1 is not None and path1 != path:
553 raise ValueError("Diff of two different paths not supported.")
572 raise ValueError("Diff of two different paths not supported.")
554
573
555 flags = [
574 flags = [
556 '-U%s' % context, '--full-index', '--binary', '-p',
575 '-U%s' % context, '--full-index', '--binary', '-p',
557 '-M', '--abbrev=40']
576 '-M', '--abbrev=40']
558 if ignore_whitespace:
577 if ignore_whitespace:
559 flags.append('-w')
578 flags.append('-w')
560
579
561 if commit1 == self.EMPTY_COMMIT:
580 if commit1 == self.EMPTY_COMMIT:
562 cmd = ['show'] + flags + [commit2.raw_id]
581 cmd = ['show'] + flags + [commit2.raw_id]
563 else:
582 else:
564 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
583 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
565
584
566 if path:
585 if path:
567 cmd.extend(['--', path])
586 cmd.extend(['--', path])
568
587
569 stdout, __ = self.run_git_command(cmd)
588 stdout, __ = self.run_git_command(cmd)
570 # If we used 'show' command, strip first few lines (until actual diff
589 # If we used 'show' command, strip first few lines (until actual diff
571 # starts)
590 # starts)
572 if commit1 == self.EMPTY_COMMIT:
591 if commit1 == self.EMPTY_COMMIT:
573 lines = stdout.splitlines()
592 lines = stdout.splitlines()
574 x = 0
593 x = 0
575 for line in lines:
594 for line in lines:
576 if line.startswith('diff'):
595 if line.startswith('diff'):
577 break
596 break
578 x += 1
597 x += 1
579 # Append new line just like 'diff' command do
598 # Append new line just like 'diff' command do
580 stdout = '\n'.join(lines[x:]) + '\n'
599 stdout = '\n'.join(lines[x:]) + '\n'
581 return GitDiff(stdout)
600 return GitDiff(stdout)
582
601
583 def strip(self, commit_id, branch_name):
602 def strip(self, commit_id, branch_name):
584 commit = self.get_commit(commit_id=commit_id)
603 commit = self.get_commit(commit_id=commit_id)
585 if commit.merge:
604 if commit.merge:
586 raise Exception('Cannot reset to merge commit')
605 raise Exception('Cannot reset to merge commit')
587
606
588 # parent is going to be the new head now
607 # parent is going to be the new head now
589 commit = commit.parents[0]
608 commit = commit.parents[0]
590 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
609 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
591
610
592 self.commit_ids = self._get_all_commit_ids()
611 self.commit_ids = self._get_all_commit_ids()
593 self._rebuild_cache(self.commit_ids)
612 self._rebuild_cache(self.commit_ids)
594
613
595 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
614 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
596 if commit_id1 == commit_id2:
615 if commit_id1 == commit_id2:
597 return commit_id1
616 return commit_id1
598
617
599 if self != repo2:
618 if self != repo2:
600 commits = self._remote.get_missing_revs(
619 commits = self._remote.get_missing_revs(
601 commit_id1, commit_id2, repo2.path)
620 commit_id1, commit_id2, repo2.path)
602 if commits:
621 if commits:
603 commit = repo2.get_commit(commits[-1])
622 commit = repo2.get_commit(commits[-1])
604 if commit.parents:
623 if commit.parents:
605 ancestor_id = commit.parents[0].raw_id
624 ancestor_id = commit.parents[0].raw_id
606 else:
625 else:
607 ancestor_id = None
626 ancestor_id = None
608 else:
627 else:
609 # no commits from other repo, ancestor_id is the commit_id2
628 # no commits from other repo, ancestor_id is the commit_id2
610 ancestor_id = commit_id2
629 ancestor_id = commit_id2
611 else:
630 else:
612 output, __ = self.run_git_command(
631 output, __ = self.run_git_command(
613 ['merge-base', commit_id1, commit_id2])
632 ['merge-base', commit_id1, commit_id2])
614 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
633 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
615
634
616 return ancestor_id
635 return ancestor_id
617
636
618 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
637 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
619 repo1 = self
638 repo1 = self
620 ancestor_id = None
639 ancestor_id = None
621
640
622 if commit_id1 == commit_id2:
641 if commit_id1 == commit_id2:
623 commits = []
642 commits = []
624 elif repo1 != repo2:
643 elif repo1 != repo2:
625 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
644 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
626 repo2.path)
645 repo2.path)
627 commits = [
646 commits = [
628 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
647 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
629 for commit_id in reversed(missing_ids)]
648 for commit_id in reversed(missing_ids)]
630 else:
649 else:
631 output, __ = repo1.run_git_command(
650 output, __ = repo1.run_git_command(
632 ['log', '--reverse', '--pretty=format: %H', '-s',
651 ['log', '--reverse', '--pretty=format: %H', '-s',
633 '%s..%s' % (commit_id1, commit_id2)])
652 '%s..%s' % (commit_id1, commit_id2)])
634 commits = [
653 commits = [
635 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
654 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
636 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
655 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
637
656
638 return commits
657 return commits
639
658
640 @LazyProperty
659 @LazyProperty
641 def in_memory_commit(self):
660 def in_memory_commit(self):
642 """
661 """
643 Returns ``GitInMemoryCommit`` object for this repository.
662 Returns ``GitInMemoryCommit`` object for this repository.
644 """
663 """
645 return GitInMemoryCommit(self)
664 return GitInMemoryCommit(self)
646
665
647 def pull(self, url, commit_ids=None, update_after=False):
666 def pull(self, url, commit_ids=None, update_after=False):
648 """
667 """
649 Pull changes from external location. Pull is different in GIT
668 Pull changes from external location. Pull is different in GIT
650 that fetch since it's doing a checkout
669 that fetch since it's doing a checkout
651
670
652 :param commit_ids: Optional. Can be set to a list of commit ids
671 :param commit_ids: Optional. Can be set to a list of commit ids
653 which shall be pulled from the other repository.
672 which shall be pulled from the other repository.
654 """
673 """
655 refs = None
674 refs = None
656 if commit_ids is not None:
675 if commit_ids is not None:
657 remote_refs = self._remote.get_remote_refs(url)
676 remote_refs = self._remote.get_remote_refs(url)
658 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
677 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
659 self._remote.pull(url, refs=refs, update_after=update_after)
678 self._remote.pull(url, refs=refs, update_after=update_after)
660 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
661
680
662 def fetch(self, url, commit_ids=None):
681 def fetch(self, url, commit_ids=None):
663 """
682 """
664 Fetch all git objects from external location.
683 Fetch all git objects from external location.
665 """
684 """
666 self._remote.sync_fetch(url, refs=commit_ids)
685 self._remote.sync_fetch(url, refs=commit_ids)
667 self._remote.invalidate_vcs_cache()
686 self._remote.invalidate_vcs_cache()
668
687
669 def push(self, url):
688 def push(self, url):
670 refs = None
689 refs = None
671 self._remote.sync_push(url, refs=refs)
690 self._remote.sync_push(url, refs=refs)
672
691
673 def set_refs(self, ref_name, commit_id):
692 def set_refs(self, ref_name, commit_id):
674 self._remote.set_refs(ref_name, commit_id)
693 self._remote.set_refs(ref_name, commit_id)
675
694
676 def remove_ref(self, ref_name):
695 def remove_ref(self, ref_name):
677 self._remote.remove_ref(ref_name)
696 self._remote.remove_ref(ref_name)
678
697
679 def _update_server_info(self):
698 def _update_server_info(self):
680 """
699 """
681 runs gits update-server-info command in this repo instance
700 runs gits update-server-info command in this repo instance
682 """
701 """
683 self._remote.update_server_info()
702 self._remote.update_server_info()
684
703
685 def _current_branch(self):
704 def _current_branch(self):
686 """
705 """
687 Return the name of the current branch.
706 Return the name of the current branch.
688
707
689 It only works for non bare repositories (i.e. repositories with a
708 It only works for non bare repositories (i.e. repositories with a
690 working copy)
709 working copy)
691 """
710 """
692 if self.bare:
711 if self.bare:
693 raise RepositoryError('Bare git repos do not have active branches')
712 raise RepositoryError('Bare git repos do not have active branches')
694
713
695 if self.is_empty():
714 if self.is_empty():
696 return None
715 return None
697
716
698 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
699 return stdout.strip()
718 return stdout.strip()
700
719
701 def _checkout(self, branch_name, create=False, force=False):
720 def _checkout(self, branch_name, create=False, force=False):
702 """
721 """
703 Checkout a branch in the working directory.
722 Checkout a branch in the working directory.
704
723
705 It tries to create the branch if create is True, failing if the branch
724 It tries to create the branch if create is True, failing if the branch
706 already exists.
725 already exists.
707
726
708 It only works for non bare repositories (i.e. repositories with a
727 It only works for non bare repositories (i.e. repositories with a
709 working copy)
728 working copy)
710 """
729 """
711 if self.bare:
730 if self.bare:
712 raise RepositoryError('Cannot checkout branches in a bare git repo')
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
713
732
714 cmd = ['checkout']
733 cmd = ['checkout']
715 if force:
734 if force:
716 cmd.append('-f')
735 cmd.append('-f')
717 if create:
736 if create:
718 cmd.append('-b')
737 cmd.append('-b')
719 cmd.append(branch_name)
738 cmd.append(branch_name)
720 self.run_git_command(cmd, fail_on_stderr=False)
739 self.run_git_command(cmd, fail_on_stderr=False)
721
740
722 def _identify(self):
741 def _identify(self):
723 """
742 """
724 Return the current state of the working directory.
743 Return the current state of the working directory.
725 """
744 """
726 if self.bare:
745 if self.bare:
727 raise RepositoryError('Bare git repos do not have active branches')
746 raise RepositoryError('Bare git repos do not have active branches')
728
747
729 if self.is_empty():
748 if self.is_empty():
730 return None
749 return None
731
750
732 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
751 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
733 return stdout.strip()
752 return stdout.strip()
734
753
735 def _local_clone(self, clone_path, branch_name, source_branch=None):
754 def _local_clone(self, clone_path, branch_name, source_branch=None):
736 """
755 """
737 Create a local clone of the current repo.
756 Create a local clone of the current repo.
738 """
757 """
739 # N.B.(skreft): the --branch option is required as otherwise the shallow
758 # N.B.(skreft): the --branch option is required as otherwise the shallow
740 # clone will only fetch the active branch.
759 # clone will only fetch the active branch.
741 cmd = ['clone', '--branch', branch_name,
760 cmd = ['clone', '--branch', branch_name,
742 self.path, os.path.abspath(clone_path)]
761 self.path, os.path.abspath(clone_path)]
743
762
744 self.run_git_command(cmd, fail_on_stderr=False)
763 self.run_git_command(cmd, fail_on_stderr=False)
745
764
746 # if we get the different source branch, make sure we also fetch it for
765 # if we get the different source branch, make sure we also fetch it for
747 # merge conditions
766 # merge conditions
748 if source_branch and source_branch != branch_name:
767 if source_branch and source_branch != branch_name:
749 # check if the ref exists.
768 # check if the ref exists.
750 shadow_repo = GitRepository(os.path.abspath(clone_path))
769 shadow_repo = GitRepository(os.path.abspath(clone_path))
751 if shadow_repo.get_remote_ref(source_branch):
770 if shadow_repo.get_remote_ref(source_branch):
752 cmd = ['fetch', self.path, source_branch]
771 cmd = ['fetch', self.path, source_branch]
753 self.run_git_command(cmd, fail_on_stderr=False)
772 self.run_git_command(cmd, fail_on_stderr=False)
754
773
755 def _local_fetch(self, repository_path, branch_name, use_origin=False):
774 def _local_fetch(self, repository_path, branch_name, use_origin=False):
756 """
775 """
757 Fetch a branch from a local repository.
776 Fetch a branch from a local repository.
758 """
777 """
759 repository_path = os.path.abspath(repository_path)
778 repository_path = os.path.abspath(repository_path)
760 if repository_path == self.path:
779 if repository_path == self.path:
761 raise ValueError('Cannot fetch from the same repository')
780 raise ValueError('Cannot fetch from the same repository')
762
781
763 if use_origin:
782 if use_origin:
764 branch_name = '+{branch}:refs/heads/{branch}'.format(
783 branch_name = '+{branch}:refs/heads/{branch}'.format(
765 branch=branch_name)
784 branch=branch_name)
766
785
767 cmd = ['fetch', '--no-tags', '--update-head-ok',
786 cmd = ['fetch', '--no-tags', '--update-head-ok',
768 repository_path, branch_name]
787 repository_path, branch_name]
769 self.run_git_command(cmd, fail_on_stderr=False)
788 self.run_git_command(cmd, fail_on_stderr=False)
770
789
771 def _local_reset(self, branch_name):
790 def _local_reset(self, branch_name):
772 branch_name = '{}'.format(branch_name)
791 branch_name = '{}'.format(branch_name)
773 cmd = ['reset', '--hard', branch_name, '--']
792 cmd = ['reset', '--hard', branch_name, '--']
774 self.run_git_command(cmd, fail_on_stderr=False)
793 self.run_git_command(cmd, fail_on_stderr=False)
775
794
776 def _last_fetch_heads(self):
795 def _last_fetch_heads(self):
777 """
796 """
778 Return the last fetched heads that need merging.
797 Return the last fetched heads that need merging.
779
798
780 The algorithm is defined at
799 The algorithm is defined at
781 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
800 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
782 """
801 """
783 if not self.bare:
802 if not self.bare:
784 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
803 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
785 else:
804 else:
786 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
805 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
787
806
788 heads = []
807 heads = []
789 with open(fetch_heads_path) as f:
808 with open(fetch_heads_path) as f:
790 for line in f:
809 for line in f:
791 if ' not-for-merge ' in line:
810 if ' not-for-merge ' in line:
792 continue
811 continue
793 line = re.sub('\t.*', '', line, flags=re.DOTALL)
812 line = re.sub('\t.*', '', line, flags=re.DOTALL)
794 heads.append(line)
813 heads.append(line)
795
814
796 return heads
815 return heads
797
816
798 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
817 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
799 return GitRepository(shadow_repository_path)
818 return GitRepository(shadow_repository_path)
800
819
801 def _local_pull(self, repository_path, branch_name, ff_only=True):
820 def _local_pull(self, repository_path, branch_name, ff_only=True):
802 """
821 """
803 Pull a branch from a local repository.
822 Pull a branch from a local repository.
804 """
823 """
805 if self.bare:
824 if self.bare:
806 raise RepositoryError('Cannot pull into a bare git repository')
825 raise RepositoryError('Cannot pull into a bare git repository')
807 # N.B.(skreft): The --ff-only option is to make sure this is a
826 # N.B.(skreft): The --ff-only option is to make sure this is a
808 # fast-forward (i.e., we are only pulling new changes and there are no
827 # fast-forward (i.e., we are only pulling new changes and there are no
809 # conflicts with our current branch)
828 # conflicts with our current branch)
810 # Additionally, that option needs to go before --no-tags, otherwise git
829 # Additionally, that option needs to go before --no-tags, otherwise git
811 # pull complains about it being an unknown flag.
830 # pull complains about it being an unknown flag.
812 cmd = ['pull']
831 cmd = ['pull']
813 if ff_only:
832 if ff_only:
814 cmd.append('--ff-only')
833 cmd.append('--ff-only')
815 cmd.extend(['--no-tags', repository_path, branch_name])
834 cmd.extend(['--no-tags', repository_path, branch_name])
816 self.run_git_command(cmd, fail_on_stderr=False)
835 self.run_git_command(cmd, fail_on_stderr=False)
817
836
818 def _local_merge(self, merge_message, user_name, user_email, heads):
837 def _local_merge(self, merge_message, user_name, user_email, heads):
819 """
838 """
820 Merge the given head into the checked out branch.
839 Merge the given head into the checked out branch.
821
840
822 It will force a merge commit.
841 It will force a merge commit.
823
842
824 Currently it raises an error if the repo is empty, as it is not possible
843 Currently it raises an error if the repo is empty, as it is not possible
825 to create a merge commit in an empty repo.
844 to create a merge commit in an empty repo.
826
845
827 :param merge_message: The message to use for the merge commit.
846 :param merge_message: The message to use for the merge commit.
828 :param heads: the heads to merge.
847 :param heads: the heads to merge.
829 """
848 """
830 if self.bare:
849 if self.bare:
831 raise RepositoryError('Cannot merge into a bare git repository')
850 raise RepositoryError('Cannot merge into a bare git repository')
832
851
833 if not heads:
852 if not heads:
834 return
853 return
835
854
836 if self.is_empty():
855 if self.is_empty():
837 # TODO(skreft): do somehting more robust in this case.
856 # TODO(skreft): do somehting more robust in this case.
838 raise RepositoryError(
857 raise RepositoryError(
839 'Do not know how to merge into empty repositories yet')
858 'Do not know how to merge into empty repositories yet')
840
859
841 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
860 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
842 # commit message. We also specify the user who is doing the merge.
861 # commit message. We also specify the user who is doing the merge.
843 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
862 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
844 '-c', 'user.email=%s' % safe_str(user_email),
863 '-c', 'user.email=%s' % safe_str(user_email),
845 'merge', '--no-ff', '-m', safe_str(merge_message)]
864 'merge', '--no-ff', '-m', safe_str(merge_message)]
846 cmd.extend(heads)
865 cmd.extend(heads)
847 try:
866 try:
848 output = self.run_git_command(cmd, fail_on_stderr=False)
867 output = self.run_git_command(cmd, fail_on_stderr=False)
849 except RepositoryError:
868 except RepositoryError:
850 # Cleanup any merge leftovers
869 # Cleanup any merge leftovers
851 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
870 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
852 raise
871 raise
853
872
854 def _local_push(
873 def _local_push(
855 self, source_branch, repository_path, target_branch,
874 self, source_branch, repository_path, target_branch,
856 enable_hooks=False, rc_scm_data=None):
875 enable_hooks=False, rc_scm_data=None):
857 """
876 """
858 Push the source_branch to the given repository and target_branch.
877 Push the source_branch to the given repository and target_branch.
859
878
860 Currently it if the target_branch is not master and the target repo is
879 Currently it if the target_branch is not master and the target repo is
861 empty, the push will work, but then GitRepository won't be able to find
880 empty, the push will work, but then GitRepository won't be able to find
862 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
863 pointing to master, which does not exist).
882 pointing to master, which does not exist).
864
883
865 It does not run the hooks in the target repo.
884 It does not run the hooks in the target repo.
866 """
885 """
867 # TODO(skreft): deal with the case in which the target repo is empty,
886 # TODO(skreft): deal with the case in which the target repo is empty,
868 # and the target_branch is not master.
887 # and the target_branch is not master.
869 target_repo = GitRepository(repository_path)
888 target_repo = GitRepository(repository_path)
870 if (not target_repo.bare and
889 if (not target_repo.bare and
871 target_repo._current_branch() == target_branch):
890 target_repo._current_branch() == target_branch):
872 # Git prevents pushing to the checked out branch, so simulate it by
891 # Git prevents pushing to the checked out branch, so simulate it by
873 # pulling into the target repository.
892 # pulling into the target repository.
874 target_repo._local_pull(self.path, source_branch)
893 target_repo._local_pull(self.path, source_branch)
875 else:
894 else:
876 cmd = ['push', os.path.abspath(repository_path),
895 cmd = ['push', os.path.abspath(repository_path),
877 '%s:%s' % (source_branch, target_branch)]
896 '%s:%s' % (source_branch, target_branch)]
878 gitenv = {}
897 gitenv = {}
879 if rc_scm_data:
898 if rc_scm_data:
880 gitenv.update({'RC_SCM_DATA': rc_scm_data})
899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
881
900
882 if not enable_hooks:
901 if not enable_hooks:
883 gitenv['RC_SKIP_HOOKS'] = '1'
902 gitenv['RC_SKIP_HOOKS'] = '1'
884 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
885
904
886 def _get_new_pr_branch(self, source_branch, target_branch):
905 def _get_new_pr_branch(self, source_branch, target_branch):
887 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
888 pr_branches = []
907 pr_branches = []
889 for branch in self.branches:
908 for branch in self.branches:
890 if branch.startswith(prefix):
909 if branch.startswith(prefix):
891 pr_branches.append(int(branch[len(prefix):]))
910 pr_branches.append(int(branch[len(prefix):]))
892
911
893 if not pr_branches:
912 if not pr_branches:
894 branch_id = 0
913 branch_id = 0
895 else:
914 else:
896 branch_id = max(pr_branches) + 1
915 branch_id = max(pr_branches) + 1
897
916
898 return '%s%d' % (prefix, branch_id)
917 return '%s%d' % (prefix, branch_id)
899
918
900 def _maybe_prepare_merge_workspace(
919 def _maybe_prepare_merge_workspace(
901 self, repo_id, workspace_id, target_ref, source_ref):
920 self, repo_id, workspace_id, target_ref, source_ref):
902 shadow_repository_path = self._get_shadow_repository_path(
921 shadow_repository_path = self._get_shadow_repository_path(
903 repo_id, workspace_id)
922 repo_id, workspace_id)
904 if not os.path.exists(shadow_repository_path):
923 if not os.path.exists(shadow_repository_path):
905 self._local_clone(
924 self._local_clone(
906 shadow_repository_path, target_ref.name, source_ref.name)
925 shadow_repository_path, target_ref.name, source_ref.name)
907 log.debug(
926 log.debug(
908 'Prepared shadow repository in %s', shadow_repository_path)
927 'Prepared shadow repository in %s', shadow_repository_path)
909
928
910 return shadow_repository_path
929 return shadow_repository_path
911
930
912 def _merge_repo(self, repo_id, workspace_id, target_ref,
931 def _merge_repo(self, repo_id, workspace_id, target_ref,
913 source_repo, source_ref, merge_message,
932 source_repo, source_ref, merge_message,
914 merger_name, merger_email, dry_run=False,
933 merger_name, merger_email, dry_run=False,
915 use_rebase=False, close_branch=False):
934 use_rebase=False, close_branch=False):
916
935
917 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
918 'rebase' if use_rebase else 'merge', dry_run)
937 'rebase' if use_rebase else 'merge', dry_run)
919 if target_ref.commit_id != self.branches[target_ref.name]:
938 if target_ref.commit_id != self.branches[target_ref.name]:
920 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
921 target_ref.commit_id, self.branches[target_ref.name])
940 target_ref.commit_id, self.branches[target_ref.name])
922 return MergeResponse(
941 return MergeResponse(
923 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
924 metadata={'target_ref': target_ref})
943 metadata={'target_ref': target_ref})
925
944
926 shadow_repository_path = self._maybe_prepare_merge_workspace(
945 shadow_repository_path = self._maybe_prepare_merge_workspace(
927 repo_id, workspace_id, target_ref, source_ref)
946 repo_id, workspace_id, target_ref, source_ref)
928 shadow_repo = self._get_shadow_instance(shadow_repository_path)
947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
929
948
930 # checkout source, if it's different. Otherwise we could not
949 # checkout source, if it's different. Otherwise we could not
931 # fetch proper commits for merge testing
950 # fetch proper commits for merge testing
932 if source_ref.name != target_ref.name:
951 if source_ref.name != target_ref.name:
933 if shadow_repo.get_remote_ref(source_ref.name):
952 if shadow_repo.get_remote_ref(source_ref.name):
934 shadow_repo._checkout(source_ref.name, force=True)
953 shadow_repo._checkout(source_ref.name, force=True)
935
954
936 # checkout target, and fetch changes
955 # checkout target, and fetch changes
937 shadow_repo._checkout(target_ref.name, force=True)
956 shadow_repo._checkout(target_ref.name, force=True)
938
957
939 # fetch/reset pull the target, in case it is changed
958 # fetch/reset pull the target, in case it is changed
940 # this handles even force changes
959 # this handles even force changes
941 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
942 shadow_repo._local_reset(target_ref.name)
961 shadow_repo._local_reset(target_ref.name)
943
962
944 # Need to reload repo to invalidate the cache, or otherwise we cannot
963 # Need to reload repo to invalidate the cache, or otherwise we cannot
945 # retrieve the last target commit.
964 # retrieve the last target commit.
946 shadow_repo = self._get_shadow_instance(shadow_repository_path)
965 shadow_repo = self._get_shadow_instance(shadow_repository_path)
947 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
948 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
949 target_ref, target_ref.commit_id,
968 target_ref, target_ref.commit_id,
950 shadow_repo.branches[target_ref.name])
969 shadow_repo.branches[target_ref.name])
951 return MergeResponse(
970 return MergeResponse(
952 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
953 metadata={'target_ref': target_ref})
972 metadata={'target_ref': target_ref})
954
973
955 # calculate new branch
974 # calculate new branch
956 pr_branch = shadow_repo._get_new_pr_branch(
975 pr_branch = shadow_repo._get_new_pr_branch(
957 source_ref.name, target_ref.name)
976 source_ref.name, target_ref.name)
958 log.debug('using pull-request merge branch: `%s`', pr_branch)
977 log.debug('using pull-request merge branch: `%s`', pr_branch)
959 # checkout to temp branch, and fetch changes
978 # checkout to temp branch, and fetch changes
960 shadow_repo._checkout(pr_branch, create=True)
979 shadow_repo._checkout(pr_branch, create=True)
961 try:
980 try:
962 shadow_repo._local_fetch(source_repo.path, source_ref.name)
981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
963 except RepositoryError:
982 except RepositoryError:
964 log.exception('Failure when doing local fetch on '
983 log.exception('Failure when doing local fetch on '
965 'shadow repo: %s', shadow_repo)
984 'shadow repo: %s', shadow_repo)
966 return MergeResponse(
985 return MergeResponse(
967 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
968 metadata={'source_ref': source_ref})
987 metadata={'source_ref': source_ref})
969
988
970 merge_ref = None
989 merge_ref = None
971 merge_failure_reason = MergeFailureReason.NONE
990 merge_failure_reason = MergeFailureReason.NONE
972 metadata = {}
991 metadata = {}
973 try:
992 try:
974 shadow_repo._local_merge(merge_message, merger_name, merger_email,
993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
975 [source_ref.commit_id])
994 [source_ref.commit_id])
976 merge_possible = True
995 merge_possible = True
977
996
978 # Need to reload repo to invalidate the cache, or otherwise we
997 # Need to reload repo to invalidate the cache, or otherwise we
979 # cannot retrieve the merge commit.
998 # cannot retrieve the merge commit.
980 shadow_repo = GitRepository(shadow_repository_path)
999 shadow_repo = GitRepository(shadow_repository_path)
981 merge_commit_id = shadow_repo.branches[pr_branch]
1000 merge_commit_id = shadow_repo.branches[pr_branch]
982
1001
983 # Set a reference pointing to the merge commit. This reference may
1002 # Set a reference pointing to the merge commit. This reference may
984 # be used to easily identify the last successful merge commit in
1003 # be used to easily identify the last successful merge commit in
985 # the shadow repository.
1004 # the shadow repository.
986 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
987 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
988 except RepositoryError:
1007 except RepositoryError:
989 log.exception('Failure when doing local merge on git shadow repo')
1008 log.exception('Failure when doing local merge on git shadow repo')
990 merge_possible = False
1009 merge_possible = False
991 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1010 merge_failure_reason = MergeFailureReason.MERGE_FAILED
992
1011
993 if merge_possible and not dry_run:
1012 if merge_possible and not dry_run:
994 try:
1013 try:
995 shadow_repo._local_push(
1014 shadow_repo._local_push(
996 pr_branch, self.path, target_ref.name, enable_hooks=True,
1015 pr_branch, self.path, target_ref.name, enable_hooks=True,
997 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1016 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
998 merge_succeeded = True
1017 merge_succeeded = True
999 except RepositoryError:
1018 except RepositoryError:
1000 log.exception(
1019 log.exception(
1001 'Failure when doing local push from the shadow '
1020 'Failure when doing local push from the shadow '
1002 'repository to the target repository at %s.', self.path)
1021 'repository to the target repository at %s.', self.path)
1003 merge_succeeded = False
1022 merge_succeeded = False
1004 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1023 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1005 metadata['target'] = 'git shadow repo'
1024 metadata['target'] = 'git shadow repo'
1006 metadata['merge_commit'] = pr_branch
1025 metadata['merge_commit'] = pr_branch
1007 else:
1026 else:
1008 merge_succeeded = False
1027 merge_succeeded = False
1009
1028
1010 return MergeResponse(
1029 return MergeResponse(
1011 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1030 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1012 metadata=metadata)
1031 metadata=metadata)
@@ -1,97 +1,98 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG inmemory module
22 HG inmemory module
23 """
23 """
24
24
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 from rhodecode.lib.utils import safe_str
26 from rhodecode.lib.utils import safe_str
27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
28 from rhodecode.lib.vcs.exceptions import RepositoryError
28 from rhodecode.lib.vcs.exceptions import RepositoryError
29
29
30
30
31 class MercurialInMemoryCommit(BaseInMemoryCommit):
31 class MercurialInMemoryCommit(BaseInMemoryCommit):
32
32
33 def commit(self, message, author, parents=None, branch=None, date=None,
33 def commit(self, message, author, parents=None, branch=None, date=None,
34 **kwargs):
34 **kwargs):
35 """
35 """
36 Performs in-memory commit (doesn't check workdir in any way) and
36 Performs in-memory commit (doesn't check workdir in any way) and
37 returns newly created `MercurialCommit`. Updates repository's
37 returns newly created `MercurialCommit`. Updates repository's
38 `commit_ids`.
38 `commit_ids`.
39
39
40 :param message: message of the commit
40 :param message: message of the commit
41 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
41 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
42 :param parents: single parent or sequence of parents from which commit
42 :param parents: single parent or sequence of parents from which commit
43 would be derived
43 would be derived
44 :param date: `datetime.datetime` instance. Defaults to
44 :param date: `datetime.datetime` instance. Defaults to
45 ``datetime.datetime.now()``.
45 ``datetime.datetime.now()``.
46 :param branch: Optional. Branch name as unicode. Will use the backend's
46 :param branch: Optional. Branch name as unicode. Will use the backend's
47 default if not given.
47 default if not given.
48
48
49 :raises `RepositoryError`: if any error occurs while committing
49 :raises `RepositoryError`: if any error occurs while committing
50 """
50 """
51 self.check_integrity(parents)
51 self.check_integrity(parents)
52
52
53 if not isinstance(message, unicode) or not isinstance(author, unicode):
53 if not isinstance(message, unicode) or not isinstance(author, unicode):
54 # TODO: johbo: Should be a TypeError
54 # TODO: johbo: Should be a TypeError
55 raise RepositoryError('Given message and author needs to be '
55 raise RepositoryError('Given message and author needs to be '
56 'an <unicode> instance got %r & %r instead'
56 'an <unicode> instance got %r & %r instead'
57 % (type(message), type(author)))
57 % (type(message), type(author)))
58
58
59 if branch is None:
59 if branch is None:
60 branch = self.repository.DEFAULT_BRANCH_NAME
60 branch = self.repository.DEFAULT_BRANCH_NAME
61 kwargs['branch'] = safe_str(branch)
61 kwargs['branch'] = safe_str(branch)
62
62
63 message = safe_str(message)
63 message = safe_str(message)
64 author = safe_str(author)
64 author = safe_str(author)
65
65
66 parent_ids = [p.raw_id if p else None for p in self.parents]
66 parent_ids = [p.raw_id if p else None for p in self.parents]
67
67
68 ENCODING = "UTF-8"
68 ENCODING = "UTF-8"
69
69
70 updated = []
70 updated = []
71 for node in self.added + self.changed:
71 for node in self.added + self.changed:
72 if node.is_binary:
72 if node.is_binary:
73 content = node.content
73 content = node.content
74 else:
74 else:
75 content = node.content.encode(ENCODING)
75 content = node.content.encode(ENCODING)
76 updated.append({
76 updated.append({
77 'path': node.path,
77 'path': node.path,
78 'content': content,
78 'content': content,
79 'mode': node.mode,
79 'mode': node.mode,
80 })
80 })
81
81
82 removed = [node.path for node in self.removed]
82 removed = [node.path for node in self.removed]
83
83
84 date, tz = date_to_timestamp_plus_offset(date)
84 date, tz = date_to_timestamp_plus_offset(date)
85
85
86 new_id = self.repository._remote.commitctx(
86 commit_id = self.repository._remote.commitctx(
87 message=message, parents=parent_ids,
87 message=message, parents=parent_ids,
88 commit_time=date, commit_timezone=tz, user=author,
88 commit_time=date, commit_timezone=tz, user=author,
89 files=self.get_paths(), extra=kwargs, removed=removed,
89 files=self.get_paths(), extra=kwargs, removed=removed,
90 updated=updated)
90 updated=updated)
91 if commit_id not in self.repository.commit_ids:
92 self.repository.commit_ids.append(commit_id)
93 self.repository._rebuild_cache(self.repository.commit_ids)
91
94
92 self.repository.commit_ids.append(new_id)
93 self.repository._rebuild_cache(self.repository.commit_ids)
94 self.repository.branches = self.repository._get_branches()
95 self.repository.branches = self.repository._get_branches()
95 tip = self.repository.get_commit()
96 tip = self.repository.get_commit()
96 self.reset()
97 self.reset()
97 return tip
98 return tip
@@ -1,941 +1,942 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 do_workspace_checkout=False, with_wire=None, bare=False):
60 do_workspace_checkout=False, with_wire=None, bare=False):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param do_workspace_checkout=False: sets update of working copy after
70 :param do_workspace_checkout=False: sets update of working copy after
71 making a clone
71 making a clone
72 :param bare: not used, compatible with other VCS
72 :param bare: not used, compatible with other VCS
73 """
73 """
74
74
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
77 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
78 # special requirements
78 # special requirements
79 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
80 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
81 self.with_wire = with_wire
81 self.with_wire = with_wire
82
82
83 self._init_repo(create, src_url, do_workspace_checkout)
83 self._init_repo(create, src_url, do_workspace_checkout)
84
84
85 # caches
85 # caches
86 self._commit_ids = {}
86 self._commit_ids = {}
87
87
88 @LazyProperty
88 @LazyProperty
89 def _remote(self):
89 def _remote(self):
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91
91
92 @LazyProperty
92 @LazyProperty
93 def commit_ids(self):
93 def commit_ids(self):
94 """
94 """
95 Returns list of commit ids, in ascending order. Being lazy
95 Returns list of commit ids, in ascending order. Being lazy
96 attribute allows external tools to inject shas from cache.
96 attribute allows external tools to inject shas from cache.
97 """
97 """
98 commit_ids = self._get_all_commit_ids()
98 commit_ids = self._get_all_commit_ids()
99 self._rebuild_cache(commit_ids)
99 self._rebuild_cache(commit_ids)
100 return commit_ids
100 return commit_ids
101
101
102 def _rebuild_cache(self, commit_ids):
102 def _rebuild_cache(self, commit_ids):
103 self._commit_ids = dict((commit_id, index)
103 self._commit_ids = dict((commit_id, index)
104 for index, commit_id in enumerate(commit_ids))
104 for index, commit_id in enumerate(commit_ids))
105
105
106 @LazyProperty
106 @LazyProperty
107 def branches(self):
107 def branches(self):
108 return self._get_branches()
108 return self._get_branches()
109
109
110 @LazyProperty
110 @LazyProperty
111 def branches_closed(self):
111 def branches_closed(self):
112 return self._get_branches(active=False, closed=True)
112 return self._get_branches(active=False, closed=True)
113
113
114 @LazyProperty
114 @LazyProperty
115 def branches_all(self):
115 def branches_all(self):
116 all_branches = {}
116 all_branches = {}
117 all_branches.update(self.branches)
117 all_branches.update(self.branches)
118 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
119 return all_branches
119 return all_branches
120
120
121 def _get_branches(self, active=True, closed=False):
121 def _get_branches(self, active=True, closed=False):
122 """
122 """
123 Gets branches for this repository
123 Gets branches for this repository
124 Returns only not closed active branches by default
124 Returns only not closed active branches by default
125
125
126 :param active: return also active branches
126 :param active: return also active branches
127 :param closed: return also closed branches
127 :param closed: return also closed branches
128
128
129 """
129 """
130 if self.is_empty():
130 if self.is_empty():
131 return {}
131 return {}
132
132
133 def get_name(ctx):
133 def get_name(ctx):
134 return ctx[0]
134 return ctx[0]
135
135
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 self._remote.branches(active, closed).items()]
137 self._remote.branches(active, closed).items()]
138
138
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140
140
141 @LazyProperty
141 @LazyProperty
142 def tags(self):
142 def tags(self):
143 """
143 """
144 Gets tags for this repository
144 Gets tags for this repository
145 """
145 """
146 return self._get_tags()
146 return self._get_tags()
147
147
148 def _get_tags(self):
148 def _get_tags(self):
149 if self.is_empty():
149 if self.is_empty():
150 return {}
150 return {}
151
151
152 def get_name(ctx):
152 def get_name(ctx):
153 return ctx[0]
153 return ctx[0]
154
154
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 self._remote.tags().items()]
156 self._remote.tags().items()]
157
157
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159
159
160 def tag(self, name, user, commit_id=None, message=None, date=None,
160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 **kwargs):
161 **kwargs):
162 """
162 """
163 Creates and returns a tag for the given ``commit_id``.
163 Creates and returns a tag for the given ``commit_id``.
164
164
165 :param name: name for new tag
165 :param name: name for new tag
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param commit_id: commit id for which new tag would be created
167 :param commit_id: commit id for which new tag would be created
168 :param message: message of the tag's commit
168 :param message: message of the tag's commit
169 :param date: date of tag's commit
169 :param date: date of tag's commit
170
170
171 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
172 """
172 """
173 if name in self.tags:
173 if name in self.tags:
174 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 commit = self.get_commit(commit_id=commit_id)
175 commit = self.get_commit(commit_id=commit_id)
176 local = kwargs.setdefault('local', False)
176 local = kwargs.setdefault('local', False)
177
177
178 if message is None:
178 if message is None:
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180
180
181 date, tz = date_to_timestamp_plus_offset(date)
181 date, tz = date_to_timestamp_plus_offset(date)
182
182
183 self._remote.tag(
183 self._remote.tag(
184 name, commit.raw_id, message, local, user, date, tz)
184 name, commit.raw_id, message, local, user, date, tz)
185 self._remote.invalidate_vcs_cache()
185 self._remote.invalidate_vcs_cache()
186
186
187 # Reinitialize tags
187 # Reinitialize tags
188 self.tags = self._get_tags()
188 self.tags = self._get_tags()
189 tag_id = self.tags[name]
189 tag_id = self.tags[name]
190
190
191 return self.get_commit(commit_id=tag_id)
191 return self.get_commit(commit_id=tag_id)
192
192
193 def remove_tag(self, name, user, message=None, date=None):
193 def remove_tag(self, name, user, message=None, date=None):
194 """
194 """
195 Removes tag with the given `name`.
195 Removes tag with the given `name`.
196
196
197 :param name: name of the tag to be removed
197 :param name: name of the tag to be removed
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param message: message of the tag's removal commit
199 :param message: message of the tag's removal commit
200 :param date: date of tag's removal commit
200 :param date: date of tag's removal commit
201
201
202 :raises TagDoesNotExistError: if tag with given name does not exists
202 :raises TagDoesNotExistError: if tag with given name does not exists
203 """
203 """
204 if name not in self.tags:
204 if name not in self.tags:
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 if message is None:
206 if message is None:
207 message = "Removed tag %s" % name
207 message = "Removed tag %s" % name
208 local = False
208 local = False
209
209
210 date, tz = date_to_timestamp_plus_offset(date)
210 date, tz = date_to_timestamp_plus_offset(date)
211
211
212 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 self._remote.invalidate_vcs_cache()
213 self._remote.invalidate_vcs_cache()
214 self.tags = self._get_tags()
214 self.tags = self._get_tags()
215
215
216 @LazyProperty
216 @LazyProperty
217 def bookmarks(self):
217 def bookmarks(self):
218 """
218 """
219 Gets bookmarks for this repository
219 Gets bookmarks for this repository
220 """
220 """
221 return self._get_bookmarks()
221 return self._get_bookmarks()
222
222
223 def _get_bookmarks(self):
223 def _get_bookmarks(self):
224 if self.is_empty():
224 if self.is_empty():
225 return {}
225 return {}
226
226
227 def get_name(ctx):
227 def get_name(ctx):
228 return ctx[0]
228 return ctx[0]
229
229
230 _bookmarks = [
230 _bookmarks = [
231 (safe_unicode(n), hexlify(h)) for n, h in
231 (safe_unicode(n), hexlify(h)) for n, h in
232 self._remote.bookmarks().items()]
232 self._remote.bookmarks().items()]
233
233
234 return OrderedDict(sorted(_bookmarks, key=get_name))
234 return OrderedDict(sorted(_bookmarks, key=get_name))
235
235
236 def _get_all_commit_ids(self):
236 def _get_all_commit_ids(self):
237 return self._remote.get_all_commit_ids('visible')
237 return self._remote.get_all_commit_ids('visible')
238
238
239 def get_diff(
239 def get_diff(
240 self, commit1, commit2, path='', ignore_whitespace=False,
240 self, commit1, commit2, path='', ignore_whitespace=False,
241 context=3, path1=None):
241 context=3, path1=None):
242 """
242 """
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 `commit2` since `commit1`.
244 `commit2` since `commit1`.
245
245
246 :param commit1: Entry point from which diff is shown. Can be
246 :param commit1: Entry point from which diff is shown. Can be
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 the changes since empty state of the repository until `commit2`
248 the changes since empty state of the repository until `commit2`
249 :param commit2: Until which commit changes should be shown.
249 :param commit2: Until which commit changes should be shown.
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 changes. Defaults to ``False``.
251 changes. Defaults to ``False``.
252 :param context: How many lines before/after changed lines should be
252 :param context: How many lines before/after changed lines should be
253 shown. Defaults to ``3``.
253 shown. Defaults to ``3``.
254 """
254 """
255 self._validate_diff_commits(commit1, commit2)
255 self._validate_diff_commits(commit1, commit2)
256 if path1 is not None and path1 != path:
256 if path1 is not None and path1 != path:
257 raise ValueError("Diff of two different paths not supported.")
257 raise ValueError("Diff of two different paths not supported.")
258
258
259 if path:
259 if path:
260 file_filter = [self.path, path]
260 file_filter = [self.path, path]
261 else:
261 else:
262 file_filter = None
262 file_filter = None
263
263
264 diff = self._remote.diff(
264 diff = self._remote.diff(
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 opt_git=True, opt_ignorews=ignore_whitespace,
266 opt_git=True, opt_ignorews=ignore_whitespace,
267 context=context)
267 context=context)
268 return MercurialDiff(diff)
268 return MercurialDiff(diff)
269
269
270 def strip(self, commit_id, branch=None):
270 def strip(self, commit_id, branch=None):
271 self._remote.strip(commit_id, update=False, backup="none")
271 self._remote.strip(commit_id, update=False, backup="none")
272
272
273 self._remote.invalidate_vcs_cache()
273 self._remote.invalidate_vcs_cache()
274 self.commit_ids = self._get_all_commit_ids()
274 self.commit_ids = self._get_all_commit_ids()
275 self._rebuild_cache(self.commit_ids)
275 self._rebuild_cache(self.commit_ids)
276
276
277 def verify(self):
277 def verify(self):
278 verify = self._remote.verify()
278 verify = self._remote.verify()
279
279
280 self._remote.invalidate_vcs_cache()
280 self._remote.invalidate_vcs_cache()
281 return verify
281 return verify
282
282
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 if commit_id1 == commit_id2:
284 if commit_id1 == commit_id2:
285 return commit_id1
285 return commit_id1
286
286
287 ancestors = self._remote.revs_from_revspec(
287 ancestors = self._remote.revs_from_revspec(
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 other_path=repo2.path)
289 other_path=repo2.path)
290 return repo2[ancestors[0]].raw_id if ancestors else None
290 return repo2[ancestors[0]].raw_id if ancestors else None
291
291
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 if commit_id1 == commit_id2:
293 if commit_id1 == commit_id2:
294 commits = []
294 commits = []
295 else:
295 else:
296 if merge:
296 if merge:
297 indexes = self._remote.revs_from_revspec(
297 indexes = self._remote.revs_from_revspec(
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 else:
300 else:
301 indexes = self._remote.revs_from_revspec(
301 indexes = self._remote.revs_from_revspec(
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 commit_id1, other_path=repo2.path)
303 commit_id1, other_path=repo2.path)
304
304
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 for idx in indexes]
306 for idx in indexes]
307
307
308 return commits
308 return commits
309
309
310 @staticmethod
310 @staticmethod
311 def check_url(url, config):
311 def check_url(url, config):
312 """
312 """
313 Function will check given url and try to verify if it's a valid
313 Function will check given url and try to verify if it's a valid
314 link. Sometimes it may happened that mercurial will issue basic
314 link. Sometimes it may happened that mercurial will issue basic
315 auth request that can cause whole API to hang when used from python
315 auth request that can cause whole API to hang when used from python
316 or other external calls.
316 or other external calls.
317
317
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 when the return code is non 200
319 when the return code is non 200
320 """
320 """
321 # check first if it's not an local url
321 # check first if it's not an local url
322 if os.path.isdir(url) or url.startswith('file:'):
322 if os.path.isdir(url) or url.startswith('file:'):
323 return True
323 return True
324
324
325 # Request the _remote to verify the url
325 # Request the _remote to verify the url
326 return connection.Hg.check_url(url, config.serialize())
326 return connection.Hg.check_url(url, config.serialize())
327
327
328 @staticmethod
328 @staticmethod
329 def is_valid_repository(path):
329 def is_valid_repository(path):
330 return os.path.isdir(os.path.join(path, '.hg'))
330 return os.path.isdir(os.path.join(path, '.hg'))
331
331
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 """
333 """
334 Function will check for mercurial repository in given path. If there
334 Function will check for mercurial repository in given path. If there
335 is no repository in that path it will raise an exception unless
335 is no repository in that path it will raise an exception unless
336 `create` parameter is set to True - in that case repository would
336 `create` parameter is set to True - in that case repository would
337 be created.
337 be created.
338
338
339 If `src_url` is given, would try to clone repository from the
339 If `src_url` is given, would try to clone repository from the
340 location at given clone_point. Additionally it'll make update to
340 location at given clone_point. Additionally it'll make update to
341 working copy accordingly to `do_workspace_checkout` flag.
341 working copy accordingly to `do_workspace_checkout` flag.
342 """
342 """
343 if create and os.path.exists(self.path):
343 if create and os.path.exists(self.path):
344 raise RepositoryError(
344 raise RepositoryError(
345 "Cannot create repository at %s, location already exist"
345 "Cannot create repository at %s, location already exist"
346 % self.path)
346 % self.path)
347
347
348 if src_url:
348 if src_url:
349 url = str(self._get_url(src_url))
349 url = str(self._get_url(src_url))
350 MercurialRepository.check_url(url, self.config)
350 MercurialRepository.check_url(url, self.config)
351
351
352 self._remote.clone(url, self.path, do_workspace_checkout)
352 self._remote.clone(url, self.path, do_workspace_checkout)
353
353
354 # Don't try to create if we've already cloned repo
354 # Don't try to create if we've already cloned repo
355 create = False
355 create = False
356
356
357 if create:
357 if create:
358 os.makedirs(self.path, mode=0o755)
358 os.makedirs(self.path, mode=0o755)
359
359
360 self._remote.localrepository(create)
360 self._remote.localrepository(create)
361
361
362 @LazyProperty
362 @LazyProperty
363 def in_memory_commit(self):
363 def in_memory_commit(self):
364 return MercurialInMemoryCommit(self)
364 return MercurialInMemoryCommit(self)
365
365
366 @LazyProperty
366 @LazyProperty
367 def description(self):
367 def description(self):
368 description = self._remote.get_config_value(
368 description = self._remote.get_config_value(
369 'web', 'description', untrusted=True)
369 'web', 'description', untrusted=True)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371
371
372 @LazyProperty
372 @LazyProperty
373 def contact(self):
373 def contact(self):
374 contact = (
374 contact = (
375 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("web", "contact") or
376 self._remote.get_config_value("ui", "username"))
376 self._remote.get_config_value("ui", "username"))
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378
378
379 @LazyProperty
379 @LazyProperty
380 def last_change(self):
380 def last_change(self):
381 """
381 """
382 Returns last change made on this repository as
382 Returns last change made on this repository as
383 `datetime.datetime` object.
383 `datetime.datetime` object.
384 """
384 """
385 try:
385 try:
386 return self.get_commit().date
386 return self.get_commit().date
387 except RepositoryError:
387 except RepositoryError:
388 tzoffset = makedate()[1]
388 tzoffset = makedate()[1]
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390
390
391 def _get_fs_mtime(self):
391 def _get_fs_mtime(self):
392 # fallback to filesystem
392 # fallback to filesystem
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 st_path = os.path.join(self.path, '.hg', "store")
394 st_path = os.path.join(self.path, '.hg', "store")
395 if os.path.exists(cl_path):
395 if os.path.exists(cl_path):
396 return os.stat(cl_path).st_mtime
396 return os.stat(cl_path).st_mtime
397 else:
397 else:
398 return os.stat(st_path).st_mtime
398 return os.stat(st_path).st_mtime
399
399
400 def _get_url(self, url):
400 def _get_url(self, url):
401 """
401 """
402 Returns normalized url. If schema is not given, would fall
402 Returns normalized url. If schema is not given, would fall
403 to filesystem
403 to filesystem
404 (``file:///``) schema.
404 (``file:///``) schema.
405 """
405 """
406 url = url.encode('utf8')
406 url = url.encode('utf8')
407 if url != 'default' and '://' not in url:
407 if url != 'default' and '://' not in url:
408 url = "file:" + urllib.pathname2url(url)
408 url = "file:" + urllib.pathname2url(url)
409 return url
409 return url
410
410
411 def get_hook_location(self):
411 def get_hook_location(self):
412 """
412 """
413 returns absolute path to location where hooks are stored
413 returns absolute path to location where hooks are stored
414 """
414 """
415 return os.path.join(self.path, '.hg', '.hgrc')
415 return os.path.join(self.path, '.hg', '.hgrc')
416
416
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 """
418 """
419 Returns ``MercurialCommit`` object representing repository's
419 Returns ``MercurialCommit`` object representing repository's
420 commit at the given `commit_id` or `commit_idx`.
420 commit at the given `commit_id` or `commit_idx`.
421 """
421 """
422 if self.is_empty():
422 if self.is_empty():
423 raise EmptyRepositoryError("There are no commits yet")
423 raise EmptyRepositoryError("There are no commits yet")
424
424
425 if commit_id is not None:
425 if commit_id is not None:
426 self._validate_commit_id(commit_id)
426 self._validate_commit_id(commit_id)
427 try:
427 try:
428 # we have cached idx, use it without contacting the remote
428 idx = self._commit_ids[commit_id]
429 idx = self._commit_ids[commit_id]
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 except KeyError:
431 except KeyError:
431 pass
432 pass
433
432 elif commit_idx is not None:
434 elif commit_idx is not None:
433 self._validate_commit_idx(commit_idx)
435 self._validate_commit_idx(commit_idx)
434 try:
436 try:
435 id_ = self.commit_ids[commit_idx]
437 _commit_id = self.commit_ids[commit_idx]
436 if commit_idx < 0:
438 if commit_idx < 0:
437 commit_idx += len(self.commit_ids)
439 commit_idx = self.commit_ids.index(_commit_id)
438 return MercurialCommit(
440
439 self, id_, commit_idx, pre_load=pre_load)
441 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 except IndexError:
442 except IndexError:
441 commit_id = commit_idx
443 commit_id = commit_idx
442 else:
444 else:
443 commit_id = "tip"
445 commit_id = "tip"
444
446
445 if isinstance(commit_id, unicode):
447 if isinstance(commit_id, unicode):
446 commit_id = safe_str(commit_id)
448 commit_id = safe_str(commit_id)
447
449
448 try:
450 try:
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
451 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 except CommitDoesNotExistError:
452 except CommitDoesNotExistError:
451 msg = "Commit %s does not exist for %s" % (
453 msg = "Commit %s does not exist for %s" % (commit_id, self.name)
452 commit_id, self)
453 raise CommitDoesNotExistError(msg)
454 raise CommitDoesNotExistError(msg)
454
455
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456
457
457 def get_commits(
458 def get_commits(
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 """
461 """
461 Returns generator of ``MercurialCommit`` objects from start to end
462 Returns generator of ``MercurialCommit`` objects from start to end
462 (both are inclusive)
463 (both are inclusive)
463
464
464 :param start_id: None, str(commit_id)
465 :param start_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
466 :param end_id: None, str(commit_id)
466 :param start_date: if specified, commits with commit date less than
467 :param start_date: if specified, commits with commit date less than
467 ``start_date`` would be filtered out from returned set
468 ``start_date`` would be filtered out from returned set
468 :param end_date: if specified, commits with commit date greater than
469 :param end_date: if specified, commits with commit date greater than
469 ``end_date`` would be filtered out from returned set
470 ``end_date`` would be filtered out from returned set
470 :param branch_name: if specified, commits not reachable from given
471 :param branch_name: if specified, commits not reachable from given
471 branch would be filtered out from returned set
472 branch would be filtered out from returned set
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 Mercurial evolve
474 Mercurial evolve
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 exist.
476 exist.
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 ``end`` could not be found.
478 ``end`` could not be found.
478 """
479 """
479 # actually we should check now if it's not an empty repo
480 # actually we should check now if it's not an empty repo
480 branch_ancestors = False
481 branch_ancestors = False
481 if self.is_empty():
482 if self.is_empty():
482 raise EmptyRepositoryError("There are no commits yet")
483 raise EmptyRepositoryError("There are no commits yet")
483 self._validate_branch_name(branch_name)
484 self._validate_branch_name(branch_name)
484
485
485 if start_id is not None:
486 if start_id is not None:
486 self._validate_commit_id(start_id)
487 self._validate_commit_id(start_id)
487 c_start = self.get_commit(commit_id=start_id)
488 c_start = self.get_commit(commit_id=start_id)
488 start_pos = self._commit_ids[c_start.raw_id]
489 start_pos = self._commit_ids[c_start.raw_id]
489 else:
490 else:
490 start_pos = None
491 start_pos = None
491
492
492 if end_id is not None:
493 if end_id is not None:
493 self._validate_commit_id(end_id)
494 self._validate_commit_id(end_id)
494 c_end = self.get_commit(commit_id=end_id)
495 c_end = self.get_commit(commit_id=end_id)
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 else:
497 else:
497 end_pos = None
498 end_pos = None
498
499
499 if None not in [start_id, end_id] and start_pos > end_pos:
500 if None not in [start_id, end_id] and start_pos > end_pos:
500 raise RepositoryError(
501 raise RepositoryError(
501 "Start commit '%s' cannot be after end commit '%s'" %
502 "Start commit '%s' cannot be after end commit '%s'" %
502 (start_id, end_id))
503 (start_id, end_id))
503
504
504 if end_pos is not None:
505 if end_pos is not None:
505 end_pos += 1
506 end_pos += 1
506
507
507 commit_filter = []
508 commit_filter = []
508
509
509 if branch_name and not branch_ancestors:
510 if branch_name and not branch_ancestors:
510 commit_filter.append('branch("%s")' % (branch_name,))
511 commit_filter.append('branch("%s")' % (branch_name,))
511 elif branch_name and branch_ancestors:
512 elif branch_name and branch_ancestors:
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513
514
514 if start_date and not end_date:
515 if start_date and not end_date:
515 commit_filter.append('date(">%s")' % (start_date,))
516 commit_filter.append('date(">%s")' % (start_date,))
516 if end_date and not start_date:
517 if end_date and not start_date:
517 commit_filter.append('date("<%s")' % (end_date,))
518 commit_filter.append('date("<%s")' % (end_date,))
518 if start_date and end_date:
519 if start_date and end_date:
519 commit_filter.append(
520 commit_filter.append(
520 'date(">%s") and date("<%s")' % (start_date, end_date))
521 'date(">%s") and date("<%s")' % (start_date, end_date))
521
522
522 if not show_hidden:
523 if not show_hidden:
523 commit_filter.append('not obsolete()')
524 commit_filter.append('not obsolete()')
524 commit_filter.append('not hidden()')
525 commit_filter.append('not hidden()')
525
526
526 # TODO: johbo: Figure out a simpler way for this solution
527 # TODO: johbo: Figure out a simpler way for this solution
527 collection_generator = CollectionGenerator
528 collection_generator = CollectionGenerator
528 if commit_filter:
529 if commit_filter:
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 revisions = self._remote.rev_range([commit_filter])
531 revisions = self._remote.rev_range([commit_filter])
531 collection_generator = MercurialIndexBasedCollectionGenerator
532 collection_generator = MercurialIndexBasedCollectionGenerator
532 else:
533 else:
533 revisions = self.commit_ids
534 revisions = self.commit_ids
534
535
535 if start_pos or end_pos:
536 if start_pos or end_pos:
536 revisions = revisions[start_pos:end_pos]
537 revisions = revisions[start_pos:end_pos]
537
538
538 return collection_generator(self, revisions, pre_load=pre_load)
539 return collection_generator(self, revisions, pre_load=pre_load)
539
540
540 def pull(self, url, commit_ids=None):
541 def pull(self, url, commit_ids=None):
541 """
542 """
542 Pull changes from external location.
543 Pull changes from external location.
543
544
544 :param commit_ids: Optional. Can be set to a list of commit ids
545 :param commit_ids: Optional. Can be set to a list of commit ids
545 which shall be pulled from the other repository.
546 which shall be pulled from the other repository.
546 """
547 """
547 url = self._get_url(url)
548 url = self._get_url(url)
548 self._remote.pull(url, commit_ids=commit_ids)
549 self._remote.pull(url, commit_ids=commit_ids)
549 self._remote.invalidate_vcs_cache()
550 self._remote.invalidate_vcs_cache()
550
551
551 def fetch(self, url, commit_ids=None):
552 def fetch(self, url, commit_ids=None):
552 """
553 """
553 Backward compatibility with GIT fetch==pull
554 Backward compatibility with GIT fetch==pull
554 """
555 """
555 return self.pull(url, commit_ids=commit_ids)
556 return self.pull(url, commit_ids=commit_ids)
556
557
557 def push(self, url):
558 def push(self, url):
558 url = self._get_url(url)
559 url = self._get_url(url)
559 self._remote.sync_push(url)
560 self._remote.sync_push(url)
560
561
561 def _local_clone(self, clone_path):
562 def _local_clone(self, clone_path):
562 """
563 """
563 Create a local clone of the current repo.
564 Create a local clone of the current repo.
564 """
565 """
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 hooks=False)
567 hooks=False)
567
568
568 def _update(self, revision, clean=False):
569 def _update(self, revision, clean=False):
569 """
570 """
570 Update the working copy to the specified revision.
571 Update the working copy to the specified revision.
571 """
572 """
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 self._remote.update(revision, clean=clean)
574 self._remote.update(revision, clean=clean)
574
575
575 def _identify(self):
576 def _identify(self):
576 """
577 """
577 Return the current state of the working directory.
578 Return the current state of the working directory.
578 """
579 """
579 return self._remote.identify().strip().rstrip('+')
580 return self._remote.identify().strip().rstrip('+')
580
581
581 def _heads(self, branch=None):
582 def _heads(self, branch=None):
582 """
583 """
583 Return the commit ids of the repository heads.
584 Return the commit ids of the repository heads.
584 """
585 """
585 return self._remote.heads(branch=branch).strip().split(' ')
586 return self._remote.heads(branch=branch).strip().split(' ')
586
587
587 def _ancestor(self, revision1, revision2):
588 def _ancestor(self, revision1, revision2):
588 """
589 """
589 Return the common ancestor of the two revisions.
590 Return the common ancestor of the two revisions.
590 """
591 """
591 return self._remote.ancestor(revision1, revision2)
592 return self._remote.ancestor(revision1, revision2)
592
593
593 def _local_push(
594 def _local_push(
594 self, revision, repository_path, push_branches=False,
595 self, revision, repository_path, push_branches=False,
595 enable_hooks=False):
596 enable_hooks=False):
596 """
597 """
597 Push the given revision to the specified repository.
598 Push the given revision to the specified repository.
598
599
599 :param push_branches: allow to create branches in the target repo.
600 :param push_branches: allow to create branches in the target repo.
600 """
601 """
601 self._remote.push(
602 self._remote.push(
602 [revision], repository_path, hooks=enable_hooks,
603 [revision], repository_path, hooks=enable_hooks,
603 push_branches=push_branches)
604 push_branches=push_branches)
604
605
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 source_ref, use_rebase=False, dry_run=False):
607 source_ref, use_rebase=False, dry_run=False):
607 """
608 """
608 Merge the given source_revision into the checked out revision.
609 Merge the given source_revision into the checked out revision.
609
610
610 Returns the commit id of the merge and a boolean indicating if the
611 Returns the commit id of the merge and a boolean indicating if the
611 commit needs to be pushed.
612 commit needs to be pushed.
612 """
613 """
613 self._update(target_ref.commit_id, clean=True)
614 self._update(target_ref.commit_id, clean=True)
614
615
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617
618
618 if ancestor == source_ref.commit_id:
619 if ancestor == source_ref.commit_id:
619 # Nothing to do, the changes were already integrated
620 # Nothing to do, the changes were already integrated
620 return target_ref.commit_id, False
621 return target_ref.commit_id, False
621
622
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 # In this case we should force a commit message
624 # In this case we should force a commit message
624 return source_ref.commit_id, True
625 return source_ref.commit_id, True
625
626
626 if use_rebase:
627 if use_rebase:
627 try:
628 try:
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 target_ref.commit_id)
630 target_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 self._remote.rebase(
632 self._remote.rebase(
632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 source=source_ref.commit_id, dest=target_ref.commit_id)
633 self._remote.invalidate_vcs_cache()
634 self._remote.invalidate_vcs_cache()
634 self._update(bookmark_name, clean=True)
635 self._update(bookmark_name, clean=True)
635 return self._identify(), True
636 return self._identify(), True
636 except RepositoryError:
637 except RepositoryError:
637 # The rebase-abort may raise another exception which 'hides'
638 # The rebase-abort may raise another exception which 'hides'
638 # the original one, therefore we log it here.
639 # the original one, therefore we log it here.
639 log.exception('Error while rebasing shadow repo during merge.')
640 log.exception('Error while rebasing shadow repo during merge.')
640
641
641 # Cleanup any rebase leftovers
642 # Cleanup any rebase leftovers
642 self._remote.invalidate_vcs_cache()
643 self._remote.invalidate_vcs_cache()
643 self._remote.rebase(abort=True)
644 self._remote.rebase(abort=True)
644 self._remote.invalidate_vcs_cache()
645 self._remote.invalidate_vcs_cache()
645 self._remote.update(clean=True)
646 self._remote.update(clean=True)
646 raise
647 raise
647 else:
648 else:
648 try:
649 try:
649 self._remote.merge(source_ref.commit_id)
650 self._remote.merge(source_ref.commit_id)
650 self._remote.invalidate_vcs_cache()
651 self._remote.invalidate_vcs_cache()
651 self._remote.commit(
652 self._remote.commit(
652 message=safe_str(merge_message),
653 message=safe_str(merge_message),
653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 username=safe_str('%s <%s>' % (user_name, user_email)))
654 self._remote.invalidate_vcs_cache()
655 self._remote.invalidate_vcs_cache()
655 return self._identify(), True
656 return self._identify(), True
656 except RepositoryError:
657 except RepositoryError:
657 # Cleanup any merge leftovers
658 # Cleanup any merge leftovers
658 self._remote.update(clean=True)
659 self._remote.update(clean=True)
659 raise
660 raise
660
661
661 def _local_close(self, target_ref, user_name, user_email,
662 def _local_close(self, target_ref, user_name, user_email,
662 source_ref, close_message=''):
663 source_ref, close_message=''):
663 """
664 """
664 Close the branch of the given source_revision
665 Close the branch of the given source_revision
665
666
666 Returns the commit id of the close and a boolean indicating if the
667 Returns the commit id of the close and a boolean indicating if the
667 commit needs to be pushed.
668 commit needs to be pushed.
668 """
669 """
669 self._update(source_ref.commit_id)
670 self._update(source_ref.commit_id)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 try:
672 try:
672 self._remote.commit(
673 self._remote.commit(
673 message=safe_str(message),
674 message=safe_str(message),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 username=safe_str('%s <%s>' % (user_name, user_email)),
675 close_branch=True)
676 close_branch=True)
676 self._remote.invalidate_vcs_cache()
677 self._remote.invalidate_vcs_cache()
677 return self._identify(), True
678 return self._identify(), True
678 except RepositoryError:
679 except RepositoryError:
679 # Cleanup any commit leftovers
680 # Cleanup any commit leftovers
680 self._remote.update(clean=True)
681 self._remote.update(clean=True)
681 raise
682 raise
682
683
683 def _is_the_same_branch(self, target_ref, source_ref):
684 def _is_the_same_branch(self, target_ref, source_ref):
684 return (
685 return (
685 self._get_branch_name(target_ref) ==
686 self._get_branch_name(target_ref) ==
686 self._get_branch_name(source_ref))
687 self._get_branch_name(source_ref))
687
688
688 def _get_branch_name(self, ref):
689 def _get_branch_name(self, ref):
689 if ref.type == 'branch':
690 if ref.type == 'branch':
690 return ref.name
691 return ref.name
691 return self._remote.ctx_branch(ref.commit_id)
692 return self._remote.ctx_branch(ref.commit_id)
692
693
693 def _maybe_prepare_merge_workspace(
694 def _maybe_prepare_merge_workspace(
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 shadow_repository_path = self._get_shadow_repository_path(
696 shadow_repository_path = self._get_shadow_repository_path(
696 repo_id, workspace_id)
697 repo_id, workspace_id)
697 if not os.path.exists(shadow_repository_path):
698 if not os.path.exists(shadow_repository_path):
698 self._local_clone(shadow_repository_path)
699 self._local_clone(shadow_repository_path)
699 log.debug(
700 log.debug(
700 'Prepared shadow repository in %s', shadow_repository_path)
701 'Prepared shadow repository in %s', shadow_repository_path)
701
702
702 return shadow_repository_path
703 return shadow_repository_path
703
704
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 source_repo, source_ref, merge_message,
706 source_repo, source_ref, merge_message,
706 merger_name, merger_email, dry_run=False,
707 merger_name, merger_email, dry_run=False,
707 use_rebase=False, close_branch=False):
708 use_rebase=False, close_branch=False):
708
709
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 'rebase' if use_rebase else 'merge', dry_run)
711 'rebase' if use_rebase else 'merge', dry_run)
711 if target_ref.commit_id not in self._heads():
712 if target_ref.commit_id not in self._heads():
712 return MergeResponse(
713 return MergeResponse(
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 metadata={'target_ref': target_ref})
715 metadata={'target_ref': target_ref})
715
716
716 try:
717 try:
717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 heads = '\n,'.join(self._heads(target_ref.name))
719 heads = '\n,'.join(self._heads(target_ref.name))
719 metadata = {
720 metadata = {
720 'target_ref': target_ref,
721 'target_ref': target_ref,
721 'source_ref': source_ref,
722 'source_ref': source_ref,
722 'heads': heads
723 'heads': heads
723 }
724 }
724 return MergeResponse(
725 return MergeResponse(
725 False, False, None,
726 False, False, None,
726 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
727 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
727 metadata=metadata)
728 metadata=metadata)
728 except CommitDoesNotExistError:
729 except CommitDoesNotExistError:
729 log.exception('Failure when looking up branch heads on hg target')
730 log.exception('Failure when looking up branch heads on hg target')
730 return MergeResponse(
731 return MergeResponse(
731 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
732 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
732 metadata={'target_ref': target_ref})
733 metadata={'target_ref': target_ref})
733
734
734 shadow_repository_path = self._maybe_prepare_merge_workspace(
735 shadow_repository_path = self._maybe_prepare_merge_workspace(
735 repo_id, workspace_id, target_ref, source_ref)
736 repo_id, workspace_id, target_ref, source_ref)
736 shadow_repo = self._get_shadow_instance(shadow_repository_path)
737 shadow_repo = self._get_shadow_instance(shadow_repository_path)
737
738
738 log.debug('Pulling in target reference %s', target_ref)
739 log.debug('Pulling in target reference %s', target_ref)
739 self._validate_pull_reference(target_ref)
740 self._validate_pull_reference(target_ref)
740 shadow_repo._local_pull(self.path, target_ref)
741 shadow_repo._local_pull(self.path, target_ref)
741
742
742 try:
743 try:
743 log.debug('Pulling in source reference %s', source_ref)
744 log.debug('Pulling in source reference %s', source_ref)
744 source_repo._validate_pull_reference(source_ref)
745 source_repo._validate_pull_reference(source_ref)
745 shadow_repo._local_pull(source_repo.path, source_ref)
746 shadow_repo._local_pull(source_repo.path, source_ref)
746 except CommitDoesNotExistError:
747 except CommitDoesNotExistError:
747 log.exception('Failure when doing local pull on hg shadow repo')
748 log.exception('Failure when doing local pull on hg shadow repo')
748 return MergeResponse(
749 return MergeResponse(
749 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
750 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
750 metadata={'source_ref': source_ref})
751 metadata={'source_ref': source_ref})
751
752
752 merge_ref = None
753 merge_ref = None
753 merge_commit_id = None
754 merge_commit_id = None
754 close_commit_id = None
755 close_commit_id = None
755 merge_failure_reason = MergeFailureReason.NONE
756 merge_failure_reason = MergeFailureReason.NONE
756 metadata = {}
757 metadata = {}
757
758
758 # enforce that close branch should be used only in case we source from
759 # enforce that close branch should be used only in case we source from
759 # an actual Branch
760 # an actual Branch
760 close_branch = close_branch and source_ref.type == 'branch'
761 close_branch = close_branch and source_ref.type == 'branch'
761
762
762 # don't allow to close branch if source and target are the same
763 # don't allow to close branch if source and target are the same
763 close_branch = close_branch and source_ref.name != target_ref.name
764 close_branch = close_branch and source_ref.name != target_ref.name
764
765
765 needs_push_on_close = False
766 needs_push_on_close = False
766 if close_branch and not use_rebase and not dry_run:
767 if close_branch and not use_rebase and not dry_run:
767 try:
768 try:
768 close_commit_id, needs_push_on_close = shadow_repo._local_close(
769 close_commit_id, needs_push_on_close = shadow_repo._local_close(
769 target_ref, merger_name, merger_email, source_ref)
770 target_ref, merger_name, merger_email, source_ref)
770 merge_possible = True
771 merge_possible = True
771 except RepositoryError:
772 except RepositoryError:
772 log.exception('Failure when doing close branch on '
773 log.exception('Failure when doing close branch on '
773 'shadow repo: %s', shadow_repo)
774 'shadow repo: %s', shadow_repo)
774 merge_possible = False
775 merge_possible = False
775 merge_failure_reason = MergeFailureReason.MERGE_FAILED
776 merge_failure_reason = MergeFailureReason.MERGE_FAILED
776 else:
777 else:
777 merge_possible = True
778 merge_possible = True
778
779
779 needs_push = False
780 needs_push = False
780 if merge_possible:
781 if merge_possible:
781 try:
782 try:
782 merge_commit_id, needs_push = shadow_repo._local_merge(
783 merge_commit_id, needs_push = shadow_repo._local_merge(
783 target_ref, merge_message, merger_name, merger_email,
784 target_ref, merge_message, merger_name, merger_email,
784 source_ref, use_rebase=use_rebase, dry_run=dry_run)
785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
785 merge_possible = True
786 merge_possible = True
786
787
787 # read the state of the close action, if it
788 # read the state of the close action, if it
788 # maybe required a push
789 # maybe required a push
789 needs_push = needs_push or needs_push_on_close
790 needs_push = needs_push or needs_push_on_close
790
791
791 # Set a bookmark pointing to the merge commit. This bookmark
792 # Set a bookmark pointing to the merge commit. This bookmark
792 # may be used to easily identify the last successful merge
793 # may be used to easily identify the last successful merge
793 # commit in the shadow repository.
794 # commit in the shadow repository.
794 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
795 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
796 except SubrepoMergeError:
797 except SubrepoMergeError:
797 log.exception(
798 log.exception(
798 'Subrepo merge error during local merge on hg shadow repo.')
799 'Subrepo merge error during local merge on hg shadow repo.')
799 merge_possible = False
800 merge_possible = False
800 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
801 needs_push = False
802 needs_push = False
802 except RepositoryError:
803 except RepositoryError:
803 log.exception('Failure when doing local merge on hg shadow repo')
804 log.exception('Failure when doing local merge on hg shadow repo')
804 merge_possible = False
805 merge_possible = False
805 merge_failure_reason = MergeFailureReason.MERGE_FAILED
806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
806 needs_push = False
807 needs_push = False
807
808
808 if merge_possible and not dry_run:
809 if merge_possible and not dry_run:
809 if needs_push:
810 if needs_push:
810 # In case the target is a bookmark, update it, so after pushing
811 # In case the target is a bookmark, update it, so after pushing
811 # the bookmarks is also updated in the target.
812 # the bookmarks is also updated in the target.
812 if target_ref.type == 'book':
813 if target_ref.type == 'book':
813 shadow_repo.bookmark(
814 shadow_repo.bookmark(
814 target_ref.name, revision=merge_commit_id)
815 target_ref.name, revision=merge_commit_id)
815 try:
816 try:
816 shadow_repo_with_hooks = self._get_shadow_instance(
817 shadow_repo_with_hooks = self._get_shadow_instance(
817 shadow_repository_path,
818 shadow_repository_path,
818 enable_hooks=True)
819 enable_hooks=True)
819 # This is the actual merge action, we push from shadow
820 # This is the actual merge action, we push from shadow
820 # into origin.
821 # into origin.
821 # Note: the push_branches option will push any new branch
822 # Note: the push_branches option will push any new branch
822 # defined in the source repository to the target. This may
823 # defined in the source repository to the target. This may
823 # be dangerous as branches are permanent in Mercurial.
824 # be dangerous as branches are permanent in Mercurial.
824 # This feature was requested in issue #441.
825 # This feature was requested in issue #441.
825 shadow_repo_with_hooks._local_push(
826 shadow_repo_with_hooks._local_push(
826 merge_commit_id, self.path, push_branches=True,
827 merge_commit_id, self.path, push_branches=True,
827 enable_hooks=True)
828 enable_hooks=True)
828
829
829 # maybe we also need to push the close_commit_id
830 # maybe we also need to push the close_commit_id
830 if close_commit_id:
831 if close_commit_id:
831 shadow_repo_with_hooks._local_push(
832 shadow_repo_with_hooks._local_push(
832 close_commit_id, self.path, push_branches=True,
833 close_commit_id, self.path, push_branches=True,
833 enable_hooks=True)
834 enable_hooks=True)
834 merge_succeeded = True
835 merge_succeeded = True
835 except RepositoryError:
836 except RepositoryError:
836 log.exception(
837 log.exception(
837 'Failure when doing local push from the shadow '
838 'Failure when doing local push from the shadow '
838 'repository to the target repository at %s.', self.path)
839 'repository to the target repository at %s.', self.path)
839 merge_succeeded = False
840 merge_succeeded = False
840 merge_failure_reason = MergeFailureReason.PUSH_FAILED
841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
841 metadata['target'] = 'hg shadow repo'
842 metadata['target'] = 'hg shadow repo'
842 metadata['merge_commit'] = merge_commit_id
843 metadata['merge_commit'] = merge_commit_id
843 else:
844 else:
844 merge_succeeded = True
845 merge_succeeded = True
845 else:
846 else:
846 merge_succeeded = False
847 merge_succeeded = False
847
848
848 return MergeResponse(
849 return MergeResponse(
849 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
850 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
850 metadata=metadata)
851 metadata=metadata)
851
852
852 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
853 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
853 config = self.config.copy()
854 config = self.config.copy()
854 if not enable_hooks:
855 if not enable_hooks:
855 config.clear_section('hooks')
856 config.clear_section('hooks')
856 return MercurialRepository(shadow_repository_path, config)
857 return MercurialRepository(shadow_repository_path, config)
857
858
858 def _validate_pull_reference(self, reference):
859 def _validate_pull_reference(self, reference):
859 if not (reference.name in self.bookmarks or
860 if not (reference.name in self.bookmarks or
860 reference.name in self.branches or
861 reference.name in self.branches or
861 self.get_commit(reference.commit_id)):
862 self.get_commit(reference.commit_id)):
862 raise CommitDoesNotExistError(
863 raise CommitDoesNotExistError(
863 'Unknown branch, bookmark or commit id')
864 'Unknown branch, bookmark or commit id')
864
865
865 def _local_pull(self, repository_path, reference):
866 def _local_pull(self, repository_path, reference):
866 """
867 """
867 Fetch a branch, bookmark or commit from a local repository.
868 Fetch a branch, bookmark or commit from a local repository.
868 """
869 """
869 repository_path = os.path.abspath(repository_path)
870 repository_path = os.path.abspath(repository_path)
870 if repository_path == self.path:
871 if repository_path == self.path:
871 raise ValueError('Cannot pull from the same repository')
872 raise ValueError('Cannot pull from the same repository')
872
873
873 reference_type_to_option_name = {
874 reference_type_to_option_name = {
874 'book': 'bookmark',
875 'book': 'bookmark',
875 'branch': 'branch',
876 'branch': 'branch',
876 }
877 }
877 option_name = reference_type_to_option_name.get(
878 option_name = reference_type_to_option_name.get(
878 reference.type, 'revision')
879 reference.type, 'revision')
879
880
880 if option_name == 'revision':
881 if option_name == 'revision':
881 ref = reference.commit_id
882 ref = reference.commit_id
882 else:
883 else:
883 ref = reference.name
884 ref = reference.name
884
885
885 options = {option_name: [ref]}
886 options = {option_name: [ref]}
886 self._remote.pull_cmd(repository_path, hooks=False, **options)
887 self._remote.pull_cmd(repository_path, hooks=False, **options)
887 self._remote.invalidate_vcs_cache()
888 self._remote.invalidate_vcs_cache()
888
889
889 def bookmark(self, bookmark, revision=None):
890 def bookmark(self, bookmark, revision=None):
890 if isinstance(bookmark, unicode):
891 if isinstance(bookmark, unicode):
891 bookmark = safe_str(bookmark)
892 bookmark = safe_str(bookmark)
892 self._remote.bookmark(bookmark, revision=revision)
893 self._remote.bookmark(bookmark, revision=revision)
893 self._remote.invalidate_vcs_cache()
894 self._remote.invalidate_vcs_cache()
894
895
895 def get_path_permissions(self, username):
896 def get_path_permissions(self, username):
896 hgacl_file = os.path.join(self.path, '.hg/hgacl')
897 hgacl_file = os.path.join(self.path, '.hg/hgacl')
897
898
898 def read_patterns(suffix):
899 def read_patterns(suffix):
899 svalue = None
900 svalue = None
900 for section, option in [
901 for section, option in [
901 ('narrowacl', username + suffix),
902 ('narrowacl', username + suffix),
902 ('narrowacl', 'default' + suffix),
903 ('narrowacl', 'default' + suffix),
903 ('narrowhgacl', username + suffix),
904 ('narrowhgacl', username + suffix),
904 ('narrowhgacl', 'default' + suffix)
905 ('narrowhgacl', 'default' + suffix)
905 ]:
906 ]:
906 try:
907 try:
907 svalue = hgacl.get(section, option)
908 svalue = hgacl.get(section, option)
908 break # stop at the first value we find
909 break # stop at the first value we find
909 except configparser.NoOptionError:
910 except configparser.NoOptionError:
910 pass
911 pass
911 if not svalue:
912 if not svalue:
912 return None
913 return None
913 result = ['/']
914 result = ['/']
914 for pattern in svalue.split():
915 for pattern in svalue.split():
915 result.append(pattern)
916 result.append(pattern)
916 if '*' not in pattern and '?' not in pattern:
917 if '*' not in pattern and '?' not in pattern:
917 result.append(pattern + '/*')
918 result.append(pattern + '/*')
918 return result
919 return result
919
920
920 if os.path.exists(hgacl_file):
921 if os.path.exists(hgacl_file):
921 try:
922 try:
922 hgacl = configparser.RawConfigParser()
923 hgacl = configparser.RawConfigParser()
923 hgacl.read(hgacl_file)
924 hgacl.read(hgacl_file)
924
925
925 includes = read_patterns('.includes')
926 includes = read_patterns('.includes')
926 excludes = read_patterns('.excludes')
927 excludes = read_patterns('.excludes')
927 return BasePathPermissionChecker.create_from_patterns(
928 return BasePathPermissionChecker.create_from_patterns(
928 includes, excludes)
929 includes, excludes)
929 except BaseException as e:
930 except BaseException as e:
930 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
931 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
931 hgacl_file, self.name, e)
932 hgacl_file, self.name, e)
932 raise exceptions.RepositoryRequirementError(msg)
933 raise exceptions.RepositoryRequirementError(msg)
933 else:
934 else:
934 return None
935 return None
935
936
936
937
937 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
938 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
938
939
939 def _commit_factory(self, commit_id):
940 def _commit_factory(self, commit_id):
940 return self.repo.get_commit(
941 return self.repo.get_commit(
941 commit_idx=commit_id, pre_load=self.pre_load)
942 commit_idx=commit_id, pre_load=self.pre_load)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1745 +1,1742 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid import compat
33 from pyramid import compat
34 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
35
35
36 from rhodecode import events
36 from rhodecode import events
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.compat import OrderedDict
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.markup_renderer import (
42 from rhodecode.lib.markup_renderer import (
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 from rhodecode.lib.vcs.backends.base import (
45 from rhodecode.lib.vcs.backends.base import (
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 from rhodecode.lib.vcs.exceptions import (
48 from rhodecode.lib.vcs.exceptions import (
49 CommitDoesNotExistError, EmptyRepositoryError)
49 CommitDoesNotExistError, EmptyRepositoryError)
50 from rhodecode.model import BaseModel
50 from rhodecode.model import BaseModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.comment import CommentsModel
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.notification import NotificationModel, \
57 from rhodecode.model.notification import NotificationModel, \
58 EmailNotificationModel
58 EmailNotificationModel
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.settings import VcsSettingsModel
60 from rhodecode.model.settings import VcsSettingsModel
61
61
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 # Data structure to hold the response data when updating commits during a pull
66 # Data structure to hold the response data when updating commits during a pull
67 # request update.
67 # request update.
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 'executed', 'reason', 'new', 'old', 'changes',
69 'executed', 'reason', 'new', 'old', 'changes',
70 'source_changed', 'target_changed'])
70 'source_changed', 'target_changed'])
71
71
72
72
73 class PullRequestModel(BaseModel):
73 class PullRequestModel(BaseModel):
74
74
75 cls = PullRequest
75 cls = PullRequest
76
76
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78
78
79 UPDATE_STATUS_MESSAGES = {
79 UPDATE_STATUS_MESSAGES = {
80 UpdateFailureReason.NONE: lazy_ugettext(
80 UpdateFailureReason.NONE: lazy_ugettext(
81 'Pull request update successful.'),
81 'Pull request update successful.'),
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 'Pull request update failed because of an unknown error.'),
83 'Pull request update failed because of an unknown error.'),
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 'No update needed because the source and target have not changed.'),
85 'No update needed because the source and target have not changed.'),
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 'Pull request cannot be updated because the reference type is '
87 'Pull request cannot be updated because the reference type is '
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 'This pull request cannot be updated because the target '
90 'This pull request cannot be updated because the target '
91 'reference is missing.'),
91 'reference is missing.'),
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 'This pull request cannot be updated because the source '
93 'This pull request cannot be updated because the source '
94 'reference is missing.'),
94 'reference is missing.'),
95 }
95 }
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98
98
99 def __get_pull_request(self, pull_request):
99 def __get_pull_request(self, pull_request):
100 return self._get_instance((
100 return self._get_instance((
101 PullRequest, PullRequestVersion), pull_request)
101 PullRequest, PullRequestVersion), pull_request)
102
102
103 def _check_perms(self, perms, pull_request, user, api=False):
103 def _check_perms(self, perms, pull_request, user, api=False):
104 if not api:
104 if not api:
105 return h.HasRepoPermissionAny(*perms)(
105 return h.HasRepoPermissionAny(*perms)(
106 user=user, repo_name=pull_request.target_repo.repo_name)
106 user=user, repo_name=pull_request.target_repo.repo_name)
107 else:
107 else:
108 return h.HasRepoPermissionAnyApi(*perms)(
108 return h.HasRepoPermissionAnyApi(*perms)(
109 user=user, repo_name=pull_request.target_repo.repo_name)
109 user=user, repo_name=pull_request.target_repo.repo_name)
110
110
111 def check_user_read(self, pull_request, user, api=False):
111 def check_user_read(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_merge(self, pull_request, user, api=False):
115 def check_user_merge(self, pull_request, user, api=False):
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 return self._check_perms(_perms, pull_request, user, api)
117 return self._check_perms(_perms, pull_request, user, api)
118
118
119 def check_user_update(self, pull_request, user, api=False):
119 def check_user_update(self, pull_request, user, api=False):
120 owner = user.user_id == pull_request.user_id
120 owner = user.user_id == pull_request.user_id
121 return self.check_user_merge(pull_request, user, api) or owner
121 return self.check_user_merge(pull_request, user, api) or owner
122
122
123 def check_user_delete(self, pull_request, user):
123 def check_user_delete(self, pull_request, user):
124 owner = user.user_id == pull_request.user_id
124 owner = user.user_id == pull_request.user_id
125 _perms = ('repository.admin',)
125 _perms = ('repository.admin',)
126 return self._check_perms(_perms, pull_request, user) or owner
126 return self._check_perms(_perms, pull_request, user) or owner
127
127
128 def check_user_change_status(self, pull_request, user, api=False):
128 def check_user_change_status(self, pull_request, user, api=False):
129 reviewer = user.user_id in [x.user_id for x in
129 reviewer = user.user_id in [x.user_id for x in
130 pull_request.reviewers]
130 pull_request.reviewers]
131 return self.check_user_update(pull_request, user, api) or reviewer
131 return self.check_user_update(pull_request, user, api) or reviewer
132
132
133 def check_user_comment(self, pull_request, user):
133 def check_user_comment(self, pull_request, user):
134 owner = user.user_id == pull_request.user_id
134 owner = user.user_id == pull_request.user_id
135 return self.check_user_read(pull_request, user) or owner
135 return self.check_user_read(pull_request, user) or owner
136
136
137 def get(self, pull_request):
137 def get(self, pull_request):
138 return self.__get_pull_request(pull_request)
138 return self.__get_pull_request(pull_request)
139
139
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 opened_by=None, order_by=None,
141 opened_by=None, order_by=None,
142 order_dir='desc', only_created=True):
142 order_dir='desc', only_created=True):
143 repo = None
143 repo = None
144 if repo_name:
144 if repo_name:
145 repo = self._get_repo(repo_name)
145 repo = self._get_repo(repo_name)
146
146
147 q = PullRequest.query()
147 q = PullRequest.query()
148
148
149 # source or target
149 # source or target
150 if repo and source:
150 if repo and source:
151 q = q.filter(PullRequest.source_repo == repo)
151 q = q.filter(PullRequest.source_repo == repo)
152 elif repo:
152 elif repo:
153 q = q.filter(PullRequest.target_repo == repo)
153 q = q.filter(PullRequest.target_repo == repo)
154
154
155 # closed,opened
155 # closed,opened
156 if statuses:
156 if statuses:
157 q = q.filter(PullRequest.status.in_(statuses))
157 q = q.filter(PullRequest.status.in_(statuses))
158
158
159 # opened by filter
159 # opened by filter
160 if opened_by:
160 if opened_by:
161 q = q.filter(PullRequest.user_id.in_(opened_by))
161 q = q.filter(PullRequest.user_id.in_(opened_by))
162
162
163 # only get those that are in "created" state
163 # only get those that are in "created" state
164 if only_created:
164 if only_created:
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166
166
167 if order_by:
167 if order_by:
168 order_map = {
168 order_map = {
169 'name_raw': PullRequest.pull_request_id,
169 'name_raw': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
170 'id': PullRequest.pull_request_id,
171 'title': PullRequest.title,
171 'title': PullRequest.title,
172 'updated_on_raw': PullRequest.updated_on,
172 'updated_on_raw': PullRequest.updated_on,
173 'target_repo': PullRequest.target_repo_id
173 'target_repo': PullRequest.target_repo_id
174 }
174 }
175 if order_dir == 'asc':
175 if order_dir == 'asc':
176 q = q.order_by(order_map[order_by].asc())
176 q = q.order_by(order_map[order_by].asc())
177 else:
177 else:
178 q = q.order_by(order_map[order_by].desc())
178 q = q.order_by(order_map[order_by].desc())
179
179
180 return q
180 return q
181
181
182 def count_all(self, repo_name, source=False, statuses=None,
182 def count_all(self, repo_name, source=False, statuses=None,
183 opened_by=None):
183 opened_by=None):
184 """
184 """
185 Count the number of pull requests for a specific repository.
185 Count the number of pull requests for a specific repository.
186
186
187 :param repo_name: target or source repo
187 :param repo_name: target or source repo
188 :param source: boolean flag to specify if repo_name refers to source
188 :param source: boolean flag to specify if repo_name refers to source
189 :param statuses: list of pull request statuses
189 :param statuses: list of pull request statuses
190 :param opened_by: author user of the pull request
190 :param opened_by: author user of the pull request
191 :returns: int number of pull requests
191 :returns: int number of pull requests
192 """
192 """
193 q = self._prepare_get_all_query(
193 q = self._prepare_get_all_query(
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195
195
196 return q.count()
196 return q.count()
197
197
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 offset=0, length=None, order_by=None, order_dir='desc'):
199 offset=0, length=None, order_by=None, order_dir='desc'):
200 """
200 """
201 Get all pull requests for a specific repository.
201 Get all pull requests for a specific repository.
202
202
203 :param repo_name: target or source repo
203 :param repo_name: target or source repo
204 :param source: boolean flag to specify if repo_name refers to source
204 :param source: boolean flag to specify if repo_name refers to source
205 :param statuses: list of pull request statuses
205 :param statuses: list of pull request statuses
206 :param opened_by: author user of the pull request
206 :param opened_by: author user of the pull request
207 :param offset: pagination offset
207 :param offset: pagination offset
208 :param length: length of returned list
208 :param length: length of returned list
209 :param order_by: order of the returned list
209 :param order_by: order of the returned list
210 :param order_dir: 'asc' or 'desc' ordering direction
210 :param order_dir: 'asc' or 'desc' ordering direction
211 :returns: list of pull requests
211 :returns: list of pull requests
212 """
212 """
213 q = self._prepare_get_all_query(
213 q = self._prepare_get_all_query(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 order_by=order_by, order_dir=order_dir)
215 order_by=order_by, order_dir=order_dir)
216
216
217 if length:
217 if length:
218 pull_requests = q.limit(length).offset(offset).all()
218 pull_requests = q.limit(length).offset(offset).all()
219 else:
219 else:
220 pull_requests = q.all()
220 pull_requests = q.all()
221
221
222 return pull_requests
222 return pull_requests
223
223
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 opened_by=None):
225 opened_by=None):
226 """
226 """
227 Count the number of pull requests for a specific repository that are
227 Count the number of pull requests for a specific repository that are
228 awaiting review.
228 awaiting review.
229
229
230 :param repo_name: target or source repo
230 :param repo_name: target or source repo
231 :param source: boolean flag to specify if repo_name refers to source
231 :param source: boolean flag to specify if repo_name refers to source
232 :param statuses: list of pull request statuses
232 :param statuses: list of pull request statuses
233 :param opened_by: author user of the pull request
233 :param opened_by: author user of the pull request
234 :returns: int number of pull requests
234 :returns: int number of pull requests
235 """
235 """
236 pull_requests = self.get_awaiting_review(
236 pull_requests = self.get_awaiting_review(
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238
238
239 return len(pull_requests)
239 return len(pull_requests)
240
240
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 opened_by=None, offset=0, length=None,
242 opened_by=None, offset=0, length=None,
243 order_by=None, order_dir='desc'):
243 order_by=None, order_dir='desc'):
244 """
244 """
245 Get all pull requests for a specific repository that are awaiting
245 Get all pull requests for a specific repository that are awaiting
246 review.
246 review.
247
247
248 :param repo_name: target or source repo
248 :param repo_name: target or source repo
249 :param source: boolean flag to specify if repo_name refers to source
249 :param source: boolean flag to specify if repo_name refers to source
250 :param statuses: list of pull request statuses
250 :param statuses: list of pull request statuses
251 :param opened_by: author user of the pull request
251 :param opened_by: author user of the pull request
252 :param offset: pagination offset
252 :param offset: pagination offset
253 :param length: length of returned list
253 :param length: length of returned list
254 :param order_by: order of the returned list
254 :param order_by: order of the returned list
255 :param order_dir: 'asc' or 'desc' ordering direction
255 :param order_dir: 'asc' or 'desc' ordering direction
256 :returns: list of pull requests
256 :returns: list of pull requests
257 """
257 """
258 pull_requests = self.get_all(
258 pull_requests = self.get_all(
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 order_by=order_by, order_dir=order_dir)
260 order_by=order_by, order_dir=order_dir)
261
261
262 _filtered_pull_requests = []
262 _filtered_pull_requests = []
263 for pr in pull_requests:
263 for pr in pull_requests:
264 status = pr.calculated_review_status()
264 status = pr.calculated_review_status()
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 _filtered_pull_requests.append(pr)
267 _filtered_pull_requests.append(pr)
268 if length:
268 if length:
269 return _filtered_pull_requests[offset:offset+length]
269 return _filtered_pull_requests[offset:offset+length]
270 else:
270 else:
271 return _filtered_pull_requests
271 return _filtered_pull_requests
272
272
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 opened_by=None, user_id=None):
274 opened_by=None, user_id=None):
275 """
275 """
276 Count the number of pull requests for a specific repository that are
276 Count the number of pull requests for a specific repository that are
277 awaiting review from a specific user.
277 awaiting review from a specific user.
278
278
279 :param repo_name: target or source repo
279 :param repo_name: target or source repo
280 :param source: boolean flag to specify if repo_name refers to source
280 :param source: boolean flag to specify if repo_name refers to source
281 :param statuses: list of pull request statuses
281 :param statuses: list of pull request statuses
282 :param opened_by: author user of the pull request
282 :param opened_by: author user of the pull request
283 :param user_id: reviewer user of the pull request
283 :param user_id: reviewer user of the pull request
284 :returns: int number of pull requests
284 :returns: int number of pull requests
285 """
285 """
286 pull_requests = self.get_awaiting_my_review(
286 pull_requests = self.get_awaiting_my_review(
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 user_id=user_id)
288 user_id=user_id)
289
289
290 return len(pull_requests)
290 return len(pull_requests)
291
291
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 opened_by=None, user_id=None, offset=0,
293 opened_by=None, user_id=None, offset=0,
294 length=None, order_by=None, order_dir='desc'):
294 length=None, order_by=None, order_dir='desc'):
295 """
295 """
296 Get all pull requests for a specific repository that are awaiting
296 Get all pull requests for a specific repository that are awaiting
297 review from a specific user.
297 review from a specific user.
298
298
299 :param repo_name: target or source repo
299 :param repo_name: target or source repo
300 :param source: boolean flag to specify if repo_name refers to source
300 :param source: boolean flag to specify if repo_name refers to source
301 :param statuses: list of pull request statuses
301 :param statuses: list of pull request statuses
302 :param opened_by: author user of the pull request
302 :param opened_by: author user of the pull request
303 :param user_id: reviewer user of the pull request
303 :param user_id: reviewer user of the pull request
304 :param offset: pagination offset
304 :param offset: pagination offset
305 :param length: length of returned list
305 :param length: length of returned list
306 :param order_by: order of the returned list
306 :param order_by: order of the returned list
307 :param order_dir: 'asc' or 'desc' ordering direction
307 :param order_dir: 'asc' or 'desc' ordering direction
308 :returns: list of pull requests
308 :returns: list of pull requests
309 """
309 """
310 pull_requests = self.get_all(
310 pull_requests = self.get_all(
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 order_by=order_by, order_dir=order_dir)
312 order_by=order_by, order_dir=order_dir)
313
313
314 _my = PullRequestModel().get_not_reviewed(user_id)
314 _my = PullRequestModel().get_not_reviewed(user_id)
315 my_participation = []
315 my_participation = []
316 for pr in pull_requests:
316 for pr in pull_requests:
317 if pr in _my:
317 if pr in _my:
318 my_participation.append(pr)
318 my_participation.append(pr)
319 _filtered_pull_requests = my_participation
319 _filtered_pull_requests = my_participation
320 if length:
320 if length:
321 return _filtered_pull_requests[offset:offset+length]
321 return _filtered_pull_requests[offset:offset+length]
322 else:
322 else:
323 return _filtered_pull_requests
323 return _filtered_pull_requests
324
324
325 def get_not_reviewed(self, user_id):
325 def get_not_reviewed(self, user_id):
326 return [
326 return [
327 x.pull_request for x in PullRequestReviewers.query().filter(
327 x.pull_request for x in PullRequestReviewers.query().filter(
328 PullRequestReviewers.user_id == user_id).all()
328 PullRequestReviewers.user_id == user_id).all()
329 ]
329 ]
330
330
331 def _prepare_participating_query(self, user_id=None, statuses=None,
331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 order_by=None, order_dir='desc'):
332 order_by=None, order_dir='desc'):
333 q = PullRequest.query()
333 q = PullRequest.query()
334 if user_id:
334 if user_id:
335 reviewers_subquery = Session().query(
335 reviewers_subquery = Session().query(
336 PullRequestReviewers.pull_request_id).filter(
336 PullRequestReviewers.pull_request_id).filter(
337 PullRequestReviewers.user_id == user_id).subquery()
337 PullRequestReviewers.user_id == user_id).subquery()
338 user_filter = or_(
338 user_filter = or_(
339 PullRequest.user_id == user_id,
339 PullRequest.user_id == user_id,
340 PullRequest.pull_request_id.in_(reviewers_subquery)
340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 )
341 )
342 q = PullRequest.query().filter(user_filter)
342 q = PullRequest.query().filter(user_filter)
343
343
344 # closed,opened
344 # closed,opened
345 if statuses:
345 if statuses:
346 q = q.filter(PullRequest.status.in_(statuses))
346 q = q.filter(PullRequest.status.in_(statuses))
347
347
348 if order_by:
348 if order_by:
349 order_map = {
349 order_map = {
350 'name_raw': PullRequest.pull_request_id,
350 'name_raw': PullRequest.pull_request_id,
351 'title': PullRequest.title,
351 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
352 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
353 'target_repo': PullRequest.target_repo_id
354 }
354 }
355 if order_dir == 'asc':
355 if order_dir == 'asc':
356 q = q.order_by(order_map[order_by].asc())
356 q = q.order_by(order_map[order_by].asc())
357 else:
357 else:
358 q = q.order_by(order_map[order_by].desc())
358 q = q.order_by(order_map[order_by].desc())
359
359
360 return q
360 return q
361
361
362 def count_im_participating_in(self, user_id=None, statuses=None):
362 def count_im_participating_in(self, user_id=None, statuses=None):
363 q = self._prepare_participating_query(user_id, statuses=statuses)
363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 return q.count()
364 return q.count()
365
365
366 def get_im_participating_in(
366 def get_im_participating_in(
367 self, user_id=None, statuses=None, offset=0,
367 self, user_id=None, statuses=None, offset=0,
368 length=None, order_by=None, order_dir='desc'):
368 length=None, order_by=None, order_dir='desc'):
369 """
369 """
370 Get all Pull requests that i'm participating in, or i have opened
370 Get all Pull requests that i'm participating in, or i have opened
371 """
371 """
372
372
373 q = self._prepare_participating_query(
373 q = self._prepare_participating_query(
374 user_id, statuses=statuses, order_by=order_by,
374 user_id, statuses=statuses, order_by=order_by,
375 order_dir=order_dir)
375 order_dir=order_dir)
376
376
377 if length:
377 if length:
378 pull_requests = q.limit(length).offset(offset).all()
378 pull_requests = q.limit(length).offset(offset).all()
379 else:
379 else:
380 pull_requests = q.all()
380 pull_requests = q.all()
381
381
382 return pull_requests
382 return pull_requests
383
383
384 def get_versions(self, pull_request):
384 def get_versions(self, pull_request):
385 """
385 """
386 returns version of pull request sorted by ID descending
386 returns version of pull request sorted by ID descending
387 """
387 """
388 return PullRequestVersion.query()\
388 return PullRequestVersion.query()\
389 .filter(PullRequestVersion.pull_request == pull_request)\
389 .filter(PullRequestVersion.pull_request == pull_request)\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 .all()
391 .all()
392
392
393 def get_pr_version(self, pull_request_id, version=None):
393 def get_pr_version(self, pull_request_id, version=None):
394 at_version = None
394 at_version = None
395
395
396 if version and version == 'latest':
396 if version and version == 'latest':
397 pull_request_ver = PullRequest.get(pull_request_id)
397 pull_request_ver = PullRequest.get(pull_request_id)
398 pull_request_obj = pull_request_ver
398 pull_request_obj = pull_request_ver
399 _org_pull_request_obj = pull_request_obj
399 _org_pull_request_obj = pull_request_obj
400 at_version = 'latest'
400 at_version = 'latest'
401 elif version:
401 elif version:
402 pull_request_ver = PullRequestVersion.get_or_404(version)
402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 pull_request_obj = pull_request_ver
403 pull_request_obj = pull_request_ver
404 _org_pull_request_obj = pull_request_ver.pull_request
404 _org_pull_request_obj = pull_request_ver.pull_request
405 at_version = pull_request_ver.pull_request_version_id
405 at_version = pull_request_ver.pull_request_version_id
406 else:
406 else:
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 pull_request_id)
408 pull_request_id)
409
409
410 pull_request_display_obj = PullRequest.get_pr_display_object(
410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 pull_request_obj, _org_pull_request_obj)
411 pull_request_obj, _org_pull_request_obj)
412
412
413 return _org_pull_request_obj, pull_request_obj, \
413 return _org_pull_request_obj, pull_request_obj, \
414 pull_request_display_obj, at_version
414 pull_request_display_obj, at_version
415
415
416 def create(self, created_by, source_repo, source_ref, target_repo,
416 def create(self, created_by, source_repo, source_ref, target_repo,
417 target_ref, revisions, reviewers, title, description=None,
417 target_ref, revisions, reviewers, title, description=None,
418 description_renderer=None,
418 description_renderer=None,
419 reviewer_data=None, translator=None, auth_user=None):
419 reviewer_data=None, translator=None, auth_user=None):
420 translator = translator or get_current_request().translate
420 translator = translator or get_current_request().translate
421
421
422 created_by_user = self._get_user(created_by)
422 created_by_user = self._get_user(created_by)
423 auth_user = auth_user or created_by_user.AuthUser()
423 auth_user = auth_user or created_by_user.AuthUser()
424 source_repo = self._get_repo(source_repo)
424 source_repo = self._get_repo(source_repo)
425 target_repo = self._get_repo(target_repo)
425 target_repo = self._get_repo(target_repo)
426
426
427 pull_request = PullRequest()
427 pull_request = PullRequest()
428 pull_request.source_repo = source_repo
428 pull_request.source_repo = source_repo
429 pull_request.source_ref = source_ref
429 pull_request.source_ref = source_ref
430 pull_request.target_repo = target_repo
430 pull_request.target_repo = target_repo
431 pull_request.target_ref = target_ref
431 pull_request.target_ref = target_ref
432 pull_request.revisions = revisions
432 pull_request.revisions = revisions
433 pull_request.title = title
433 pull_request.title = title
434 pull_request.description = description
434 pull_request.description = description
435 pull_request.description_renderer = description_renderer
435 pull_request.description_renderer = description_renderer
436 pull_request.author = created_by_user
436 pull_request.author = created_by_user
437 pull_request.reviewer_data = reviewer_data
437 pull_request.reviewer_data = reviewer_data
438 pull_request.pull_request_state = pull_request.STATE_CREATING
438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 Session().add(pull_request)
439 Session().add(pull_request)
440 Session().flush()
440 Session().flush()
441
441
442 reviewer_ids = set()
442 reviewer_ids = set()
443 # members / reviewers
443 # members / reviewers
444 for reviewer_object in reviewers:
444 for reviewer_object in reviewers:
445 user_id, reasons, mandatory, rules = reviewer_object
445 user_id, reasons, mandatory, rules = reviewer_object
446 user = self._get_user(user_id)
446 user = self._get_user(user_id)
447
447
448 # skip duplicates
448 # skip duplicates
449 if user.user_id in reviewer_ids:
449 if user.user_id in reviewer_ids:
450 continue
450 continue
451
451
452 reviewer_ids.add(user.user_id)
452 reviewer_ids.add(user.user_id)
453
453
454 reviewer = PullRequestReviewers()
454 reviewer = PullRequestReviewers()
455 reviewer.user = user
455 reviewer.user = user
456 reviewer.pull_request = pull_request
456 reviewer.pull_request = pull_request
457 reviewer.reasons = reasons
457 reviewer.reasons = reasons
458 reviewer.mandatory = mandatory
458 reviewer.mandatory = mandatory
459
459
460 # NOTE(marcink): pick only first rule for now
460 # NOTE(marcink): pick only first rule for now
461 rule_id = list(rules)[0] if rules else None
461 rule_id = list(rules)[0] if rules else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 if rule:
463 if rule:
464 review_group = rule.user_group_vote_rule(user_id)
464 review_group = rule.user_group_vote_rule(user_id)
465 # we check if this particular reviewer is member of a voting group
465 # we check if this particular reviewer is member of a voting group
466 if review_group:
466 if review_group:
467 # NOTE(marcink):
467 # NOTE(marcink):
468 # can be that user is member of more but we pick the first same,
468 # can be that user is member of more but we pick the first same,
469 # same as default reviewers algo
469 # same as default reviewers algo
470 review_group = review_group[0]
470 review_group = review_group[0]
471
471
472 rule_data = {
472 rule_data = {
473 'rule_name':
473 'rule_name':
474 rule.review_rule_name,
474 rule.review_rule_name,
475 'rule_user_group_entry_id':
475 'rule_user_group_entry_id':
476 review_group.repo_review_rule_users_group_id,
476 review_group.repo_review_rule_users_group_id,
477 'rule_user_group_name':
477 'rule_user_group_name':
478 review_group.users_group.users_group_name,
478 review_group.users_group.users_group_name,
479 'rule_user_group_members':
479 'rule_user_group_members':
480 [x.user.username for x in review_group.users_group.members],
480 [x.user.username for x in review_group.users_group.members],
481 'rule_user_group_members_id':
481 'rule_user_group_members_id':
482 [x.user.user_id for x in review_group.users_group.members],
482 [x.user.user_id for x in review_group.users_group.members],
483 }
483 }
484 # e.g {'vote_rule': -1, 'mandatory': True}
484 # e.g {'vote_rule': -1, 'mandatory': True}
485 rule_data.update(review_group.rule_data())
485 rule_data.update(review_group.rule_data())
486
486
487 reviewer.rule_data = rule_data
487 reviewer.rule_data = rule_data
488
488
489 Session().add(reviewer)
489 Session().add(reviewer)
490 Session().flush()
490 Session().flush()
491
491
492 # Set approval status to "Under Review" for all commits which are
492 # Set approval status to "Under Review" for all commits which are
493 # part of this pull request.
493 # part of this pull request.
494 ChangesetStatusModel().set_status(
494 ChangesetStatusModel().set_status(
495 repo=target_repo,
495 repo=target_repo,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 user=created_by_user,
497 user=created_by_user,
498 pull_request=pull_request
498 pull_request=pull_request
499 )
499 )
500 # we commit early at this point. This has to do with a fact
500 # we commit early at this point. This has to do with a fact
501 # that before queries do some row-locking. And because of that
501 # that before queries do some row-locking. And because of that
502 # we need to commit and finish transaction before below validate call
502 # we need to commit and finish transaction before below validate call
503 # that for large repos could be long resulting in long row locks
503 # that for large repos could be long resulting in long row locks
504 Session().commit()
504 Session().commit()
505
505
506 # prepare workspace, and run initial merge simulation. Set state during that
506 # prepare workspace, and run initial merge simulation. Set state during that
507 # operation
507 # operation
508 pull_request = PullRequest.get(pull_request.pull_request_id)
508 pull_request = PullRequest.get(pull_request.pull_request_id)
509
509
510 # set as merging, for simulation, and if finished to created so we mark
510 # set as merging, for simulation, and if finished to created so we mark
511 # simulation is working fine
511 # simulation is working fine
512 with pull_request.set_state(PullRequest.STATE_MERGING,
512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 final_state=PullRequest.STATE_CREATED):
513 final_state=PullRequest.STATE_CREATED):
514 MergeCheck.validate(
514 MergeCheck.validate(
515 pull_request, auth_user=auth_user, translator=translator)
515 pull_request, auth_user=auth_user, translator=translator)
516
516
517 self.notify_reviewers(pull_request, reviewer_ids)
517 self.notify_reviewers(pull_request, reviewer_ids)
518 self.trigger_pull_request_hook(
518 self.trigger_pull_request_hook(
519 pull_request, created_by_user, 'create')
519 pull_request, created_by_user, 'create')
520
520
521 creation_data = pull_request.get_api_data(with_merge_state=False)
521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 self._log_audit_action(
522 self._log_audit_action(
523 'repo.pull_request.create', {'data': creation_data},
523 'repo.pull_request.create', {'data': creation_data},
524 auth_user, pull_request)
524 auth_user, pull_request)
525
525
526 return pull_request
526 return pull_request
527
527
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 pull_request = self.__get_pull_request(pull_request)
529 pull_request = self.__get_pull_request(pull_request)
530 target_scm = pull_request.target_repo.scm_instance()
530 target_scm = pull_request.target_repo.scm_instance()
531 if action == 'create':
531 if action == 'create':
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 elif action == 'merge':
533 elif action == 'merge':
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 elif action == 'close':
535 elif action == 'close':
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 elif action == 'review_status_change':
537 elif action == 'review_status_change':
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 elif action == 'update':
539 elif action == 'update':
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 elif action == 'comment':
541 elif action == 'comment':
542 # dummy hook ! for comment. We want this function to handle all cases
542 # dummy hook ! for comment. We want this function to handle all cases
543 def trigger_hook(*args, **kwargs):
543 def trigger_hook(*args, **kwargs):
544 pass
544 pass
545 comment = data['comment']
545 comment = data['comment']
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 else:
547 else:
548 return
548 return
549
549
550 trigger_hook(
550 trigger_hook(
551 username=user.username,
551 username=user.username,
552 repo_name=pull_request.target_repo.repo_name,
552 repo_name=pull_request.target_repo.repo_name,
553 repo_alias=target_scm.alias,
553 repo_alias=target_scm.alias,
554 pull_request=pull_request,
554 pull_request=pull_request,
555 data=data)
555 data=data)
556
556
557 def _get_commit_ids(self, pull_request):
557 def _get_commit_ids(self, pull_request):
558 """
558 """
559 Return the commit ids of the merged pull request.
559 Return the commit ids of the merged pull request.
560
560
561 This method is not dealing correctly yet with the lack of autoupdates
561 This method is not dealing correctly yet with the lack of autoupdates
562 nor with the implicit target updates.
562 nor with the implicit target updates.
563 For example: if a commit in the source repo is already in the target it
563 For example: if a commit in the source repo is already in the target it
564 will be reported anyways.
564 will be reported anyways.
565 """
565 """
566 merge_rev = pull_request.merge_rev
566 merge_rev = pull_request.merge_rev
567 if merge_rev is None:
567 if merge_rev is None:
568 raise ValueError('This pull request was not merged yet')
568 raise ValueError('This pull request was not merged yet')
569
569
570 commit_ids = list(pull_request.revisions)
570 commit_ids = list(pull_request.revisions)
571 if merge_rev not in commit_ids:
571 if merge_rev not in commit_ids:
572 commit_ids.append(merge_rev)
572 commit_ids.append(merge_rev)
573
573
574 return commit_ids
574 return commit_ids
575
575
576 def merge_repo(self, pull_request, user, extras):
576 def merge_repo(self, pull_request, user, extras):
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 extras['user_agent'] = 'internal-merge'
578 extras['user_agent'] = 'internal-merge'
579 merge_state = self._merge_pull_request(pull_request, user, extras)
579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 if merge_state.executed:
580 if merge_state.executed:
581 log.debug("Merge was successful, updating the pull request comments.")
581 log.debug("Merge was successful, updating the pull request comments.")
582 self._comment_and_close_pr(pull_request, user, merge_state)
582 self._comment_and_close_pr(pull_request, user, merge_state)
583
583
584 self._log_audit_action(
584 self._log_audit_action(
585 'repo.pull_request.merge',
585 'repo.pull_request.merge',
586 {'merge_state': merge_state.__dict__},
586 {'merge_state': merge_state.__dict__},
587 user, pull_request)
587 user, pull_request)
588
588
589 else:
589 else:
590 log.warn("Merge failed, not updating the pull request.")
590 log.warn("Merge failed, not updating the pull request.")
591 return merge_state
591 return merge_state
592
592
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 target_vcs = pull_request.target_repo.scm_instance()
594 target_vcs = pull_request.target_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
595 source_vcs = pull_request.source_repo.scm_instance()
596
596
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 pr_id=pull_request.pull_request_id,
598 pr_id=pull_request.pull_request_id,
599 pr_title=pull_request.title,
599 pr_title=pull_request.title,
600 source_repo=source_vcs.name,
600 source_repo=source_vcs.name,
601 source_ref_name=pull_request.source_ref_parts.name,
601 source_ref_name=pull_request.source_ref_parts.name,
602 target_repo=target_vcs.name,
602 target_repo=target_vcs.name,
603 target_ref_name=pull_request.target_ref_parts.name,
603 target_ref_name=pull_request.target_ref_parts.name,
604 )
604 )
605
605
606 workspace_id = self._workspace_id(pull_request)
606 workspace_id = self._workspace_id(pull_request)
607 repo_id = pull_request.target_repo.repo_id
607 repo_id = pull_request.target_repo.repo_id
608 use_rebase = self._use_rebase_for_merging(pull_request)
608 use_rebase = self._use_rebase_for_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
609 close_branch = self._close_branch_before_merging(pull_request)
610
610
611 target_ref = self._refresh_reference(
611 target_ref = self._refresh_reference(
612 pull_request.target_ref_parts, target_vcs)
612 pull_request.target_ref_parts, target_vcs)
613
613
614 callback_daemon, extras = prepare_callback_daemon(
614 callback_daemon, extras = prepare_callback_daemon(
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 host=vcs_settings.HOOKS_HOST,
616 host=vcs_settings.HOOKS_HOST,
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618
618
619 with callback_daemon:
619 with callback_daemon:
620 # TODO: johbo: Implement a clean way to run a config_override
620 # TODO: johbo: Implement a clean way to run a config_override
621 # for a single call.
621 # for a single call.
622 target_vcs.config.set(
622 target_vcs.config.set(
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624
624
625 user_name = user.short_contact
625 user_name = user.short_contact
626 merge_state = target_vcs.merge(
626 merge_state = target_vcs.merge(
627 repo_id, workspace_id, target_ref, source_vcs,
627 repo_id, workspace_id, target_ref, source_vcs,
628 pull_request.source_ref_parts,
628 pull_request.source_ref_parts,
629 user_name=user_name, user_email=user.email,
629 user_name=user_name, user_email=user.email,
630 message=message, use_rebase=use_rebase,
630 message=message, use_rebase=use_rebase,
631 close_branch=close_branch)
631 close_branch=close_branch)
632 return merge_state
632 return merge_state
633
633
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 pull_request.updated_on = datetime.datetime.now()
636 pull_request.updated_on = datetime.datetime.now()
637 close_msg = close_msg or 'Pull request merged and closed'
637 close_msg = close_msg or 'Pull request merged and closed'
638
638
639 CommentsModel().create(
639 CommentsModel().create(
640 text=safe_unicode(close_msg),
640 text=safe_unicode(close_msg),
641 repo=pull_request.target_repo.repo_id,
641 repo=pull_request.target_repo.repo_id,
642 user=user.user_id,
642 user=user.user_id,
643 pull_request=pull_request.pull_request_id,
643 pull_request=pull_request.pull_request_id,
644 f_path=None,
644 f_path=None,
645 line_no=None,
645 line_no=None,
646 closing_pr=True
646 closing_pr=True
647 )
647 )
648
648
649 Session().add(pull_request)
649 Session().add(pull_request)
650 Session().flush()
650 Session().flush()
651 # TODO: paris: replace invalidation with less radical solution
651 # TODO: paris: replace invalidation with less radical solution
652 ScmModel().mark_for_invalidation(
652 ScmModel().mark_for_invalidation(
653 pull_request.target_repo.repo_name)
653 pull_request.target_repo.repo_name)
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655
655
656 def has_valid_update_type(self, pull_request):
656 def has_valid_update_type(self, pull_request):
657 source_ref_type = pull_request.source_ref_parts.type
657 source_ref_type = pull_request.source_ref_parts.type
658 return source_ref_type in self.REF_TYPES
658 return source_ref_type in self.REF_TYPES
659
659
660 def update_commits(self, pull_request):
660 def update_commits(self, pull_request):
661 """
661 """
662 Get the updated list of commits for the pull request
662 Get the updated list of commits for the pull request
663 and return the new pull request version and the list
663 and return the new pull request version and the list
664 of commits processed by this update action
664 of commits processed by this update action
665 """
665 """
666 pull_request = self.__get_pull_request(pull_request)
666 pull_request = self.__get_pull_request(pull_request)
667 source_ref_type = pull_request.source_ref_parts.type
667 source_ref_type = pull_request.source_ref_parts.type
668 source_ref_name = pull_request.source_ref_parts.name
668 source_ref_name = pull_request.source_ref_parts.name
669 source_ref_id = pull_request.source_ref_parts.commit_id
669 source_ref_id = pull_request.source_ref_parts.commit_id
670
670
671 target_ref_type = pull_request.target_ref_parts.type
671 target_ref_type = pull_request.target_ref_parts.type
672 target_ref_name = pull_request.target_ref_parts.name
672 target_ref_name = pull_request.target_ref_parts.name
673 target_ref_id = pull_request.target_ref_parts.commit_id
673 target_ref_id = pull_request.target_ref_parts.commit_id
674
674
675 if not self.has_valid_update_type(pull_request):
675 if not self.has_valid_update_type(pull_request):
676 log.debug("Skipping update of pull request %s due to ref type: %s",
676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 pull_request, source_ref_type)
677 pull_request, source_ref_type)
678 return UpdateResponse(
678 return UpdateResponse(
679 executed=False,
679 executed=False,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 old=pull_request, new=None, changes=None,
681 old=pull_request, new=None, changes=None,
682 source_changed=False, target_changed=False)
682 source_changed=False, target_changed=False)
683
683
684 # source repo
684 # source repo
685 source_repo = pull_request.source_repo.scm_instance()
685 source_repo = pull_request.source_repo.scm_instance()
686 source_repo.count() # cache rebuild
687
686
688 try:
687 try:
689 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 source_commit = source_repo.get_commit(commit_id=source_ref_name)
690 except CommitDoesNotExistError:
689 except CommitDoesNotExistError:
691 return UpdateResponse(
690 return UpdateResponse(
692 executed=False,
691 executed=False,
693 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 reason=UpdateFailureReason.MISSING_SOURCE_REF,
694 old=pull_request, new=None, changes=None,
693 old=pull_request, new=None, changes=None,
695 source_changed=False, target_changed=False)
694 source_changed=False, target_changed=False)
696
695
697 source_changed = source_ref_id != source_commit.raw_id
696 source_changed = source_ref_id != source_commit.raw_id
698
697
699 # target repo
698 # target repo
700 target_repo = pull_request.target_repo.scm_instance()
699 target_repo = pull_request.target_repo.scm_instance()
701 target_repo.count() # cache rebuild
702
700
703 try:
701 try:
704 target_commit = target_repo.get_commit(commit_id=target_ref_name)
702 target_commit = target_repo.get_commit(commit_id=target_ref_name)
705 except CommitDoesNotExistError:
703 except CommitDoesNotExistError:
706 return UpdateResponse(
704 return UpdateResponse(
707 executed=False,
705 executed=False,
708 reason=UpdateFailureReason.MISSING_TARGET_REF,
706 reason=UpdateFailureReason.MISSING_TARGET_REF,
709 old=pull_request, new=None, changes=None,
707 old=pull_request, new=None, changes=None,
710 source_changed=False, target_changed=False)
708 source_changed=False, target_changed=False)
711 target_changed = target_ref_id != target_commit.raw_id
709 target_changed = target_ref_id != target_commit.raw_id
712
710
713 if not (source_changed or target_changed):
711 if not (source_changed or target_changed):
714 log.debug("Nothing changed in pull request %s", pull_request)
712 log.debug("Nothing changed in pull request %s", pull_request)
715 return UpdateResponse(
713 return UpdateResponse(
716 executed=False,
714 executed=False,
717 reason=UpdateFailureReason.NO_CHANGE,
715 reason=UpdateFailureReason.NO_CHANGE,
718 old=pull_request, new=None, changes=None,
716 old=pull_request, new=None, changes=None,
719 source_changed=target_changed, target_changed=source_changed)
717 source_changed=target_changed, target_changed=source_changed)
720
718
721 change_in_found = 'target repo' if target_changed else 'source repo'
719 change_in_found = 'target repo' if target_changed else 'source repo'
722 log.debug('Updating pull request because of change in %s detected',
720 log.debug('Updating pull request because of change in %s detected',
723 change_in_found)
721 change_in_found)
724
722
725 # Finally there is a need for an update, in case of source change
723 # Finally there is a need for an update, in case of source change
726 # we create a new version, else just an update
724 # we create a new version, else just an update
727 if source_changed:
725 if source_changed:
728 pull_request_version = self._create_version_from_snapshot(pull_request)
726 pull_request_version = self._create_version_from_snapshot(pull_request)
729 self._link_comments_to_version(pull_request_version)
727 self._link_comments_to_version(pull_request_version)
730 else:
728 else:
731 try:
729 try:
732 ver = pull_request.versions[-1]
730 ver = pull_request.versions[-1]
733 except IndexError:
731 except IndexError:
734 ver = None
732 ver = None
735
733
736 pull_request.pull_request_version_id = \
734 pull_request.pull_request_version_id = \
737 ver.pull_request_version_id if ver else None
735 ver.pull_request_version_id if ver else None
738 pull_request_version = pull_request
736 pull_request_version = pull_request
739
737
740 try:
738 try:
741 if target_ref_type in self.REF_TYPES:
739 if target_ref_type in self.REF_TYPES:
742 target_commit = target_repo.get_commit(target_ref_name)
740 target_commit = target_repo.get_commit(target_ref_name)
743 else:
741 else:
744 target_commit = target_repo.get_commit(target_ref_id)
742 target_commit = target_repo.get_commit(target_ref_id)
745 except CommitDoesNotExistError:
743 except CommitDoesNotExistError:
746 return UpdateResponse(
744 return UpdateResponse(
747 executed=False,
745 executed=False,
748 reason=UpdateFailureReason.MISSING_TARGET_REF,
746 reason=UpdateFailureReason.MISSING_TARGET_REF,
749 old=pull_request, new=None, changes=None,
747 old=pull_request, new=None, changes=None,
750 source_changed=source_changed, target_changed=target_changed)
748 source_changed=source_changed, target_changed=target_changed)
751
749
752 # re-compute commit ids
750 # re-compute commit ids
753 old_commit_ids = pull_request.revisions
751 old_commit_ids = pull_request.revisions
754 pre_load = ["author", "branch", "date", "message"]
752 pre_load = ["author", "branch", "date", "message"]
755 commit_ranges = target_repo.compare(
753 commit_ranges = target_repo.compare(
756 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
754 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
757 pre_load=pre_load)
755 pre_load=pre_load)
758
756
759 ancestor = source_repo.get_common_ancestor(
757 ancestor = source_repo.get_common_ancestor(
760 source_commit.raw_id, target_commit.raw_id, target_repo)
758 source_commit.raw_id, target_commit.raw_id, target_repo)
761
759
762 pull_request.source_ref = '%s:%s:%s' % (
760 pull_request.source_ref = '%s:%s:%s' % (
763 source_ref_type, source_ref_name, source_commit.raw_id)
761 source_ref_type, source_ref_name, source_commit.raw_id)
764 pull_request.target_ref = '%s:%s:%s' % (
762 pull_request.target_ref = '%s:%s:%s' % (
765 target_ref_type, target_ref_name, ancestor)
763 target_ref_type, target_ref_name, ancestor)
766
764
767 pull_request.revisions = [
765 pull_request.revisions = [
768 commit.raw_id for commit in reversed(commit_ranges)]
766 commit.raw_id for commit in reversed(commit_ranges)]
769 pull_request.updated_on = datetime.datetime.now()
767 pull_request.updated_on = datetime.datetime.now()
770 Session().add(pull_request)
768 Session().add(pull_request)
771 new_commit_ids = pull_request.revisions
769 new_commit_ids = pull_request.revisions
772
770
773 old_diff_data, new_diff_data = self._generate_update_diffs(
771 old_diff_data, new_diff_data = self._generate_update_diffs(
774 pull_request, pull_request_version)
772 pull_request, pull_request_version)
775
773
776 # calculate commit and file changes
774 # calculate commit and file changes
777 changes = self._calculate_commit_id_changes(
775 changes = self._calculate_commit_id_changes(
778 old_commit_ids, new_commit_ids)
776 old_commit_ids, new_commit_ids)
779 file_changes = self._calculate_file_changes(
777 file_changes = self._calculate_file_changes(
780 old_diff_data, new_diff_data)
778 old_diff_data, new_diff_data)
781
779
782 # set comments as outdated if DIFFS changed
780 # set comments as outdated if DIFFS changed
783 CommentsModel().outdate_comments(
781 CommentsModel().outdate_comments(
784 pull_request, old_diff_data=old_diff_data,
782 pull_request, old_diff_data=old_diff_data,
785 new_diff_data=new_diff_data)
783 new_diff_data=new_diff_data)
786
784
787 commit_changes = (changes.added or changes.removed)
785 commit_changes = (changes.added or changes.removed)
788 file_node_changes = (
786 file_node_changes = (
789 file_changes.added or file_changes.modified or file_changes.removed)
787 file_changes.added or file_changes.modified or file_changes.removed)
790 pr_has_changes = commit_changes or file_node_changes
788 pr_has_changes = commit_changes or file_node_changes
791
789
792 # Add an automatic comment to the pull request, in case
790 # Add an automatic comment to the pull request, in case
793 # anything has changed
791 # anything has changed
794 if pr_has_changes:
792 if pr_has_changes:
795 update_comment = CommentsModel().create(
793 update_comment = CommentsModel().create(
796 text=self._render_update_message(changes, file_changes),
794 text=self._render_update_message(changes, file_changes),
797 repo=pull_request.target_repo,
795 repo=pull_request.target_repo,
798 user=pull_request.author,
796 user=pull_request.author,
799 pull_request=pull_request,
797 pull_request=pull_request,
800 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
798 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
801
799
802 # Update status to "Under Review" for added commits
800 # Update status to "Under Review" for added commits
803 for commit_id in changes.added:
801 for commit_id in changes.added:
804 ChangesetStatusModel().set_status(
802 ChangesetStatusModel().set_status(
805 repo=pull_request.source_repo,
803 repo=pull_request.source_repo,
806 status=ChangesetStatus.STATUS_UNDER_REVIEW,
804 status=ChangesetStatus.STATUS_UNDER_REVIEW,
807 comment=update_comment,
805 comment=update_comment,
808 user=pull_request.author,
806 user=pull_request.author,
809 pull_request=pull_request,
807 pull_request=pull_request,
810 revision=commit_id)
808 revision=commit_id)
811
809
812 log.debug(
810 log.debug(
813 'Updated pull request %s, added_ids: %s, common_ids: %s, '
811 'Updated pull request %s, added_ids: %s, common_ids: %s, '
814 'removed_ids: %s', pull_request.pull_request_id,
812 'removed_ids: %s', pull_request.pull_request_id,
815 changes.added, changes.common, changes.removed)
813 changes.added, changes.common, changes.removed)
816 log.debug(
814 log.debug(
817 'Updated pull request with the following file changes: %s',
815 'Updated pull request with the following file changes: %s',
818 file_changes)
816 file_changes)
819
817
820 log.info(
818 log.info(
821 "Updated pull request %s from commit %s to commit %s, "
819 "Updated pull request %s from commit %s to commit %s, "
822 "stored new version %s of this pull request.",
820 "stored new version %s of this pull request.",
823 pull_request.pull_request_id, source_ref_id,
821 pull_request.pull_request_id, source_ref_id,
824 pull_request.source_ref_parts.commit_id,
822 pull_request.source_ref_parts.commit_id,
825 pull_request_version.pull_request_version_id)
823 pull_request_version.pull_request_version_id)
826 Session().commit()
824 Session().commit()
827 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
825 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
828
826
829 return UpdateResponse(
827 return UpdateResponse(
830 executed=True, reason=UpdateFailureReason.NONE,
828 executed=True, reason=UpdateFailureReason.NONE,
831 old=pull_request, new=pull_request_version, changes=changes,
829 old=pull_request, new=pull_request_version, changes=changes,
832 source_changed=source_changed, target_changed=target_changed)
830 source_changed=source_changed, target_changed=target_changed)
833
831
834 def _create_version_from_snapshot(self, pull_request):
832 def _create_version_from_snapshot(self, pull_request):
835 version = PullRequestVersion()
833 version = PullRequestVersion()
836 version.title = pull_request.title
834 version.title = pull_request.title
837 version.description = pull_request.description
835 version.description = pull_request.description
838 version.status = pull_request.status
836 version.status = pull_request.status
839 version.pull_request_state = pull_request.pull_request_state
837 version.pull_request_state = pull_request.pull_request_state
840 version.created_on = datetime.datetime.now()
838 version.created_on = datetime.datetime.now()
841 version.updated_on = pull_request.updated_on
839 version.updated_on = pull_request.updated_on
842 version.user_id = pull_request.user_id
840 version.user_id = pull_request.user_id
843 version.source_repo = pull_request.source_repo
841 version.source_repo = pull_request.source_repo
844 version.source_ref = pull_request.source_ref
842 version.source_ref = pull_request.source_ref
845 version.target_repo = pull_request.target_repo
843 version.target_repo = pull_request.target_repo
846 version.target_ref = pull_request.target_ref
844 version.target_ref = pull_request.target_ref
847
845
848 version._last_merge_source_rev = pull_request._last_merge_source_rev
846 version._last_merge_source_rev = pull_request._last_merge_source_rev
849 version._last_merge_target_rev = pull_request._last_merge_target_rev
847 version._last_merge_target_rev = pull_request._last_merge_target_rev
850 version.last_merge_status = pull_request.last_merge_status
848 version.last_merge_status = pull_request.last_merge_status
851 version.shadow_merge_ref = pull_request.shadow_merge_ref
849 version.shadow_merge_ref = pull_request.shadow_merge_ref
852 version.merge_rev = pull_request.merge_rev
850 version.merge_rev = pull_request.merge_rev
853 version.reviewer_data = pull_request.reviewer_data
851 version.reviewer_data = pull_request.reviewer_data
854
852
855 version.revisions = pull_request.revisions
853 version.revisions = pull_request.revisions
856 version.pull_request = pull_request
854 version.pull_request = pull_request
857 Session().add(version)
855 Session().add(version)
858 Session().flush()
856 Session().flush()
859
857
860 return version
858 return version
861
859
862 def _generate_update_diffs(self, pull_request, pull_request_version):
860 def _generate_update_diffs(self, pull_request, pull_request_version):
863
861
864 diff_context = (
862 diff_context = (
865 self.DIFF_CONTEXT +
863 self.DIFF_CONTEXT +
866 CommentsModel.needed_extra_diff_context())
864 CommentsModel.needed_extra_diff_context())
867 hide_whitespace_changes = False
865 hide_whitespace_changes = False
868 source_repo = pull_request_version.source_repo
866 source_repo = pull_request_version.source_repo
869 source_ref_id = pull_request_version.source_ref_parts.commit_id
867 source_ref_id = pull_request_version.source_ref_parts.commit_id
870 target_ref_id = pull_request_version.target_ref_parts.commit_id
868 target_ref_id = pull_request_version.target_ref_parts.commit_id
871 old_diff = self._get_diff_from_pr_or_version(
869 old_diff = self._get_diff_from_pr_or_version(
872 source_repo, source_ref_id, target_ref_id,
870 source_repo, source_ref_id, target_ref_id,
873 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
871 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
874
872
875 source_repo = pull_request.source_repo
873 source_repo = pull_request.source_repo
876 source_ref_id = pull_request.source_ref_parts.commit_id
874 source_ref_id = pull_request.source_ref_parts.commit_id
877 target_ref_id = pull_request.target_ref_parts.commit_id
875 target_ref_id = pull_request.target_ref_parts.commit_id
878
876
879 new_diff = self._get_diff_from_pr_or_version(
877 new_diff = self._get_diff_from_pr_or_version(
880 source_repo, source_ref_id, target_ref_id,
878 source_repo, source_ref_id, target_ref_id,
881 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
879 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
882
880
883 old_diff_data = diffs.DiffProcessor(old_diff)
881 old_diff_data = diffs.DiffProcessor(old_diff)
884 old_diff_data.prepare()
882 old_diff_data.prepare()
885 new_diff_data = diffs.DiffProcessor(new_diff)
883 new_diff_data = diffs.DiffProcessor(new_diff)
886 new_diff_data.prepare()
884 new_diff_data.prepare()
887
885
888 return old_diff_data, new_diff_data
886 return old_diff_data, new_diff_data
889
887
890 def _link_comments_to_version(self, pull_request_version):
888 def _link_comments_to_version(self, pull_request_version):
891 """
889 """
892 Link all unlinked comments of this pull request to the given version.
890 Link all unlinked comments of this pull request to the given version.
893
891
894 :param pull_request_version: The `PullRequestVersion` to which
892 :param pull_request_version: The `PullRequestVersion` to which
895 the comments shall be linked.
893 the comments shall be linked.
896
894
897 """
895 """
898 pull_request = pull_request_version.pull_request
896 pull_request = pull_request_version.pull_request
899 comments = ChangesetComment.query()\
897 comments = ChangesetComment.query()\
900 .filter(
898 .filter(
901 # TODO: johbo: Should we query for the repo at all here?
899 # TODO: johbo: Should we query for the repo at all here?
902 # Pending decision on how comments of PRs are to be related
900 # Pending decision on how comments of PRs are to be related
903 # to either the source repo, the target repo or no repo at all.
901 # to either the source repo, the target repo or no repo at all.
904 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
902 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
905 ChangesetComment.pull_request == pull_request,
903 ChangesetComment.pull_request == pull_request,
906 ChangesetComment.pull_request_version == None)\
904 ChangesetComment.pull_request_version == None)\
907 .order_by(ChangesetComment.comment_id.asc())
905 .order_by(ChangesetComment.comment_id.asc())
908
906
909 # TODO: johbo: Find out why this breaks if it is done in a bulk
907 # TODO: johbo: Find out why this breaks if it is done in a bulk
910 # operation.
908 # operation.
911 for comment in comments:
909 for comment in comments:
912 comment.pull_request_version_id = (
910 comment.pull_request_version_id = (
913 pull_request_version.pull_request_version_id)
911 pull_request_version.pull_request_version_id)
914 Session().add(comment)
912 Session().add(comment)
915
913
916 def _calculate_commit_id_changes(self, old_ids, new_ids):
914 def _calculate_commit_id_changes(self, old_ids, new_ids):
917 added = [x for x in new_ids if x not in old_ids]
915 added = [x for x in new_ids if x not in old_ids]
918 common = [x for x in new_ids if x in old_ids]
916 common = [x for x in new_ids if x in old_ids]
919 removed = [x for x in old_ids if x not in new_ids]
917 removed = [x for x in old_ids if x not in new_ids]
920 total = new_ids
918 total = new_ids
921 return ChangeTuple(added, common, removed, total)
919 return ChangeTuple(added, common, removed, total)
922
920
923 def _calculate_file_changes(self, old_diff_data, new_diff_data):
921 def _calculate_file_changes(self, old_diff_data, new_diff_data):
924
922
925 old_files = OrderedDict()
923 old_files = OrderedDict()
926 for diff_data in old_diff_data.parsed_diff:
924 for diff_data in old_diff_data.parsed_diff:
927 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
925 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
928
926
929 added_files = []
927 added_files = []
930 modified_files = []
928 modified_files = []
931 removed_files = []
929 removed_files = []
932 for diff_data in new_diff_data.parsed_diff:
930 for diff_data in new_diff_data.parsed_diff:
933 new_filename = diff_data['filename']
931 new_filename = diff_data['filename']
934 new_hash = md5_safe(diff_data['raw_diff'])
932 new_hash = md5_safe(diff_data['raw_diff'])
935
933
936 old_hash = old_files.get(new_filename)
934 old_hash = old_files.get(new_filename)
937 if not old_hash:
935 if not old_hash:
938 # file is not present in old diff, means it's added
936 # file is not present in old diff, means it's added
939 added_files.append(new_filename)
937 added_files.append(new_filename)
940 else:
938 else:
941 if new_hash != old_hash:
939 if new_hash != old_hash:
942 modified_files.append(new_filename)
940 modified_files.append(new_filename)
943 # now remove a file from old, since we have seen it already
941 # now remove a file from old, since we have seen it already
944 del old_files[new_filename]
942 del old_files[new_filename]
945
943
946 # removed files is when there are present in old, but not in NEW,
944 # removed files is when there are present in old, but not in NEW,
947 # since we remove old files that are present in new diff, left-overs
945 # since we remove old files that are present in new diff, left-overs
948 # if any should be the removed files
946 # if any should be the removed files
949 removed_files.extend(old_files.keys())
947 removed_files.extend(old_files.keys())
950
948
951 return FileChangeTuple(added_files, modified_files, removed_files)
949 return FileChangeTuple(added_files, modified_files, removed_files)
952
950
953 def _render_update_message(self, changes, file_changes):
951 def _render_update_message(self, changes, file_changes):
954 """
952 """
955 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
953 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
956 so it's always looking the same disregarding on which default
954 so it's always looking the same disregarding on which default
957 renderer system is using.
955 renderer system is using.
958
956
959 :param changes: changes named tuple
957 :param changes: changes named tuple
960 :param file_changes: file changes named tuple
958 :param file_changes: file changes named tuple
961
959
962 """
960 """
963 new_status = ChangesetStatus.get_status_lbl(
961 new_status = ChangesetStatus.get_status_lbl(
964 ChangesetStatus.STATUS_UNDER_REVIEW)
962 ChangesetStatus.STATUS_UNDER_REVIEW)
965
963
966 changed_files = (
964 changed_files = (
967 file_changes.added + file_changes.modified + file_changes.removed)
965 file_changes.added + file_changes.modified + file_changes.removed)
968
966
969 params = {
967 params = {
970 'under_review_label': new_status,
968 'under_review_label': new_status,
971 'added_commits': changes.added,
969 'added_commits': changes.added,
972 'removed_commits': changes.removed,
970 'removed_commits': changes.removed,
973 'changed_files': changed_files,
971 'changed_files': changed_files,
974 'added_files': file_changes.added,
972 'added_files': file_changes.added,
975 'modified_files': file_changes.modified,
973 'modified_files': file_changes.modified,
976 'removed_files': file_changes.removed,
974 'removed_files': file_changes.removed,
977 }
975 }
978 renderer = RstTemplateRenderer()
976 renderer = RstTemplateRenderer()
979 return renderer.render('pull_request_update.mako', **params)
977 return renderer.render('pull_request_update.mako', **params)
980
978
981 def edit(self, pull_request, title, description, description_renderer, user):
979 def edit(self, pull_request, title, description, description_renderer, user):
982 pull_request = self.__get_pull_request(pull_request)
980 pull_request = self.__get_pull_request(pull_request)
983 old_data = pull_request.get_api_data(with_merge_state=False)
981 old_data = pull_request.get_api_data(with_merge_state=False)
984 if pull_request.is_closed():
982 if pull_request.is_closed():
985 raise ValueError('This pull request is closed')
983 raise ValueError('This pull request is closed')
986 if title:
984 if title:
987 pull_request.title = title
985 pull_request.title = title
988 pull_request.description = description
986 pull_request.description = description
989 pull_request.updated_on = datetime.datetime.now()
987 pull_request.updated_on = datetime.datetime.now()
990 pull_request.description_renderer = description_renderer
988 pull_request.description_renderer = description_renderer
991 Session().add(pull_request)
989 Session().add(pull_request)
992 self._log_audit_action(
990 self._log_audit_action(
993 'repo.pull_request.edit', {'old_data': old_data},
991 'repo.pull_request.edit', {'old_data': old_data},
994 user, pull_request)
992 user, pull_request)
995
993
996 def update_reviewers(self, pull_request, reviewer_data, user):
994 def update_reviewers(self, pull_request, reviewer_data, user):
997 """
995 """
998 Update the reviewers in the pull request
996 Update the reviewers in the pull request
999
997
1000 :param pull_request: the pr to update
998 :param pull_request: the pr to update
1001 :param reviewer_data: list of tuples
999 :param reviewer_data: list of tuples
1002 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1003 """
1001 """
1004 pull_request = self.__get_pull_request(pull_request)
1002 pull_request = self.__get_pull_request(pull_request)
1005 if pull_request.is_closed():
1003 if pull_request.is_closed():
1006 raise ValueError('This pull request is closed')
1004 raise ValueError('This pull request is closed')
1007
1005
1008 reviewers = {}
1006 reviewers = {}
1009 for user_id, reasons, mandatory, rules in reviewer_data:
1007 for user_id, reasons, mandatory, rules in reviewer_data:
1010 if isinstance(user_id, (int, compat.string_types)):
1008 if isinstance(user_id, (int, compat.string_types)):
1011 user_id = self._get_user(user_id).user_id
1009 user_id = self._get_user(user_id).user_id
1012 reviewers[user_id] = {
1010 reviewers[user_id] = {
1013 'reasons': reasons, 'mandatory': mandatory}
1011 'reasons': reasons, 'mandatory': mandatory}
1014
1012
1015 reviewers_ids = set(reviewers.keys())
1013 reviewers_ids = set(reviewers.keys())
1016 current_reviewers = PullRequestReviewers.query()\
1014 current_reviewers = PullRequestReviewers.query()\
1017 .filter(PullRequestReviewers.pull_request ==
1015 .filter(PullRequestReviewers.pull_request ==
1018 pull_request).all()
1016 pull_request).all()
1019 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1020
1018
1021 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1022 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1023
1021
1024 log.debug("Adding %s reviewers", ids_to_add)
1022 log.debug("Adding %s reviewers", ids_to_add)
1025 log.debug("Removing %s reviewers", ids_to_remove)
1023 log.debug("Removing %s reviewers", ids_to_remove)
1026 changed = False
1024 changed = False
1027 added_audit_reviewers = []
1025 added_audit_reviewers = []
1028 removed_audit_reviewers = []
1026 removed_audit_reviewers = []
1029
1027
1030 for uid in ids_to_add:
1028 for uid in ids_to_add:
1031 changed = True
1029 changed = True
1032 _usr = self._get_user(uid)
1030 _usr = self._get_user(uid)
1033 reviewer = PullRequestReviewers()
1031 reviewer = PullRequestReviewers()
1034 reviewer.user = _usr
1032 reviewer.user = _usr
1035 reviewer.pull_request = pull_request
1033 reviewer.pull_request = pull_request
1036 reviewer.reasons = reviewers[uid]['reasons']
1034 reviewer.reasons = reviewers[uid]['reasons']
1037 # NOTE(marcink): mandatory shouldn't be changed now
1035 # NOTE(marcink): mandatory shouldn't be changed now
1038 # reviewer.mandatory = reviewers[uid]['reasons']
1036 # reviewer.mandatory = reviewers[uid]['reasons']
1039 Session().add(reviewer)
1037 Session().add(reviewer)
1040 added_audit_reviewers.append(reviewer.get_dict())
1038 added_audit_reviewers.append(reviewer.get_dict())
1041
1039
1042 for uid in ids_to_remove:
1040 for uid in ids_to_remove:
1043 changed = True
1041 changed = True
1044 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1042 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1045 # that prevents and fixes cases that we added the same reviewer twice.
1043 # that prevents and fixes cases that we added the same reviewer twice.
1046 # this CAN happen due to the lack of DB checks
1044 # this CAN happen due to the lack of DB checks
1047 reviewers = PullRequestReviewers.query()\
1045 reviewers = PullRequestReviewers.query()\
1048 .filter(PullRequestReviewers.user_id == uid,
1046 .filter(PullRequestReviewers.user_id == uid,
1049 PullRequestReviewers.pull_request == pull_request)\
1047 PullRequestReviewers.pull_request == pull_request)\
1050 .all()
1048 .all()
1051
1049
1052 for obj in reviewers:
1050 for obj in reviewers:
1053 added_audit_reviewers.append(obj.get_dict())
1051 added_audit_reviewers.append(obj.get_dict())
1054 Session().delete(obj)
1052 Session().delete(obj)
1055
1053
1056 if changed:
1054 if changed:
1057 Session().expire_all()
1055 Session().expire_all()
1058 pull_request.updated_on = datetime.datetime.now()
1056 pull_request.updated_on = datetime.datetime.now()
1059 Session().add(pull_request)
1057 Session().add(pull_request)
1060
1058
1061 # finally store audit logs
1059 # finally store audit logs
1062 for user_data in added_audit_reviewers:
1060 for user_data in added_audit_reviewers:
1063 self._log_audit_action(
1061 self._log_audit_action(
1064 'repo.pull_request.reviewer.add', {'data': user_data},
1062 'repo.pull_request.reviewer.add', {'data': user_data},
1065 user, pull_request)
1063 user, pull_request)
1066 for user_data in removed_audit_reviewers:
1064 for user_data in removed_audit_reviewers:
1067 self._log_audit_action(
1065 self._log_audit_action(
1068 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1066 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1069 user, pull_request)
1067 user, pull_request)
1070
1068
1071 self.notify_reviewers(pull_request, ids_to_add)
1069 self.notify_reviewers(pull_request, ids_to_add)
1072 return ids_to_add, ids_to_remove
1070 return ids_to_add, ids_to_remove
1073
1071
1074 def get_url(self, pull_request, request=None, permalink=False):
1072 def get_url(self, pull_request, request=None, permalink=False):
1075 if not request:
1073 if not request:
1076 request = get_current_request()
1074 request = get_current_request()
1077
1075
1078 if permalink:
1076 if permalink:
1079 return request.route_url(
1077 return request.route_url(
1080 'pull_requests_global',
1078 'pull_requests_global',
1081 pull_request_id=pull_request.pull_request_id,)
1079 pull_request_id=pull_request.pull_request_id,)
1082 else:
1080 else:
1083 return request.route_url('pullrequest_show',
1081 return request.route_url('pullrequest_show',
1084 repo_name=safe_str(pull_request.target_repo.repo_name),
1082 repo_name=safe_str(pull_request.target_repo.repo_name),
1085 pull_request_id=pull_request.pull_request_id,)
1083 pull_request_id=pull_request.pull_request_id,)
1086
1084
1087 def get_shadow_clone_url(self, pull_request, request=None):
1085 def get_shadow_clone_url(self, pull_request, request=None):
1088 """
1086 """
1089 Returns qualified url pointing to the shadow repository. If this pull
1087 Returns qualified url pointing to the shadow repository. If this pull
1090 request is closed there is no shadow repository and ``None`` will be
1088 request is closed there is no shadow repository and ``None`` will be
1091 returned.
1089 returned.
1092 """
1090 """
1093 if pull_request.is_closed():
1091 if pull_request.is_closed():
1094 return None
1092 return None
1095 else:
1093 else:
1096 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1094 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1097 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1095 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1098
1096
1099 def notify_reviewers(self, pull_request, reviewers_ids):
1097 def notify_reviewers(self, pull_request, reviewers_ids):
1100 # notification to reviewers
1098 # notification to reviewers
1101 if not reviewers_ids:
1099 if not reviewers_ids:
1102 return
1100 return
1103
1101
1104 pull_request_obj = pull_request
1102 pull_request_obj = pull_request
1105 # get the current participants of this pull request
1103 # get the current participants of this pull request
1106 recipients = reviewers_ids
1104 recipients = reviewers_ids
1107 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1105 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1108
1106
1109 pr_source_repo = pull_request_obj.source_repo
1107 pr_source_repo = pull_request_obj.source_repo
1110 pr_target_repo = pull_request_obj.target_repo
1108 pr_target_repo = pull_request_obj.target_repo
1111
1109
1112 pr_url = h.route_url('pullrequest_show',
1110 pr_url = h.route_url('pullrequest_show',
1113 repo_name=pr_target_repo.repo_name,
1111 repo_name=pr_target_repo.repo_name,
1114 pull_request_id=pull_request_obj.pull_request_id,)
1112 pull_request_id=pull_request_obj.pull_request_id,)
1115
1113
1116 # set some variables for email notification
1114 # set some variables for email notification
1117 pr_target_repo_url = h.route_url(
1115 pr_target_repo_url = h.route_url(
1118 'repo_summary', repo_name=pr_target_repo.repo_name)
1116 'repo_summary', repo_name=pr_target_repo.repo_name)
1119
1117
1120 pr_source_repo_url = h.route_url(
1118 pr_source_repo_url = h.route_url(
1121 'repo_summary', repo_name=pr_source_repo.repo_name)
1119 'repo_summary', repo_name=pr_source_repo.repo_name)
1122
1120
1123 # pull request specifics
1121 # pull request specifics
1124 pull_request_commits = [
1122 pull_request_commits = [
1125 (x.raw_id, x.message)
1123 (x.raw_id, x.message)
1126 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1124 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1127
1125
1128 kwargs = {
1126 kwargs = {
1129 'user': pull_request.author,
1127 'user': pull_request.author,
1130 'pull_request': pull_request_obj,
1128 'pull_request': pull_request_obj,
1131 'pull_request_commits': pull_request_commits,
1129 'pull_request_commits': pull_request_commits,
1132
1130
1133 'pull_request_target_repo': pr_target_repo,
1131 'pull_request_target_repo': pr_target_repo,
1134 'pull_request_target_repo_url': pr_target_repo_url,
1132 'pull_request_target_repo_url': pr_target_repo_url,
1135
1133
1136 'pull_request_source_repo': pr_source_repo,
1134 'pull_request_source_repo': pr_source_repo,
1137 'pull_request_source_repo_url': pr_source_repo_url,
1135 'pull_request_source_repo_url': pr_source_repo_url,
1138
1136
1139 'pull_request_url': pr_url,
1137 'pull_request_url': pr_url,
1140 }
1138 }
1141
1139
1142 # pre-generate the subject for notification itself
1140 # pre-generate the subject for notification itself
1143 (subject,
1141 (subject,
1144 _h, _e, # we don't care about those
1142 _h, _e, # we don't care about those
1145 body_plaintext) = EmailNotificationModel().render_email(
1143 body_plaintext) = EmailNotificationModel().render_email(
1146 notification_type, **kwargs)
1144 notification_type, **kwargs)
1147
1145
1148 # create notification objects, and emails
1146 # create notification objects, and emails
1149 NotificationModel().create(
1147 NotificationModel().create(
1150 created_by=pull_request.author,
1148 created_by=pull_request.author,
1151 notification_subject=subject,
1149 notification_subject=subject,
1152 notification_body=body_plaintext,
1150 notification_body=body_plaintext,
1153 notification_type=notification_type,
1151 notification_type=notification_type,
1154 recipients=recipients,
1152 recipients=recipients,
1155 email_kwargs=kwargs,
1153 email_kwargs=kwargs,
1156 )
1154 )
1157
1155
1158 def delete(self, pull_request, user):
1156 def delete(self, pull_request, user):
1159 pull_request = self.__get_pull_request(pull_request)
1157 pull_request = self.__get_pull_request(pull_request)
1160 old_data = pull_request.get_api_data(with_merge_state=False)
1158 old_data = pull_request.get_api_data(with_merge_state=False)
1161 self._cleanup_merge_workspace(pull_request)
1159 self._cleanup_merge_workspace(pull_request)
1162 self._log_audit_action(
1160 self._log_audit_action(
1163 'repo.pull_request.delete', {'old_data': old_data},
1161 'repo.pull_request.delete', {'old_data': old_data},
1164 user, pull_request)
1162 user, pull_request)
1165 Session().delete(pull_request)
1163 Session().delete(pull_request)
1166
1164
1167 def close_pull_request(self, pull_request, user):
1165 def close_pull_request(self, pull_request, user):
1168 pull_request = self.__get_pull_request(pull_request)
1166 pull_request = self.__get_pull_request(pull_request)
1169 self._cleanup_merge_workspace(pull_request)
1167 self._cleanup_merge_workspace(pull_request)
1170 pull_request.status = PullRequest.STATUS_CLOSED
1168 pull_request.status = PullRequest.STATUS_CLOSED
1171 pull_request.updated_on = datetime.datetime.now()
1169 pull_request.updated_on = datetime.datetime.now()
1172 Session().add(pull_request)
1170 Session().add(pull_request)
1173 self.trigger_pull_request_hook(
1171 self.trigger_pull_request_hook(
1174 pull_request, pull_request.author, 'close')
1172 pull_request, pull_request.author, 'close')
1175
1173
1176 pr_data = pull_request.get_api_data(with_merge_state=False)
1174 pr_data = pull_request.get_api_data(with_merge_state=False)
1177 self._log_audit_action(
1175 self._log_audit_action(
1178 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1176 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1179
1177
1180 def close_pull_request_with_comment(
1178 def close_pull_request_with_comment(
1181 self, pull_request, user, repo, message=None, auth_user=None):
1179 self, pull_request, user, repo, message=None, auth_user=None):
1182
1180
1183 pull_request_review_status = pull_request.calculated_review_status()
1181 pull_request_review_status = pull_request.calculated_review_status()
1184
1182
1185 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1183 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1186 # approved only if we have voting consent
1184 # approved only if we have voting consent
1187 status = ChangesetStatus.STATUS_APPROVED
1185 status = ChangesetStatus.STATUS_APPROVED
1188 else:
1186 else:
1189 status = ChangesetStatus.STATUS_REJECTED
1187 status = ChangesetStatus.STATUS_REJECTED
1190 status_lbl = ChangesetStatus.get_status_lbl(status)
1188 status_lbl = ChangesetStatus.get_status_lbl(status)
1191
1189
1192 default_message = (
1190 default_message = (
1193 'Closing with status change {transition_icon} {status}.'
1191 'Closing with status change {transition_icon} {status}.'
1194 ).format(transition_icon='>', status=status_lbl)
1192 ).format(transition_icon='>', status=status_lbl)
1195 text = message or default_message
1193 text = message or default_message
1196
1194
1197 # create a comment, and link it to new status
1195 # create a comment, and link it to new status
1198 comment = CommentsModel().create(
1196 comment = CommentsModel().create(
1199 text=text,
1197 text=text,
1200 repo=repo.repo_id,
1198 repo=repo.repo_id,
1201 user=user.user_id,
1199 user=user.user_id,
1202 pull_request=pull_request.pull_request_id,
1200 pull_request=pull_request.pull_request_id,
1203 status_change=status_lbl,
1201 status_change=status_lbl,
1204 status_change_type=status,
1202 status_change_type=status,
1205 closing_pr=True,
1203 closing_pr=True,
1206 auth_user=auth_user,
1204 auth_user=auth_user,
1207 )
1205 )
1208
1206
1209 # calculate old status before we change it
1207 # calculate old status before we change it
1210 old_calculated_status = pull_request.calculated_review_status()
1208 old_calculated_status = pull_request.calculated_review_status()
1211 ChangesetStatusModel().set_status(
1209 ChangesetStatusModel().set_status(
1212 repo.repo_id,
1210 repo.repo_id,
1213 status,
1211 status,
1214 user.user_id,
1212 user.user_id,
1215 comment=comment,
1213 comment=comment,
1216 pull_request=pull_request.pull_request_id
1214 pull_request=pull_request.pull_request_id
1217 )
1215 )
1218
1216
1219 Session().flush()
1217 Session().flush()
1220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1218 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1221 # we now calculate the status of pull request again, and based on that
1219 # we now calculate the status of pull request again, and based on that
1222 # calculation trigger status change. This might happen in cases
1220 # calculation trigger status change. This might happen in cases
1223 # that non-reviewer admin closes a pr, which means his vote doesn't
1221 # that non-reviewer admin closes a pr, which means his vote doesn't
1224 # change the status, while if he's a reviewer this might change it.
1222 # change the status, while if he's a reviewer this might change it.
1225 calculated_status = pull_request.calculated_review_status()
1223 calculated_status = pull_request.calculated_review_status()
1226 if old_calculated_status != calculated_status:
1224 if old_calculated_status != calculated_status:
1227 self.trigger_pull_request_hook(
1225 self.trigger_pull_request_hook(
1228 pull_request, user, 'review_status_change',
1226 pull_request, user, 'review_status_change',
1229 data={'status': calculated_status})
1227 data={'status': calculated_status})
1230
1228
1231 # finally close the PR
1229 # finally close the PR
1232 PullRequestModel().close_pull_request(
1230 PullRequestModel().close_pull_request(
1233 pull_request.pull_request_id, user)
1231 pull_request.pull_request_id, user)
1234
1232
1235 return comment, status
1233 return comment, status
1236
1234
1237 def merge_status(self, pull_request, translator=None,
1235 def merge_status(self, pull_request, translator=None,
1238 force_shadow_repo_refresh=False):
1236 force_shadow_repo_refresh=False):
1239 _ = translator or get_current_request().translate
1237 _ = translator or get_current_request().translate
1240
1238
1241 if not self._is_merge_enabled(pull_request):
1239 if not self._is_merge_enabled(pull_request):
1242 return False, _('Server-side pull request merging is disabled.')
1240 return False, _('Server-side pull request merging is disabled.')
1243 if pull_request.is_closed():
1241 if pull_request.is_closed():
1244 return False, _('This pull request is closed.')
1242 return False, _('This pull request is closed.')
1245 merge_possible, msg = self._check_repo_requirements(
1243 merge_possible, msg = self._check_repo_requirements(
1246 target=pull_request.target_repo, source=pull_request.source_repo,
1244 target=pull_request.target_repo, source=pull_request.source_repo,
1247 translator=_)
1245 translator=_)
1248 if not merge_possible:
1246 if not merge_possible:
1249 return merge_possible, msg
1247 return merge_possible, msg
1250
1248
1251 try:
1249 try:
1252 resp = self._try_merge(
1250 resp = self._try_merge(
1253 pull_request,
1251 pull_request,
1254 force_shadow_repo_refresh=force_shadow_repo_refresh)
1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1255 log.debug("Merge response: %s", resp)
1253 log.debug("Merge response: %s", resp)
1256 status = resp.possible, resp.merge_status_message
1254 status = resp.possible, resp.merge_status_message
1257 except NotImplementedError:
1255 except NotImplementedError:
1258 status = False, _('Pull request merging is not supported.')
1256 status = False, _('Pull request merging is not supported.')
1259
1257
1260 return status
1258 return status
1261
1259
1262 def _check_repo_requirements(self, target, source, translator):
1260 def _check_repo_requirements(self, target, source, translator):
1263 """
1261 """
1264 Check if `target` and `source` have compatible requirements.
1262 Check if `target` and `source` have compatible requirements.
1265
1263
1266 Currently this is just checking for largefiles.
1264 Currently this is just checking for largefiles.
1267 """
1265 """
1268 _ = translator
1266 _ = translator
1269 target_has_largefiles = self._has_largefiles(target)
1267 target_has_largefiles = self._has_largefiles(target)
1270 source_has_largefiles = self._has_largefiles(source)
1268 source_has_largefiles = self._has_largefiles(source)
1271 merge_possible = True
1269 merge_possible = True
1272 message = u''
1270 message = u''
1273
1271
1274 if target_has_largefiles != source_has_largefiles:
1272 if target_has_largefiles != source_has_largefiles:
1275 merge_possible = False
1273 merge_possible = False
1276 if source_has_largefiles:
1274 if source_has_largefiles:
1277 message = _(
1275 message = _(
1278 'Target repository large files support is disabled.')
1276 'Target repository large files support is disabled.')
1279 else:
1277 else:
1280 message = _(
1278 message = _(
1281 'Source repository large files support is disabled.')
1279 'Source repository large files support is disabled.')
1282
1280
1283 return merge_possible, message
1281 return merge_possible, message
1284
1282
1285 def _has_largefiles(self, repo):
1283 def _has_largefiles(self, repo):
1286 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1284 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1287 'extensions', 'largefiles')
1285 'extensions', 'largefiles')
1288 return largefiles_ui and largefiles_ui[0].active
1286 return largefiles_ui and largefiles_ui[0].active
1289
1287
1290 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1288 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1291 """
1289 """
1292 Try to merge the pull request and return the merge status.
1290 Try to merge the pull request and return the merge status.
1293 """
1291 """
1294 log.debug(
1292 log.debug(
1295 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1293 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1296 pull_request.pull_request_id, force_shadow_repo_refresh)
1294 pull_request.pull_request_id, force_shadow_repo_refresh)
1297 target_vcs = pull_request.target_repo.scm_instance()
1295 target_vcs = pull_request.target_repo.scm_instance()
1298 # Refresh the target reference.
1296 # Refresh the target reference.
1299 try:
1297 try:
1300 target_ref = self._refresh_reference(
1298 target_ref = self._refresh_reference(
1301 pull_request.target_ref_parts, target_vcs)
1299 pull_request.target_ref_parts, target_vcs)
1302 except CommitDoesNotExistError:
1300 except CommitDoesNotExistError:
1303 merge_state = MergeResponse(
1301 merge_state = MergeResponse(
1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1302 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1305 metadata={'target_ref': pull_request.target_ref_parts})
1303 metadata={'target_ref': pull_request.target_ref_parts})
1306 return merge_state
1304 return merge_state
1307
1305
1308 target_locked = pull_request.target_repo.locked
1306 target_locked = pull_request.target_repo.locked
1309 if target_locked and target_locked[0]:
1307 if target_locked and target_locked[0]:
1310 locked_by = 'user:{}'.format(target_locked[0])
1308 locked_by = 'user:{}'.format(target_locked[0])
1311 log.debug("The target repository is locked by %s.", locked_by)
1309 log.debug("The target repository is locked by %s.", locked_by)
1312 merge_state = MergeResponse(
1310 merge_state = MergeResponse(
1313 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1314 metadata={'locked_by': locked_by})
1312 metadata={'locked_by': locked_by})
1315 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1316 pull_request, target_ref):
1314 pull_request, target_ref):
1317 log.debug("Refreshing the merge status of the repository.")
1315 log.debug("Refreshing the merge status of the repository.")
1318 merge_state = self._refresh_merge_state(
1316 merge_state = self._refresh_merge_state(
1319 pull_request, target_vcs, target_ref)
1317 pull_request, target_vcs, target_ref)
1320 else:
1318 else:
1321 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1319 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1322 metadata = {
1320 metadata = {
1323 'target_ref': pull_request.target_ref_parts,
1321 'target_ref': pull_request.target_ref_parts,
1324 'source_ref': pull_request.source_ref_parts,
1322 'source_ref': pull_request.source_ref_parts,
1325 }
1323 }
1326 if not possible and target_ref.type == 'branch':
1324 if not possible and target_ref.type == 'branch':
1327 # NOTE(marcink): case for mercurial multiple heads on branch
1325 # NOTE(marcink): case for mercurial multiple heads on branch
1328 heads = target_vcs._heads(target_ref.name)
1326 heads = target_vcs._heads(target_ref.name)
1329 if len(heads) != 1:
1327 if len(heads) != 1:
1330 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1328 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1331 metadata.update({
1329 metadata.update({
1332 'heads': heads
1330 'heads': heads
1333 })
1331 })
1334 merge_state = MergeResponse(
1332 merge_state = MergeResponse(
1335 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1333 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1336
1334
1337 return merge_state
1335 return merge_state
1338
1336
1339 def _refresh_reference(self, reference, vcs_repository):
1337 def _refresh_reference(self, reference, vcs_repository):
1340 if reference.type in self.UPDATABLE_REF_TYPES:
1338 if reference.type in self.UPDATABLE_REF_TYPES:
1341 name_or_id = reference.name
1339 name_or_id = reference.name
1342 else:
1340 else:
1343 name_or_id = reference.commit_id
1341 name_or_id = reference.commit_id
1344
1342
1345 vcs_repository.count() # cache rebuild
1346 refreshed_commit = vcs_repository.get_commit(name_or_id)
1343 refreshed_commit = vcs_repository.get_commit(name_or_id)
1347 refreshed_reference = Reference(
1344 refreshed_reference = Reference(
1348 reference.type, reference.name, refreshed_commit.raw_id)
1345 reference.type, reference.name, refreshed_commit.raw_id)
1349 return refreshed_reference
1346 return refreshed_reference
1350
1347
1351 def _needs_merge_state_refresh(self, pull_request, target_reference):
1348 def _needs_merge_state_refresh(self, pull_request, target_reference):
1352 return not(
1349 return not(
1353 pull_request.revisions and
1350 pull_request.revisions and
1354 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1351 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1355 target_reference.commit_id == pull_request._last_merge_target_rev)
1352 target_reference.commit_id == pull_request._last_merge_target_rev)
1356
1353
1357 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1354 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1358 workspace_id = self._workspace_id(pull_request)
1355 workspace_id = self._workspace_id(pull_request)
1359 source_vcs = pull_request.source_repo.scm_instance()
1356 source_vcs = pull_request.source_repo.scm_instance()
1360 repo_id = pull_request.target_repo.repo_id
1357 repo_id = pull_request.target_repo.repo_id
1361 use_rebase = self._use_rebase_for_merging(pull_request)
1358 use_rebase = self._use_rebase_for_merging(pull_request)
1362 close_branch = self._close_branch_before_merging(pull_request)
1359 close_branch = self._close_branch_before_merging(pull_request)
1363 merge_state = target_vcs.merge(
1360 merge_state = target_vcs.merge(
1364 repo_id, workspace_id,
1361 repo_id, workspace_id,
1365 target_reference, source_vcs, pull_request.source_ref_parts,
1362 target_reference, source_vcs, pull_request.source_ref_parts,
1366 dry_run=True, use_rebase=use_rebase,
1363 dry_run=True, use_rebase=use_rebase,
1367 close_branch=close_branch)
1364 close_branch=close_branch)
1368
1365
1369 # Do not store the response if there was an unknown error.
1366 # Do not store the response if there was an unknown error.
1370 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1367 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1371 pull_request._last_merge_source_rev = \
1368 pull_request._last_merge_source_rev = \
1372 pull_request.source_ref_parts.commit_id
1369 pull_request.source_ref_parts.commit_id
1373 pull_request._last_merge_target_rev = target_reference.commit_id
1370 pull_request._last_merge_target_rev = target_reference.commit_id
1374 pull_request.last_merge_status = merge_state.failure_reason
1371 pull_request.last_merge_status = merge_state.failure_reason
1375 pull_request.shadow_merge_ref = merge_state.merge_ref
1372 pull_request.shadow_merge_ref = merge_state.merge_ref
1376 Session().add(pull_request)
1373 Session().add(pull_request)
1377 Session().commit()
1374 Session().commit()
1378
1375
1379 return merge_state
1376 return merge_state
1380
1377
1381 def _workspace_id(self, pull_request):
1378 def _workspace_id(self, pull_request):
1382 workspace_id = 'pr-%s' % pull_request.pull_request_id
1379 workspace_id = 'pr-%s' % pull_request.pull_request_id
1383 return workspace_id
1380 return workspace_id
1384
1381
1385 def generate_repo_data(self, repo, commit_id=None, branch=None,
1382 def generate_repo_data(self, repo, commit_id=None, branch=None,
1386 bookmark=None, translator=None):
1383 bookmark=None, translator=None):
1387 from rhodecode.model.repo import RepoModel
1384 from rhodecode.model.repo import RepoModel
1388
1385
1389 all_refs, selected_ref = \
1386 all_refs, selected_ref = \
1390 self._get_repo_pullrequest_sources(
1387 self._get_repo_pullrequest_sources(
1391 repo.scm_instance(), commit_id=commit_id,
1388 repo.scm_instance(), commit_id=commit_id,
1392 branch=branch, bookmark=bookmark, translator=translator)
1389 branch=branch, bookmark=bookmark, translator=translator)
1393
1390
1394 refs_select2 = []
1391 refs_select2 = []
1395 for element in all_refs:
1392 for element in all_refs:
1396 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1393 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1397 refs_select2.append({'text': element[1], 'children': children})
1394 refs_select2.append({'text': element[1], 'children': children})
1398
1395
1399 return {
1396 return {
1400 'user': {
1397 'user': {
1401 'user_id': repo.user.user_id,
1398 'user_id': repo.user.user_id,
1402 'username': repo.user.username,
1399 'username': repo.user.username,
1403 'firstname': repo.user.first_name,
1400 'firstname': repo.user.first_name,
1404 'lastname': repo.user.last_name,
1401 'lastname': repo.user.last_name,
1405 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1402 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1406 },
1403 },
1407 'name': repo.repo_name,
1404 'name': repo.repo_name,
1408 'link': RepoModel().get_url(repo),
1405 'link': RepoModel().get_url(repo),
1409 'description': h.chop_at_smart(repo.description_safe, '\n'),
1406 'description': h.chop_at_smart(repo.description_safe, '\n'),
1410 'refs': {
1407 'refs': {
1411 'all_refs': all_refs,
1408 'all_refs': all_refs,
1412 'selected_ref': selected_ref,
1409 'selected_ref': selected_ref,
1413 'select2_refs': refs_select2
1410 'select2_refs': refs_select2
1414 }
1411 }
1415 }
1412 }
1416
1413
1417 def generate_pullrequest_title(self, source, source_ref, target):
1414 def generate_pullrequest_title(self, source, source_ref, target):
1418 return u'{source}#{at_ref} to {target}'.format(
1415 return u'{source}#{at_ref} to {target}'.format(
1419 source=source,
1416 source=source,
1420 at_ref=source_ref,
1417 at_ref=source_ref,
1421 target=target,
1418 target=target,
1422 )
1419 )
1423
1420
1424 def _cleanup_merge_workspace(self, pull_request):
1421 def _cleanup_merge_workspace(self, pull_request):
1425 # Merging related cleanup
1422 # Merging related cleanup
1426 repo_id = pull_request.target_repo.repo_id
1423 repo_id = pull_request.target_repo.repo_id
1427 target_scm = pull_request.target_repo.scm_instance()
1424 target_scm = pull_request.target_repo.scm_instance()
1428 workspace_id = self._workspace_id(pull_request)
1425 workspace_id = self._workspace_id(pull_request)
1429
1426
1430 try:
1427 try:
1431 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1428 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1432 except NotImplementedError:
1429 except NotImplementedError:
1433 pass
1430 pass
1434
1431
1435 def _get_repo_pullrequest_sources(
1432 def _get_repo_pullrequest_sources(
1436 self, repo, commit_id=None, branch=None, bookmark=None,
1433 self, repo, commit_id=None, branch=None, bookmark=None,
1437 translator=None):
1434 translator=None):
1438 """
1435 """
1439 Return a structure with repo's interesting commits, suitable for
1436 Return a structure with repo's interesting commits, suitable for
1440 the selectors in pullrequest controller
1437 the selectors in pullrequest controller
1441
1438
1442 :param commit_id: a commit that must be in the list somehow
1439 :param commit_id: a commit that must be in the list somehow
1443 and selected by default
1440 and selected by default
1444 :param branch: a branch that must be in the list and selected
1441 :param branch: a branch that must be in the list and selected
1445 by default - even if closed
1442 by default - even if closed
1446 :param bookmark: a bookmark that must be in the list and selected
1443 :param bookmark: a bookmark that must be in the list and selected
1447 """
1444 """
1448 _ = translator or get_current_request().translate
1445 _ = translator or get_current_request().translate
1449
1446
1450 commit_id = safe_str(commit_id) if commit_id else None
1447 commit_id = safe_str(commit_id) if commit_id else None
1451 branch = safe_unicode(branch) if branch else None
1448 branch = safe_unicode(branch) if branch else None
1452 bookmark = safe_unicode(bookmark) if bookmark else None
1449 bookmark = safe_unicode(bookmark) if bookmark else None
1453
1450
1454 selected = None
1451 selected = None
1455
1452
1456 # order matters: first source that has commit_id in it will be selected
1453 # order matters: first source that has commit_id in it will be selected
1457 sources = []
1454 sources = []
1458 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1455 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1459 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1456 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1460
1457
1461 if commit_id:
1458 if commit_id:
1462 ref_commit = (h.short_id(commit_id), commit_id)
1459 ref_commit = (h.short_id(commit_id), commit_id)
1463 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1460 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1464
1461
1465 sources.append(
1462 sources.append(
1466 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1463 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1467 )
1464 )
1468
1465
1469 groups = []
1466 groups = []
1470
1467
1471 for group_key, ref_list, group_name, match in sources:
1468 for group_key, ref_list, group_name, match in sources:
1472 group_refs = []
1469 group_refs = []
1473 for ref_name, ref_id in ref_list:
1470 for ref_name, ref_id in ref_list:
1474 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1471 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1475 group_refs.append((ref_key, ref_name))
1472 group_refs.append((ref_key, ref_name))
1476
1473
1477 if not selected:
1474 if not selected:
1478 if set([commit_id, match]) & set([ref_id, ref_name]):
1475 if set([commit_id, match]) & set([ref_id, ref_name]):
1479 selected = ref_key
1476 selected = ref_key
1480
1477
1481 if group_refs:
1478 if group_refs:
1482 groups.append((group_refs, group_name))
1479 groups.append((group_refs, group_name))
1483
1480
1484 if not selected:
1481 if not selected:
1485 ref = commit_id or branch or bookmark
1482 ref = commit_id or branch or bookmark
1486 if ref:
1483 if ref:
1487 raise CommitDoesNotExistError(
1484 raise CommitDoesNotExistError(
1488 u'No commit refs could be found matching: {}'.format(ref))
1485 u'No commit refs could be found matching: {}'.format(ref))
1489 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1486 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1490 selected = u'branch:{}:{}'.format(
1487 selected = u'branch:{}:{}'.format(
1491 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1488 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1492 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1489 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1493 )
1490 )
1494 elif repo.commit_ids:
1491 elif repo.commit_ids:
1495 # make the user select in this case
1492 # make the user select in this case
1496 selected = None
1493 selected = None
1497 else:
1494 else:
1498 raise EmptyRepositoryError()
1495 raise EmptyRepositoryError()
1499 return groups, selected
1496 return groups, selected
1500
1497
1501 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1498 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1502 hide_whitespace_changes, diff_context):
1499 hide_whitespace_changes, diff_context):
1503
1500
1504 return self._get_diff_from_pr_or_version(
1501 return self._get_diff_from_pr_or_version(
1505 source_repo, source_ref_id, target_ref_id,
1502 source_repo, source_ref_id, target_ref_id,
1506 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1503 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1507
1504
1508 def _get_diff_from_pr_or_version(
1505 def _get_diff_from_pr_or_version(
1509 self, source_repo, source_ref_id, target_ref_id,
1506 self, source_repo, source_ref_id, target_ref_id,
1510 hide_whitespace_changes, diff_context):
1507 hide_whitespace_changes, diff_context):
1511
1508
1512 target_commit = source_repo.get_commit(
1509 target_commit = source_repo.get_commit(
1513 commit_id=safe_str(target_ref_id))
1510 commit_id=safe_str(target_ref_id))
1514 source_commit = source_repo.get_commit(
1511 source_commit = source_repo.get_commit(
1515 commit_id=safe_str(source_ref_id))
1512 commit_id=safe_str(source_ref_id))
1516 if isinstance(source_repo, Repository):
1513 if isinstance(source_repo, Repository):
1517 vcs_repo = source_repo.scm_instance()
1514 vcs_repo = source_repo.scm_instance()
1518 else:
1515 else:
1519 vcs_repo = source_repo
1516 vcs_repo = source_repo
1520
1517
1521 # TODO: johbo: In the context of an update, we cannot reach
1518 # TODO: johbo: In the context of an update, we cannot reach
1522 # the old commit anymore with our normal mechanisms. It needs
1519 # the old commit anymore with our normal mechanisms. It needs
1523 # some sort of special support in the vcs layer to avoid this
1520 # some sort of special support in the vcs layer to avoid this
1524 # workaround.
1521 # workaround.
1525 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1522 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1526 vcs_repo.alias == 'git'):
1523 vcs_repo.alias == 'git'):
1527 source_commit.raw_id = safe_str(source_ref_id)
1524 source_commit.raw_id = safe_str(source_ref_id)
1528
1525
1529 log.debug('calculating diff between '
1526 log.debug('calculating diff between '
1530 'source_ref:%s and target_ref:%s for repo `%s`',
1527 'source_ref:%s and target_ref:%s for repo `%s`',
1531 target_ref_id, source_ref_id,
1528 target_ref_id, source_ref_id,
1532 safe_unicode(vcs_repo.path))
1529 safe_unicode(vcs_repo.path))
1533
1530
1534 vcs_diff = vcs_repo.get_diff(
1531 vcs_diff = vcs_repo.get_diff(
1535 commit1=target_commit, commit2=source_commit,
1532 commit1=target_commit, commit2=source_commit,
1536 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1533 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1537 return vcs_diff
1534 return vcs_diff
1538
1535
1539 def _is_merge_enabled(self, pull_request):
1536 def _is_merge_enabled(self, pull_request):
1540 return self._get_general_setting(
1537 return self._get_general_setting(
1541 pull_request, 'rhodecode_pr_merge_enabled')
1538 pull_request, 'rhodecode_pr_merge_enabled')
1542
1539
1543 def _use_rebase_for_merging(self, pull_request):
1540 def _use_rebase_for_merging(self, pull_request):
1544 repo_type = pull_request.target_repo.repo_type
1541 repo_type = pull_request.target_repo.repo_type
1545 if repo_type == 'hg':
1542 if repo_type == 'hg':
1546 return self._get_general_setting(
1543 return self._get_general_setting(
1547 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1544 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1548 elif repo_type == 'git':
1545 elif repo_type == 'git':
1549 return self._get_general_setting(
1546 return self._get_general_setting(
1550 pull_request, 'rhodecode_git_use_rebase_for_merging')
1547 pull_request, 'rhodecode_git_use_rebase_for_merging')
1551
1548
1552 return False
1549 return False
1553
1550
1554 def _close_branch_before_merging(self, pull_request):
1551 def _close_branch_before_merging(self, pull_request):
1555 repo_type = pull_request.target_repo.repo_type
1552 repo_type = pull_request.target_repo.repo_type
1556 if repo_type == 'hg':
1553 if repo_type == 'hg':
1557 return self._get_general_setting(
1554 return self._get_general_setting(
1558 pull_request, 'rhodecode_hg_close_branch_before_merging')
1555 pull_request, 'rhodecode_hg_close_branch_before_merging')
1559 elif repo_type == 'git':
1556 elif repo_type == 'git':
1560 return self._get_general_setting(
1557 return self._get_general_setting(
1561 pull_request, 'rhodecode_git_close_branch_before_merging')
1558 pull_request, 'rhodecode_git_close_branch_before_merging')
1562
1559
1563 return False
1560 return False
1564
1561
1565 def _get_general_setting(self, pull_request, settings_key, default=False):
1562 def _get_general_setting(self, pull_request, settings_key, default=False):
1566 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1563 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1567 settings = settings_model.get_general_settings()
1564 settings = settings_model.get_general_settings()
1568 return settings.get(settings_key, default)
1565 return settings.get(settings_key, default)
1569
1566
1570 def _log_audit_action(self, action, action_data, user, pull_request):
1567 def _log_audit_action(self, action, action_data, user, pull_request):
1571 audit_logger.store(
1568 audit_logger.store(
1572 action=action,
1569 action=action,
1573 action_data=action_data,
1570 action_data=action_data,
1574 user=user,
1571 user=user,
1575 repo=pull_request.target_repo)
1572 repo=pull_request.target_repo)
1576
1573
1577 def get_reviewer_functions(self):
1574 def get_reviewer_functions(self):
1578 """
1575 """
1579 Fetches functions for validation and fetching default reviewers.
1576 Fetches functions for validation and fetching default reviewers.
1580 If available we use the EE package, else we fallback to CE
1577 If available we use the EE package, else we fallback to CE
1581 package functions
1578 package functions
1582 """
1579 """
1583 try:
1580 try:
1584 from rc_reviewers.utils import get_default_reviewers_data
1581 from rc_reviewers.utils import get_default_reviewers_data
1585 from rc_reviewers.utils import validate_default_reviewers
1582 from rc_reviewers.utils import validate_default_reviewers
1586 except ImportError:
1583 except ImportError:
1587 from rhodecode.apps.repository.utils import get_default_reviewers_data
1584 from rhodecode.apps.repository.utils import get_default_reviewers_data
1588 from rhodecode.apps.repository.utils import validate_default_reviewers
1585 from rhodecode.apps.repository.utils import validate_default_reviewers
1589
1586
1590 return get_default_reviewers_data, validate_default_reviewers
1587 return get_default_reviewers_data, validate_default_reviewers
1591
1588
1592
1589
1593 class MergeCheck(object):
1590 class MergeCheck(object):
1594 """
1591 """
1595 Perform Merge Checks and returns a check object which stores information
1592 Perform Merge Checks and returns a check object which stores information
1596 about merge errors, and merge conditions
1593 about merge errors, and merge conditions
1597 """
1594 """
1598 TODO_CHECK = 'todo'
1595 TODO_CHECK = 'todo'
1599 PERM_CHECK = 'perm'
1596 PERM_CHECK = 'perm'
1600 REVIEW_CHECK = 'review'
1597 REVIEW_CHECK = 'review'
1601 MERGE_CHECK = 'merge'
1598 MERGE_CHECK = 'merge'
1602
1599
1603 def __init__(self):
1600 def __init__(self):
1604 self.review_status = None
1601 self.review_status = None
1605 self.merge_possible = None
1602 self.merge_possible = None
1606 self.merge_msg = ''
1603 self.merge_msg = ''
1607 self.failed = None
1604 self.failed = None
1608 self.errors = []
1605 self.errors = []
1609 self.error_details = OrderedDict()
1606 self.error_details = OrderedDict()
1610
1607
1611 def push_error(self, error_type, message, error_key, details):
1608 def push_error(self, error_type, message, error_key, details):
1612 self.failed = True
1609 self.failed = True
1613 self.errors.append([error_type, message])
1610 self.errors.append([error_type, message])
1614 self.error_details[error_key] = dict(
1611 self.error_details[error_key] = dict(
1615 details=details,
1612 details=details,
1616 error_type=error_type,
1613 error_type=error_type,
1617 message=message
1614 message=message
1618 )
1615 )
1619
1616
1620 @classmethod
1617 @classmethod
1621 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1618 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1622 force_shadow_repo_refresh=False):
1619 force_shadow_repo_refresh=False):
1623 _ = translator
1620 _ = translator
1624 merge_check = cls()
1621 merge_check = cls()
1625
1622
1626 # permissions to merge
1623 # permissions to merge
1627 user_allowed_to_merge = PullRequestModel().check_user_merge(
1624 user_allowed_to_merge = PullRequestModel().check_user_merge(
1628 pull_request, auth_user)
1625 pull_request, auth_user)
1629 if not user_allowed_to_merge:
1626 if not user_allowed_to_merge:
1630 log.debug("MergeCheck: cannot merge, approval is pending.")
1627 log.debug("MergeCheck: cannot merge, approval is pending.")
1631
1628
1632 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1629 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1633 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1630 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1634 if fail_early:
1631 if fail_early:
1635 return merge_check
1632 return merge_check
1636
1633
1637 # permission to merge into the target branch
1634 # permission to merge into the target branch
1638 target_commit_id = pull_request.target_ref_parts.commit_id
1635 target_commit_id = pull_request.target_ref_parts.commit_id
1639 if pull_request.target_ref_parts.type == 'branch':
1636 if pull_request.target_ref_parts.type == 'branch':
1640 branch_name = pull_request.target_ref_parts.name
1637 branch_name = pull_request.target_ref_parts.name
1641 else:
1638 else:
1642 # for mercurial we can always figure out the branch from the commit
1639 # for mercurial we can always figure out the branch from the commit
1643 # in case of bookmark
1640 # in case of bookmark
1644 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1641 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1645 branch_name = target_commit.branch
1642 branch_name = target_commit.branch
1646
1643
1647 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1644 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1648 pull_request.target_repo.repo_name, branch_name)
1645 pull_request.target_repo.repo_name, branch_name)
1649 if branch_perm and branch_perm == 'branch.none':
1646 if branch_perm and branch_perm == 'branch.none':
1650 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1647 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1651 branch_name, rule)
1648 branch_name, rule)
1652 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1649 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1653 if fail_early:
1650 if fail_early:
1654 return merge_check
1651 return merge_check
1655
1652
1656 # review status, must be always present
1653 # review status, must be always present
1657 review_status = pull_request.calculated_review_status()
1654 review_status = pull_request.calculated_review_status()
1658 merge_check.review_status = review_status
1655 merge_check.review_status = review_status
1659
1656
1660 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1657 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1661 if not status_approved:
1658 if not status_approved:
1662 log.debug("MergeCheck: cannot merge, approval is pending.")
1659 log.debug("MergeCheck: cannot merge, approval is pending.")
1663
1660
1664 msg = _('Pull request reviewer approval is pending.')
1661 msg = _('Pull request reviewer approval is pending.')
1665
1662
1666 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1663 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1667
1664
1668 if fail_early:
1665 if fail_early:
1669 return merge_check
1666 return merge_check
1670
1667
1671 # left over TODOs
1668 # left over TODOs
1672 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1669 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1673 if todos:
1670 if todos:
1674 log.debug("MergeCheck: cannot merge, {} "
1671 log.debug("MergeCheck: cannot merge, {} "
1675 "unresolved TODOs left.".format(len(todos)))
1672 "unresolved TODOs left.".format(len(todos)))
1676
1673
1677 if len(todos) == 1:
1674 if len(todos) == 1:
1678 msg = _('Cannot merge, {} TODO still not resolved.').format(
1675 msg = _('Cannot merge, {} TODO still not resolved.').format(
1679 len(todos))
1676 len(todos))
1680 else:
1677 else:
1681 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1678 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1682 len(todos))
1679 len(todos))
1683
1680
1684 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1681 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1685
1682
1686 if fail_early:
1683 if fail_early:
1687 return merge_check
1684 return merge_check
1688
1685
1689 # merge possible, here is the filesystem simulation + shadow repo
1686 # merge possible, here is the filesystem simulation + shadow repo
1690 merge_status, msg = PullRequestModel().merge_status(
1687 merge_status, msg = PullRequestModel().merge_status(
1691 pull_request, translator=translator,
1688 pull_request, translator=translator,
1692 force_shadow_repo_refresh=force_shadow_repo_refresh)
1689 force_shadow_repo_refresh=force_shadow_repo_refresh)
1693 merge_check.merge_possible = merge_status
1690 merge_check.merge_possible = merge_status
1694 merge_check.merge_msg = msg
1691 merge_check.merge_msg = msg
1695 if not merge_status:
1692 if not merge_status:
1696 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1693 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1697 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1694 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1698
1695
1699 if fail_early:
1696 if fail_early:
1700 return merge_check
1697 return merge_check
1701
1698
1702 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1699 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1703 return merge_check
1700 return merge_check
1704
1701
1705 @classmethod
1702 @classmethod
1706 def get_merge_conditions(cls, pull_request, translator):
1703 def get_merge_conditions(cls, pull_request, translator):
1707 _ = translator
1704 _ = translator
1708 merge_details = {}
1705 merge_details = {}
1709
1706
1710 model = PullRequestModel()
1707 model = PullRequestModel()
1711 use_rebase = model._use_rebase_for_merging(pull_request)
1708 use_rebase = model._use_rebase_for_merging(pull_request)
1712
1709
1713 if use_rebase:
1710 if use_rebase:
1714 merge_details['merge_strategy'] = dict(
1711 merge_details['merge_strategy'] = dict(
1715 details={},
1712 details={},
1716 message=_('Merge strategy: rebase')
1713 message=_('Merge strategy: rebase')
1717 )
1714 )
1718 else:
1715 else:
1719 merge_details['merge_strategy'] = dict(
1716 merge_details['merge_strategy'] = dict(
1720 details={},
1717 details={},
1721 message=_('Merge strategy: explicit merge commit')
1718 message=_('Merge strategy: explicit merge commit')
1722 )
1719 )
1723
1720
1724 close_branch = model._close_branch_before_merging(pull_request)
1721 close_branch = model._close_branch_before_merging(pull_request)
1725 if close_branch:
1722 if close_branch:
1726 repo_type = pull_request.target_repo.repo_type
1723 repo_type = pull_request.target_repo.repo_type
1727 close_msg = ''
1724 close_msg = ''
1728 if repo_type == 'hg':
1725 if repo_type == 'hg':
1729 close_msg = _('Source branch will be closed after merge.')
1726 close_msg = _('Source branch will be closed after merge.')
1730 elif repo_type == 'git':
1727 elif repo_type == 'git':
1731 close_msg = _('Source branch will be deleted after merge.')
1728 close_msg = _('Source branch will be deleted after merge.')
1732
1729
1733 merge_details['close_branch'] = dict(
1730 merge_details['close_branch'] = dict(
1734 details={},
1731 details={},
1735 message=close_msg
1732 message=close_msg
1736 )
1733 )
1737
1734
1738 return merge_details
1735 return merge_details
1739
1736
1740
1737
1741 ChangeTuple = collections.namedtuple(
1738 ChangeTuple = collections.namedtuple(
1742 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1739 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1743
1740
1744 FileChangeTuple = collections.namedtuple(
1741 FileChangeTuple = collections.namedtuple(
1745 'FileChangeTuple', ['added', 'modified', 'removed'])
1742 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,196 +1,195 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import stat
22 import stat
23 import sys
23 import sys
24
24
25 import pytest
25 import pytest
26 from mock import Mock, patch, DEFAULT
26 from mock import Mock, patch, DEFAULT
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.model import db, scm
29 from rhodecode.model import db, scm
30 from rhodecode.tests import no_newline_id_generator
30 from rhodecode.tests import no_newline_id_generator
31
31
32
32
33 def test_scm_instance_config(backend):
33 def test_scm_instance_config(backend):
34 repo = backend.create_repo()
34 repo = backend.create_repo()
35 with patch.multiple('rhodecode.model.db.Repository',
35 with patch.multiple('rhodecode.model.db.Repository',
36 _get_instance=DEFAULT,
36 _get_instance=DEFAULT,
37 _get_instance_cached=DEFAULT) as mocks:
37 _get_instance_cached=DEFAULT) as mocks:
38 repo.scm_instance()
38 repo.scm_instance()
39 mocks['_get_instance'].assert_called_with(
39 mocks['_get_instance'].assert_called_with(
40 config=None, cache=False)
40 config=None, cache=False)
41
41
42 config = {'some': 'value'}
42 config = {'some': 'value'}
43 repo.scm_instance(config=config)
43 repo.scm_instance(config=config)
44 mocks['_get_instance'].assert_called_with(
44 mocks['_get_instance'].assert_called_with(
45 config=config, cache=False)
45 config=config, cache=False)
46
46
47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
48 repo.scm_instance(config=config)
48 repo.scm_instance(config=config)
49 mocks['_get_instance_cached'].assert_called()
49 mocks['_get_instance_cached'].assert_called()
50
50
51
51
52 def test__get_instance_config(backend):
52 def test__get_instance_config(backend):
53 repo = backend.create_repo()
53 repo = backend.create_repo()
54 vcs_class = Mock()
54 vcs_class = Mock()
55 with patch.multiple('rhodecode.lib.vcs.backends',
55 with patch.multiple('rhodecode.lib.vcs.backends',
56 get_scm=DEFAULT,
56 get_scm=DEFAULT,
57 get_backend=DEFAULT) as mocks:
57 get_backend=DEFAULT) as mocks:
58 mocks['get_scm'].return_value = backend.alias
58 mocks['get_scm'].return_value = backend.alias
59 mocks['get_backend'].return_value = vcs_class
59 mocks['get_backend'].return_value = vcs_class
60 with patch('rhodecode.model.db.Repository._config') as config_mock:
60 with patch('rhodecode.model.db.Repository._config') as config_mock:
61 repo._get_instance()
61 repo._get_instance()
62 vcs_class.assert_called_with(
62 vcs_class.assert_called_with(
63 repo_path=repo.repo_full_path, config=config_mock,
63 repo_path=repo.repo_full_path, config=config_mock,
64 create=False, with_wire={'cache': True})
64 create=False, with_wire={'cache': True})
65
65
66 new_config = {'override': 'old_config'}
66 new_config = {'override': 'old_config'}
67 repo._get_instance(config=new_config)
67 repo._get_instance(config=new_config)
68 vcs_class.assert_called_with(
68 vcs_class.assert_called_with(
69 repo_path=repo.repo_full_path, config=new_config, create=False,
69 repo_path=repo.repo_full_path, config=new_config, create=False,
70 with_wire={'cache': True})
70 with_wire={'cache': True})
71
71
72
72
73 def test_mark_for_invalidation_config(backend):
73 def test_mark_for_invalidation_config(backend):
74 repo = backend.create_repo()
74 repo = backend.create_repo()
75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
77 _, kwargs = _mock.call_args
77 _, kwargs = _mock.call_args
78 assert kwargs['config'].__dict__ == repo._config.__dict__
78 assert kwargs['config'].__dict__ == repo._config.__dict__
79
79
80
80
81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
82 commits = [{'message': 'A'}, {'message': 'B'}]
82 commits = [{'message': 'A'}, {'message': 'B'}]
83 repo = backend.create_repo(commits=commits)
83 repo = backend.create_repo(commits=commits)
84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
85 assert repo.changeset_cache['revision'] == 1
85 assert repo.changeset_cache['revision'] == 1
86
86
87
87
88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
89 repo = backend.create_repo()
89 repo = backend.create_repo()
90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
91 assert repo.changeset_cache['revision'] == -1
91 assert repo.changeset_cache['revision'] == -1
92
92
93
93
94 def test_strip_with_multiple_heads(backend_hg):
94 def test_strip_with_multiple_heads(backend_hg):
95 commits = [
95 commits = [
96 {'message': 'A'},
96 {'message': 'A'},
97 {'message': 'a'},
97 {'message': 'a'},
98 {'message': 'b'},
98 {'message': 'b'},
99 {'message': 'B', 'parents': ['A']},
99 {'message': 'B', 'parents': ['A']},
100 {'message': 'a1'},
100 {'message': 'a1'},
101 ]
101 ]
102 repo = backend_hg.create_repo(commits=commits)
102 repo = backend_hg.create_repo(commits=commits)
103 commit_ids = backend_hg.commit_ids
103 commit_ids = backend_hg.commit_ids
104
104
105 model = scm.ScmModel()
105 model = scm.ScmModel()
106 model.strip(repo, commit_ids['b'], branch=None)
106 model.strip(repo, commit_ids['b'], branch=None)
107
107
108 vcs_repo = repo.scm_instance()
108 vcs_repo = repo.scm_instance()
109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
110 assert len(rest_commit_ids) == 4
110 assert len(rest_commit_ids) == 4
111 assert commit_ids['b'] not in rest_commit_ids
111 assert commit_ids['b'] not in rest_commit_ids
112
112
113
113
114 def test_strip_with_single_heads(backend_hg):
114 def test_strip_with_single_heads(backend_hg):
115 commits = [
115 commits = [
116 {'message': 'A'},
116 {'message': 'A'},
117 {'message': 'a'},
117 {'message': 'a'},
118 {'message': 'b'},
118 {'message': 'b'},
119 ]
119 ]
120 repo = backend_hg.create_repo(commits=commits)
120 repo = backend_hg.create_repo(commits=commits)
121 commit_ids = backend_hg.commit_ids
121 commit_ids = backend_hg.commit_ids
122
122
123 model = scm.ScmModel()
123 model = scm.ScmModel()
124 model.strip(repo, commit_ids['b'], branch=None)
124 model.strip(repo, commit_ids['b'], branch=None)
125
125
126 vcs_repo = repo.scm_instance()
126 vcs_repo = repo.scm_instance()
127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_commits()]
128 assert len(rest_commit_ids) == 2
128 assert len(rest_commit_ids) == 2
129 assert commit_ids['b'] not in rest_commit_ids
129 assert commit_ids['b'] not in rest_commit_ids
130
130
131
131
132 def test_get_nodes_returns_unicode_flat(backend_random):
132 def test_get_nodes_returns_unicode_flat(backend):
133 repo = backend_random.repo
133 repo = backend.repo
134 directories, files = scm.ScmModel().get_nodes(
134 commit_id = repo.get_commit(commit_idx=0).raw_id
135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
135 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=True)
136 flat=True)
137 assert_contains_only_unicode(directories)
136 assert_contains_only_unicode(directories)
138 assert_contains_only_unicode(files)
137 assert_contains_only_unicode(files)
139
138
140
139
141 def test_get_nodes_returns_unicode_non_flat(backend_random):
140 def test_get_nodes_returns_unicode_non_flat(backend):
142 repo = backend_random.repo
141 repo = backend.repo
143 directories, files = scm.ScmModel().get_nodes(
142 commit_id = repo.get_commit(commit_idx=0).raw_id
144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
143
145 flat=False)
144 directories, files = scm.ScmModel().get_nodes(repo.repo_name, commit_id, flat=False)
146 # johbo: Checking only the names for now, since that is the critical
145 # johbo: Checking only the names for now, since that is the critical
147 # part.
146 # part.
148 assert_contains_only_unicode([d['name'] for d in directories])
147 assert_contains_only_unicode([d['name'] for d in directories])
149 assert_contains_only_unicode([f['name'] for f in files])
148 assert_contains_only_unicode([f['name'] for f in files])
150
149
151
150
152 def test_get_nodes_max_file_bytes(backend_random):
151 def test_get_nodes_max_file_bytes(backend_random):
153 repo = backend_random.repo
152 repo = backend_random.repo
154 max_file_bytes = 10
153 max_file_bytes = 10
155 directories, files = scm.ScmModel().get_nodes(
154 directories, files = scm.ScmModel().get_nodes(
156 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
155 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
157 extended_info=True, flat=False)
156 extended_info=True, flat=False)
158 assert any(file['content'] and len(file['content']) > max_file_bytes
157 assert any(file['content'] and len(file['content']) > max_file_bytes
159 for file in files)
158 for file in files)
160
159
161 directories, files = scm.ScmModel().get_nodes(
160 directories, files = scm.ScmModel().get_nodes(
162 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
161 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
163 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
162 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
164 assert all(
163 assert all(
165 file['content'] is None if file['size'] > max_file_bytes else True
164 file['content'] is None if file['size'] > max_file_bytes else True
166 for file in files)
165 for file in files)
167
166
168
167
169 def assert_contains_only_unicode(structure):
168 def assert_contains_only_unicode(structure):
170 assert structure
169 assert structure
171 for value in structure:
170 for value in structure:
172 assert isinstance(value, unicode)
171 assert isinstance(value, unicode)
173
172
174
173
175 @pytest.mark.backends("hg", "git")
174 @pytest.mark.backends("hg", "git")
176 def test_get_non_unicode_reference(backend):
175 def test_get_non_unicode_reference(backend):
177 model = scm.ScmModel()
176 model = scm.ScmModel()
178 non_unicode_list = ["AdΔ±nΔ±".decode("cp1254")]
177 non_unicode_list = ["AdΔ±nΔ±".decode("cp1254")]
179
178
180 def scm_instance():
179 def scm_instance():
181 return Mock(
180 return Mock(
182 branches=non_unicode_list, bookmarks=non_unicode_list,
181 branches=non_unicode_list, bookmarks=non_unicode_list,
183 tags=non_unicode_list, alias=backend.alias)
182 tags=non_unicode_list, alias=backend.alias)
184
183
185 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
184 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
186 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
185 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
187 if backend.alias == 'hg':
186 if backend.alias == 'hg':
188 valid_choices = [
187 valid_choices = [
189 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
188 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
190 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
189 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
191 else:
190 else:
192 valid_choices = [
191 valid_choices = [
193 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
192 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
194 u'tag:Ad\xc4\xb1n\xc4\xb1']
193 u'tag:Ad\xc4\xb1n\xc4\xb1']
195
194
196 assert choices == valid_choices
195 assert choices == valid_choices
@@ -1,257 +1,256 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import shutil
22 import shutil
23 import datetime
23 import datetime
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import Config
28 from rhodecode.lib.vcs.backends.base import Config
29 from rhodecode.lib.vcs.nodes import FileNode
29 from rhodecode.lib.vcs.nodes import FileNode
30 from rhodecode.tests import get_new_dir
30 from rhodecode.tests import get_new_dir
31 from rhodecode.tests.utils import check_skip_backends, check_xfail_backends
31 from rhodecode.tests.utils import check_skip_backends, check_xfail_backends
32
32
33
33
34 @pytest.fixture()
34 @pytest.fixture()
35 def vcs_repository_support(
35 def vcs_repository_support(
36 request, backend_alias, baseapp, _vcs_repo_container):
36 request, backend_alias, baseapp, _vcs_repo_container):
37 """
37 """
38 Provide a test repository for the test run.
38 Provide a test repository for the test run.
39
39
40 Depending on the value of `recreate_repo_per_test` a new repo for each
40 Depending on the value of `recreate_repo_per_test` a new repo for each
41 test will be created.
41 test will be created.
42
42
43 The parameter `--backends` can be used to limit this fixture to specific
43 The parameter `--backends` can be used to limit this fixture to specific
44 backend implementations.
44 backend implementations.
45 """
45 """
46 cls = request.cls
46 cls = request.cls
47
47
48 check_skip_backends(request.node, backend_alias)
48 check_skip_backends(request.node, backend_alias)
49 check_xfail_backends(request.node, backend_alias)
49 check_xfail_backends(request.node, backend_alias)
50
50
51 if _should_create_repo_per_test(cls):
51 if _should_create_repo_per_test(cls):
52 _vcs_repo_container = _create_vcs_repo_container(request)
52 _vcs_repo_container = _create_vcs_repo_container(request)
53
53
54 repo = _vcs_repo_container.get_repo(cls, backend_alias=backend_alias)
54 repo = _vcs_repo_container.get_repo(cls, backend_alias=backend_alias)
55
55
56 # TODO: johbo: Supporting old test class api, think about removing this
56 # TODO: johbo: Supporting old test class api, think about removing this
57 cls.repo = repo
57 cls.repo = repo
58 cls.repo_path = repo.path
58 cls.repo_path = repo.path
59 cls.default_branch = repo.DEFAULT_BRANCH_NAME
59 cls.default_branch = repo.DEFAULT_BRANCH_NAME
60 cls.Backend = cls.backend_class = repo.__class__
60 cls.Backend = cls.backend_class = repo.__class__
61 cls.imc = repo.in_memory_commit
61 cls.imc = repo.in_memory_commit
62
62
63 return backend_alias, repo
63 return backend_alias, repo
64
64
65
65
66 @pytest.fixture(scope='class')
66 @pytest.fixture(scope='class')
67 def _vcs_repo_container(request):
67 def _vcs_repo_container(request):
68 """
68 """
69 Internal fixture intended to help support class based scoping on demand.
69 Internal fixture intended to help support class based scoping on demand.
70 """
70 """
71 return _create_vcs_repo_container(request)
71 return _create_vcs_repo_container(request)
72
72
73
73
74 def _create_vcs_repo_container(request):
74 def _create_vcs_repo_container(request):
75 repo_container = VcsRepoContainer()
75 repo_container = VcsRepoContainer()
76 if not request.config.getoption('--keep-tmp-path'):
76 if not request.config.getoption('--keep-tmp-path'):
77 request.addfinalizer(repo_container.cleanup)
77 request.addfinalizer(repo_container.cleanup)
78 return repo_container
78 return repo_container
79
79
80
80
81 class VcsRepoContainer(object):
81 class VcsRepoContainer(object):
82
82
83 def __init__(self):
83 def __init__(self):
84 self._cleanup_paths = []
84 self._cleanup_paths = []
85 self._repos = {}
85 self._repos = {}
86
86
87 def get_repo(self, test_class, backend_alias):
87 def get_repo(self, test_class, backend_alias):
88 if backend_alias not in self._repos:
88 if backend_alias not in self._repos:
89 repo = _create_empty_repository(test_class, backend_alias)
89 repo = _create_empty_repository(test_class, backend_alias)
90
90
91 self._cleanup_paths.append(repo.path)
91 self._cleanup_paths.append(repo.path)
92 self._repos[backend_alias] = repo
92 self._repos[backend_alias] = repo
93 return self._repos[backend_alias]
93 return self._repos[backend_alias]
94
94
95 def cleanup(self):
95 def cleanup(self):
96 for repo_path in reversed(self._cleanup_paths):
96 for repo_path in reversed(self._cleanup_paths):
97 shutil.rmtree(repo_path)
97 shutil.rmtree(repo_path)
98
98
99
99
100 def _should_create_repo_per_test(cls):
100 def _should_create_repo_per_test(cls):
101 return getattr(cls, 'recreate_repo_per_test', False)
101 return getattr(cls, 'recreate_repo_per_test', False)
102
102
103
103
104 def _create_empty_repository(cls, backend_alias=None):
104 def _create_empty_repository(cls, backend_alias=None):
105 Backend = get_backend(backend_alias or cls.backend_alias)
105 Backend = get_backend(backend_alias or cls.backend_alias)
106 repo_path = get_new_dir(str(time.time()))
106 repo_path = get_new_dir(str(time.time()))
107 repo = Backend(repo_path, create=True)
107 repo = Backend(repo_path, create=True)
108 if hasattr(cls, '_get_commits'):
108 if hasattr(cls, '_get_commits'):
109 commits = cls._get_commits()
109 commits = cls._get_commits()
110 cls.tip = _add_commits_to_repo(repo, commits)
110 cls.tip = _add_commits_to_repo(repo, commits)
111
111
112 return repo
112 return repo
113
113
114
114
115 @pytest.fixture
115 @pytest.fixture
116 def config():
116 def config():
117 """
117 """
118 Instance of a repository config.
118 Instance of a repository config.
119
119
120 The instance contains only one value:
120 The instance contains only one value:
121
121
122 - Section: "section-a"
122 - Section: "section-a"
123 - Key: "a-1"
123 - Key: "a-1"
124 - Value: "value-a-1"
124 - Value: "value-a-1"
125
125
126 The intended usage is for cases where a config instance is needed but no
126 The intended usage is for cases where a config instance is needed but no
127 specific content is required.
127 specific content is required.
128 """
128 """
129 config = Config()
129 config = Config()
130 config.set('section-a', 'a-1', 'value-a-1')
130 config.set('section-a', 'a-1', 'value-a-1')
131 return config
131 return config
132
132
133
133
134 def _add_commits_to_repo(repo, commits):
134 def _add_commits_to_repo(repo, commits):
135 imc = repo.in_memory_commit
135 imc = repo.in_memory_commit
136 tip = None
136 tip = None
137
137
138 for commit in commits:
138 for commit in commits:
139 for node in commit.get('added', []):
139 for node in commit.get('added', []):
140 imc.add(FileNode(node.path, content=node.content))
140 imc.add(FileNode(node.path, content=node.content))
141 for node in commit.get('changed', []):
141 for node in commit.get('changed', []):
142 imc.change(FileNode(node.path, content=node.content))
142 imc.change(FileNode(node.path, content=node.content))
143 for node in commit.get('removed', []):
143 for node in commit.get('removed', []):
144 imc.remove(FileNode(node.path))
144 imc.remove(FileNode(node.path))
145
145
146 tip = imc.commit(
146 tip = imc.commit(
147 message=unicode(commit['message']),
147 message=unicode(commit['message']),
148 author=unicode(commit['author']),
148 author=unicode(commit['author']),
149 date=commit['date'],
149 date=commit['date'],
150 branch=commit.get('branch'))
150 branch=commit.get('branch'))
151
152 return tip
151 return tip
153
152
154
153
155 @pytest.fixture
154 @pytest.fixture
156 def vcs_repo(request, backend_alias):
155 def vcs_repo(request, backend_alias):
157 Backend = get_backend(backend_alias)
156 Backend = get_backend(backend_alias)
158 repo_path = get_new_dir(str(time.time()))
157 repo_path = get_new_dir(str(time.time()))
159 repo = Backend(repo_path, create=True)
158 repo = Backend(repo_path, create=True)
160
159
161 @request.addfinalizer
160 @request.addfinalizer
162 def cleanup():
161 def cleanup():
163 shutil.rmtree(repo_path)
162 shutil.rmtree(repo_path)
164
163
165 return repo
164 return repo
166
165
167
166
168 @pytest.fixture
167 @pytest.fixture
169 def generate_repo_with_commits(vcs_repo):
168 def generate_repo_with_commits(vcs_repo):
170 """
169 """
171 Creates a fabric to generate N comits with some file nodes on a randomly
170 Creates a fabric to generate N comits with some file nodes on a randomly
172 generated repository
171 generated repository
173 """
172 """
174
173
175 def commit_generator(num):
174 def commit_generator(num):
176 start_date = datetime.datetime(2010, 1, 1, 20)
175 start_date = datetime.datetime(2010, 1, 1, 20)
177 for x in xrange(num):
176 for x in xrange(num):
178 yield {
177 yield {
179 'message': 'Commit %d' % x,
178 'message': 'Commit %d' % x,
180 'author': 'Joe Doe <joe.doe@example.com>',
179 'author': 'Joe Doe <joe.doe@example.com>',
181 'date': start_date + datetime.timedelta(hours=12 * x),
180 'date': start_date + datetime.timedelta(hours=12 * x),
182 'added': [
181 'added': [
183 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
182 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
184 ],
183 ],
185 'modified': [
184 'modified': [
186 FileNode('file_%d.txt' % x,
185 FileNode('file_%d.txt' % x,
187 content='Foobar %d modified' % (x-1)),
186 content='Foobar %d modified' % (x-1)),
188 ]
187 ]
189 }
188 }
190
189
191 def commit_maker(num=5):
190 def commit_maker(num=5):
192 _add_commits_to_repo(vcs_repo, commit_generator(num))
191 _add_commits_to_repo(vcs_repo, commit_generator(num))
193 return vcs_repo
192 return vcs_repo
194
193
195 return commit_maker
194 return commit_maker
196
195
197
196
198 @pytest.fixture
197 @pytest.fixture
199 def hg_repo(request, vcs_repo):
198 def hg_repo(request, vcs_repo):
200 repo = vcs_repo
199 repo = vcs_repo
201
200
202 commits = repo._get_commits()
201 commits = repo._get_commits()
203 _add_commits_to_repo(repo, commits)
202 _add_commits_to_repo(repo, commits)
204
203
205 return repo
204 return repo
206
205
207
206
208 @pytest.fixture
207 @pytest.fixture
209 def hg_commit(hg_repo):
208 def hg_commit(hg_repo):
210 return hg_repo.get_commit()
209 return hg_repo.get_commit()
211
210
212
211
213 class BackendTestMixin(object):
212 class BackendTestMixin(object):
214 """
213 """
215 This is a backend independent test case class which should be created
214 This is a backend independent test case class which should be created
216 with ``type`` method.
215 with ``type`` method.
217
216
218 It is required to set following attributes at subclass:
217 It is required to set following attributes at subclass:
219
218
220 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
219 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
221 - ``repo_path``: path to the repository which would be created for set of
220 - ``repo_path``: path to the repository which would be created for set of
222 tests
221 tests
223 - ``recreate_repo_per_test``: If set to ``False``, repo would NOT be
222 - ``recreate_repo_per_test``: If set to ``False``, repo would NOT be
224 created
223 created
225 before every single test. Defaults to ``True``.
224 before every single test. Defaults to ``True``.
226 """
225 """
227 recreate_repo_per_test = True
226 recreate_repo_per_test = True
228
227
229 @classmethod
228 @classmethod
230 def _get_commits(cls):
229 def _get_commits(cls):
231 commits = [
230 commits = [
232 {
231 {
233 'message': u'Initial commit',
232 'message': u'Initial commit',
234 'author': u'Joe Doe <joe.doe@example.com>',
233 'author': u'Joe Doe <joe.doe@example.com>',
235 'date': datetime.datetime(2010, 1, 1, 20),
234 'date': datetime.datetime(2010, 1, 1, 20),
236 'added': [
235 'added': [
237 FileNode('foobar', content='Foobar'),
236 FileNode('foobar', content='Foobar'),
238 FileNode('foobar2', content='Foobar II'),
237 FileNode('foobar2', content='Foobar II'),
239 FileNode('foo/bar/baz', content='baz here!'),
238 FileNode('foo/bar/baz', content='baz here!'),
240 ],
239 ],
241 },
240 },
242 {
241 {
243 'message': u'Changes...',
242 'message': u'Changes...',
244 'author': u'Jane Doe <jane.doe@example.com>',
243 'author': u'Jane Doe <jane.doe@example.com>',
245 'date': datetime.datetime(2010, 1, 1, 21),
244 'date': datetime.datetime(2010, 1, 1, 21),
246 'added': [
245 'added': [
247 FileNode('some/new.txt', content='news...'),
246 FileNode('some/new.txt', content='news...'),
248 ],
247 ],
249 'changed': [
248 'changed': [
250 FileNode('foobar', 'Foobar I'),
249 FileNode('foobar', 'Foobar I'),
251 ],
250 ],
252 'removed': [],
251 'removed': [],
253 },
252 },
254 ]
253 ]
255 return commits
254 return commits
256
255
257
256
@@ -1,593 +1,592 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import time
22 import time
23
23
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.vcs.backends.base import (
26 from rhodecode.lib.vcs.backends.base import (
27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
28 from rhodecode.lib.vcs.exceptions import (
28 from rhodecode.lib.vcs.exceptions import (
29 BranchDoesNotExistError, CommitDoesNotExistError,
29 BranchDoesNotExistError, CommitDoesNotExistError,
30 RepositoryError, EmptyRepositoryError)
30 RepositoryError, EmptyRepositoryError)
31 from rhodecode.lib.vcs.nodes import (
31 from rhodecode.lib.vcs.nodes import (
32 FileNode, AddedFileNodesGenerator,
32 FileNode, AddedFileNodesGenerator,
33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
34 from rhodecode.tests import get_new_dir
34 from rhodecode.tests import get_new_dir
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
36
36
37
37
38 class TestBaseChangeset:
38 class TestBaseChangeset:
39
39
40 def test_is_deprecated(self):
40 def test_is_deprecated(self):
41 from rhodecode.lib.vcs.backends.base import BaseChangeset
41 from rhodecode.lib.vcs.backends.base import BaseChangeset
42 pytest.deprecated_call(BaseChangeset)
42 pytest.deprecated_call(BaseChangeset)
43
43
44
44
45 class TestEmptyCommit(object):
45 class TestEmptyCommit(object):
46
46
47 def test_branch_without_alias_returns_none(self):
47 def test_branch_without_alias_returns_none(self):
48 commit = EmptyCommit()
48 commit = EmptyCommit()
49 assert commit.branch is None
49 assert commit.branch is None
50
50
51
51
52 @pytest.mark.usefixtures("vcs_repository_support")
52 @pytest.mark.usefixtures("vcs_repository_support")
53 class TestCommitsInNonEmptyRepo(BackendTestMixin):
53 class TestCommitsInNonEmptyRepo(BackendTestMixin):
54 recreate_repo_per_test = True
54 recreate_repo_per_test = True
55
55
56 @classmethod
56 @classmethod
57 def _get_commits(cls):
57 def _get_commits(cls):
58 start_date = datetime.datetime(2010, 1, 1, 20)
58 start_date = datetime.datetime(2010, 1, 1, 20)
59 for x in xrange(5):
59 for x in xrange(5):
60 yield {
60 yield {
61 'message': 'Commit %d' % x,
61 'message': 'Commit %d' % x,
62 'author': 'Joe Doe <joe.doe@example.com>',
62 'author': 'Joe Doe <joe.doe@example.com>',
63 'date': start_date + datetime.timedelta(hours=12 * x),
63 'date': start_date + datetime.timedelta(hours=12 * x),
64 'added': [
64 'added': [
65 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
65 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
66 ],
66 ],
67 }
67 }
68
68
69 def test_walk_returns_empty_list_in_case_of_file(self):
69 def test_walk_returns_empty_list_in_case_of_file(self):
70 result = list(self.tip.walk('file_0.txt'))
70 result = list(self.tip.walk('file_0.txt'))
71 assert result == []
71 assert result == []
72
72
73 @pytest.mark.backends("git", "hg")
73 @pytest.mark.backends("git", "hg")
74 def test_new_branch(self):
74 def test_new_branch(self):
75 self.imc.add(FileNode('docs/index.txt',
75 self.imc.add(FileNode('docs/index.txt',
76 content='Documentation\n'))
76 content='Documentation\n'))
77 foobar_tip = self.imc.commit(
77 foobar_tip = self.imc.commit(
78 message=u'New branch: foobar',
78 message=u'New branch: foobar',
79 author=u'joe',
79 author=u'joe',
80 branch='foobar',
80 branch='foobar',
81 )
81 )
82 assert 'foobar' in self.repo.branches
82 assert 'foobar' in self.repo.branches
83 assert foobar_tip.branch == 'foobar'
83 assert foobar_tip.branch == 'foobar'
84 # 'foobar' should be the only branch that contains the new commit
84 # 'foobar' should be the only branch that contains the new commit
85 branch = self.repo.branches.values()
85 branch = self.repo.branches.values()
86 assert branch[0] != branch[1]
86 assert branch[0] != branch[1]
87
87
88 @pytest.mark.backends("git", "hg")
88 @pytest.mark.backends("git", "hg")
89 def test_new_head_in_default_branch(self):
89 def test_new_head_in_default_branch(self):
90 tip = self.repo.get_commit()
90 tip = self.repo.get_commit()
91 self.imc.add(FileNode('docs/index.txt',
91 self.imc.add(FileNode('docs/index.txt',
92 content='Documentation\n'))
92 content='Documentation\n'))
93 foobar_tip = self.imc.commit(
93 foobar_tip = self.imc.commit(
94 message=u'New branch: foobar',
94 message=u'New branch: foobar',
95 author=u'joe',
95 author=u'joe',
96 branch='foobar',
96 branch='foobar',
97 parents=[tip],
97 parents=[tip],
98 )
98 )
99 self.imc.change(FileNode('docs/index.txt',
99 self.imc.change(FileNode('docs/index.txt',
100 content='Documentation\nand more...\n'))
100 content='Documentation\nand more...\n'))
101 newtip = self.imc.commit(
101 newtip = self.imc.commit(
102 message=u'At default branch',
102 message=u'At default branch',
103 author=u'joe',
103 author=u'joe',
104 branch=foobar_tip.branch,
104 branch=foobar_tip.branch,
105 parents=[foobar_tip],
105 parents=[foobar_tip],
106 )
106 )
107
107
108 newest_tip = self.imc.commit(
108 newest_tip = self.imc.commit(
109 message=u'Merged with %s' % foobar_tip.raw_id,
109 message=u'Merged with %s' % foobar_tip.raw_id,
110 author=u'joe',
110 author=u'joe',
111 branch=self.backend_class.DEFAULT_BRANCH_NAME,
111 branch=self.backend_class.DEFAULT_BRANCH_NAME,
112 parents=[newtip, foobar_tip],
112 parents=[newtip, foobar_tip],
113 )
113 )
114
114
115 assert newest_tip.branch == self.backend_class.DEFAULT_BRANCH_NAME
115 assert newest_tip.branch == self.backend_class.DEFAULT_BRANCH_NAME
116
116
117 @pytest.mark.backends("git", "hg")
117 @pytest.mark.backends("git", "hg")
118 def test_get_commits_respects_branch_name(self):
118 def test_get_commits_respects_branch_name(self):
119 """
119 """
120 * e1930d0 (HEAD, master) Back in default branch
120 * e1930d0 (HEAD, master) Back in default branch
121 | * e1930d0 (docs) New Branch: docs2
121 | * e1930d0 (docs) New Branch: docs2
122 | * dcc14fa New branch: docs
122 | * dcc14fa New branch: docs
123 |/
123 |/
124 * e63c41a Initial commit
124 * e63c41a Initial commit
125 ...
125 ...
126 * 624d3db Commit 0
126 * 624d3db Commit 0
127
127
128 :return:
128 :return:
129 """
129 """
130 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
130 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
131 TEST_BRANCH = 'docs'
131 TEST_BRANCH = 'docs'
132 org_tip = self.repo.get_commit()
132 org_tip = self.repo.get_commit()
133
133
134 self.imc.add(FileNode('readme.txt', content='Document\n'))
134 self.imc.add(FileNode('readme.txt', content='Document\n'))
135 initial = self.imc.commit(
135 initial = self.imc.commit(
136 message=u'Initial commit',
136 message=u'Initial commit',
137 author=u'joe',
137 author=u'joe',
138 parents=[org_tip],
138 parents=[org_tip],
139 branch=DEFAULT_BRANCH,)
139 branch=DEFAULT_BRANCH,)
140
140
141 self.imc.add(FileNode('newdoc.txt', content='foobar\n'))
141 self.imc.add(FileNode('newdoc.txt', content='foobar\n'))
142 docs_branch_commit1 = self.imc.commit(
142 docs_branch_commit1 = self.imc.commit(
143 message=u'New branch: docs',
143 message=u'New branch: docs',
144 author=u'joe',
144 author=u'joe',
145 parents=[initial],
145 parents=[initial],
146 branch=TEST_BRANCH,)
146 branch=TEST_BRANCH,)
147
147
148 self.imc.add(FileNode('newdoc2.txt', content='foobar2\n'))
148 self.imc.add(FileNode('newdoc2.txt', content='foobar2\n'))
149 docs_branch_commit2 = self.imc.commit(
149 docs_branch_commit2 = self.imc.commit(
150 message=u'New branch: docs2',
150 message=u'New branch: docs2',
151 author=u'joe',
151 author=u'joe',
152 parents=[docs_branch_commit1],
152 parents=[docs_branch_commit1],
153 branch=TEST_BRANCH,)
153 branch=TEST_BRANCH,)
154
154
155 self.imc.add(FileNode('newfile', content='hello world\n'))
155 self.imc.add(FileNode('newfile', content='hello world\n'))
156 self.imc.commit(
156 self.imc.commit(
157 message=u'Back in default branch',
157 message=u'Back in default branch',
158 author=u'joe',
158 author=u'joe',
159 parents=[initial],
159 parents=[initial],
160 branch=DEFAULT_BRANCH,)
160 branch=DEFAULT_BRANCH,)
161
161
162 default_branch_commits = self.repo.get_commits(
162 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
163 branch_name=DEFAULT_BRANCH)
164 assert docs_branch_commit1 not in list(default_branch_commits)
163 assert docs_branch_commit1 not in list(default_branch_commits)
165 assert docs_branch_commit2 not in list(default_branch_commits)
164 assert docs_branch_commit2 not in list(default_branch_commits)
166
165
167 docs_branch_commits = self.repo.get_commits(
166 docs_branch_commits = self.repo.get_commits(
168 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
167 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
169 branch_name=TEST_BRANCH)
168 branch_name=TEST_BRANCH)
170 assert docs_branch_commit1 in list(docs_branch_commits)
169 assert docs_branch_commit1 in list(docs_branch_commits)
171 assert docs_branch_commit2 in list(docs_branch_commits)
170 assert docs_branch_commit2 in list(docs_branch_commits)
172
171
173 @pytest.mark.backends("svn")
172 @pytest.mark.backends("svn")
174 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
173 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
175 repo = vcsbackend_svn['svn-simple-layout']
174 repo = vcsbackend_svn['svn-simple-layout']
176 commits = repo.get_commits(branch_name='trunk')
175 commits = repo.get_commits(branch_name='trunk')
177 commit_indexes = [c.idx for c in commits]
176 commit_indexes = [c.idx for c in commits]
178 assert commit_indexes == [1, 2, 3, 7, 12, 15]
177 assert commit_indexes == [1, 2, 3, 7, 12, 15]
179
178
180 def test_get_commit_by_branch(self):
179 def test_get_commit_by_branch(self):
181 for branch, commit_id in self.repo.branches.iteritems():
180 for branch, commit_id in self.repo.branches.iteritems():
182 assert commit_id == self.repo.get_commit(branch).raw_id
181 assert commit_id == self.repo.get_commit(branch).raw_id
183
182
184 def test_get_commit_by_tag(self):
183 def test_get_commit_by_tag(self):
185 for tag, commit_id in self.repo.tags.iteritems():
184 for tag, commit_id in self.repo.tags.iteritems():
186 assert commit_id == self.repo.get_commit(tag).raw_id
185 assert commit_id == self.repo.get_commit(tag).raw_id
187
186
188 def test_get_commit_parents(self):
187 def test_get_commit_parents(self):
189 repo = self.repo
188 repo = self.repo
190 for test_idx in [1, 2, 3]:
189 for test_idx in [1, 2, 3]:
191 commit = repo.get_commit(commit_idx=test_idx - 1)
190 commit = repo.get_commit(commit_idx=test_idx - 1)
192 assert [commit] == repo.get_commit(commit_idx=test_idx).parents
191 assert [commit] == repo.get_commit(commit_idx=test_idx).parents
193
192
194 def test_get_commit_children(self):
193 def test_get_commit_children(self):
195 repo = self.repo
194 repo = self.repo
196 for test_idx in [1, 2, 3]:
195 for test_idx in [1, 2, 3]:
197 commit = repo.get_commit(commit_idx=test_idx + 1)
196 commit = repo.get_commit(commit_idx=test_idx + 1)
198 assert [commit] == repo.get_commit(commit_idx=test_idx).children
197 assert [commit] == repo.get_commit(commit_idx=test_idx).children
199
198
200
199
201 @pytest.mark.usefixtures("vcs_repository_support")
200 @pytest.mark.usefixtures("vcs_repository_support")
202 class TestCommits(BackendTestMixin):
201 class TestCommits(BackendTestMixin):
203 recreate_repo_per_test = False
202 recreate_repo_per_test = False
204
203
205 @classmethod
204 @classmethod
206 def _get_commits(cls):
205 def _get_commits(cls):
207 start_date = datetime.datetime(2010, 1, 1, 20)
206 start_date = datetime.datetime(2010, 1, 1, 20)
208 for x in xrange(5):
207 for x in xrange(5):
209 yield {
208 yield {
210 'message': u'Commit %d' % x,
209 'message': u'Commit %d' % x,
211 'author': u'Joe Doe <joe.doe@example.com>',
210 'author': u'Joe Doe <joe.doe@example.com>',
212 'date': start_date + datetime.timedelta(hours=12 * x),
211 'date': start_date + datetime.timedelta(hours=12 * x),
213 'added': [
212 'added': [
214 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
213 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
215 ],
214 ],
216 }
215 }
217
216
218 def test_simple(self):
217 def test_simple(self):
219 tip = self.repo.get_commit()
218 tip = self.repo.get_commit()
220 assert tip.date, datetime.datetime(2010, 1, 3 == 20)
219 assert tip.date, datetime.datetime(2010, 1, 3 == 20)
221
220
222 def test_simple_serialized_commit(self):
221 def test_simple_serialized_commit(self):
223 tip = self.repo.get_commit()
222 tip = self.repo.get_commit()
224 # json.dumps(tip) uses .__json__() method
223 # json.dumps(tip) uses .__json__() method
225 data = tip.__json__()
224 data = tip.__json__()
226 assert 'branch' in data
225 assert 'branch' in data
227 assert data['revision']
226 assert data['revision']
228
227
229 def test_retrieve_tip(self):
228 def test_retrieve_tip(self):
230 tip = self.repo.get_commit('tip')
229 tip = self.repo.get_commit('tip')
231 assert tip == self.repo.get_commit()
230 assert tip == self.repo.get_commit()
232
231
233 def test_invalid(self):
232 def test_invalid(self):
234 with pytest.raises(CommitDoesNotExistError):
233 with pytest.raises(CommitDoesNotExistError):
235 self.repo.get_commit(commit_idx=123456789)
234 self.repo.get_commit(commit_idx=123456789)
236
235
237 def test_idx(self):
236 def test_idx(self):
238 commit = self.repo[0]
237 commit = self.repo[0]
239 assert commit.idx == 0
238 assert commit.idx == 0
240
239
241 def test_negative_idx(self):
240 def test_negative_idx(self):
242 commit = self.repo.get_commit(commit_idx=-1)
241 commit = self.repo.get_commit(commit_idx=-1)
243 assert commit.idx >= 0
242 assert commit.idx >= 0
244
243
245 def test_revision_is_deprecated(self):
244 def test_revision_is_deprecated(self):
246 def get_revision(commit):
245 def get_revision(commit):
247 return commit.revision
246 return commit.revision
248
247
249 commit = self.repo[0]
248 commit = self.repo[0]
250 pytest.deprecated_call(get_revision, commit)
249 pytest.deprecated_call(get_revision, commit)
251
250
252 def test_size(self):
251 def test_size(self):
253 tip = self.repo.get_commit()
252 tip = self.repo.get_commit()
254 size = 5 * len('Foobar N') # Size of 5 files
253 size = 5 * len('Foobar N') # Size of 5 files
255 assert tip.size == size
254 assert tip.size == size
256
255
257 def test_size_at_commit(self):
256 def test_size_at_commit(self):
258 tip = self.repo.get_commit()
257 tip = self.repo.get_commit()
259 size = 5 * len('Foobar N') # Size of 5 files
258 size = 5 * len('Foobar N') # Size of 5 files
260 assert self.repo.size_at_commit(tip.raw_id) == size
259 assert self.repo.size_at_commit(tip.raw_id) == size
261
260
262 def test_size_at_first_commit(self):
261 def test_size_at_first_commit(self):
263 commit = self.repo[0]
262 commit = self.repo[0]
264 size = len('Foobar N') # Size of 1 file
263 size = len('Foobar N') # Size of 1 file
265 assert self.repo.size_at_commit(commit.raw_id) == size
264 assert self.repo.size_at_commit(commit.raw_id) == size
266
265
267 def test_author(self):
266 def test_author(self):
268 tip = self.repo.get_commit()
267 tip = self.repo.get_commit()
269 assert_text_equal(tip.author, u'Joe Doe <joe.doe@example.com>')
268 assert_text_equal(tip.author, u'Joe Doe <joe.doe@example.com>')
270
269
271 def test_author_name(self):
270 def test_author_name(self):
272 tip = self.repo.get_commit()
271 tip = self.repo.get_commit()
273 assert_text_equal(tip.author_name, u'Joe Doe')
272 assert_text_equal(tip.author_name, u'Joe Doe')
274
273
275 def test_author_email(self):
274 def test_author_email(self):
276 tip = self.repo.get_commit()
275 tip = self.repo.get_commit()
277 assert_text_equal(tip.author_email, u'joe.doe@example.com')
276 assert_text_equal(tip.author_email, u'joe.doe@example.com')
278
277
279 def test_message(self):
278 def test_message(self):
280 tip = self.repo.get_commit()
279 tip = self.repo.get_commit()
281 assert_text_equal(tip.message, u'Commit 4')
280 assert_text_equal(tip.message, u'Commit 4')
282
281
283 def test_diff(self):
282 def test_diff(self):
284 tip = self.repo.get_commit()
283 tip = self.repo.get_commit()
285 diff = tip.diff()
284 diff = tip.diff()
286 assert "+Foobar 4" in diff.raw
285 assert "+Foobar 4" in diff.raw
287
286
288 def test_prev(self):
287 def test_prev(self):
289 tip = self.repo.get_commit()
288 tip = self.repo.get_commit()
290 prev_commit = tip.prev()
289 prev_commit = tip.prev()
291 assert prev_commit.message == 'Commit 3'
290 assert prev_commit.message == 'Commit 3'
292
291
293 def test_prev_raises_on_first_commit(self):
292 def test_prev_raises_on_first_commit(self):
294 commit = self.repo.get_commit(commit_idx=0)
293 commit = self.repo.get_commit(commit_idx=0)
295 with pytest.raises(CommitDoesNotExistError):
294 with pytest.raises(CommitDoesNotExistError):
296 commit.prev()
295 commit.prev()
297
296
298 def test_prev_works_on_second_commit_issue_183(self):
297 def test_prev_works_on_second_commit_issue_183(self):
299 commit = self.repo.get_commit(commit_idx=1)
298 commit = self.repo.get_commit(commit_idx=1)
300 prev_commit = commit.prev()
299 prev_commit = commit.prev()
301 assert prev_commit.idx == 0
300 assert prev_commit.idx == 0
302
301
303 def test_next(self):
302 def test_next(self):
304 commit = self.repo.get_commit(commit_idx=2)
303 commit = self.repo.get_commit(commit_idx=2)
305 next_commit = commit.next()
304 next_commit = commit.next()
306 assert next_commit.message == 'Commit 3'
305 assert next_commit.message == 'Commit 3'
307
306
308 def test_next_raises_on_tip(self):
307 def test_next_raises_on_tip(self):
309 commit = self.repo.get_commit()
308 commit = self.repo.get_commit()
310 with pytest.raises(CommitDoesNotExistError):
309 with pytest.raises(CommitDoesNotExistError):
311 commit.next()
310 commit.next()
312
311
313 def test_get_path_commit(self):
312 def test_get_path_commit(self):
314 commit = self.repo.get_commit()
313 commit = self.repo.get_commit()
315 commit.get_path_commit('file_4.txt')
314 commit.get_path_commit('file_4.txt')
316 assert commit.message == 'Commit 4'
315 assert commit.message == 'Commit 4'
317
316
318 def test_get_filenodes_generator(self):
317 def test_get_filenodes_generator(self):
319 tip = self.repo.get_commit()
318 tip = self.repo.get_commit()
320 filepaths = [node.path for node in tip.get_filenodes_generator()]
319 filepaths = [node.path for node in tip.get_filenodes_generator()]
321 assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
320 assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
322
321
323 def test_get_file_annotate(self):
322 def test_get_file_annotate(self):
324 file_added_commit = self.repo.get_commit(commit_idx=3)
323 file_added_commit = self.repo.get_commit(commit_idx=3)
325 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
324 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
326
325
327 line_no, commit_id, commit_loader, line = annotations[0]
326 line_no, commit_id, commit_loader, line = annotations[0]
328
327
329 assert line_no == 1
328 assert line_no == 1
330 assert commit_id == file_added_commit.raw_id
329 assert commit_id == file_added_commit.raw_id
331 assert commit_loader() == file_added_commit
330 assert commit_loader() == file_added_commit
332 assert 'Foobar 3' in line
331 assert 'Foobar 3' in line
333
332
334 def test_get_file_annotate_does_not_exist(self):
333 def test_get_file_annotate_does_not_exist(self):
335 file_added_commit = self.repo.get_commit(commit_idx=2)
334 file_added_commit = self.repo.get_commit(commit_idx=2)
336 # TODO: Should use a specific exception class here?
335 # TODO: Should use a specific exception class here?
337 with pytest.raises(Exception):
336 with pytest.raises(Exception):
338 list(file_added_commit.get_file_annotate('file_3.txt'))
337 list(file_added_commit.get_file_annotate('file_3.txt'))
339
338
340 def test_get_file_annotate_tip(self):
339 def test_get_file_annotate_tip(self):
341 tip = self.repo.get_commit()
340 tip = self.repo.get_commit()
342 commit = self.repo.get_commit(commit_idx=3)
341 commit = self.repo.get_commit(commit_idx=3)
343 expected_values = list(commit.get_file_annotate('file_3.txt'))
342 expected_values = list(commit.get_file_annotate('file_3.txt'))
344 annotations = list(tip.get_file_annotate('file_3.txt'))
343 annotations = list(tip.get_file_annotate('file_3.txt'))
345
344
346 # Note: Skip index 2 because the loader function is not the same
345 # Note: Skip index 2 because the loader function is not the same
347 for idx in (0, 1, 3):
346 for idx in (0, 1, 3):
348 assert annotations[0][idx] == expected_values[0][idx]
347 assert annotations[0][idx] == expected_values[0][idx]
349
348
350 def test_get_commits_is_ordered_by_date(self):
349 def test_get_commits_is_ordered_by_date(self):
351 commits = self.repo.get_commits()
350 commits = self.repo.get_commits()
352 assert isinstance(commits, CollectionGenerator)
351 assert isinstance(commits, CollectionGenerator)
353 assert len(commits) == 0 or len(commits) != 0
352 assert len(commits) == 0 or len(commits) != 0
354 commits = list(commits)
353 commits = list(commits)
355 ordered_by_date = sorted(commits, key=lambda commit: commit.date)
354 ordered_by_date = sorted(commits, key=lambda commit: commit.date)
356 assert commits == ordered_by_date
355 assert commits == ordered_by_date
357
356
358 def test_get_commits_respects_start(self):
357 def test_get_commits_respects_start(self):
359 second_id = self.repo.commit_ids[1]
358 second_id = self.repo.commit_ids[1]
360 commits = self.repo.get_commits(start_id=second_id)
359 commits = self.repo.get_commits(start_id=second_id)
361 assert isinstance(commits, CollectionGenerator)
360 assert isinstance(commits, CollectionGenerator)
362 commits = list(commits)
361 commits = list(commits)
363 assert len(commits) == 4
362 assert len(commits) == 4
364
363
365 def test_get_commits_includes_start_commit(self):
364 def test_get_commits_includes_start_commit(self):
366 second_id = self.repo.commit_ids[1]
365 second_id = self.repo.commit_ids[1]
367 commits = self.repo.get_commits(start_id=second_id)
366 commits = self.repo.get_commits(start_id=second_id)
368 assert isinstance(commits, CollectionGenerator)
367 assert isinstance(commits, CollectionGenerator)
369 commits = list(commits)
368 commits = list(commits)
370 assert commits[0].raw_id == second_id
369 assert commits[0].raw_id == second_id
371
370
372 def test_get_commits_respects_end(self):
371 def test_get_commits_respects_end(self):
373 second_id = self.repo.commit_ids[1]
372 second_id = self.repo.commit_ids[1]
374 commits = self.repo.get_commits(end_id=second_id)
373 commits = self.repo.get_commits(end_id=second_id)
375 assert isinstance(commits, CollectionGenerator)
374 assert isinstance(commits, CollectionGenerator)
376 commits = list(commits)
375 commits = list(commits)
377 assert commits[-1].raw_id == second_id
376 assert commits[-1].raw_id == second_id
378 assert len(commits) == 2
377 assert len(commits) == 2
379
378
380 def test_get_commits_respects_both_start_and_end(self):
379 def test_get_commits_respects_both_start_and_end(self):
381 second_id = self.repo.commit_ids[1]
380 second_id = self.repo.commit_ids[1]
382 third_id = self.repo.commit_ids[2]
381 third_id = self.repo.commit_ids[2]
383 commits = self.repo.get_commits(start_id=second_id, end_id=third_id)
382 commits = self.repo.get_commits(start_id=second_id, end_id=third_id)
384 assert isinstance(commits, CollectionGenerator)
383 assert isinstance(commits, CollectionGenerator)
385 commits = list(commits)
384 commits = list(commits)
386 assert len(commits) == 2
385 assert len(commits) == 2
387
386
388 def test_get_commits_on_empty_repo_raises_EmptyRepository_error(self):
387 def test_get_commits_on_empty_repo_raises_EmptyRepository_error(self):
389 repo_path = get_new_dir(str(time.time()))
388 repo_path = get_new_dir(str(time.time()))
390 repo = self.Backend(repo_path, create=True)
389 repo = self.Backend(repo_path, create=True)
391
390
392 with pytest.raises(EmptyRepositoryError):
391 with pytest.raises(EmptyRepositoryError):
393 list(repo.get_commits(start_id='foobar'))
392 list(repo.get_commits(start_id='foobar'))
394
393
395 def test_get_commits_respects_hidden(self):
394 def test_get_commits_respects_hidden(self):
396 commits = self.repo.get_commits(show_hidden=True)
395 commits = self.repo.get_commits(show_hidden=True)
397 assert isinstance(commits, CollectionGenerator)
396 assert isinstance(commits, CollectionGenerator)
398 assert len(commits) == 5
397 assert len(commits) == 5
399
398
400 def test_get_commits_includes_end_commit(self):
399 def test_get_commits_includes_end_commit(self):
401 second_id = self.repo.commit_ids[1]
400 second_id = self.repo.commit_ids[1]
402 commits = self.repo.get_commits(end_id=second_id)
401 commits = self.repo.get_commits(end_id=second_id)
403 assert isinstance(commits, CollectionGenerator)
402 assert isinstance(commits, CollectionGenerator)
404 assert len(commits) == 2
403 assert len(commits) == 2
405 commits = list(commits)
404 commits = list(commits)
406 assert commits[-1].raw_id == second_id
405 assert commits[-1].raw_id == second_id
407
406
408 def test_get_commits_respects_start_date(self):
407 def test_get_commits_respects_start_date(self):
409 start_date = datetime.datetime(2010, 1, 2)
408 start_date = datetime.datetime(2010, 1, 2)
410 commits = self.repo.get_commits(start_date=start_date)
409 commits = self.repo.get_commits(start_date=start_date)
411 assert isinstance(commits, CollectionGenerator)
410 assert isinstance(commits, CollectionGenerator)
412 # Should be 4 commits after 2010-01-02 00:00:00
411 # Should be 4 commits after 2010-01-02 00:00:00
413 assert len(commits) == 4
412 assert len(commits) == 4
414 for c in commits:
413 for c in commits:
415 assert c.date >= start_date
414 assert c.date >= start_date
416
415
417 def test_get_commits_respects_start_date_with_branch(self):
416 def test_get_commits_respects_start_date_with_branch(self):
418 start_date = datetime.datetime(2010, 1, 2)
417 start_date = datetime.datetime(2010, 1, 2)
419 commits = self.repo.get_commits(
418 commits = self.repo.get_commits(
420 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
419 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
421 assert isinstance(commits, CollectionGenerator)
420 assert isinstance(commits, CollectionGenerator)
422 # Should be 4 commits after 2010-01-02 00:00:00
421 # Should be 4 commits after 2010-01-02 00:00:00
423 assert len(commits) == 4
422 assert len(commits) == 4
424 for c in commits:
423 for c in commits:
425 assert c.date >= start_date
424 assert c.date >= start_date
426
425
427 def test_get_commits_respects_start_date_and_end_date(self):
426 def test_get_commits_respects_start_date_and_end_date(self):
428 start_date = datetime.datetime(2010, 1, 2)
427 start_date = datetime.datetime(2010, 1, 2)
429 end_date = datetime.datetime(2010, 1, 3)
428 end_date = datetime.datetime(2010, 1, 3)
430 commits = self.repo.get_commits(start_date=start_date,
429 commits = self.repo.get_commits(start_date=start_date,
431 end_date=end_date)
430 end_date=end_date)
432 assert isinstance(commits, CollectionGenerator)
431 assert isinstance(commits, CollectionGenerator)
433 assert len(commits) == 2
432 assert len(commits) == 2
434 for c in commits:
433 for c in commits:
435 assert c.date >= start_date
434 assert c.date >= start_date
436 assert c.date <= end_date
435 assert c.date <= end_date
437
436
438 def test_get_commits_respects_end_date(self):
437 def test_get_commits_respects_end_date(self):
439 end_date = datetime.datetime(2010, 1, 2)
438 end_date = datetime.datetime(2010, 1, 2)
440 commits = self.repo.get_commits(end_date=end_date)
439 commits = self.repo.get_commits(end_date=end_date)
441 assert isinstance(commits, CollectionGenerator)
440 assert isinstance(commits, CollectionGenerator)
442 assert len(commits) == 1
441 assert len(commits) == 1
443 for c in commits:
442 for c in commits:
444 assert c.date <= end_date
443 assert c.date <= end_date
445
444
446 def test_get_commits_respects_reverse(self):
445 def test_get_commits_respects_reverse(self):
447 commits = self.repo.get_commits() # no longer reverse support
446 commits = self.repo.get_commits() # no longer reverse support
448 assert isinstance(commits, CollectionGenerator)
447 assert isinstance(commits, CollectionGenerator)
449 assert len(commits) == 5
448 assert len(commits) == 5
450 commit_ids = reversed([c.raw_id for c in commits])
449 commit_ids = reversed([c.raw_id for c in commits])
451 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
450 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
452
451
453 def test_get_commits_slice_generator(self):
452 def test_get_commits_slice_generator(self):
454 commits = self.repo.get_commits(
453 commits = self.repo.get_commits(
455 branch_name=self.repo.DEFAULT_BRANCH_NAME)
454 branch_name=self.repo.DEFAULT_BRANCH_NAME)
456 assert isinstance(commits, CollectionGenerator)
455 assert isinstance(commits, CollectionGenerator)
457 commit_slice = list(commits[1:3])
456 commit_slice = list(commits[1:3])
458 assert len(commit_slice) == 2
457 assert len(commit_slice) == 2
459
458
460 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
459 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
461 with pytest.raises(CommitDoesNotExistError):
460 with pytest.raises(CommitDoesNotExistError):
462 list(self.repo.get_commits(start_id='foobar'))
461 list(self.repo.get_commits(start_id='foobar'))
463
462
464 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
463 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
465 with pytest.raises(CommitDoesNotExistError):
464 with pytest.raises(CommitDoesNotExistError):
466 list(self.repo.get_commits(end_id='foobar'))
465 list(self.repo.get_commits(end_id='foobar'))
467
466
468 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
467 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
469 with pytest.raises(BranchDoesNotExistError):
468 with pytest.raises(BranchDoesNotExistError):
470 list(self.repo.get_commits(branch_name='foobar'))
469 list(self.repo.get_commits(branch_name='foobar'))
471
470
472 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
471 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
473 start_id = self.repo.commit_ids[-1]
472 start_id = self.repo.commit_ids[-1]
474 end_id = self.repo.commit_ids[0]
473 end_id = self.repo.commit_ids[0]
475 with pytest.raises(RepositoryError):
474 with pytest.raises(RepositoryError):
476 list(self.repo.get_commits(start_id=start_id, end_id=end_id))
475 list(self.repo.get_commits(start_id=start_id, end_id=end_id))
477
476
478 def test_get_commits_raises_for_numerical_ids(self):
477 def test_get_commits_raises_for_numerical_ids(self):
479 with pytest.raises(TypeError):
478 with pytest.raises(TypeError):
480 self.repo.get_commits(start_id=1, end_id=2)
479 self.repo.get_commits(start_id=1, end_id=2)
481
480
482 def test_commit_equality(self):
481 def test_commit_equality(self):
483 commit1 = self.repo.get_commit(self.repo.commit_ids[0])
482 commit1 = self.repo.get_commit(self.repo.commit_ids[0])
484 commit2 = self.repo.get_commit(self.repo.commit_ids[1])
483 commit2 = self.repo.get_commit(self.repo.commit_ids[1])
485
484
486 assert commit1 == commit1
485 assert commit1 == commit1
487 assert commit2 == commit2
486 assert commit2 == commit2
488 assert commit1 != commit2
487 assert commit1 != commit2
489 assert commit2 != commit1
488 assert commit2 != commit1
490 assert commit1 != None
489 assert commit1 != None
491 assert None != commit1
490 assert None != commit1
492 assert 1 != commit1
491 assert 1 != commit1
493 assert 'string' != commit1
492 assert 'string' != commit1
494
493
495
494
496 @pytest.mark.parametrize("filename, expected", [
495 @pytest.mark.parametrize("filename, expected", [
497 ("README.rst", False),
496 ("README.rst", False),
498 ("README", True),
497 ("README", True),
499 ])
498 ])
500 def test_commit_is_link(vcsbackend, filename, expected):
499 def test_commit_is_link(vcsbackend, filename, expected):
501 commit = vcsbackend.repo.get_commit()
500 commit = vcsbackend.repo.get_commit()
502 link_status = commit.is_link(filename)
501 link_status = commit.is_link(filename)
503 assert link_status is expected
502 assert link_status is expected
504
503
505
504
506 @pytest.mark.usefixtures("vcs_repository_support")
505 @pytest.mark.usefixtures("vcs_repository_support")
507 class TestCommitsChanges(BackendTestMixin):
506 class TestCommitsChanges(BackendTestMixin):
508 recreate_repo_per_test = False
507 recreate_repo_per_test = False
509
508
510 @classmethod
509 @classmethod
511 def _get_commits(cls):
510 def _get_commits(cls):
512 return [
511 return [
513 {
512 {
514 'message': u'Initial',
513 'message': u'Initial',
515 'author': u'Joe Doe <joe.doe@example.com>',
514 'author': u'Joe Doe <joe.doe@example.com>',
516 'date': datetime.datetime(2010, 1, 1, 20),
515 'date': datetime.datetime(2010, 1, 1, 20),
517 'added': [
516 'added': [
518 FileNode('foo/bar', content='foo'),
517 FileNode('foo/bar', content='foo'),
519 FileNode('foo/baΕ‚', content='foo'),
518 FileNode('foo/baΕ‚', content='foo'),
520 FileNode('foobar', content='foo'),
519 FileNode('foobar', content='foo'),
521 FileNode('qwe', content='foo'),
520 FileNode('qwe', content='foo'),
522 ],
521 ],
523 },
522 },
524 {
523 {
525 'message': u'Massive changes',
524 'message': u'Massive changes',
526 'author': u'Joe Doe <joe.doe@example.com>',
525 'author': u'Joe Doe <joe.doe@example.com>',
527 'date': datetime.datetime(2010, 1, 1, 22),
526 'date': datetime.datetime(2010, 1, 1, 22),
528 'added': [FileNode('fallout', content='War never changes')],
527 'added': [FileNode('fallout', content='War never changes')],
529 'changed': [
528 'changed': [
530 FileNode('foo/bar', content='baz'),
529 FileNode('foo/bar', content='baz'),
531 FileNode('foobar', content='baz'),
530 FileNode('foobar', content='baz'),
532 ],
531 ],
533 'removed': [FileNode('qwe')],
532 'removed': [FileNode('qwe')],
534 },
533 },
535 ]
534 ]
536
535
537 def test_initial_commit(self, local_dt_to_utc):
536 def test_initial_commit(self, local_dt_to_utc):
538 commit = self.repo.get_commit(commit_idx=0)
537 commit = self.repo.get_commit(commit_idx=0)
539 assert set(commit.added) == set([
538 assert set(commit.added) == set([
540 commit.get_node('foo/bar'),
539 commit.get_node('foo/bar'),
541 commit.get_node('foo/baΕ‚'),
540 commit.get_node('foo/baΕ‚'),
542 commit.get_node('foobar'),
541 commit.get_node('foobar'),
543 commit.get_node('qwe'),
542 commit.get_node('qwe'),
544 ])
543 ])
545 assert set(commit.changed) == set()
544 assert set(commit.changed) == set()
546 assert set(commit.removed) == set()
545 assert set(commit.removed) == set()
547 assert set(commit.affected_files) == set(
546 assert set(commit.affected_files) == set(
548 ['foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'])
547 ['foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'])
549 assert commit.date == local_dt_to_utc(
548 assert commit.date == local_dt_to_utc(
550 datetime.datetime(2010, 1, 1, 20, 0))
549 datetime.datetime(2010, 1, 1, 20, 0))
551
550
552 def test_head_added(self):
551 def test_head_added(self):
553 commit = self.repo.get_commit()
552 commit = self.repo.get_commit()
554 assert isinstance(commit.added, AddedFileNodesGenerator)
553 assert isinstance(commit.added, AddedFileNodesGenerator)
555 assert set(commit.added) == set([commit.get_node('fallout')])
554 assert set(commit.added) == set([commit.get_node('fallout')])
556 assert isinstance(commit.changed, ChangedFileNodesGenerator)
555 assert isinstance(commit.changed, ChangedFileNodesGenerator)
557 assert set(commit.changed) == set([
556 assert set(commit.changed) == set([
558 commit.get_node('foo/bar'),
557 commit.get_node('foo/bar'),
559 commit.get_node('foobar'),
558 commit.get_node('foobar'),
560 ])
559 ])
561 assert isinstance(commit.removed, RemovedFileNodesGenerator)
560 assert isinstance(commit.removed, RemovedFileNodesGenerator)
562 assert len(commit.removed) == 1
561 assert len(commit.removed) == 1
563 assert list(commit.removed)[0].path == 'qwe'
562 assert list(commit.removed)[0].path == 'qwe'
564
563
565 def test_get_filemode(self):
564 def test_get_filemode(self):
566 commit = self.repo.get_commit()
565 commit = self.repo.get_commit()
567 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
566 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
568
567
569 def test_get_filemode_non_ascii(self):
568 def test_get_filemode_non_ascii(self):
570 commit = self.repo.get_commit()
569 commit = self.repo.get_commit()
571 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
570 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
572 assert FILEMODE_DEFAULT == commit.get_file_mode(u'foo/baΕ‚')
571 assert FILEMODE_DEFAULT == commit.get_file_mode(u'foo/baΕ‚')
573
572
574 def test_get_path_history(self):
573 def test_get_path_history(self):
575 commit = self.repo.get_commit()
574 commit = self.repo.get_commit()
576 history = commit.get_path_history('foo/bar')
575 history = commit.get_path_history('foo/bar')
577 assert len(history) == 2
576 assert len(history) == 2
578
577
579 def test_get_path_history_with_limit(self):
578 def test_get_path_history_with_limit(self):
580 commit = self.repo.get_commit()
579 commit = self.repo.get_commit()
581 history = commit.get_path_history('foo/bar', limit=1)
580 history = commit.get_path_history('foo/bar', limit=1)
582 assert len(history) == 1
581 assert len(history) == 1
583
582
584 def test_get_path_history_first_commit(self):
583 def test_get_path_history_first_commit(self):
585 commit = self.repo[0]
584 commit = self.repo[0]
586 history = commit.get_path_history('foo/bar')
585 history = commit.get_path_history('foo/bar')
587 assert len(history) == 1
586 assert len(history) == 1
588
587
589
588
590 def assert_text_equal(expected, given):
589 def assert_text_equal(expected, given):
591 assert expected == given
590 assert expected == given
592 assert isinstance(expected, unicode)
591 assert isinstance(expected, unicode)
593 assert isinstance(given, unicode)
592 assert isinstance(given, unicode)
@@ -1,1288 +1,1288 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25 import shutil
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.utils import make_db_config
30 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
31 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
32 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 RepositoryError, VCSError, NodeDoesNotExistError)
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39
39
40
40
41 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
42
42
43
43
44 def repo_path_generator():
44 def repo_path_generator():
45 """
45 """
46 Return a different path to be used for cloning repos.
46 Return a different path to be used for cloning repos.
47 """
47 """
48 i = 0
48 i = 0
49 while True:
49 while True:
50 i += 1
50 i += 1
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52
52
53
53
54 REPO_PATH_GENERATOR = repo_path_generator()
54 REPO_PATH_GENERATOR = repo_path_generator()
55
55
56
56
57 class TestGitRepository:
57 class TestGitRepository:
58
58
59 # pylint: disable=protected-access
59 # pylint: disable=protected-access
60
60
61 def __check_for_existing_repo(self):
61 def __check_for_existing_repo(self):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 self.fail('Cannot test git clone repo as location %s already '
63 self.fail('Cannot test git clone repo as location %s already '
64 'exists. You should manually remove it first.'
64 'exists. You should manually remove it first.'
65 % TEST_GIT_REPO_CLONE)
65 % TEST_GIT_REPO_CLONE)
66
66
67 @pytest.fixture(autouse=True)
67 @pytest.fixture(autouse=True)
68 def prepare(self, request, baseapp):
68 def prepare(self, request, baseapp):
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70
70
71 def get_clone_repo(self):
71 def get_clone_repo(self):
72 """
72 """
73 Return a non bare clone of the base repo.
73 Return a non bare clone of the base repo.
74 """
74 """
75 clone_path = next(REPO_PATH_GENERATOR)
75 clone_path = next(REPO_PATH_GENERATOR)
76 repo_clone = GitRepository(
76 repo_clone = GitRepository(
77 clone_path, create=True, src_url=self.repo.path, bare=False)
77 clone_path, create=True, src_url=self.repo.path, bare=False)
78
78
79 return repo_clone
79 return repo_clone
80
80
81 def get_empty_repo(self, bare=False):
81 def get_empty_repo(self, bare=False):
82 """
82 """
83 Return a non bare empty repo.
83 Return a non bare empty repo.
84 """
84 """
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86
86
87 def test_wrong_repo_path(self):
87 def test_wrong_repo_path(self):
88 wrong_repo_path = '/tmp/errorrepo_git'
88 wrong_repo_path = '/tmp/errorrepo_git'
89 with pytest.raises(RepositoryError):
89 with pytest.raises(RepositoryError):
90 GitRepository(wrong_repo_path)
90 GitRepository(wrong_repo_path)
91
91
92 def test_repo_clone(self):
92 def test_repo_clone(self):
93 self.__check_for_existing_repo()
93 self.__check_for_existing_repo()
94 repo = GitRepository(TEST_GIT_REPO)
94 repo = GitRepository(TEST_GIT_REPO)
95 repo_clone = GitRepository(
95 repo_clone = GitRepository(
96 TEST_GIT_REPO_CLONE,
96 TEST_GIT_REPO_CLONE,
97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 # Checking hashes of commits should be enough
99 # Checking hashes of commits should be enough
100 for commit in repo.get_commits():
100 for commit in repo.get_commits():
101 raw_id = commit.raw_id
101 raw_id = commit.raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103
103
104 def test_repo_clone_without_create(self):
104 def test_repo_clone_without_create(self):
105 with pytest.raises(RepositoryError):
105 with pytest.raises(RepositoryError):
106 GitRepository(
106 GitRepository(
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108
108
109 def test_repo_clone_with_update(self):
109 def test_repo_clone_with_update(self):
110 repo = GitRepository(TEST_GIT_REPO)
110 repo = GitRepository(TEST_GIT_REPO)
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 repo_clone = GitRepository(
112 repo_clone = GitRepository(
113 clone_path,
113 clone_path,
114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116
116
117 # check if current workdir was updated
117 # check if current workdir was updated
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 assert os.path.isfile(fpath)
119 assert os.path.isfile(fpath)
120
120
121 def test_repo_clone_without_update(self):
121 def test_repo_clone_without_update(self):
122 repo = GitRepository(TEST_GIT_REPO)
122 repo = GitRepository(TEST_GIT_REPO)
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 repo_clone = GitRepository(
124 repo_clone = GitRepository(
125 clone_path,
125 clone_path,
126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 # check if current workdir was *NOT* updated
128 # check if current workdir was *NOT* updated
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 # Make sure it's not bare repo
130 # Make sure it's not bare repo
131 assert not repo_clone.bare
131 assert not repo_clone.bare
132 assert not os.path.isfile(fpath)
132 assert not os.path.isfile(fpath)
133
133
134 def test_repo_clone_into_bare_repo(self):
134 def test_repo_clone_into_bare_repo(self):
135 repo = GitRepository(TEST_GIT_REPO)
135 repo = GitRepository(TEST_GIT_REPO)
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 repo_clone = GitRepository(
137 repo_clone = GitRepository(
138 clone_path, create=True, src_url=repo.path, bare=True)
138 clone_path, create=True, src_url=repo.path, bare=True)
139 assert repo_clone.bare
139 assert repo_clone.bare
140
140
141 def test_create_repo_is_not_bare_by_default(self):
141 def test_create_repo_is_not_bare_by_default(self):
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 assert not repo.bare
143 assert not repo.bare
144
144
145 def test_create_bare_repo(self):
145 def test_create_bare_repo(self):
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 assert repo.bare
147 assert repo.bare
148
148
149 def test_update_server_info(self):
149 def test_update_server_info(self):
150 self.repo._update_server_info()
150 self.repo._update_server_info()
151
151
152 def test_fetch(self, vcsbackend_git):
152 def test_fetch(self, vcsbackend_git):
153 # Note: This is a git specific part of the API, it's only implemented
153 # Note: This is a git specific part of the API, it's only implemented
154 # by the git backend.
154 # by the git backend.
155 source_repo = vcsbackend_git.repo
155 source_repo = vcsbackend_git.repo
156 target_repo = vcsbackend_git.create_repo(bare=True)
156 target_repo = vcsbackend_git.create_repo(bare=True)
157 target_repo.fetch(source_repo.path)
157 target_repo.fetch(source_repo.path)
158 # Note: Get a fresh instance, avoids caching trouble
158 # Note: Get a fresh instance, avoids caching trouble
159 target_repo = vcsbackend_git.backend(target_repo.path)
159 target_repo = vcsbackend_git.backend(target_repo.path)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161
161
162 def test_commit_ids(self):
162 def test_commit_ids(self):
163 # there are 112 commits (by now)
163 # there are 112 commits (by now)
164 # so we can assume they would be available from now on
164 # so we can assume they would be available from now on
165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 '8430a588b43b5d6da365400117c89400326e7992',
174 '8430a588b43b5d6da365400117c89400326e7992',
175 'd955cd312c17b02143c04fa1099a352b04368118',
175 'd955cd312c17b02143c04fa1099a352b04368118',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 assert subset.issubset(set(self.repo.commit_ids))
190 assert subset.issubset(set(self.repo.commit_ids))
191
191
192 def test_slicing(self):
192 def test_slicing(self):
193 # 4 1 5 10 95
193 # 4 1 5 10 95
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 (10, 20, 10), (5, 100, 95)]:
195 (10, 20, 10), (5, 100, 95)]:
196 commit_ids = list(self.repo[sfrom:sto])
196 commit_ids = list(self.repo[sfrom:sto])
197 assert len(commit_ids) == size
197 assert len(commit_ids) == size
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200
200
201 def test_branches(self):
201 def test_branches(self):
202 # TODO: Need more tests here
202 # TODO: Need more tests here
203 # Removed (those are 'remotes' branches for cloned repo)
203 # Removed (those are 'remotes' branches for cloned repo)
204 # assert 'master' in self.repo.branches
204 # assert 'master' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
207 for __, commit_id in self.repo.branches.items():
207 for __, commit_id in self.repo.branches.items():
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209
209
210 def test_tags(self):
210 def test_tags(self):
211 # TODO: Need more tests here
211 # TODO: Need more tests here
212 assert 'v0.1.1' in self.repo.tags
212 assert 'v0.1.1' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
214 for __, commit_id in self.repo.tags.items():
214 for __, commit_id in self.repo.tags.items():
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216
216
217 def _test_single_commit_cache(self, commit_id):
217 def _test_single_commit_cache(self, commit_id):
218 commit = self.repo.get_commit(commit_id)
218 commit = self.repo.get_commit(commit_id)
219 assert commit_id in self.repo.commits
219 assert commit_id in self.repo.commits
220 assert commit is self.repo.commits[commit_id]
220 assert commit is self.repo.commits[commit_id]
221
221
222 def test_initial_commit(self):
222 def test_initial_commit(self):
223 commit_id = self.repo.commit_ids[0]
223 commit_id = self.repo.commit_ids[0]
224 init_commit = self.repo.get_commit(commit_id)
224 init_commit = self.repo.get_commit(commit_id)
225 init_author = init_commit.author
225 init_author = init_commit.author
226
226
227 assert init_commit.message == 'initial import\n'
227 assert init_commit.message == 'initial import\n'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 assert init_author == init_commit.committer
229 assert init_author == init_commit.committer
230 for path in ('vcs/__init__.py',
230 for path in ('vcs/__init__.py',
231 'vcs/backends/BaseRepository.py',
231 'vcs/backends/BaseRepository.py',
232 'vcs/backends/__init__.py'):
232 'vcs/backends/__init__.py'):
233 assert isinstance(init_commit.get_node(path), FileNode)
233 assert isinstance(init_commit.get_node(path), FileNode)
234 for path in ('', 'vcs', 'vcs/backends'):
234 for path in ('', 'vcs', 'vcs/backends'):
235 assert isinstance(init_commit.get_node(path), DirNode)
235 assert isinstance(init_commit.get_node(path), DirNode)
236
236
237 with pytest.raises(NodeDoesNotExistError):
237 with pytest.raises(NodeDoesNotExistError):
238 init_commit.get_node(path='foobar')
238 init_commit.get_node(path='foobar')
239
239
240 node = init_commit.get_node('vcs/')
240 node = init_commit.get_node('vcs/')
241 assert hasattr(node, 'kind')
241 assert hasattr(node, 'kind')
242 assert node.kind == NodeKind.DIR
242 assert node.kind == NodeKind.DIR
243
243
244 node = init_commit.get_node('vcs')
244 node = init_commit.get_node('vcs')
245 assert hasattr(node, 'kind')
245 assert hasattr(node, 'kind')
246 assert node.kind == NodeKind.DIR
246 assert node.kind == NodeKind.DIR
247
247
248 node = init_commit.get_node('vcs/__init__.py')
248 node = init_commit.get_node('vcs/__init__.py')
249 assert hasattr(node, 'kind')
249 assert hasattr(node, 'kind')
250 assert node.kind == NodeKind.FILE
250 assert node.kind == NodeKind.FILE
251
251
252 def test_not_existing_commit(self):
252 def test_not_existing_commit(self):
253 with pytest.raises(RepositoryError):
253 with pytest.raises(RepositoryError):
254 self.repo.get_commit('f' * 40)
254 self.repo.get_commit('f' * 40)
255
255
256 def test_commit10(self):
256 def test_commit10(self):
257
257
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 README = """===
259 README = """===
260 VCS
260 VCS
261 ===
261 ===
262
262
263 Various Version Control System management abstraction layer for Python.
263 Various Version Control System management abstraction layer for Python.
264
264
265 Introduction
265 Introduction
266 ------------
266 ------------
267
267
268 TODO: To be written...
268 TODO: To be written...
269
269
270 """
270 """
271 node = commit10.get_node('README.rst')
271 node = commit10.get_node('README.rst')
272 assert node.kind == NodeKind.FILE
272 assert node.kind == NodeKind.FILE
273 assert node.content == README
273 assert node.content == README
274
274
275 def test_head(self):
275 def test_head(self):
276 assert self.repo.head == self.repo.get_commit().raw_id
276 assert self.repo.head == self.repo.get_commit().raw_id
277
277
278 def test_checkout_with_create(self):
278 def test_checkout_with_create(self):
279 repo_clone = self.get_clone_repo()
279 repo_clone = self.get_clone_repo()
280
280
281 new_branch = 'new_branch'
281 new_branch = 'new_branch'
282 assert repo_clone._current_branch() == 'master'
282 assert repo_clone._current_branch() == 'master'
283 assert set(repo_clone.branches) == {'master'}
283 assert set(repo_clone.branches) == {'master'}
284 repo_clone._checkout(new_branch, create=True)
284 repo_clone._checkout(new_branch, create=True)
285
285
286 # Branches is a lazy property so we need to recrete the Repo object.
286 # Branches is a lazy property so we need to recrete the Repo object.
287 repo_clone = GitRepository(repo_clone.path)
287 repo_clone = GitRepository(repo_clone.path)
288 assert set(repo_clone.branches) == {'master', new_branch}
288 assert set(repo_clone.branches) == {'master', new_branch}
289 assert repo_clone._current_branch() == new_branch
289 assert repo_clone._current_branch() == new_branch
290
290
291 def test_checkout(self):
291 def test_checkout(self):
292 repo_clone = self.get_clone_repo()
292 repo_clone = self.get_clone_repo()
293
293
294 repo_clone._checkout('new_branch', create=True)
294 repo_clone._checkout('new_branch', create=True)
295 repo_clone._checkout('master')
295 repo_clone._checkout('master')
296
296
297 assert repo_clone._current_branch() == 'master'
297 assert repo_clone._current_branch() == 'master'
298
298
299 def test_checkout_same_branch(self):
299 def test_checkout_same_branch(self):
300 repo_clone = self.get_clone_repo()
300 repo_clone = self.get_clone_repo()
301
301
302 repo_clone._checkout('master')
302 repo_clone._checkout('master')
303 assert repo_clone._current_branch() == 'master'
303 assert repo_clone._current_branch() == 'master'
304
304
305 def test_checkout_branch_already_exists(self):
305 def test_checkout_branch_already_exists(self):
306 repo_clone = self.get_clone_repo()
306 repo_clone = self.get_clone_repo()
307
307
308 with pytest.raises(RepositoryError):
308 with pytest.raises(RepositoryError):
309 repo_clone._checkout('master', create=True)
309 repo_clone._checkout('master', create=True)
310
310
311 def test_checkout_bare_repo(self):
311 def test_checkout_bare_repo(self):
312 with pytest.raises(RepositoryError):
312 with pytest.raises(RepositoryError):
313 self.repo._checkout('master')
313 self.repo._checkout('master')
314
314
315 def test_current_branch_bare_repo(self):
315 def test_current_branch_bare_repo(self):
316 with pytest.raises(RepositoryError):
316 with pytest.raises(RepositoryError):
317 self.repo._current_branch()
317 self.repo._current_branch()
318
318
319 def test_current_branch_empty_repo(self):
319 def test_current_branch_empty_repo(self):
320 repo = self.get_empty_repo()
320 repo = self.get_empty_repo()
321 assert repo._current_branch() is None
321 assert repo._current_branch() is None
322
322
323 def test_local_clone(self):
323 def test_local_clone(self):
324 clone_path = next(REPO_PATH_GENERATOR)
324 clone_path = next(REPO_PATH_GENERATOR)
325 self.repo._local_clone(clone_path, 'master')
325 self.repo._local_clone(clone_path, 'master')
326 repo_clone = GitRepository(clone_path)
326 repo_clone = GitRepository(clone_path)
327
327
328 assert self.repo.commit_ids == repo_clone.commit_ids
328 assert self.repo.commit_ids == repo_clone.commit_ids
329
329
330 def test_local_clone_with_specific_branch(self):
330 def test_local_clone_with_specific_branch(self):
331 source_repo = self.get_clone_repo()
331 source_repo = self.get_clone_repo()
332
332
333 # Create a new branch in source repo
333 # Create a new branch in source repo
334 new_branch_commit = source_repo.commit_ids[-3]
334 new_branch_commit = source_repo.commit_ids[-3]
335 source_repo._checkout(new_branch_commit)
335 source_repo._checkout(new_branch_commit)
336 source_repo._checkout('new_branch', create=True)
336 source_repo._checkout('new_branch', create=True)
337
337
338 clone_path = next(REPO_PATH_GENERATOR)
338 clone_path = next(REPO_PATH_GENERATOR)
339 source_repo._local_clone(clone_path, 'new_branch')
339 source_repo._local_clone(clone_path, 'new_branch')
340 repo_clone = GitRepository(clone_path)
340 repo_clone = GitRepository(clone_path)
341
341
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343
343
344 clone_path = next(REPO_PATH_GENERATOR)
344 clone_path = next(REPO_PATH_GENERATOR)
345 source_repo._local_clone(clone_path, 'master')
345 source_repo._local_clone(clone_path, 'master')
346 repo_clone = GitRepository(clone_path)
346 repo_clone = GitRepository(clone_path)
347
347
348 assert source_repo.commit_ids == repo_clone.commit_ids
348 assert source_repo.commit_ids == repo_clone.commit_ids
349
349
350 def test_local_clone_fails_if_target_exists(self):
350 def test_local_clone_fails_if_target_exists(self):
351 with pytest.raises(RepositoryError):
351 with pytest.raises(RepositoryError):
352 self.repo._local_clone(self.repo.path, 'master')
352 self.repo._local_clone(self.repo.path, 'master')
353
353
354 def test_local_fetch(self):
354 def test_local_fetch(self):
355 target_repo = self.get_empty_repo()
355 target_repo = self.get_empty_repo()
356 source_repo = self.get_clone_repo()
356 source_repo = self.get_clone_repo()
357
357
358 # Create a new branch in source repo
358 # Create a new branch in source repo
359 master_commit = source_repo.commit_ids[-1]
359 master_commit = source_repo.commit_ids[-1]
360 new_branch_commit = source_repo.commit_ids[-3]
360 new_branch_commit = source_repo.commit_ids[-3]
361 source_repo._checkout(new_branch_commit)
361 source_repo._checkout(new_branch_commit)
362 source_repo._checkout('new_branch', create=True)
362 source_repo._checkout('new_branch', create=True)
363
363
364 target_repo._local_fetch(source_repo.path, 'new_branch')
364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366
366
367 target_repo._local_fetch(source_repo.path, 'master')
367 target_repo._local_fetch(source_repo.path, 'master')
368 assert target_repo._last_fetch_heads() == [master_commit]
368 assert target_repo._last_fetch_heads() == [master_commit]
369
369
370 def test_local_fetch_from_bare_repo(self):
370 def test_local_fetch_from_bare_repo(self):
371 target_repo = self.get_empty_repo()
371 target_repo = self.get_empty_repo()
372 target_repo._local_fetch(self.repo.path, 'master')
372 target_repo._local_fetch(self.repo.path, 'master')
373
373
374 master_commit = self.repo.commit_ids[-1]
374 master_commit = self.repo.commit_ids[-1]
375 assert target_repo._last_fetch_heads() == [master_commit]
375 assert target_repo._last_fetch_heads() == [master_commit]
376
376
377 def test_local_fetch_from_same_repo(self):
377 def test_local_fetch_from_same_repo(self):
378 with pytest.raises(ValueError):
378 with pytest.raises(ValueError):
379 self.repo._local_fetch(self.repo.path, 'master')
379 self.repo._local_fetch(self.repo.path, 'master')
380
380
381 def test_local_fetch_branch_does_not_exist(self):
381 def test_local_fetch_branch_does_not_exist(self):
382 target_repo = self.get_empty_repo()
382 target_repo = self.get_empty_repo()
383
383
384 with pytest.raises(RepositoryError):
384 with pytest.raises(RepositoryError):
385 target_repo._local_fetch(self.repo.path, 'new_branch')
385 target_repo._local_fetch(self.repo.path, 'new_branch')
386
386
387 def test_local_pull(self):
387 def test_local_pull(self):
388 target_repo = self.get_empty_repo()
388 target_repo = self.get_empty_repo()
389 source_repo = self.get_clone_repo()
389 source_repo = self.get_clone_repo()
390
390
391 # Create a new branch in source repo
391 # Create a new branch in source repo
392 master_commit = source_repo.commit_ids[-1]
392 master_commit = source_repo.commit_ids[-1]
393 new_branch_commit = source_repo.commit_ids[-3]
393 new_branch_commit = source_repo.commit_ids[-3]
394 source_repo._checkout(new_branch_commit)
394 source_repo._checkout(new_branch_commit)
395 source_repo._checkout('new_branch', create=True)
395 source_repo._checkout('new_branch', create=True)
396
396
397 target_repo._local_pull(source_repo.path, 'new_branch')
397 target_repo._local_pull(source_repo.path, 'new_branch')
398 target_repo = GitRepository(target_repo.path)
398 target_repo = GitRepository(target_repo.path)
399 assert target_repo.head == new_branch_commit
399 assert target_repo.head == new_branch_commit
400
400
401 target_repo._local_pull(source_repo.path, 'master')
401 target_repo._local_pull(source_repo.path, 'master')
402 target_repo = GitRepository(target_repo.path)
402 target_repo = GitRepository(target_repo.path)
403 assert target_repo.head == master_commit
403 assert target_repo.head == master_commit
404
404
405 def test_local_pull_in_bare_repo(self):
405 def test_local_pull_in_bare_repo(self):
406 with pytest.raises(RepositoryError):
406 with pytest.raises(RepositoryError):
407 self.repo._local_pull(self.repo.path, 'master')
407 self.repo._local_pull(self.repo.path, 'master')
408
408
409 def test_local_merge(self):
409 def test_local_merge(self):
410 target_repo = self.get_empty_repo()
410 target_repo = self.get_empty_repo()
411 source_repo = self.get_clone_repo()
411 source_repo = self.get_clone_repo()
412
412
413 # Create a new branch in source repo
413 # Create a new branch in source repo
414 master_commit = source_repo.commit_ids[-1]
414 master_commit = source_repo.commit_ids[-1]
415 new_branch_commit = source_repo.commit_ids[-3]
415 new_branch_commit = source_repo.commit_ids[-3]
416 source_repo._checkout(new_branch_commit)
416 source_repo._checkout(new_branch_commit)
417 source_repo._checkout('new_branch', create=True)
417 source_repo._checkout('new_branch', create=True)
418
418
419 # This is required as one cannot do a -ff-only merge in an empty repo.
419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 target_repo._local_pull(source_repo.path, 'new_branch')
420 target_repo._local_pull(source_repo.path, 'new_branch')
421
421
422 target_repo._local_fetch(source_repo.path, 'master')
422 target_repo._local_fetch(source_repo.path, 'master')
423 merge_message = 'Merge message\n\nDescription:...'
423 merge_message = 'Merge message\n\nDescription:...'
424 user_name = 'Albert Einstein'
424 user_name = 'Albert Einstein'
425 user_email = 'albert@einstein.com'
425 user_email = 'albert@einstein.com'
426 target_repo._local_merge(merge_message, user_name, user_email,
426 target_repo._local_merge(merge_message, user_name, user_email,
427 target_repo._last_fetch_heads())
427 target_repo._last_fetch_heads())
428
428
429 target_repo = GitRepository(target_repo.path)
429 target_repo = GitRepository(target_repo.path)
430 assert target_repo.commit_ids[-2] == master_commit
430 assert target_repo.commit_ids[-2] == master_commit
431 last_commit = target_repo.get_commit(target_repo.head)
431 last_commit = target_repo.get_commit(target_repo.head)
432 assert last_commit.message.strip() == merge_message
432 assert last_commit.message.strip() == merge_message
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434
434
435 assert not os.path.exists(
435 assert not os.path.exists(
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437
437
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441
441
442 target_repo._local_fetch(self.repo.path, 'master')
442 target_repo._local_fetch(self.repo.path, 'master')
443 with pytest.raises(RepositoryError):
443 with pytest.raises(RepositoryError):
444 target_repo._local_merge(
444 target_repo._local_merge(
445 'merge_message', 'user name', 'user@name.com',
445 'merge_message', 'user name', 'user@name.com',
446 target_repo._last_fetch_heads())
446 target_repo._last_fetch_heads())
447
447
448 # Check we are not left in an intermediate merge state
448 # Check we are not left in an intermediate merge state
449 assert not os.path.exists(
449 assert not os.path.exists(
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451
451
452 def test_local_merge_into_empty_repo(self):
452 def test_local_merge_into_empty_repo(self):
453 target_repo = self.get_empty_repo()
453 target_repo = self.get_empty_repo()
454
454
455 # This is required as one cannot do a -ff-only merge in an empty repo.
455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 target_repo._local_fetch(self.repo.path, 'master')
456 target_repo._local_fetch(self.repo.path, 'master')
457 with pytest.raises(RepositoryError):
457 with pytest.raises(RepositoryError):
458 target_repo._local_merge(
458 target_repo._local_merge(
459 'merge_message', 'user name', 'user@name.com',
459 'merge_message', 'user name', 'user@name.com',
460 target_repo._last_fetch_heads())
460 target_repo._last_fetch_heads())
461
461
462 def test_local_merge_in_bare_repo(self):
462 def test_local_merge_in_bare_repo(self):
463 with pytest.raises(RepositoryError):
463 with pytest.raises(RepositoryError):
464 self.repo._local_merge(
464 self.repo._local_merge(
465 'merge_message', 'user name', 'user@name.com', None)
465 'merge_message', 'user name', 'user@name.com', None)
466
466
467 def test_local_push_non_bare(self):
467 def test_local_push_non_bare(self):
468 target_repo = self.get_empty_repo()
468 target_repo = self.get_empty_repo()
469
469
470 pushed_branch = 'pushed_branch'
470 pushed_branch = 'pushed_branch'
471 self.repo._local_push('master', target_repo.path, pushed_branch)
471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 # report any branches.
473 # report any branches.
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 f.write('ref: refs/heads/%s' % pushed_branch)
475 f.write('ref: refs/heads/%s' % pushed_branch)
476
476
477 target_repo = GitRepository(target_repo.path)
477 target_repo = GitRepository(target_repo.path)
478
478
479 assert (target_repo.branches[pushed_branch] ==
479 assert (target_repo.branches[pushed_branch] ==
480 self.repo.branches['master'])
480 self.repo.branches['master'])
481
481
482 def test_local_push_bare(self):
482 def test_local_push_bare(self):
483 target_repo = self.get_empty_repo(bare=True)
483 target_repo = self.get_empty_repo(bare=True)
484
484
485 pushed_branch = 'pushed_branch'
485 pushed_branch = 'pushed_branch'
486 self.repo._local_push('master', target_repo.path, pushed_branch)
486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 # report any branches.
488 # report any branches.
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 f.write('ref: refs/heads/%s' % pushed_branch)
490 f.write('ref: refs/heads/%s' % pushed_branch)
491
491
492 target_repo = GitRepository(target_repo.path)
492 target_repo = GitRepository(target_repo.path)
493
493
494 assert (target_repo.branches[pushed_branch] ==
494 assert (target_repo.branches[pushed_branch] ==
495 self.repo.branches['master'])
495 self.repo.branches['master'])
496
496
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 target_repo = self.get_clone_repo()
498 target_repo = self.get_clone_repo()
499
499
500 pushed_branch = 'pushed_branch'
500 pushed_branch = 'pushed_branch'
501 # Create a new branch in source repo
501 # Create a new branch in source repo
502 new_branch_commit = target_repo.commit_ids[-3]
502 new_branch_commit = target_repo.commit_ids[-3]
503 target_repo._checkout(new_branch_commit)
503 target_repo._checkout(new_branch_commit)
504 target_repo._checkout(pushed_branch, create=True)
504 target_repo._checkout(pushed_branch, create=True)
505
505
506 self.repo._local_push('master', target_repo.path, pushed_branch)
506 self.repo._local_push('master', target_repo.path, pushed_branch)
507
507
508 target_repo = GitRepository(target_repo.path)
508 target_repo = GitRepository(target_repo.path)
509
509
510 assert (target_repo.branches[pushed_branch] ==
510 assert (target_repo.branches[pushed_branch] ==
511 self.repo.branches['master'])
511 self.repo.branches['master'])
512
512
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 with pytest.raises(RepositoryError):
515 with pytest.raises(RepositoryError):
516 self.repo._local_push('master', target_repo.path, 'master')
516 self.repo._local_push('master', target_repo.path, 'master')
517
517
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 target_repo = self.get_empty_repo(bare=True)
519 target_repo = self.get_empty_repo(bare=True)
520
520
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 self.repo._local_push(
522 self.repo._local_push(
523 'master', target_repo.path, 'master', enable_hooks=True)
523 'master', target_repo.path, 'master', enable_hooks=True)
524 env = run_mock.call_args[1]['extra_env']
524 env = run_mock.call_args[1]['extra_env']
525 assert 'RC_SKIP_HOOKS' not in env
525 assert 'RC_SKIP_HOOKS' not in env
526
526
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 path_components = (
528 path_components = (
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 hook_path = os.path.join(repo_path, *path_components)
530 hook_path = os.path.join(repo_path, *path_components)
531 with open(hook_path, 'w') as f:
531 with open(hook_path, 'w') as f:
532 script_lines = [
532 script_lines = [
533 '#!%s' % sys.executable,
533 '#!%s' % sys.executable,
534 'import os',
534 'import os',
535 'import sys',
535 'import sys',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 ' sys.exit(0)',
537 ' sys.exit(0)',
538 'sys.exit(1)',
538 'sys.exit(1)',
539 ]
539 ]
540 f.write('\n'.join(script_lines))
540 f.write('\n'.join(script_lines))
541 os.chmod(hook_path, 0o755)
541 os.chmod(hook_path, 0o755)
542
542
543 def test_local_push_does_not_execute_hook(self):
543 def test_local_push_does_not_execute_hook(self):
544 target_repo = self.get_empty_repo()
544 target_repo = self.get_empty_repo()
545
545
546 pushed_branch = 'pushed_branch'
546 pushed_branch = 'pushed_branch'
547 self._add_failing_hook(target_repo.path, 'pre-receive')
547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 self.repo._local_push('master', target_repo.path, pushed_branch)
548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 # report any branches.
550 # report any branches.
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 f.write('ref: refs/heads/%s' % pushed_branch)
552 f.write('ref: refs/heads/%s' % pushed_branch)
553
553
554 target_repo = GitRepository(target_repo.path)
554 target_repo = GitRepository(target_repo.path)
555
555
556 assert (target_repo.branches[pushed_branch] ==
556 assert (target_repo.branches[pushed_branch] ==
557 self.repo.branches['master'])
557 self.repo.branches['master'])
558
558
559 def test_local_push_executes_hook(self):
559 def test_local_push_executes_hook(self):
560 target_repo = self.get_empty_repo(bare=True)
560 target_repo = self.get_empty_repo(bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 with pytest.raises(RepositoryError):
562 with pytest.raises(RepositoryError):
563 self.repo._local_push(
563 self.repo._local_push(
564 'master', target_repo.path, 'master', enable_hooks=True)
564 'master', target_repo.path, 'master', enable_hooks=True)
565
565
566 def test_maybe_prepare_merge_workspace(self):
566 def test_maybe_prepare_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 2, 'pr2', Reference('branch', 'master', 'unused'),
568 2, 'pr2', Reference('branch', 'master', 'unused'),
569 Reference('branch', 'master', 'unused'))
569 Reference('branch', 'master', 'unused'))
570
570
571 assert os.path.isdir(workspace)
571 assert os.path.isdir(workspace)
572 workspace_repo = GitRepository(workspace)
572 workspace_repo = GitRepository(workspace)
573 assert workspace_repo.branches == self.repo.branches
573 assert workspace_repo.branches == self.repo.branches
574
574
575 # Calling it a second time should also succeed
575 # Calling it a second time should also succeed
576 workspace = self.repo._maybe_prepare_merge_workspace(
576 workspace = self.repo._maybe_prepare_merge_workspace(
577 2, 'pr2', Reference('branch', 'master', 'unused'),
577 2, 'pr2', Reference('branch', 'master', 'unused'),
578 Reference('branch', 'master', 'unused'))
578 Reference('branch', 'master', 'unused'))
579 assert os.path.isdir(workspace)
579 assert os.path.isdir(workspace)
580
580
581 def test_maybe_prepare_merge_workspace_different_refs(self):
581 def test_maybe_prepare_merge_workspace_different_refs(self):
582 workspace = self.repo._maybe_prepare_merge_workspace(
582 workspace = self.repo._maybe_prepare_merge_workspace(
583 2, 'pr2', Reference('branch', 'master', 'unused'),
583 2, 'pr2', Reference('branch', 'master', 'unused'),
584 Reference('branch', 'develop', 'unused'))
584 Reference('branch', 'develop', 'unused'))
585
585
586 assert os.path.isdir(workspace)
586 assert os.path.isdir(workspace)
587 workspace_repo = GitRepository(workspace)
587 workspace_repo = GitRepository(workspace)
588 assert workspace_repo.branches == self.repo.branches
588 assert workspace_repo.branches == self.repo.branches
589
589
590 # Calling it a second time should also succeed
590 # Calling it a second time should also succeed
591 workspace = self.repo._maybe_prepare_merge_workspace(
591 workspace = self.repo._maybe_prepare_merge_workspace(
592 2, 'pr2', Reference('branch', 'master', 'unused'),
592 2, 'pr2', Reference('branch', 'master', 'unused'),
593 Reference('branch', 'develop', 'unused'))
593 Reference('branch', 'develop', 'unused'))
594 assert os.path.isdir(workspace)
594 assert os.path.isdir(workspace)
595
595
596 def test_cleanup_merge_workspace(self):
596 def test_cleanup_merge_workspace(self):
597 workspace = self.repo._maybe_prepare_merge_workspace(
597 workspace = self.repo._maybe_prepare_merge_workspace(
598 2, 'pr3', Reference('branch', 'master', 'unused'),
598 2, 'pr3', Reference('branch', 'master', 'unused'),
599 Reference('branch', 'master', 'unused'))
599 Reference('branch', 'master', 'unused'))
600 self.repo.cleanup_merge_workspace(2, 'pr3')
600 self.repo.cleanup_merge_workspace(2, 'pr3')
601
601
602 assert not os.path.exists(workspace)
602 assert not os.path.exists(workspace)
603
603
604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 # No assert: because in case of an inexistent workspace this function
605 # No assert: because in case of an inexistent workspace this function
606 # should still succeed.
606 # should still succeed.
607 self.repo.cleanup_merge_workspace(1, 'pr4')
607 self.repo.cleanup_merge_workspace(1, 'pr4')
608
608
609 def test_set_refs(self):
609 def test_set_refs(self):
610 test_ref = 'refs/test-refs/abcde'
610 test_ref = 'refs/test-refs/abcde'
611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612
612
613 self.repo.set_refs(test_ref, test_commit_id)
613 self.repo.set_refs(test_ref, test_commit_id)
614 stdout, _ = self.repo.run_git_command(['show-ref'])
614 stdout, _ = self.repo.run_git_command(['show-ref'])
615 assert test_ref in stdout
615 assert test_ref in stdout
616 assert test_commit_id in stdout
616 assert test_commit_id in stdout
617
617
618 def test_remove_ref(self):
618 def test_remove_ref(self):
619 test_ref = 'refs/test-refs/abcde'
619 test_ref = 'refs/test-refs/abcde'
620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 self.repo.set_refs(test_ref, test_commit_id)
621 self.repo.set_refs(test_ref, test_commit_id)
622 stdout, _ = self.repo.run_git_command(['show-ref'])
622 stdout, _ = self.repo.run_git_command(['show-ref'])
623 assert test_ref in stdout
623 assert test_ref in stdout
624 assert test_commit_id in stdout
624 assert test_commit_id in stdout
625
625
626 self.repo.remove_ref(test_ref)
626 self.repo.remove_ref(test_ref)
627 stdout, _ = self.repo.run_git_command(['show-ref'])
627 stdout, _ = self.repo.run_git_command(['show-ref'])
628 assert test_ref not in stdout
628 assert test_ref not in stdout
629 assert test_commit_id not in stdout
629 assert test_commit_id not in stdout
630
630
631
631
632 class TestGitCommit(object):
632 class TestGitCommit(object):
633
633
634 @pytest.fixture(autouse=True)
634 @pytest.fixture(autouse=True)
635 def prepare(self):
635 def prepare(self):
636 self.repo = GitRepository(TEST_GIT_REPO)
636 self.repo = GitRepository(TEST_GIT_REPO)
637
637
638 def test_default_commit(self):
638 def test_default_commit(self):
639 tip = self.repo.get_commit()
639 tip = self.repo.get_commit()
640 assert tip == self.repo.get_commit(None)
640 assert tip == self.repo.get_commit(None)
641 assert tip == self.repo.get_commit('tip')
641 assert tip == self.repo.get_commit('tip')
642
642
643 def test_root_node(self):
643 def test_root_node(self):
644 tip = self.repo.get_commit()
644 tip = self.repo.get_commit()
645 assert tip.root is tip.get_node('')
645 assert tip.root is tip.get_node('')
646
646
647 def test_lazy_fetch(self):
647 def test_lazy_fetch(self):
648 """
648 """
649 Test if commit's nodes expands and are cached as we walk through
649 Test if commit's nodes expands and are cached as we walk through
650 the commit. This test is somewhat hard to write as order of tests
650 the commit. This test is somewhat hard to write as order of tests
651 is a key here. Written by running command after command in a shell.
651 is a key here. Written by running command after command in a shell.
652 """
652 """
653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 assert commit_id in self.repo.commit_ids
654 assert commit_id in self.repo.commit_ids
655 commit = self.repo.get_commit(commit_id)
655 commit = self.repo.get_commit(commit_id)
656 assert len(commit.nodes) == 0
656 assert len(commit.nodes) == 0
657 root = commit.root
657 root = commit.root
658 assert len(commit.nodes) == 1
658 assert len(commit.nodes) == 1
659 assert len(root.nodes) == 8
659 assert len(root.nodes) == 8
660 # accessing root.nodes updates commit.nodes
660 # accessing root.nodes updates commit.nodes
661 assert len(commit.nodes) == 9
661 assert len(commit.nodes) == 9
662
662
663 docs = root.get_node('docs')
663 docs = root.get_node('docs')
664 # we haven't yet accessed anything new as docs dir was already cached
664 # we haven't yet accessed anything new as docs dir was already cached
665 assert len(commit.nodes) == 9
665 assert len(commit.nodes) == 9
666 assert len(docs.nodes) == 8
666 assert len(docs.nodes) == 8
667 # accessing docs.nodes updates commit.nodes
667 # accessing docs.nodes updates commit.nodes
668 assert len(commit.nodes) == 17
668 assert len(commit.nodes) == 17
669
669
670 assert docs is commit.get_node('docs')
670 assert docs is commit.get_node('docs')
671 assert docs is root.nodes[0]
671 assert docs is root.nodes[0]
672 assert docs is root.dirs[0]
672 assert docs is root.dirs[0]
673 assert docs is commit.get_node('docs')
673 assert docs is commit.get_node('docs')
674
674
675 def test_nodes_with_commit(self):
675 def test_nodes_with_commit(self):
676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 commit = self.repo.get_commit(commit_id)
677 commit = self.repo.get_commit(commit_id)
678 root = commit.root
678 root = commit.root
679 docs = root.get_node('docs')
679 docs = root.get_node('docs')
680 assert docs is commit.get_node('docs')
680 assert docs is commit.get_node('docs')
681 api = docs.get_node('api')
681 api = docs.get_node('api')
682 assert api is commit.get_node('docs/api')
682 assert api is commit.get_node('docs/api')
683 index = api.get_node('index.rst')
683 index = api.get_node('index.rst')
684 assert index is commit.get_node('docs/api/index.rst')
684 assert index is commit.get_node('docs/api/index.rst')
685 assert index is commit.get_node('docs')\
685 assert index is commit.get_node('docs')\
686 .get_node('api')\
686 .get_node('api')\
687 .get_node('index.rst')
687 .get_node('index.rst')
688
688
689 def test_branch_and_tags(self):
689 def test_branch_and_tags(self):
690 """
690 """
691 rev0 = self.repo.commit_ids[0]
691 rev0 = self.repo.commit_ids[0]
692 commit0 = self.repo.get_commit(rev0)
692 commit0 = self.repo.get_commit(rev0)
693 assert commit0.branch == 'master'
693 assert commit0.branch == 'master'
694 assert commit0.tags == []
694 assert commit0.tags == []
695
695
696 rev10 = self.repo.commit_ids[10]
696 rev10 = self.repo.commit_ids[10]
697 commit10 = self.repo.get_commit(rev10)
697 commit10 = self.repo.get_commit(rev10)
698 assert commit10.branch == 'master'
698 assert commit10.branch == 'master'
699 assert commit10.tags == []
699 assert commit10.tags == []
700
700
701 rev44 = self.repo.commit_ids[44]
701 rev44 = self.repo.commit_ids[44]
702 commit44 = self.repo.get_commit(rev44)
702 commit44 = self.repo.get_commit(rev44)
703 assert commit44.branch == 'web-branch'
703 assert commit44.branch == 'web-branch'
704
704
705 tip = self.repo.get_commit('tip')
705 tip = self.repo.get_commit('tip')
706 assert 'tip' in tip.tags
706 assert 'tip' in tip.tags
707 """
707 """
708 # Those tests would fail - branches are now going
708 # Those tests would fail - branches are now going
709 # to be changed at main API in order to support git backend
709 # to be changed at main API in order to support git backend
710 pass
710 pass
711
711
712 def test_file_size(self):
712 def test_file_size(self):
713 to_check = (
713 to_check = (
714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 'vcs/backends/BaseRepository.py', 502),
715 'vcs/backends/BaseRepository.py', 502),
716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 'vcs/backends/hg.py', 854),
717 'vcs/backends/hg.py', 854),
718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 'setup.py', 1068),
719 'setup.py', 1068),
720
720
721 ('d955cd312c17b02143c04fa1099a352b04368118',
721 ('d955cd312c17b02143c04fa1099a352b04368118',
722 'vcs/backends/base.py', 2921),
722 'vcs/backends/base.py', 2921),
723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 'vcs/backends/base.py', 3936),
724 'vcs/backends/base.py', 3936),
725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 'vcs/backends/base.py', 6189),
726 'vcs/backends/base.py', 6189),
727 )
727 )
728 for commit_id, path, size in to_check:
728 for commit_id, path, size in to_check:
729 node = self.repo.get_commit(commit_id).get_node(path)
729 node = self.repo.get_commit(commit_id).get_node(path)
730 assert node.is_file()
730 assert node.is_file()
731 assert node.size == size
731 assert node.size == size
732
732
733 def test_file_history_from_commits(self):
733 def test_file_history_from_commits(self):
734 node = self.repo[10].get_node('setup.py')
734 node = self.repo[10].get_node('setup.py')
735 commit_ids = [commit.raw_id for commit in node.history]
735 commit_ids = [commit.raw_id for commit in node.history]
736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737
737
738 node = self.repo[20].get_node('setup.py')
738 node = self.repo[20].get_node('setup.py')
739 node_ids = [commit.raw_id for commit in node.history]
739 node_ids = [commit.raw_id for commit in node.history]
740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742
742
743 # special case we check history from commit that has this particular
743 # special case we check history from commit that has this particular
744 # file changed this means we check if it's included as well
744 # file changed this means we check if it's included as well
745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 .get_node('setup.py')
746 .get_node('setup.py')
747 node_ids = [commit.raw_id for commit in node.history]
747 node_ids = [commit.raw_id for commit in node.history]
748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750
750
751 def test_file_history(self):
751 def test_file_history(self):
752 # we can only check if those commits are present in the history
752 # we can only check if those commits are present in the history
753 # as we cannot update this test every time file is changed
753 # as we cannot update this test every time file is changed
754 files = {
754 files = {
755 'setup.py': [
755 'setup.py': [
756 '54386793436c938cff89326944d4c2702340037d',
756 '54386793436c938cff89326944d4c2702340037d',
757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 '998ed409c795fec2012b1c0ca054d99888b22090',
758 '998ed409c795fec2012b1c0ca054d99888b22090',
759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 ],
765 ],
766 'vcs/nodes.py': [
766 'vcs/nodes.py': [
767 '33fa3223355104431402a888fa77a4e9956feb3e',
767 '33fa3223355104431402a888fa77a4e9956feb3e',
768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 '4313566d2e417cb382948f8d9d7c765330356054',
772 '4313566d2e417cb382948f8d9d7c765330356054',
773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 '54386793436c938cff89326944d4c2702340037d',
774 '54386793436c938cff89326944d4c2702340037d',
775 '54000345d2e78b03a99d561399e8e548de3f3203',
775 '54000345d2e78b03a99d561399e8e548de3f3203',
776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 ],
798 ],
799 'vcs/backends/git.py': [
799 'vcs/backends/git.py': [
800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 '9a751d84d8e9408e736329767387f41b36935153',
801 '9a751d84d8e9408e736329767387f41b36935153',
802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 '54000345d2e78b03a99d561399e8e548de3f3203',
807 '54000345d2e78b03a99d561399e8e548de3f3203',
808 ],
808 ],
809 }
809 }
810 for path, commit_ids in files.items():
810 for path, commit_ids in files.items():
811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 node_ids = [commit.raw_id for commit in node.history]
812 node_ids = [commit.raw_id for commit in node.history]
813 assert set(commit_ids).issubset(set(node_ids)), (
813 assert set(commit_ids).issubset(set(node_ids)), (
814 "We assumed that %s is subset of commit_ids for which file %s "
814 "We assumed that %s is subset of commit_ids for which file %s "
815 "has been changed, and history of that node returned: %s"
815 "has been changed, and history of that node returned: %s"
816 % (commit_ids, path, node_ids))
816 % (commit_ids, path, node_ids))
817
817
818 def test_file_annotate(self):
818 def test_file_annotate(self):
819 files = {
819 files = {
820 'vcs/backends/__init__.py': {
820 'vcs/backends/__init__.py': {
821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 'lines_no': 1,
822 'lines_no': 1,
823 'commits': [
823 'commits': [
824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 ],
825 ],
826 },
826 },
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 'lines_no': 21,
828 'lines_no': 21,
829 'commits': [
829 'commits': [
830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 ],
851 ],
852 },
852 },
853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 'lines_no': 32,
854 'lines_no': 32,
855 'commits': [
855 'commits': [
856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '54000345d2e78b03a99d561399e8e548de3f3203',
863 '54000345d2e78b03a99d561399e8e548de3f3203',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 ],
888 ],
889 },
889 },
890 },
890 },
891 }
891 }
892
892
893 for fname, commit_dict in files.items():
893 for fname, commit_dict in files.items():
894 for commit_id, __ in commit_dict.items():
894 for commit_id, __ in commit_dict.items():
895 commit = self.repo.get_commit(commit_id)
895 commit = self.repo.get_commit(commit_id)
896
896
897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 assert l1_1 == l1_2
899 assert l1_1 == l1_2
900 l1 = l1_1
900 l1 = l1_1
901 l2 = files[fname][commit_id]['commits']
901 l2 = files[fname][commit_id]['commits']
902 assert l1 == l2, (
902 assert l1 == l2, (
903 "The lists of commit_ids for %s@commit_id %s"
903 "The lists of commit_ids for %s@commit_id %s"
904 "from annotation list should match each other, "
904 "from annotation list should match each other, "
905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906
906
907 def test_files_state(self):
907 def test_files_state(self):
908 """
908 """
909 Tests state of FileNodes.
909 Tests state of FileNodes.
910 """
910 """
911 node = self.repo\
911 node = self.repo\
912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 .get_node('vcs/utils/diffs.py')
913 .get_node('vcs/utils/diffs.py')
914 assert node.state, NodeState.ADDED
914 assert node.state, NodeState.ADDED
915 assert node.added
915 assert node.added
916 assert not node.changed
916 assert not node.changed
917 assert not node.not_changed
917 assert not node.not_changed
918 assert not node.removed
918 assert not node.removed
919
919
920 node = self.repo\
920 node = self.repo\
921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 .get_node('.hgignore')
922 .get_node('.hgignore')
923 assert node.state, NodeState.CHANGED
923 assert node.state, NodeState.CHANGED
924 assert not node.added
924 assert not node.added
925 assert node.changed
925 assert node.changed
926 assert not node.not_changed
926 assert not node.not_changed
927 assert not node.removed
927 assert not node.removed
928
928
929 node = self.repo\
929 node = self.repo\
930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 .get_node('setup.py')
931 .get_node('setup.py')
932 assert node.state, NodeState.NOT_CHANGED
932 assert node.state, NodeState.NOT_CHANGED
933 assert not node.added
933 assert not node.added
934 assert not node.changed
934 assert not node.changed
935 assert node.not_changed
935 assert node.not_changed
936 assert not node.removed
936 assert not node.removed
937
937
938 # If node has REMOVED state then trying to fetch it would raise
938 # If node has REMOVED state then trying to fetch it would raise
939 # CommitError exception
939 # CommitError exception
940 commit = self.repo.get_commit(
940 commit = self.repo.get_commit(
941 'fa6600f6848800641328adbf7811fd2372c02ab2')
941 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 path = 'vcs/backends/BaseRepository.py'
942 path = 'vcs/backends/BaseRepository.py'
943 with pytest.raises(NodeDoesNotExistError):
943 with pytest.raises(NodeDoesNotExistError):
944 commit.get_node(path)
944 commit.get_node(path)
945 # but it would be one of ``removed`` (commit's attribute)
945 # but it would be one of ``removed`` (commit's attribute)
946 assert path in [rf.path for rf in commit.removed]
946 assert path in [rf.path for rf in commit.removed]
947
947
948 commit = self.repo.get_commit(
948 commit = self.repo.get_commit(
949 '54386793436c938cff89326944d4c2702340037d')
949 '54386793436c938cff89326944d4c2702340037d')
950 changed = [
950 changed = [
951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 'vcs/nodes.py']
952 'vcs/nodes.py']
953 assert set(changed) == set([f.path for f in commit.changed])
953 assert set(changed) == set([f.path for f in commit.changed])
954
954
955 def test_unicode_branch_refs(self):
955 def test_unicode_branch_refs(self):
956 unicode_branches = {
956 unicode_branches = {
957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
959 }
959 }
960 with mock.patch(
960 with mock.patch(
961 ("rhodecode.lib.vcs.backends.git.repository"
961 ("rhodecode.lib.vcs.backends.git.repository"
962 ".GitRepository._refs"),
962 ".GitRepository._refs"),
963 unicode_branches):
963 unicode_branches):
964 branches = self.repo.branches
964 branches = self.repo.branches
965
965
966 assert 'unicode' in branches
966 assert 'unicode' in branches
967 assert u'uniΓ§ΓΆβˆ‚e' in branches
967 assert u'uniΓ§ΓΆβˆ‚e' in branches
968
968
969 def test_unicode_tag_refs(self):
969 def test_unicode_tag_refs(self):
970 unicode_tags = {
970 unicode_tags = {
971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 }
973 }
974 with mock.patch(
974 with mock.patch(
975 ("rhodecode.lib.vcs.backends.git.repository"
975 ("rhodecode.lib.vcs.backends.git.repository"
976 ".GitRepository._refs"),
976 ".GitRepository._refs"),
977 unicode_tags):
977 unicode_tags):
978 tags = self.repo.tags
978 tags = self.repo.tags
979
979
980 assert 'unicode' in tags
980 assert 'unicode' in tags
981 assert u'uniΓ§ΓΆβˆ‚e' in tags
981 assert u'uniΓ§ΓΆβˆ‚e' in tags
982
982
983 def test_commit_message_is_unicode(self):
983 def test_commit_message_is_unicode(self):
984 for commit in self.repo:
984 for commit in self.repo:
985 assert type(commit.message) == unicode
985 assert type(commit.message) == unicode
986
986
987 def test_commit_author_is_unicode(self):
987 def test_commit_author_is_unicode(self):
988 for commit in self.repo:
988 for commit in self.repo:
989 assert type(commit.author) == unicode
989 assert type(commit.author) == unicode
990
990
991 def test_repo_files_content_is_unicode(self):
991 def test_repo_files_content_is_unicode(self):
992 commit = self.repo.get_commit()
992 commit = self.repo.get_commit()
993 for node in commit.get_node('/'):
993 for node in commit.get_node('/'):
994 if node.is_file():
994 if node.is_file():
995 assert type(node.content) == unicode
995 assert type(node.content) == unicode
996
996
997 def test_wrong_path(self):
997 def test_wrong_path(self):
998 # There is 'setup.py' in the root dir but not there:
998 # There is 'setup.py' in the root dir but not there:
999 path = 'foo/bar/setup.py'
999 path = 'foo/bar/setup.py'
1000 tip = self.repo.get_commit()
1000 tip = self.repo.get_commit()
1001 with pytest.raises(VCSError):
1001 with pytest.raises(VCSError):
1002 tip.get_node(path)
1002 tip.get_node(path)
1003
1003
1004 @pytest.mark.parametrize("author_email, commit_id", [
1004 @pytest.mark.parametrize("author_email, commit_id", [
1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 ('lukasz.balcerzak@python-center.pl',
1006 ('lukasz.balcerzak@python-center.pl',
1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 ])
1009 ])
1010 def test_author_email(self, author_email, commit_id):
1010 def test_author_email(self, author_email, commit_id):
1011 commit = self.repo.get_commit(commit_id)
1011 commit = self.repo.get_commit(commit_id)
1012 assert author_email == commit.author_email
1012 assert author_email == commit.author_email
1013
1013
1014 @pytest.mark.parametrize("author, commit_id", [
1014 @pytest.mark.parametrize("author, commit_id", [
1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 ])
1018 ])
1019 def test_author_username(self, author, commit_id):
1019 def test_author_username(self, author, commit_id):
1020 commit = self.repo.get_commit(commit_id)
1020 commit = self.repo.get_commit(commit_id)
1021 assert author == commit.author_name
1021 assert author == commit.author_name
1022
1022
1023
1023
1024 class TestLargeFileRepo(object):
1024 class TestLargeFileRepo(object):
1025
1025
1026 def test_large_file(self, backend_git):
1026 def test_large_file(self, backend_git):
1027 conf = make_db_config()
1027 conf = make_db_config()
1028 repo = backend_git.create_test_repo('largefiles', conf)
1028 repo = backend_git.create_test_repo('largefiles', conf)
1029
1029
1030 tip = repo.scm_instance().get_commit()
1030 tip = repo.scm_instance().get_commit()
1031
1031
1032 # extract stored LF node into the origin cache
1032 # extract stored LF node into the origin cache
1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034
1034
1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 oid_path = os.path.join(lfs_store, oid)
1036 oid_path = os.path.join(lfs_store, oid)
1037 oid_destination = os.path.join(
1037 oid_destination = os.path.join(
1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 shutil.copy(oid_path, oid_destination)
1039 shutil.copy(oid_path, oid_destination)
1040
1040
1041 node = tip.get_node('1MB.zip')
1041 node = tip.get_node('1MB.zip')
1042
1042
1043 lf_node = node.get_largefile_node()
1043 lf_node = node.get_largefile_node()
1044
1044
1045 assert lf_node.is_largefile() is True
1045 assert lf_node.is_largefile() is True
1046 assert lf_node.size == 1024000
1046 assert lf_node.size == 1024000
1047 assert lf_node.name == '1MB.zip'
1047 assert lf_node.name == '1MB.zip'
1048
1048
1049
1049
1050 @pytest.mark.usefixtures("vcs_repository_support")
1050 @pytest.mark.usefixtures("vcs_repository_support")
1051 class TestGitSpecificWithRepo(BackendTestMixin):
1051 class TestGitSpecificWithRepo(BackendTestMixin):
1052
1052
1053 @classmethod
1053 @classmethod
1054 def _get_commits(cls):
1054 def _get_commits(cls):
1055 return [
1055 return [
1056 {
1056 {
1057 'message': 'Initial',
1057 'message': 'Initial',
1058 'author': 'Joe Doe <joe.doe@example.com>',
1058 'author': 'Joe Doe <joe.doe@example.com>',
1059 'date': datetime.datetime(2010, 1, 1, 20),
1059 'date': datetime.datetime(2010, 1, 1, 20),
1060 'added': [
1060 'added': [
1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 FileNode(
1062 FileNode(
1063 'foobar/static/admin', content='admin',
1063 'foobar/static/admin', content='admin',
1064 mode=0o120000), # this is a link
1064 mode=0o120000), # this is a link
1065 FileNode('foo', content='foo'),
1065 FileNode('foo', content='foo'),
1066 ],
1066 ],
1067 },
1067 },
1068 {
1068 {
1069 'message': 'Second',
1069 'message': 'Second',
1070 'author': 'Joe Doe <joe.doe@example.com>',
1070 'author': 'Joe Doe <joe.doe@example.com>',
1071 'date': datetime.datetime(2010, 1, 1, 22),
1071 'date': datetime.datetime(2010, 1, 1, 22),
1072 'added': [
1072 'added': [
1073 FileNode('foo2', content='foo2'),
1073 FileNode('foo2', content='foo2'),
1074 ],
1074 ],
1075 },
1075 },
1076 ]
1076 ]
1077
1077
1078 def test_paths_slow_traversing(self):
1078 def test_paths_slow_traversing(self):
1079 commit = self.repo.get_commit()
1079 commit = self.repo.get_commit()
1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 .get_node('admin').get_node('base.js').content == 'base'
1081 .get_node('admin').get_node('base.js').content == 'base'
1082
1082
1083 def test_paths_fast_traversing(self):
1083 def test_paths_fast_traversing(self):
1084 commit = self.repo.get_commit()
1084 commit = self.repo.get_commit()
1085 assert (
1085 assert (
1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 'base')
1087 'base')
1088
1088
1089 def test_get_diff_runs_git_command_with_hashes(self):
1089 def test_get_diff_runs_git_command_with_hashes(self):
1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 self.repo.get_diff(self.repo[0], self.repo[1])
1091 self.repo.get_diff(self.repo[0], self.repo[1])
1092 self.repo.run_git_command.assert_called_once_with(
1092 self.repo.run_git_command.assert_called_once_with(
1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 '--abbrev=40', self.repo._get_commit_id(0),
1094 '--abbrev=40', self.repo._lookup_commit(0),
1095 self.repo._get_commit_id(1)])
1095 self.repo._lookup_commit(1)])
1096
1096
1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 self.repo.run_git_command.assert_called_once_with(
1100 self.repo.run_git_command.assert_called_once_with(
1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 '--abbrev=40', self.repo._get_commit_id(1)])
1102 '--abbrev=40', self.repo._lookup_commit(1)])
1103
1103
1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 self.repo.run_git_command.assert_called_once_with(
1107 self.repo.run_git_command.assert_called_once_with(
1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 '--abbrev=40', self.repo._get_commit_id(0),
1109 '--abbrev=40', self.repo._lookup_commit(0),
1110 self.repo._get_commit_id(1), '--', 'foo'])
1110 self.repo._lookup_commit(1), '--', 'foo'])
1111
1111
1112
1112
1113 @pytest.mark.usefixtures("vcs_repository_support")
1113 @pytest.mark.usefixtures("vcs_repository_support")
1114 class TestGitRegression(BackendTestMixin):
1114 class TestGitRegression(BackendTestMixin):
1115
1115
1116 @classmethod
1116 @classmethod
1117 def _get_commits(cls):
1117 def _get_commits(cls):
1118 return [
1118 return [
1119 {
1119 {
1120 'message': 'Initial',
1120 'message': 'Initial',
1121 'author': 'Joe Doe <joe.doe@example.com>',
1121 'author': 'Joe Doe <joe.doe@example.com>',
1122 'date': datetime.datetime(2010, 1, 1, 20),
1122 'date': datetime.datetime(2010, 1, 1, 20),
1123 'added': [
1123 'added': [
1124 FileNode('bot/__init__.py', content='base'),
1124 FileNode('bot/__init__.py', content='base'),
1125 FileNode('bot/templates/404.html', content='base'),
1125 FileNode('bot/templates/404.html', content='base'),
1126 FileNode('bot/templates/500.html', content='base'),
1126 FileNode('bot/templates/500.html', content='base'),
1127 ],
1127 ],
1128 },
1128 },
1129 {
1129 {
1130 'message': 'Second',
1130 'message': 'Second',
1131 'author': 'Joe Doe <joe.doe@example.com>',
1131 'author': 'Joe Doe <joe.doe@example.com>',
1132 'date': datetime.datetime(2010, 1, 1, 22),
1132 'date': datetime.datetime(2010, 1, 1, 22),
1133 'added': [
1133 'added': [
1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 FileNode(
1136 FileNode(
1137 'bot/build/static/templates/f.html', content='foo2'),
1137 'bot/build/static/templates/f.html', content='foo2'),
1138 FileNode(
1138 FileNode(
1139 'bot/build/static/templates/f1.html', content='foo2'),
1139 'bot/build/static/templates/f1.html', content='foo2'),
1140 FileNode('bot/build/templates/err.html', content='foo2'),
1140 FileNode('bot/build/templates/err.html', content='foo2'),
1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 ],
1142 ],
1143 },
1143 },
1144 ]
1144 ]
1145
1145
1146 @pytest.mark.parametrize("path, expected_paths", [
1146 @pytest.mark.parametrize("path, expected_paths", [
1147 ('bot', [
1147 ('bot', [
1148 'bot/build',
1148 'bot/build',
1149 'bot/templates',
1149 'bot/templates',
1150 'bot/__init__.py']),
1150 'bot/__init__.py']),
1151 ('bot/build', [
1151 ('bot/build', [
1152 'bot/build/migrations',
1152 'bot/build/migrations',
1153 'bot/build/static',
1153 'bot/build/static',
1154 'bot/build/templates']),
1154 'bot/build/templates']),
1155 ('bot/build/static', [
1155 ('bot/build/static', [
1156 'bot/build/static/templates']),
1156 'bot/build/static/templates']),
1157 ('bot/build/static/templates', [
1157 ('bot/build/static/templates', [
1158 'bot/build/static/templates/f.html',
1158 'bot/build/static/templates/f.html',
1159 'bot/build/static/templates/f1.html']),
1159 'bot/build/static/templates/f1.html']),
1160 ('bot/build/templates', [
1160 ('bot/build/templates', [
1161 'bot/build/templates/err.html',
1161 'bot/build/templates/err.html',
1162 'bot/build/templates/err2.html']),
1162 'bot/build/templates/err2.html']),
1163 ('bot/templates/', [
1163 ('bot/templates/', [
1164 'bot/templates/404.html',
1164 'bot/templates/404.html',
1165 'bot/templates/500.html']),
1165 'bot/templates/500.html']),
1166 ])
1166 ])
1167 def test_similar_paths(self, path, expected_paths):
1167 def test_similar_paths(self, path, expected_paths):
1168 commit = self.repo.get_commit()
1168 commit = self.repo.get_commit()
1169 paths = [n.path for n in commit.get_nodes(path)]
1169 paths = [n.path for n in commit.get_nodes(path)]
1170 assert paths == expected_paths
1170 assert paths == expected_paths
1171
1171
1172
1172
1173 class TestDiscoverGitVersion(object):
1173 class TestDiscoverGitVersion(object):
1174
1174
1175 def test_returns_git_version(self, baseapp):
1175 def test_returns_git_version(self, baseapp):
1176 version = discover_git_version()
1176 version = discover_git_version()
1177 assert version
1177 assert version
1178
1178
1179 def test_returns_empty_string_without_vcsserver(self):
1179 def test_returns_empty_string_without_vcsserver(self):
1180 mock_connection = mock.Mock()
1180 mock_connection = mock.Mock()
1181 mock_connection.discover_git_version = mock.Mock(
1181 mock_connection.discover_git_version = mock.Mock(
1182 side_effect=Exception)
1182 side_effect=Exception)
1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 version = discover_git_version()
1184 version = discover_git_version()
1185 assert version == ''
1185 assert version == ''
1186
1186
1187
1187
1188 class TestGetSubmoduleUrl(object):
1188 class TestGetSubmoduleUrl(object):
1189 def test_submodules_file_found(self):
1189 def test_submodules_file_found(self):
1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 node = mock.Mock()
1191 node = mock.Mock()
1192 with mock.patch.object(
1192 with mock.patch.object(
1193 commit, 'get_node', return_value=node) as get_node_mock:
1193 commit, 'get_node', return_value=node) as get_node_mock:
1194 node.content = (
1194 node.content = (
1195 '[submodule "subrepo1"]\n'
1195 '[submodule "subrepo1"]\n'
1196 '\tpath = subrepo1\n'
1196 '\tpath = subrepo1\n'
1197 '\turl = https://code.rhodecode.com/dulwich\n'
1197 '\turl = https://code.rhodecode.com/dulwich\n'
1198 )
1198 )
1199 result = commit._get_submodule_url('subrepo1')
1199 result = commit._get_submodule_url('subrepo1')
1200 get_node_mock.assert_called_once_with('.gitmodules')
1200 get_node_mock.assert_called_once_with('.gitmodules')
1201 assert result == 'https://code.rhodecode.com/dulwich'
1201 assert result == 'https://code.rhodecode.com/dulwich'
1202
1202
1203 def test_complex_submodule_path(self):
1203 def test_complex_submodule_path(self):
1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 node = mock.Mock()
1205 node = mock.Mock()
1206 with mock.patch.object(
1206 with mock.patch.object(
1207 commit, 'get_node', return_value=node) as get_node_mock:
1207 commit, 'get_node', return_value=node) as get_node_mock:
1208 node.content = (
1208 node.content = (
1209 '[submodule "complex/subrepo/path"]\n'
1209 '[submodule "complex/subrepo/path"]\n'
1210 '\tpath = complex/subrepo/path\n'
1210 '\tpath = complex/subrepo/path\n'
1211 '\turl = https://code.rhodecode.com/dulwich\n'
1211 '\turl = https://code.rhodecode.com/dulwich\n'
1212 )
1212 )
1213 result = commit._get_submodule_url('complex/subrepo/path')
1213 result = commit._get_submodule_url('complex/subrepo/path')
1214 get_node_mock.assert_called_once_with('.gitmodules')
1214 get_node_mock.assert_called_once_with('.gitmodules')
1215 assert result == 'https://code.rhodecode.com/dulwich'
1215 assert result == 'https://code.rhodecode.com/dulwich'
1216
1216
1217 def test_submodules_file_not_found(self):
1217 def test_submodules_file_not_found(self):
1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 with mock.patch.object(
1219 with mock.patch.object(
1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 result = commit._get_submodule_url('complex/subrepo/path')
1221 result = commit._get_submodule_url('complex/subrepo/path')
1222 assert result is None
1222 assert result is None
1223
1223
1224 def test_path_not_found(self):
1224 def test_path_not_found(self):
1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 node = mock.Mock()
1226 node = mock.Mock()
1227 with mock.patch.object(
1227 with mock.patch.object(
1228 commit, 'get_node', return_value=node) as get_node_mock:
1228 commit, 'get_node', return_value=node) as get_node_mock:
1229 node.content = (
1229 node.content = (
1230 '[submodule "subrepo1"]\n'
1230 '[submodule "subrepo1"]\n'
1231 '\tpath = subrepo1\n'
1231 '\tpath = subrepo1\n'
1232 '\turl = https://code.rhodecode.com/dulwich\n'
1232 '\turl = https://code.rhodecode.com/dulwich\n'
1233 )
1233 )
1234 result = commit._get_submodule_url('subrepo2')
1234 result = commit._get_submodule_url('subrepo2')
1235 get_node_mock.assert_called_once_with('.gitmodules')
1235 get_node_mock.assert_called_once_with('.gitmodules')
1236 assert result is None
1236 assert result is None
1237
1237
1238 def test_returns_cached_values(self):
1238 def test_returns_cached_values(self):
1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 node = mock.Mock()
1240 node = mock.Mock()
1241 with mock.patch.object(
1241 with mock.patch.object(
1242 commit, 'get_node', return_value=node) as get_node_mock:
1242 commit, 'get_node', return_value=node) as get_node_mock:
1243 node.content = (
1243 node.content = (
1244 '[submodule "subrepo1"]\n'
1244 '[submodule "subrepo1"]\n'
1245 '\tpath = subrepo1\n'
1245 '\tpath = subrepo1\n'
1246 '\turl = https://code.rhodecode.com/dulwich\n'
1246 '\turl = https://code.rhodecode.com/dulwich\n'
1247 )
1247 )
1248 for _ in range(3):
1248 for _ in range(3):
1249 commit._get_submodule_url('subrepo1')
1249 commit._get_submodule_url('subrepo1')
1250 get_node_mock.assert_called_once_with('.gitmodules')
1250 get_node_mock.assert_called_once_with('.gitmodules')
1251
1251
1252 def test_get_node_returns_a_link(self):
1252 def test_get_node_returns_a_link(self):
1253 repository = mock.Mock()
1253 repository = mock.Mock()
1254 repository.alias = 'git'
1254 repository.alias = 'git'
1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 get_id_patch = mock.patch.object(
1257 get_id_patch = mock.patch.object(
1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 get_submodule_patch = mock.patch.object(
1259 get_submodule_patch = mock.patch.object(
1260 commit, '_get_submodule_url', return_value=submodule_url)
1260 commit, '_get_submodule_url', return_value=submodule_url)
1261
1261
1262 with get_id_patch, get_submodule_patch as submodule_mock:
1262 with get_id_patch, get_submodule_patch as submodule_mock:
1263 node = commit.get_node('/abcde')
1263 node = commit.get_node('/abcde')
1264
1264
1265 submodule_mock.assert_called_once_with('/abcde')
1265 submodule_mock.assert_called_once_with('/abcde')
1266 assert type(node) == SubModuleNode
1266 assert type(node) == SubModuleNode
1267 assert node.url == submodule_url
1267 assert node.url == submodule_url
1268
1268
1269 def test_get_nodes_returns_links(self):
1269 def test_get_nodes_returns_links(self):
1270 repository = mock.MagicMock()
1270 repository = mock.MagicMock()
1271 repository.alias = 'git'
1271 repository.alias = 'git'
1272 repository._remote.tree_items.return_value = [
1272 repository._remote.tree_items.return_value = [
1273 ('subrepo', 'stat', 1, 'link')
1273 ('subrepo', 'stat', 1, 'link')
1274 ]
1274 ]
1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 get_id_patch = mock.patch.object(
1277 get_id_patch = mock.patch.object(
1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 get_submodule_patch = mock.patch.object(
1279 get_submodule_patch = mock.patch.object(
1280 commit, '_get_submodule_url', return_value=submodule_url)
1280 commit, '_get_submodule_url', return_value=submodule_url)
1281
1281
1282 with get_id_patch, get_submodule_patch as submodule_mock:
1282 with get_id_patch, get_submodule_patch as submodule_mock:
1283 nodes = commit.get_nodes('/abcde')
1283 nodes = commit.get_nodes('/abcde')
1284
1284
1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 assert len(nodes) == 1
1286 assert len(nodes) == 1
1287 assert type(nodes[0]) == SubModuleNode
1287 assert type(nodes[0]) == SubModuleNode
1288 assert nodes[0].url == submodule_url
1288 assert nodes[0].url == submodule_url
General Comments 0
You need to be logged in to leave comments. Login now