##// END OF EJS Templates
pull-requests: changed the order of close-branch after merge, so we don't leave open heads....
marcink -
r4436:37e7e17e default
parent child Browse files
Show More
@@ -1,986 +1,996 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 "Cannot create repository at %s, location already exist"
368 "Cannot create repository at %s, location already exist"
369 % self.path)
369 % self.path)
370
370
371 if src_url:
371 if src_url:
372 url = str(self._get_url(src_url))
372 url = str(self._get_url(src_url))
373 MercurialRepository.check_url(url, self.config)
373 MercurialRepository.check_url(url, self.config)
374
374
375 self._remote.clone(url, self.path, do_workspace_checkout)
375 self._remote.clone(url, self.path, do_workspace_checkout)
376
376
377 # Don't try to create if we've already cloned repo
377 # Don't try to create if we've already cloned repo
378 create = False
378 create = False
379
379
380 if create:
380 if create:
381 os.makedirs(self.path, mode=0o755)
381 os.makedirs(self.path, mode=0o755)
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 url = url.encode('utf8')
428 url = url.encode('utf8')
429 if url != 'default' and '://' not in url:
429 if url != 'default' and '://' not in url:
430 url = "file:" + urllib.pathname2url(url)
430 url = "file:" + urllib.pathname2url(url)
431 return url
431 return url
432
432
433 def get_hook_location(self):
433 def get_hook_location(self):
434 """
434 """
435 returns absolute path to location where hooks are stored
435 returns absolute path to location where hooks are stored
436 """
436 """
437 return os.path.join(self.path, '.hg', '.hgrc')
437 return os.path.join(self.path, '.hg', '.hgrc')
438
438
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False):
440 translate_tag=None, maybe_unreachable=False):
441 """
441 """
442 Returns ``MercurialCommit`` object representing repository's
442 Returns ``MercurialCommit`` object representing repository's
443 commit at the given `commit_id` or `commit_idx`.
443 commit at the given `commit_id` or `commit_idx`.
444 """
444 """
445 if self.is_empty():
445 if self.is_empty():
446 raise EmptyRepositoryError("There are no commits yet")
446 raise EmptyRepositoryError("There are no commits yet")
447
447
448 if commit_id is not None:
448 if commit_id is not None:
449 self._validate_commit_id(commit_id)
449 self._validate_commit_id(commit_id)
450 try:
450 try:
451 # we have cached idx, use it without contacting the remote
451 # we have cached idx, use it without contacting the remote
452 idx = self._commit_ids[commit_id]
452 idx = self._commit_ids[commit_id]
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 except KeyError:
454 except KeyError:
455 pass
455 pass
456
456
457 elif commit_idx is not None:
457 elif commit_idx is not None:
458 self._validate_commit_idx(commit_idx)
458 self._validate_commit_idx(commit_idx)
459 try:
459 try:
460 _commit_id = self.commit_ids[commit_idx]
460 _commit_id = self.commit_ids[commit_idx]
461 if commit_idx < 0:
461 if commit_idx < 0:
462 commit_idx = self.commit_ids.index(_commit_id)
462 commit_idx = self.commit_ids.index(_commit_id)
463
463
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 except IndexError:
465 except IndexError:
466 commit_id = commit_idx
466 commit_id = commit_idx
467 else:
467 else:
468 commit_id = "tip"
468 commit_id = "tip"
469
469
470 if isinstance(commit_id, unicode):
470 if isinstance(commit_id, unicode):
471 commit_id = safe_str(commit_id)
471 commit_id = safe_str(commit_id)
472
472
473 try:
473 try:
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 except CommitDoesNotExistError:
475 except CommitDoesNotExistError:
476 msg = "Commit {} does not exist for `{}`".format(
476 msg = "Commit {} does not exist for `{}`".format(
477 *map(safe_str, [commit_id, self.name]))
477 *map(safe_str, [commit_id, self.name]))
478 raise CommitDoesNotExistError(msg)
478 raise CommitDoesNotExistError(msg)
479
479
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481
481
482 def get_commits(
482 def get_commits(
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 """
485 """
486 Returns generator of ``MercurialCommit`` objects from start to end
486 Returns generator of ``MercurialCommit`` objects from start to end
487 (both are inclusive)
487 (both are inclusive)
488
488
489 :param start_id: None, str(commit_id)
489 :param start_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
491 :param start_date: if specified, commits with commit date less than
491 :param start_date: if specified, commits with commit date less than
492 ``start_date`` would be filtered out from returned set
492 ``start_date`` would be filtered out from returned set
493 :param end_date: if specified, commits with commit date greater than
493 :param end_date: if specified, commits with commit date greater than
494 ``end_date`` would be filtered out from returned set
494 ``end_date`` would be filtered out from returned set
495 :param branch_name: if specified, commits not reachable from given
495 :param branch_name: if specified, commits not reachable from given
496 branch would be filtered out from returned set
496 branch would be filtered out from returned set
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 Mercurial evolve
498 Mercurial evolve
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 exist.
500 exist.
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 ``end`` could not be found.
502 ``end`` could not be found.
503 """
503 """
504 # actually we should check now if it's not an empty repo
504 # actually we should check now if it's not an empty repo
505 if self.is_empty():
505 if self.is_empty():
506 raise EmptyRepositoryError("There are no commits yet")
506 raise EmptyRepositoryError("There are no commits yet")
507 self._validate_branch_name(branch_name)
507 self._validate_branch_name(branch_name)
508
508
509 branch_ancestors = False
509 branch_ancestors = False
510 if start_id is not None:
510 if start_id is not None:
511 self._validate_commit_id(start_id)
511 self._validate_commit_id(start_id)
512 c_start = self.get_commit(commit_id=start_id)
512 c_start = self.get_commit(commit_id=start_id)
513 start_pos = self._commit_ids[c_start.raw_id]
513 start_pos = self._commit_ids[c_start.raw_id]
514 else:
514 else:
515 start_pos = None
515 start_pos = None
516
516
517 if end_id is not None:
517 if end_id is not None:
518 self._validate_commit_id(end_id)
518 self._validate_commit_id(end_id)
519 c_end = self.get_commit(commit_id=end_id)
519 c_end = self.get_commit(commit_id=end_id)
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 else:
521 else:
522 end_pos = None
522 end_pos = None
523
523
524 if None not in [start_id, end_id] and start_pos > end_pos:
524 if None not in [start_id, end_id] and start_pos > end_pos:
525 raise RepositoryError(
525 raise RepositoryError(
526 "Start commit '%s' cannot be after end commit '%s'" %
526 "Start commit '%s' cannot be after end commit '%s'" %
527 (start_id, end_id))
527 (start_id, end_id))
528
528
529 if end_pos is not None:
529 if end_pos is not None:
530 end_pos += 1
530 end_pos += 1
531
531
532 commit_filter = []
532 commit_filter = []
533
533
534 if branch_name and not branch_ancestors:
534 if branch_name and not branch_ancestors:
535 commit_filter.append('branch("%s")' % (branch_name,))
535 commit_filter.append('branch("%s")' % (branch_name,))
536 elif branch_name and branch_ancestors:
536 elif branch_name and branch_ancestors:
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538
538
539 if start_date and not end_date:
539 if start_date and not end_date:
540 commit_filter.append('date(">%s")' % (start_date,))
540 commit_filter.append('date(">%s")' % (start_date,))
541 if end_date and not start_date:
541 if end_date and not start_date:
542 commit_filter.append('date("<%s")' % (end_date,))
542 commit_filter.append('date("<%s")' % (end_date,))
543 if start_date and end_date:
543 if start_date and end_date:
544 commit_filter.append(
544 commit_filter.append(
545 'date(">%s") and date("<%s")' % (start_date, end_date))
545 'date(">%s") and date("<%s")' % (start_date, end_date))
546
546
547 if not show_hidden:
547 if not show_hidden:
548 commit_filter.append('not obsolete()')
548 commit_filter.append('not obsolete()')
549 commit_filter.append('not hidden()')
549 commit_filter.append('not hidden()')
550
550
551 # TODO: johbo: Figure out a simpler way for this solution
551 # TODO: johbo: Figure out a simpler way for this solution
552 collection_generator = CollectionGenerator
552 collection_generator = CollectionGenerator
553 if commit_filter:
553 if commit_filter:
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 revisions = self._remote.rev_range([commit_filter])
555 revisions = self._remote.rev_range([commit_filter])
556 collection_generator = MercurialIndexBasedCollectionGenerator
556 collection_generator = MercurialIndexBasedCollectionGenerator
557 else:
557 else:
558 revisions = self.commit_ids
558 revisions = self.commit_ids
559
559
560 if start_pos or end_pos:
560 if start_pos or end_pos:
561 revisions = revisions[start_pos:end_pos]
561 revisions = revisions[start_pos:end_pos]
562
562
563 return collection_generator(self, revisions, pre_load=pre_load)
563 return collection_generator(self, revisions, pre_load=pre_load)
564
564
565 def pull(self, url, commit_ids=None):
565 def pull(self, url, commit_ids=None):
566 """
566 """
567 Pull changes from external location.
567 Pull changes from external location.
568
568
569 :param commit_ids: Optional. Can be set to a list of commit ids
569 :param commit_ids: Optional. Can be set to a list of commit ids
570 which shall be pulled from the other repository.
570 which shall be pulled from the other repository.
571 """
571 """
572 url = self._get_url(url)
572 url = self._get_url(url)
573 self._remote.pull(url, commit_ids=commit_ids)
573 self._remote.pull(url, commit_ids=commit_ids)
574 self._remote.invalidate_vcs_cache()
574 self._remote.invalidate_vcs_cache()
575
575
576 def fetch(self, url, commit_ids=None):
576 def fetch(self, url, commit_ids=None):
577 """
577 """
578 Backward compatibility with GIT fetch==pull
578 Backward compatibility with GIT fetch==pull
579 """
579 """
580 return self.pull(url, commit_ids=commit_ids)
580 return self.pull(url, commit_ids=commit_ids)
581
581
582 def push(self, url):
582 def push(self, url):
583 url = self._get_url(url)
583 url = self._get_url(url)
584 self._remote.sync_push(url)
584 self._remote.sync_push(url)
585
585
586 def _local_clone(self, clone_path):
586 def _local_clone(self, clone_path):
587 """
587 """
588 Create a local clone of the current repo.
588 Create a local clone of the current repo.
589 """
589 """
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 hooks=False)
591 hooks=False)
592
592
593 def _update(self, revision, clean=False):
593 def _update(self, revision, clean=False):
594 """
594 """
595 Update the working copy to the specified revision.
595 Update the working copy to the specified revision.
596 """
596 """
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 self._remote.update(revision, clean=clean)
598 self._remote.update(revision, clean=clean)
599
599
600 def _identify(self):
600 def _identify(self):
601 """
601 """
602 Return the current state of the working directory.
602 Return the current state of the working directory.
603 """
603 """
604 return self._remote.identify().strip().rstrip('+')
604 return self._remote.identify().strip().rstrip('+')
605
605
606 def _heads(self, branch=None):
606 def _heads(self, branch=None):
607 """
607 """
608 Return the commit ids of the repository heads.
608 Return the commit ids of the repository heads.
609 """
609 """
610 return self._remote.heads(branch=branch).strip().split(' ')
610 return self._remote.heads(branch=branch).strip().split(' ')
611
611
612 def _ancestor(self, revision1, revision2):
612 def _ancestor(self, revision1, revision2):
613 """
613 """
614 Return the common ancestor of the two revisions.
614 Return the common ancestor of the two revisions.
615 """
615 """
616 return self._remote.ancestor(revision1, revision2)
616 return self._remote.ancestor(revision1, revision2)
617
617
618 def _local_push(
618 def _local_push(
619 self, revision, repository_path, push_branches=False,
619 self, revision, repository_path, push_branches=False,
620 enable_hooks=False):
620 enable_hooks=False):
621 """
621 """
622 Push the given revision to the specified repository.
622 Push the given revision to the specified repository.
623
623
624 :param push_branches: allow to create branches in the target repo.
624 :param push_branches: allow to create branches in the target repo.
625 """
625 """
626 self._remote.push(
626 self._remote.push(
627 [revision], repository_path, hooks=enable_hooks,
627 [revision], repository_path, hooks=enable_hooks,
628 push_branches=push_branches)
628 push_branches=push_branches)
629
629
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 source_ref, use_rebase=False, dry_run=False):
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 """
632 """
633 Merge the given source_revision into the checked out revision.
633 Merge the given source_revision into the checked out revision.
634
634
635 Returns the commit id of the merge and a boolean indicating if the
635 Returns the commit id of the merge and a boolean indicating if the
636 commit needs to be pushed.
636 commit needs to be pushed.
637 """
637 """
638 self._update(target_ref.commit_id, clean=True)
638 source_ref_commit_id = source_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
639
640
640 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
641 # update our workdir to target ref, for proper merge
642 self._update(target_ref_commit_id, clean=True)
643
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
641 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
642
646
643 if ancestor == source_ref.commit_id:
647 if close_commit_id:
644 # Nothing to do, the changes were already integrated
648 # NOTE(marcink): if we get the close commit, this is our new source
645 return target_ref.commit_id, False
649 # which will include the close commit itself.
650 source_ref_commit_id = close_commit_id
646
651
647 elif ancestor == target_ref.commit_id and is_the_same_branch:
652 if ancestor == source_ref_commit_id:
653 # Nothing to do, the changes were already integrated
654 return target_ref_commit_id, False
655
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
648 # In this case we should force a commit message
657 # In this case we should force a commit message
649 return source_ref.commit_id, True
658 return source_ref_commit_id, True
650
659
651 unresolved = None
660 unresolved = None
652 if use_rebase:
661 if use_rebase:
653 try:
662 try:
654 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
655 target_ref.commit_id)
656 self.bookmark(bookmark_name, revision=source_ref.commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
657 self._remote.rebase(
665 self._remote.rebase(
658 source=source_ref.commit_id, dest=target_ref.commit_id)
666 source=source_ref_commit_id, dest=target_ref_commit_id)
659 self._remote.invalidate_vcs_cache()
667 self._remote.invalidate_vcs_cache()
660 self._update(bookmark_name, clean=True)
668 self._update(bookmark_name, clean=True)
661 return self._identify(), True
669 return self._identify(), True
662 except RepositoryError as e:
670 except RepositoryError as e:
663 # The rebase-abort may raise another exception which 'hides'
671 # The rebase-abort may raise another exception which 'hides'
664 # the original one, therefore we log it here.
672 # the original one, therefore we log it here.
665 log.exception('Error while rebasing shadow repo during merge.')
673 log.exception('Error while rebasing shadow repo during merge.')
666 if 'unresolved conflicts' in safe_str(e):
674 if 'unresolved conflicts' in safe_str(e):
667 unresolved = self._remote.get_unresolved_files()
675 unresolved = self._remote.get_unresolved_files()
668 log.debug('unresolved files: %s', unresolved)
676 log.debug('unresolved files: %s', unresolved)
669
677
670 # Cleanup any rebase leftovers
678 # Cleanup any rebase leftovers
671 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
672 self._remote.rebase(abort=True)
680 self._remote.rebase(abort=True)
673 self._remote.invalidate_vcs_cache()
681 self._remote.invalidate_vcs_cache()
674 self._remote.update(clean=True)
682 self._remote.update(clean=True)
675 if unresolved:
683 if unresolved:
676 raise UnresolvedFilesInRepo(unresolved)
684 raise UnresolvedFilesInRepo(unresolved)
677 else:
685 else:
678 raise
686 raise
679 else:
687 else:
680 try:
688 try:
681 self._remote.merge(source_ref.commit_id)
689 self._remote.merge(source_ref_commit_id)
682 self._remote.invalidate_vcs_cache()
690 self._remote.invalidate_vcs_cache()
683 self._remote.commit(
691 self._remote.commit(
684 message=safe_str(merge_message),
692 message=safe_str(merge_message),
685 username=safe_str('%s <%s>' % (user_name, user_email)))
693 username=safe_str('%s <%s>' % (user_name, user_email)))
686 self._remote.invalidate_vcs_cache()
694 self._remote.invalidate_vcs_cache()
687 return self._identify(), True
695 return self._identify(), True
688 except RepositoryError as e:
696 except RepositoryError as e:
689 # The merge-abort may raise another exception which 'hides'
697 # The merge-abort may raise another exception which 'hides'
690 # the original one, therefore we log it here.
698 # the original one, therefore we log it here.
691 log.exception('Error while merging shadow repo during merge.')
699 log.exception('Error while merging shadow repo during merge.')
692 if 'unresolved merge conflicts' in safe_str(e):
700 if 'unresolved merge conflicts' in safe_str(e):
693 unresolved = self._remote.get_unresolved_files()
701 unresolved = self._remote.get_unresolved_files()
694 log.debug('unresolved files: %s', unresolved)
702 log.debug('unresolved files: %s', unresolved)
695
703
696 # Cleanup any merge leftovers
704 # Cleanup any merge leftovers
697 self._remote.update(clean=True)
705 self._remote.update(clean=True)
698 if unresolved:
706 if unresolved:
699 raise UnresolvedFilesInRepo(unresolved)
707 raise UnresolvedFilesInRepo(unresolved)
700 else:
708 else:
701 raise
709 raise
702
710
703 def _local_close(self, target_ref, user_name, user_email,
711 def _local_close(self, target_ref, user_name, user_email,
704 source_ref, close_message=''):
712 source_ref, close_message=''):
705 """
713 """
706 Close the branch of the given source_revision
714 Close the branch of the given source_revision
707
715
708 Returns the commit id of the close and a boolean indicating if the
716 Returns the commit id of the close and a boolean indicating if the
709 commit needs to be pushed.
717 commit needs to be pushed.
710 """
718 """
711 self._update(source_ref.commit_id)
719 self._update(source_ref.commit_id)
712 message = close_message or "Closing branch: `{}`".format(source_ref.name)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
713 try:
721 try:
714 self._remote.commit(
722 self._remote.commit(
715 message=safe_str(message),
723 message=safe_str(message),
716 username=safe_str('%s <%s>' % (user_name, user_email)),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
717 close_branch=True)
725 close_branch=True)
718 self._remote.invalidate_vcs_cache()
726 self._remote.invalidate_vcs_cache()
719 return self._identify(), True
727 return self._identify(), True
720 except RepositoryError:
728 except RepositoryError:
721 # Cleanup any commit leftovers
729 # Cleanup any commit leftovers
722 self._remote.update(clean=True)
730 self._remote.update(clean=True)
723 raise
731 raise
724
732
725 def _is_the_same_branch(self, target_ref, source_ref):
733 def _is_the_same_branch(self, target_ref, source_ref):
726 return (
734 return (
727 self._get_branch_name(target_ref) ==
735 self._get_branch_name(target_ref) ==
728 self._get_branch_name(source_ref))
736 self._get_branch_name(source_ref))
729
737
730 def _get_branch_name(self, ref):
738 def _get_branch_name(self, ref):
731 if ref.type == 'branch':
739 if ref.type == 'branch':
732 return ref.name
740 return ref.name
733 return self._remote.ctx_branch(ref.commit_id)
741 return self._remote.ctx_branch(ref.commit_id)
734
742
735 def _maybe_prepare_merge_workspace(
743 def _maybe_prepare_merge_workspace(
736 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
737 shadow_repository_path = self._get_shadow_repository_path(
745 shadow_repository_path = self._get_shadow_repository_path(
738 self.path, repo_id, workspace_id)
746 self.path, repo_id, workspace_id)
739 if not os.path.exists(shadow_repository_path):
747 if not os.path.exists(shadow_repository_path):
740 self._local_clone(shadow_repository_path)
748 self._local_clone(shadow_repository_path)
741 log.debug(
749 log.debug(
742 'Prepared shadow repository in %s', shadow_repository_path)
750 'Prepared shadow repository in %s', shadow_repository_path)
743
751
744 return shadow_repository_path
752 return shadow_repository_path
745
753
746 def _merge_repo(self, repo_id, workspace_id, target_ref,
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
747 source_repo, source_ref, merge_message,
755 source_repo, source_ref, merge_message,
748 merger_name, merger_email, dry_run=False,
756 merger_name, merger_email, dry_run=False,
749 use_rebase=False, close_branch=False):
757 use_rebase=False, close_branch=False):
750
758
751 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
752 'rebase' if use_rebase else 'merge', dry_run)
760 'rebase' if use_rebase else 'merge', dry_run)
753 if target_ref.commit_id not in self._heads():
761 if target_ref.commit_id not in self._heads():
754 return MergeResponse(
762 return MergeResponse(
755 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
756 metadata={'target_ref': target_ref})
764 metadata={'target_ref': target_ref})
757
765
758 try:
766 try:
759 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
760 heads = '\n,'.join(self._heads(target_ref.name))
768 heads = '\n,'.join(self._heads(target_ref.name))
761 metadata = {
769 metadata = {
762 'target_ref': target_ref,
770 'target_ref': target_ref,
763 'source_ref': source_ref,
771 'source_ref': source_ref,
764 'heads': heads
772 'heads': heads
765 }
773 }
766 return MergeResponse(
774 return MergeResponse(
767 False, False, None,
775 False, False, None,
768 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
776 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
769 metadata=metadata)
777 metadata=metadata)
770 except CommitDoesNotExistError:
778 except CommitDoesNotExistError:
771 log.exception('Failure when looking up branch heads on hg target')
779 log.exception('Failure when looking up branch heads on hg target')
772 return MergeResponse(
780 return MergeResponse(
773 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
781 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
774 metadata={'target_ref': target_ref})
782 metadata={'target_ref': target_ref})
775
783
776 shadow_repository_path = self._maybe_prepare_merge_workspace(
784 shadow_repository_path = self._maybe_prepare_merge_workspace(
777 repo_id, workspace_id, target_ref, source_ref)
785 repo_id, workspace_id, target_ref, source_ref)
778 shadow_repo = self.get_shadow_instance(shadow_repository_path)
786 shadow_repo = self.get_shadow_instance(shadow_repository_path)
779
787
780 log.debug('Pulling in target reference %s', target_ref)
788 log.debug('Pulling in target reference %s', target_ref)
781 self._validate_pull_reference(target_ref)
789 self._validate_pull_reference(target_ref)
782 shadow_repo._local_pull(self.path, target_ref)
790 shadow_repo._local_pull(self.path, target_ref)
783
791
784 try:
792 try:
785 log.debug('Pulling in source reference %s', source_ref)
793 log.debug('Pulling in source reference %s', source_ref)
786 source_repo._validate_pull_reference(source_ref)
794 source_repo._validate_pull_reference(source_ref)
787 shadow_repo._local_pull(source_repo.path, source_ref)
795 shadow_repo._local_pull(source_repo.path, source_ref)
788 except CommitDoesNotExistError:
796 except CommitDoesNotExistError:
789 log.exception('Failure when doing local pull on hg shadow repo')
797 log.exception('Failure when doing local pull on hg shadow repo')
790 return MergeResponse(
798 return MergeResponse(
791 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
799 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
792 metadata={'source_ref': source_ref})
800 metadata={'source_ref': source_ref})
793
801
794 merge_ref = None
802 merge_ref = None
795 merge_commit_id = None
803 merge_commit_id = None
796 close_commit_id = None
804 close_commit_id = None
797 merge_failure_reason = MergeFailureReason.NONE
805 merge_failure_reason = MergeFailureReason.NONE
798 metadata = {}
806 metadata = {}
799
807
800 # enforce that close branch should be used only in case we source from
808 # enforce that close branch should be used only in case we source from
801 # an actual Branch
809 # an actual Branch
802 close_branch = close_branch and source_ref.type == 'branch'
810 close_branch = close_branch and source_ref.type == 'branch'
803
811
804 # don't allow to close branch if source and target are the same
812 # don't allow to close branch if source and target are the same
805 close_branch = close_branch and source_ref.name != target_ref.name
813 close_branch = close_branch and source_ref.name != target_ref.name
806
814
807 needs_push_on_close = False
815 needs_push_on_close = False
808 if close_branch and not use_rebase and not dry_run:
816 if close_branch and not use_rebase and not dry_run:
809 try:
817 try:
810 close_commit_id, needs_push_on_close = shadow_repo._local_close(
818 close_commit_id, needs_push_on_close = shadow_repo._local_close(
811 target_ref, merger_name, merger_email, source_ref)
819 target_ref, merger_name, merger_email, source_ref)
812 merge_possible = True
820 merge_possible = True
813 except RepositoryError:
821 except RepositoryError:
814 log.exception('Failure when doing close branch on '
822 log.exception('Failure when doing close branch on '
815 'shadow repo: %s', shadow_repo)
823 'shadow repo: %s', shadow_repo)
816 merge_possible = False
824 merge_possible = False
817 merge_failure_reason = MergeFailureReason.MERGE_FAILED
825 merge_failure_reason = MergeFailureReason.MERGE_FAILED
818 else:
826 else:
819 merge_possible = True
827 merge_possible = True
820
828
821 needs_push = False
829 needs_push = False
822 if merge_possible:
830 if merge_possible:
831
823 try:
832 try:
824 merge_commit_id, needs_push = shadow_repo._local_merge(
833 merge_commit_id, needs_push = shadow_repo._local_merge(
825 target_ref, merge_message, merger_name, merger_email,
834 target_ref, merge_message, merger_name, merger_email,
826 source_ref, use_rebase=use_rebase, dry_run=dry_run)
835 source_ref, use_rebase=use_rebase,
836 close_commit_id=close_commit_id, dry_run=dry_run)
827 merge_possible = True
837 merge_possible = True
828
838
829 # read the state of the close action, if it
839 # read the state of the close action, if it
830 # maybe required a push
840 # maybe required a push
831 needs_push = needs_push or needs_push_on_close
841 needs_push = needs_push or needs_push_on_close
832
842
833 # Set a bookmark pointing to the merge commit. This bookmark
843 # Set a bookmark pointing to the merge commit. This bookmark
834 # may be used to easily identify the last successful merge
844 # may be used to easily identify the last successful merge
835 # commit in the shadow repository.
845 # commit in the shadow repository.
836 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
846 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
837 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
847 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
838 except SubrepoMergeError:
848 except SubrepoMergeError:
839 log.exception(
849 log.exception(
840 'Subrepo merge error during local merge on hg shadow repo.')
850 'Subrepo merge error during local merge on hg shadow repo.')
841 merge_possible = False
851 merge_possible = False
842 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
852 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
843 needs_push = False
853 needs_push = False
844 except RepositoryError as e:
854 except RepositoryError as e:
845 log.exception('Failure when doing local merge on hg shadow repo')
855 log.exception('Failure when doing local merge on hg shadow repo')
846 if isinstance(e, UnresolvedFilesInRepo):
856 if isinstance(e, UnresolvedFilesInRepo):
847 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
857 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
848
858
849 merge_possible = False
859 merge_possible = False
850 merge_failure_reason = MergeFailureReason.MERGE_FAILED
860 merge_failure_reason = MergeFailureReason.MERGE_FAILED
851 needs_push = False
861 needs_push = False
852
862
853 if merge_possible and not dry_run:
863 if merge_possible and not dry_run:
854 if needs_push:
864 if needs_push:
855 # In case the target is a bookmark, update it, so after pushing
865 # In case the target is a bookmark, update it, so after pushing
856 # the bookmarks is also updated in the target.
866 # the bookmarks is also updated in the target.
857 if target_ref.type == 'book':
867 if target_ref.type == 'book':
858 shadow_repo.bookmark(
868 shadow_repo.bookmark(
859 target_ref.name, revision=merge_commit_id)
869 target_ref.name, revision=merge_commit_id)
860 try:
870 try:
861 shadow_repo_with_hooks = self.get_shadow_instance(
871 shadow_repo_with_hooks = self.get_shadow_instance(
862 shadow_repository_path,
872 shadow_repository_path,
863 enable_hooks=True)
873 enable_hooks=True)
864 # This is the actual merge action, we push from shadow
874 # This is the actual merge action, we push from shadow
865 # into origin.
875 # into origin.
866 # Note: the push_branches option will push any new branch
876 # Note: the push_branches option will push any new branch
867 # defined in the source repository to the target. This may
877 # defined in the source repository to the target. This may
868 # be dangerous as branches are permanent in Mercurial.
878 # be dangerous as branches are permanent in Mercurial.
869 # This feature was requested in issue #441.
879 # This feature was requested in issue #441.
870 shadow_repo_with_hooks._local_push(
880 shadow_repo_with_hooks._local_push(
871 merge_commit_id, self.path, push_branches=True,
881 merge_commit_id, self.path, push_branches=True,
872 enable_hooks=True)
882 enable_hooks=True)
873
883
874 # maybe we also need to push the close_commit_id
884 # maybe we also need to push the close_commit_id
875 if close_commit_id:
885 if close_commit_id:
876 shadow_repo_with_hooks._local_push(
886 shadow_repo_with_hooks._local_push(
877 close_commit_id, self.path, push_branches=True,
887 close_commit_id, self.path, push_branches=True,
878 enable_hooks=True)
888 enable_hooks=True)
879 merge_succeeded = True
889 merge_succeeded = True
880 except RepositoryError:
890 except RepositoryError:
881 log.exception(
891 log.exception(
882 'Failure when doing local push from the shadow '
892 'Failure when doing local push from the shadow '
883 'repository to the target repository at %s.', self.path)
893 'repository to the target repository at %s.', self.path)
884 merge_succeeded = False
894 merge_succeeded = False
885 merge_failure_reason = MergeFailureReason.PUSH_FAILED
895 merge_failure_reason = MergeFailureReason.PUSH_FAILED
886 metadata['target'] = 'hg shadow repo'
896 metadata['target'] = 'hg shadow repo'
887 metadata['merge_commit'] = merge_commit_id
897 metadata['merge_commit'] = merge_commit_id
888 else:
898 else:
889 merge_succeeded = True
899 merge_succeeded = True
890 else:
900 else:
891 merge_succeeded = False
901 merge_succeeded = False
892
902
893 return MergeResponse(
903 return MergeResponse(
894 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
904 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
895 metadata=metadata)
905 metadata=metadata)
896
906
897 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
907 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
898 config = self.config.copy()
908 config = self.config.copy()
899 if not enable_hooks:
909 if not enable_hooks:
900 config.clear_section('hooks')
910 config.clear_section('hooks')
901 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
911 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
902
912
903 def _validate_pull_reference(self, reference):
913 def _validate_pull_reference(self, reference):
904 if not (reference.name in self.bookmarks or
914 if not (reference.name in self.bookmarks or
905 reference.name in self.branches or
915 reference.name in self.branches or
906 self.get_commit(reference.commit_id)):
916 self.get_commit(reference.commit_id)):
907 raise CommitDoesNotExistError(
917 raise CommitDoesNotExistError(
908 'Unknown branch, bookmark or commit id')
918 'Unknown branch, bookmark or commit id')
909
919
910 def _local_pull(self, repository_path, reference):
920 def _local_pull(self, repository_path, reference):
911 """
921 """
912 Fetch a branch, bookmark or commit from a local repository.
922 Fetch a branch, bookmark or commit from a local repository.
913 """
923 """
914 repository_path = os.path.abspath(repository_path)
924 repository_path = os.path.abspath(repository_path)
915 if repository_path == self.path:
925 if repository_path == self.path:
916 raise ValueError('Cannot pull from the same repository')
926 raise ValueError('Cannot pull from the same repository')
917
927
918 reference_type_to_option_name = {
928 reference_type_to_option_name = {
919 'book': 'bookmark',
929 'book': 'bookmark',
920 'branch': 'branch',
930 'branch': 'branch',
921 }
931 }
922 option_name = reference_type_to_option_name.get(
932 option_name = reference_type_to_option_name.get(
923 reference.type, 'revision')
933 reference.type, 'revision')
924
934
925 if option_name == 'revision':
935 if option_name == 'revision':
926 ref = reference.commit_id
936 ref = reference.commit_id
927 else:
937 else:
928 ref = reference.name
938 ref = reference.name
929
939
930 options = {option_name: [ref]}
940 options = {option_name: [ref]}
931 self._remote.pull_cmd(repository_path, hooks=False, **options)
941 self._remote.pull_cmd(repository_path, hooks=False, **options)
932 self._remote.invalidate_vcs_cache()
942 self._remote.invalidate_vcs_cache()
933
943
934 def bookmark(self, bookmark, revision=None):
944 def bookmark(self, bookmark, revision=None):
935 if isinstance(bookmark, unicode):
945 if isinstance(bookmark, unicode):
936 bookmark = safe_str(bookmark)
946 bookmark = safe_str(bookmark)
937 self._remote.bookmark(bookmark, revision=revision)
947 self._remote.bookmark(bookmark, revision=revision)
938 self._remote.invalidate_vcs_cache()
948 self._remote.invalidate_vcs_cache()
939
949
940 def get_path_permissions(self, username):
950 def get_path_permissions(self, username):
941 hgacl_file = os.path.join(self.path, '.hg/hgacl')
951 hgacl_file = os.path.join(self.path, '.hg/hgacl')
942
952
943 def read_patterns(suffix):
953 def read_patterns(suffix):
944 svalue = None
954 svalue = None
945 for section, option in [
955 for section, option in [
946 ('narrowacl', username + suffix),
956 ('narrowacl', username + suffix),
947 ('narrowacl', 'default' + suffix),
957 ('narrowacl', 'default' + suffix),
948 ('narrowhgacl', username + suffix),
958 ('narrowhgacl', username + suffix),
949 ('narrowhgacl', 'default' + suffix)
959 ('narrowhgacl', 'default' + suffix)
950 ]:
960 ]:
951 try:
961 try:
952 svalue = hgacl.get(section, option)
962 svalue = hgacl.get(section, option)
953 break # stop at the first value we find
963 break # stop at the first value we find
954 except configparser.NoOptionError:
964 except configparser.NoOptionError:
955 pass
965 pass
956 if not svalue:
966 if not svalue:
957 return None
967 return None
958 result = ['/']
968 result = ['/']
959 for pattern in svalue.split():
969 for pattern in svalue.split():
960 result.append(pattern)
970 result.append(pattern)
961 if '*' not in pattern and '?' not in pattern:
971 if '*' not in pattern and '?' not in pattern:
962 result.append(pattern + '/*')
972 result.append(pattern + '/*')
963 return result
973 return result
964
974
965 if os.path.exists(hgacl_file):
975 if os.path.exists(hgacl_file):
966 try:
976 try:
967 hgacl = configparser.RawConfigParser()
977 hgacl = configparser.RawConfigParser()
968 hgacl.read(hgacl_file)
978 hgacl.read(hgacl_file)
969
979
970 includes = read_patterns('.includes')
980 includes = read_patterns('.includes')
971 excludes = read_patterns('.excludes')
981 excludes = read_patterns('.excludes')
972 return BasePathPermissionChecker.create_from_patterns(
982 return BasePathPermissionChecker.create_from_patterns(
973 includes, excludes)
983 includes, excludes)
974 except BaseException as e:
984 except BaseException as e:
975 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
985 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
976 hgacl_file, self.name, e)
986 hgacl_file, self.name, e)
977 raise exceptions.RepositoryRequirementError(msg)
987 raise exceptions.RepositoryRequirementError(msg)
978 else:
988 else:
979 return None
989 return None
980
990
981
991
982 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
992 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
983
993
984 def _commit_factory(self, commit_id):
994 def _commit_factory(self, commit_id):
985 return self.repo.get_commit(
995 return self.repo.get_commit(
986 commit_idx=commit_id, pre_load=self.pre_load)
996 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,2072 +1,2072 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30
30
31 import datetime
31 import datetime
32 import urllib
32 import urllib
33 import collections
33 import collections
34
34
35 from pyramid import compat
35 from pyramid import compat
36 from pyramid.threadlocal import get_current_request
36 from pyramid.threadlocal import get_current_request
37
37
38 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.translation import lazy_ugettext
39 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 from rhodecode.lib import audit_logger
41 from rhodecode.lib import audit_logger
42 from rhodecode.lib.compat import OrderedDict
42 from rhodecode.lib.compat import OrderedDict
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.markup_renderer import (
44 from rhodecode.lib.markup_renderer import (
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 from rhodecode.lib.utils2 import (
46 from rhodecode.lib.utils2 import (
47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 get_current_rhodecode_user)
48 get_current_rhodecode_user)
49 from rhodecode.lib.vcs.backends.base import (
49 from rhodecode.lib.vcs.backends.base import (
50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 TargetRefMissing, SourceRefMissing)
51 TargetRefMissing, SourceRefMissing)
52 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 from rhodecode.lib.vcs.exceptions import (
53 from rhodecode.lib.vcs.exceptions import (
54 CommitDoesNotExistError, EmptyRepositoryError)
54 CommitDoesNotExistError, EmptyRepositoryError)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.comment import CommentsModel
58 from rhodecode.model.db import (
58 from rhodecode.model.db import (
59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 from rhodecode.model.meta import Session
61 from rhodecode.model.meta import Session
62 from rhodecode.model.notification import NotificationModel, \
62 from rhodecode.model.notification import NotificationModel, \
63 EmailNotificationModel
63 EmailNotificationModel
64 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.scm import ScmModel
65 from rhodecode.model.settings import VcsSettingsModel
65 from rhodecode.model.settings import VcsSettingsModel
66
66
67
67
68 log = logging.getLogger(__name__)
68 log = logging.getLogger(__name__)
69
69
70
70
71 # Data structure to hold the response data when updating commits during a pull
71 # Data structure to hold the response data when updating commits during a pull
72 # request update.
72 # request update.
73 class UpdateResponse(object):
73 class UpdateResponse(object):
74
74
75 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 commit_changes, source_changed, target_changed):
76 commit_changes, source_changed, target_changed):
77
77
78 self.executed = executed
78 self.executed = executed
79 self.reason = reason
79 self.reason = reason
80 self.new = new
80 self.new = new
81 self.old = old
81 self.old = old
82 self.common_ancestor_id = common_ancestor_id
82 self.common_ancestor_id = common_ancestor_id
83 self.changes = commit_changes
83 self.changes = commit_changes
84 self.source_changed = source_changed
84 self.source_changed = source_changed
85 self.target_changed = target_changed
85 self.target_changed = target_changed
86
86
87
87
88 def get_diff_info(
88 def get_diff_info(
89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 get_commit_authors=True):
90 get_commit_authors=True):
91 """
91 """
92 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 This is also used for default reviewers logic
93 This is also used for default reviewers logic
94 """
94 """
95
95
96 source_scm = source_repo.scm_instance()
96 source_scm = source_repo.scm_instance()
97 target_scm = target_repo.scm_instance()
97 target_scm = target_repo.scm_instance()
98
98
99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 if not ancestor_id:
100 if not ancestor_id:
101 raise ValueError(
101 raise ValueError(
102 'cannot calculate diff info without a common ancestor. '
102 'cannot calculate diff info without a common ancestor. '
103 'Make sure both repositories are related, and have a common forking commit.')
103 'Make sure both repositories are related, and have a common forking commit.')
104
104
105 # case here is that want a simple diff without incoming commits,
105 # case here is that want a simple diff without incoming commits,
106 # previewing what will be merged based only on commits in the source.
106 # previewing what will be merged based only on commits in the source.
107 log.debug('Using ancestor %s as source_ref instead of %s',
107 log.debug('Using ancestor %s as source_ref instead of %s',
108 ancestor_id, source_ref)
108 ancestor_id, source_ref)
109
109
110 # source of changes now is the common ancestor
110 # source of changes now is the common ancestor
111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 # target commit becomes the source ref as it is the last commit
112 # target commit becomes the source ref as it is the last commit
113 # for diff generation this logic gives proper diff
113 # for diff generation this logic gives proper diff
114 target_commit = source_scm.get_commit(commit_id=source_ref)
114 target_commit = source_scm.get_commit(commit_id=source_ref)
115
115
116 vcs_diff = \
116 vcs_diff = \
117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 ignore_whitespace=False, context=3)
118 ignore_whitespace=False, context=3)
119
119
120 diff_processor = diffs.DiffProcessor(
120 diff_processor = diffs.DiffProcessor(
121 vcs_diff, format='newdiff', diff_limit=None,
121 vcs_diff, format='newdiff', diff_limit=None,
122 file_limit=None, show_full_diff=True)
122 file_limit=None, show_full_diff=True)
123
123
124 _parsed = diff_processor.prepare()
124 _parsed = diff_processor.prepare()
125
125
126 all_files = []
126 all_files = []
127 all_files_changes = []
127 all_files_changes = []
128 changed_lines = {}
128 changed_lines = {}
129 stats = [0, 0]
129 stats = [0, 0]
130 for f in _parsed:
130 for f in _parsed:
131 all_files.append(f['filename'])
131 all_files.append(f['filename'])
132 all_files_changes.append({
132 all_files_changes.append({
133 'filename': f['filename'],
133 'filename': f['filename'],
134 'stats': f['stats']
134 'stats': f['stats']
135 })
135 })
136 stats[0] += f['stats']['added']
136 stats[0] += f['stats']['added']
137 stats[1] += f['stats']['deleted']
137 stats[1] += f['stats']['deleted']
138
138
139 changed_lines[f['filename']] = []
139 changed_lines[f['filename']] = []
140 if len(f['chunks']) < 2:
140 if len(f['chunks']) < 2:
141 continue
141 continue
142 # first line is "context" information
142 # first line is "context" information
143 for chunks in f['chunks'][1:]:
143 for chunks in f['chunks'][1:]:
144 for chunk in chunks['lines']:
144 for chunk in chunks['lines']:
145 if chunk['action'] not in ('del', 'mod'):
145 if chunk['action'] not in ('del', 'mod'):
146 continue
146 continue
147 changed_lines[f['filename']].append(chunk['old_lineno'])
147 changed_lines[f['filename']].append(chunk['old_lineno'])
148
148
149 commit_authors = []
149 commit_authors = []
150 user_counts = {}
150 user_counts = {}
151 email_counts = {}
151 email_counts = {}
152 author_counts = {}
152 author_counts = {}
153 _commit_cache = {}
153 _commit_cache = {}
154
154
155 commits = []
155 commits = []
156 if get_commit_authors:
156 if get_commit_authors:
157 commits = target_scm.compare(
157 commits = target_scm.compare(
158 target_ref, source_ref, source_scm, merge=True,
158 target_ref, source_ref, source_scm, merge=True,
159 pre_load=["author"])
159 pre_load=["author"])
160
160
161 for commit in commits:
161 for commit in commits:
162 user = User.get_from_cs_author(commit.author)
162 user = User.get_from_cs_author(commit.author)
163 if user and user not in commit_authors:
163 if user and user not in commit_authors:
164 commit_authors.append(user)
164 commit_authors.append(user)
165
165
166 # lines
166 # lines
167 if get_authors:
167 if get_authors:
168 target_commit = source_repo.get_commit(ancestor_id)
168 target_commit = source_repo.get_commit(ancestor_id)
169
169
170 for fname, lines in changed_lines.items():
170 for fname, lines in changed_lines.items():
171 try:
171 try:
172 node = target_commit.get_node(fname)
172 node = target_commit.get_node(fname)
173 except Exception:
173 except Exception:
174 continue
174 continue
175
175
176 if not isinstance(node, FileNode):
176 if not isinstance(node, FileNode):
177 continue
177 continue
178
178
179 for annotation in node.annotate:
179 for annotation in node.annotate:
180 line_no, commit_id, get_commit_func, line_text = annotation
180 line_no, commit_id, get_commit_func, line_text = annotation
181 if line_no in lines:
181 if line_no in lines:
182 if commit_id not in _commit_cache:
182 if commit_id not in _commit_cache:
183 _commit_cache[commit_id] = get_commit_func()
183 _commit_cache[commit_id] = get_commit_func()
184 commit = _commit_cache[commit_id]
184 commit = _commit_cache[commit_id]
185 author = commit.author
185 author = commit.author
186 email = commit.author_email
186 email = commit.author_email
187 user = User.get_from_cs_author(author)
187 user = User.get_from_cs_author(author)
188 if user:
188 if user:
189 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
189 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
190 author_counts[author] = author_counts.get(author, 0) + 1
190 author_counts[author] = author_counts.get(author, 0) + 1
191 email_counts[email] = email_counts.get(email, 0) + 1
191 email_counts[email] = email_counts.get(email, 0) + 1
192
192
193 return {
193 return {
194 'commits': commits,
194 'commits': commits,
195 'files': all_files_changes,
195 'files': all_files_changes,
196 'stats': stats,
196 'stats': stats,
197 'ancestor': ancestor_id,
197 'ancestor': ancestor_id,
198 # original authors of modified files
198 # original authors of modified files
199 'original_authors': {
199 'original_authors': {
200 'users': user_counts,
200 'users': user_counts,
201 'authors': author_counts,
201 'authors': author_counts,
202 'emails': email_counts,
202 'emails': email_counts,
203 },
203 },
204 'commit_authors': commit_authors
204 'commit_authors': commit_authors
205 }
205 }
206
206
207
207
208 class PullRequestModel(BaseModel):
208 class PullRequestModel(BaseModel):
209
209
210 cls = PullRequest
210 cls = PullRequest
211
211
212 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
212 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
213
213
214 UPDATE_STATUS_MESSAGES = {
214 UPDATE_STATUS_MESSAGES = {
215 UpdateFailureReason.NONE: lazy_ugettext(
215 UpdateFailureReason.NONE: lazy_ugettext(
216 'Pull request update successful.'),
216 'Pull request update successful.'),
217 UpdateFailureReason.UNKNOWN: lazy_ugettext(
217 UpdateFailureReason.UNKNOWN: lazy_ugettext(
218 'Pull request update failed because of an unknown error.'),
218 'Pull request update failed because of an unknown error.'),
219 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
219 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
220 'No update needed because the source and target have not changed.'),
220 'No update needed because the source and target have not changed.'),
221 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
221 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
222 'Pull request cannot be updated because the reference type is '
222 'Pull request cannot be updated because the reference type is '
223 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
223 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
224 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
224 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
225 'This pull request cannot be updated because the target '
225 'This pull request cannot be updated because the target '
226 'reference is missing.'),
226 'reference is missing.'),
227 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
227 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
228 'This pull request cannot be updated because the source '
228 'This pull request cannot be updated because the source '
229 'reference is missing.'),
229 'reference is missing.'),
230 }
230 }
231 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
231 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
232 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
232 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
233
233
234 def __get_pull_request(self, pull_request):
234 def __get_pull_request(self, pull_request):
235 return self._get_instance((
235 return self._get_instance((
236 PullRequest, PullRequestVersion), pull_request)
236 PullRequest, PullRequestVersion), pull_request)
237
237
238 def _check_perms(self, perms, pull_request, user, api=False):
238 def _check_perms(self, perms, pull_request, user, api=False):
239 if not api:
239 if not api:
240 return h.HasRepoPermissionAny(*perms)(
240 return h.HasRepoPermissionAny(*perms)(
241 user=user, repo_name=pull_request.target_repo.repo_name)
241 user=user, repo_name=pull_request.target_repo.repo_name)
242 else:
242 else:
243 return h.HasRepoPermissionAnyApi(*perms)(
243 return h.HasRepoPermissionAnyApi(*perms)(
244 user=user, repo_name=pull_request.target_repo.repo_name)
244 user=user, repo_name=pull_request.target_repo.repo_name)
245
245
246 def check_user_read(self, pull_request, user, api=False):
246 def check_user_read(self, pull_request, user, api=False):
247 _perms = ('repository.admin', 'repository.write', 'repository.read',)
247 _perms = ('repository.admin', 'repository.write', 'repository.read',)
248 return self._check_perms(_perms, pull_request, user, api)
248 return self._check_perms(_perms, pull_request, user, api)
249
249
250 def check_user_merge(self, pull_request, user, api=False):
250 def check_user_merge(self, pull_request, user, api=False):
251 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
251 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
252 return self._check_perms(_perms, pull_request, user, api)
252 return self._check_perms(_perms, pull_request, user, api)
253
253
254 def check_user_update(self, pull_request, user, api=False):
254 def check_user_update(self, pull_request, user, api=False):
255 owner = user.user_id == pull_request.user_id
255 owner = user.user_id == pull_request.user_id
256 return self.check_user_merge(pull_request, user, api) or owner
256 return self.check_user_merge(pull_request, user, api) or owner
257
257
258 def check_user_delete(self, pull_request, user):
258 def check_user_delete(self, pull_request, user):
259 owner = user.user_id == pull_request.user_id
259 owner = user.user_id == pull_request.user_id
260 _perms = ('repository.admin',)
260 _perms = ('repository.admin',)
261 return self._check_perms(_perms, pull_request, user) or owner
261 return self._check_perms(_perms, pull_request, user) or owner
262
262
263 def check_user_change_status(self, pull_request, user, api=False):
263 def check_user_change_status(self, pull_request, user, api=False):
264 reviewer = user.user_id in [x.user_id for x in
264 reviewer = user.user_id in [x.user_id for x in
265 pull_request.reviewers]
265 pull_request.reviewers]
266 return self.check_user_update(pull_request, user, api) or reviewer
266 return self.check_user_update(pull_request, user, api) or reviewer
267
267
268 def check_user_comment(self, pull_request, user):
268 def check_user_comment(self, pull_request, user):
269 owner = user.user_id == pull_request.user_id
269 owner = user.user_id == pull_request.user_id
270 return self.check_user_read(pull_request, user) or owner
270 return self.check_user_read(pull_request, user) or owner
271
271
272 def get(self, pull_request):
272 def get(self, pull_request):
273 return self.__get_pull_request(pull_request)
273 return self.__get_pull_request(pull_request)
274
274
275 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
275 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
276 statuses=None, opened_by=None, order_by=None,
276 statuses=None, opened_by=None, order_by=None,
277 order_dir='desc', only_created=False):
277 order_dir='desc', only_created=False):
278 repo = None
278 repo = None
279 if repo_name:
279 if repo_name:
280 repo = self._get_repo(repo_name)
280 repo = self._get_repo(repo_name)
281
281
282 q = PullRequest.query()
282 q = PullRequest.query()
283
283
284 if search_q:
284 if search_q:
285 like_expression = u'%{}%'.format(safe_unicode(search_q))
285 like_expression = u'%{}%'.format(safe_unicode(search_q))
286 q = q.join(User)
286 q = q.join(User)
287 q = q.filter(or_(
287 q = q.filter(or_(
288 cast(PullRequest.pull_request_id, String).ilike(like_expression),
288 cast(PullRequest.pull_request_id, String).ilike(like_expression),
289 User.username.ilike(like_expression),
289 User.username.ilike(like_expression),
290 PullRequest.title.ilike(like_expression),
290 PullRequest.title.ilike(like_expression),
291 PullRequest.description.ilike(like_expression),
291 PullRequest.description.ilike(like_expression),
292 ))
292 ))
293
293
294 # source or target
294 # source or target
295 if repo and source:
295 if repo and source:
296 q = q.filter(PullRequest.source_repo == repo)
296 q = q.filter(PullRequest.source_repo == repo)
297 elif repo:
297 elif repo:
298 q = q.filter(PullRequest.target_repo == repo)
298 q = q.filter(PullRequest.target_repo == repo)
299
299
300 # closed,opened
300 # closed,opened
301 if statuses:
301 if statuses:
302 q = q.filter(PullRequest.status.in_(statuses))
302 q = q.filter(PullRequest.status.in_(statuses))
303
303
304 # opened by filter
304 # opened by filter
305 if opened_by:
305 if opened_by:
306 q = q.filter(PullRequest.user_id.in_(opened_by))
306 q = q.filter(PullRequest.user_id.in_(opened_by))
307
307
308 # only get those that are in "created" state
308 # only get those that are in "created" state
309 if only_created:
309 if only_created:
310 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
310 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
311
311
312 if order_by:
312 if order_by:
313 order_map = {
313 order_map = {
314 'name_raw': PullRequest.pull_request_id,
314 'name_raw': PullRequest.pull_request_id,
315 'id': PullRequest.pull_request_id,
315 'id': PullRequest.pull_request_id,
316 'title': PullRequest.title,
316 'title': PullRequest.title,
317 'updated_on_raw': PullRequest.updated_on,
317 'updated_on_raw': PullRequest.updated_on,
318 'target_repo': PullRequest.target_repo_id
318 'target_repo': PullRequest.target_repo_id
319 }
319 }
320 if order_dir == 'asc':
320 if order_dir == 'asc':
321 q = q.order_by(order_map[order_by].asc())
321 q = q.order_by(order_map[order_by].asc())
322 else:
322 else:
323 q = q.order_by(order_map[order_by].desc())
323 q = q.order_by(order_map[order_by].desc())
324
324
325 return q
325 return q
326
326
327 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
327 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
328 opened_by=None):
328 opened_by=None):
329 """
329 """
330 Count the number of pull requests for a specific repository.
330 Count the number of pull requests for a specific repository.
331
331
332 :param repo_name: target or source repo
332 :param repo_name: target or source repo
333 :param search_q: filter by text
333 :param search_q: filter by text
334 :param source: boolean flag to specify if repo_name refers to source
334 :param source: boolean flag to specify if repo_name refers to source
335 :param statuses: list of pull request statuses
335 :param statuses: list of pull request statuses
336 :param opened_by: author user of the pull request
336 :param opened_by: author user of the pull request
337 :returns: int number of pull requests
337 :returns: int number of pull requests
338 """
338 """
339 q = self._prepare_get_all_query(
339 q = self._prepare_get_all_query(
340 repo_name, search_q=search_q, source=source, statuses=statuses,
340 repo_name, search_q=search_q, source=source, statuses=statuses,
341 opened_by=opened_by)
341 opened_by=opened_by)
342
342
343 return q.count()
343 return q.count()
344
344
345 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
345 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
346 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
346 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
347 """
347 """
348 Get all pull requests for a specific repository.
348 Get all pull requests for a specific repository.
349
349
350 :param repo_name: target or source repo
350 :param repo_name: target or source repo
351 :param search_q: filter by text
351 :param search_q: filter by text
352 :param source: boolean flag to specify if repo_name refers to source
352 :param source: boolean flag to specify if repo_name refers to source
353 :param statuses: list of pull request statuses
353 :param statuses: list of pull request statuses
354 :param opened_by: author user of the pull request
354 :param opened_by: author user of the pull request
355 :param offset: pagination offset
355 :param offset: pagination offset
356 :param length: length of returned list
356 :param length: length of returned list
357 :param order_by: order of the returned list
357 :param order_by: order of the returned list
358 :param order_dir: 'asc' or 'desc' ordering direction
358 :param order_dir: 'asc' or 'desc' ordering direction
359 :returns: list of pull requests
359 :returns: list of pull requests
360 """
360 """
361 q = self._prepare_get_all_query(
361 q = self._prepare_get_all_query(
362 repo_name, search_q=search_q, source=source, statuses=statuses,
362 repo_name, search_q=search_q, source=source, statuses=statuses,
363 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
363 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
364
364
365 if length:
365 if length:
366 pull_requests = q.limit(length).offset(offset).all()
366 pull_requests = q.limit(length).offset(offset).all()
367 else:
367 else:
368 pull_requests = q.all()
368 pull_requests = q.all()
369
369
370 return pull_requests
370 return pull_requests
371
371
372 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
372 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
373 opened_by=None):
373 opened_by=None):
374 """
374 """
375 Count the number of pull requests for a specific repository that are
375 Count the number of pull requests for a specific repository that are
376 awaiting review.
376 awaiting review.
377
377
378 :param repo_name: target or source repo
378 :param repo_name: target or source repo
379 :param search_q: filter by text
379 :param search_q: filter by text
380 :param source: boolean flag to specify if repo_name refers to source
380 :param source: boolean flag to specify if repo_name refers to source
381 :param statuses: list of pull request statuses
381 :param statuses: list of pull request statuses
382 :param opened_by: author user of the pull request
382 :param opened_by: author user of the pull request
383 :returns: int number of pull requests
383 :returns: int number of pull requests
384 """
384 """
385 pull_requests = self.get_awaiting_review(
385 pull_requests = self.get_awaiting_review(
386 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
386 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
387
387
388 return len(pull_requests)
388 return len(pull_requests)
389
389
390 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
390 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
391 opened_by=None, offset=0, length=None,
391 opened_by=None, offset=0, length=None,
392 order_by=None, order_dir='desc'):
392 order_by=None, order_dir='desc'):
393 """
393 """
394 Get all pull requests for a specific repository that are awaiting
394 Get all pull requests for a specific repository that are awaiting
395 review.
395 review.
396
396
397 :param repo_name: target or source repo
397 :param repo_name: target or source repo
398 :param search_q: filter by text
398 :param search_q: filter by text
399 :param source: boolean flag to specify if repo_name refers to source
399 :param source: boolean flag to specify if repo_name refers to source
400 :param statuses: list of pull request statuses
400 :param statuses: list of pull request statuses
401 :param opened_by: author user of the pull request
401 :param opened_by: author user of the pull request
402 :param offset: pagination offset
402 :param offset: pagination offset
403 :param length: length of returned list
403 :param length: length of returned list
404 :param order_by: order of the returned list
404 :param order_by: order of the returned list
405 :param order_dir: 'asc' or 'desc' ordering direction
405 :param order_dir: 'asc' or 'desc' ordering direction
406 :returns: list of pull requests
406 :returns: list of pull requests
407 """
407 """
408 pull_requests = self.get_all(
408 pull_requests = self.get_all(
409 repo_name, search_q=search_q, source=source, statuses=statuses,
409 repo_name, search_q=search_q, source=source, statuses=statuses,
410 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
410 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
411
411
412 _filtered_pull_requests = []
412 _filtered_pull_requests = []
413 for pr in pull_requests:
413 for pr in pull_requests:
414 status = pr.calculated_review_status()
414 status = pr.calculated_review_status()
415 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
415 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
416 ChangesetStatus.STATUS_UNDER_REVIEW]:
416 ChangesetStatus.STATUS_UNDER_REVIEW]:
417 _filtered_pull_requests.append(pr)
417 _filtered_pull_requests.append(pr)
418 if length:
418 if length:
419 return _filtered_pull_requests[offset:offset+length]
419 return _filtered_pull_requests[offset:offset+length]
420 else:
420 else:
421 return _filtered_pull_requests
421 return _filtered_pull_requests
422
422
423 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
423 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
424 opened_by=None, user_id=None):
424 opened_by=None, user_id=None):
425 """
425 """
426 Count the number of pull requests for a specific repository that are
426 Count the number of pull requests for a specific repository that are
427 awaiting review from a specific user.
427 awaiting review from a specific user.
428
428
429 :param repo_name: target or source repo
429 :param repo_name: target or source repo
430 :param search_q: filter by text
430 :param search_q: filter by text
431 :param source: boolean flag to specify if repo_name refers to source
431 :param source: boolean flag to specify if repo_name refers to source
432 :param statuses: list of pull request statuses
432 :param statuses: list of pull request statuses
433 :param opened_by: author user of the pull request
433 :param opened_by: author user of the pull request
434 :param user_id: reviewer user of the pull request
434 :param user_id: reviewer user of the pull request
435 :returns: int number of pull requests
435 :returns: int number of pull requests
436 """
436 """
437 pull_requests = self.get_awaiting_my_review(
437 pull_requests = self.get_awaiting_my_review(
438 repo_name, search_q=search_q, source=source, statuses=statuses,
438 repo_name, search_q=search_q, source=source, statuses=statuses,
439 opened_by=opened_by, user_id=user_id)
439 opened_by=opened_by, user_id=user_id)
440
440
441 return len(pull_requests)
441 return len(pull_requests)
442
442
443 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
443 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
444 opened_by=None, user_id=None, offset=0,
444 opened_by=None, user_id=None, offset=0,
445 length=None, order_by=None, order_dir='desc'):
445 length=None, order_by=None, order_dir='desc'):
446 """
446 """
447 Get all pull requests for a specific repository that are awaiting
447 Get all pull requests for a specific repository that are awaiting
448 review from a specific user.
448 review from a specific user.
449
449
450 :param repo_name: target or source repo
450 :param repo_name: target or source repo
451 :param search_q: filter by text
451 :param search_q: filter by text
452 :param source: boolean flag to specify if repo_name refers to source
452 :param source: boolean flag to specify if repo_name refers to source
453 :param statuses: list of pull request statuses
453 :param statuses: list of pull request statuses
454 :param opened_by: author user of the pull request
454 :param opened_by: author user of the pull request
455 :param user_id: reviewer user of the pull request
455 :param user_id: reviewer user of the pull request
456 :param offset: pagination offset
456 :param offset: pagination offset
457 :param length: length of returned list
457 :param length: length of returned list
458 :param order_by: order of the returned list
458 :param order_by: order of the returned list
459 :param order_dir: 'asc' or 'desc' ordering direction
459 :param order_dir: 'asc' or 'desc' ordering direction
460 :returns: list of pull requests
460 :returns: list of pull requests
461 """
461 """
462 pull_requests = self.get_all(
462 pull_requests = self.get_all(
463 repo_name, search_q=search_q, source=source, statuses=statuses,
463 repo_name, search_q=search_q, source=source, statuses=statuses,
464 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
464 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
465
465
466 _my = PullRequestModel().get_not_reviewed(user_id)
466 _my = PullRequestModel().get_not_reviewed(user_id)
467 my_participation = []
467 my_participation = []
468 for pr in pull_requests:
468 for pr in pull_requests:
469 if pr in _my:
469 if pr in _my:
470 my_participation.append(pr)
470 my_participation.append(pr)
471 _filtered_pull_requests = my_participation
471 _filtered_pull_requests = my_participation
472 if length:
472 if length:
473 return _filtered_pull_requests[offset:offset+length]
473 return _filtered_pull_requests[offset:offset+length]
474 else:
474 else:
475 return _filtered_pull_requests
475 return _filtered_pull_requests
476
476
477 def get_not_reviewed(self, user_id):
477 def get_not_reviewed(self, user_id):
478 return [
478 return [
479 x.pull_request for x in PullRequestReviewers.query().filter(
479 x.pull_request for x in PullRequestReviewers.query().filter(
480 PullRequestReviewers.user_id == user_id).all()
480 PullRequestReviewers.user_id == user_id).all()
481 ]
481 ]
482
482
483 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
483 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
484 order_by=None, order_dir='desc'):
484 order_by=None, order_dir='desc'):
485 q = PullRequest.query()
485 q = PullRequest.query()
486 if user_id:
486 if user_id:
487 reviewers_subquery = Session().query(
487 reviewers_subquery = Session().query(
488 PullRequestReviewers.pull_request_id).filter(
488 PullRequestReviewers.pull_request_id).filter(
489 PullRequestReviewers.user_id == user_id).subquery()
489 PullRequestReviewers.user_id == user_id).subquery()
490 user_filter = or_(
490 user_filter = or_(
491 PullRequest.user_id == user_id,
491 PullRequest.user_id == user_id,
492 PullRequest.pull_request_id.in_(reviewers_subquery)
492 PullRequest.pull_request_id.in_(reviewers_subquery)
493 )
493 )
494 q = PullRequest.query().filter(user_filter)
494 q = PullRequest.query().filter(user_filter)
495
495
496 # closed,opened
496 # closed,opened
497 if statuses:
497 if statuses:
498 q = q.filter(PullRequest.status.in_(statuses))
498 q = q.filter(PullRequest.status.in_(statuses))
499
499
500 if query:
500 if query:
501 like_expression = u'%{}%'.format(safe_unicode(query))
501 like_expression = u'%{}%'.format(safe_unicode(query))
502 q = q.join(User)
502 q = q.join(User)
503 q = q.filter(or_(
503 q = q.filter(or_(
504 cast(PullRequest.pull_request_id, String).ilike(like_expression),
504 cast(PullRequest.pull_request_id, String).ilike(like_expression),
505 User.username.ilike(like_expression),
505 User.username.ilike(like_expression),
506 PullRequest.title.ilike(like_expression),
506 PullRequest.title.ilike(like_expression),
507 PullRequest.description.ilike(like_expression),
507 PullRequest.description.ilike(like_expression),
508 ))
508 ))
509 if order_by:
509 if order_by:
510 order_map = {
510 order_map = {
511 'name_raw': PullRequest.pull_request_id,
511 'name_raw': PullRequest.pull_request_id,
512 'title': PullRequest.title,
512 'title': PullRequest.title,
513 'updated_on_raw': PullRequest.updated_on,
513 'updated_on_raw': PullRequest.updated_on,
514 'target_repo': PullRequest.target_repo_id
514 'target_repo': PullRequest.target_repo_id
515 }
515 }
516 if order_dir == 'asc':
516 if order_dir == 'asc':
517 q = q.order_by(order_map[order_by].asc())
517 q = q.order_by(order_map[order_by].asc())
518 else:
518 else:
519 q = q.order_by(order_map[order_by].desc())
519 q = q.order_by(order_map[order_by].desc())
520
520
521 return q
521 return q
522
522
523 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
523 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
524 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
524 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
525 return q.count()
525 return q.count()
526
526
527 def get_im_participating_in(
527 def get_im_participating_in(
528 self, user_id=None, statuses=None, query='', offset=0,
528 self, user_id=None, statuses=None, query='', offset=0,
529 length=None, order_by=None, order_dir='desc'):
529 length=None, order_by=None, order_dir='desc'):
530 """
530 """
531 Get all Pull requests that i'm participating in, or i have opened
531 Get all Pull requests that i'm participating in, or i have opened
532 """
532 """
533
533
534 q = self._prepare_participating_query(
534 q = self._prepare_participating_query(
535 user_id, statuses=statuses, query=query, order_by=order_by,
535 user_id, statuses=statuses, query=query, order_by=order_by,
536 order_dir=order_dir)
536 order_dir=order_dir)
537
537
538 if length:
538 if length:
539 pull_requests = q.limit(length).offset(offset).all()
539 pull_requests = q.limit(length).offset(offset).all()
540 else:
540 else:
541 pull_requests = q.all()
541 pull_requests = q.all()
542
542
543 return pull_requests
543 return pull_requests
544
544
545 def get_versions(self, pull_request):
545 def get_versions(self, pull_request):
546 """
546 """
547 returns version of pull request sorted by ID descending
547 returns version of pull request sorted by ID descending
548 """
548 """
549 return PullRequestVersion.query()\
549 return PullRequestVersion.query()\
550 .filter(PullRequestVersion.pull_request == pull_request)\
550 .filter(PullRequestVersion.pull_request == pull_request)\
551 .order_by(PullRequestVersion.pull_request_version_id.asc())\
551 .order_by(PullRequestVersion.pull_request_version_id.asc())\
552 .all()
552 .all()
553
553
554 def get_pr_version(self, pull_request_id, version=None):
554 def get_pr_version(self, pull_request_id, version=None):
555 at_version = None
555 at_version = None
556
556
557 if version and version == 'latest':
557 if version and version == 'latest':
558 pull_request_ver = PullRequest.get(pull_request_id)
558 pull_request_ver = PullRequest.get(pull_request_id)
559 pull_request_obj = pull_request_ver
559 pull_request_obj = pull_request_ver
560 _org_pull_request_obj = pull_request_obj
560 _org_pull_request_obj = pull_request_obj
561 at_version = 'latest'
561 at_version = 'latest'
562 elif version:
562 elif version:
563 pull_request_ver = PullRequestVersion.get_or_404(version)
563 pull_request_ver = PullRequestVersion.get_or_404(version)
564 pull_request_obj = pull_request_ver
564 pull_request_obj = pull_request_ver
565 _org_pull_request_obj = pull_request_ver.pull_request
565 _org_pull_request_obj = pull_request_ver.pull_request
566 at_version = pull_request_ver.pull_request_version_id
566 at_version = pull_request_ver.pull_request_version_id
567 else:
567 else:
568 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
568 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
569 pull_request_id)
569 pull_request_id)
570
570
571 pull_request_display_obj = PullRequest.get_pr_display_object(
571 pull_request_display_obj = PullRequest.get_pr_display_object(
572 pull_request_obj, _org_pull_request_obj)
572 pull_request_obj, _org_pull_request_obj)
573
573
574 return _org_pull_request_obj, pull_request_obj, \
574 return _org_pull_request_obj, pull_request_obj, \
575 pull_request_display_obj, at_version
575 pull_request_display_obj, at_version
576
576
577 def create(self, created_by, source_repo, source_ref, target_repo,
577 def create(self, created_by, source_repo, source_ref, target_repo,
578 target_ref, revisions, reviewers, title, description=None,
578 target_ref, revisions, reviewers, title, description=None,
579 common_ancestor_id=None,
579 common_ancestor_id=None,
580 description_renderer=None,
580 description_renderer=None,
581 reviewer_data=None, translator=None, auth_user=None):
581 reviewer_data=None, translator=None, auth_user=None):
582 translator = translator or get_current_request().translate
582 translator = translator or get_current_request().translate
583
583
584 created_by_user = self._get_user(created_by)
584 created_by_user = self._get_user(created_by)
585 auth_user = auth_user or created_by_user.AuthUser()
585 auth_user = auth_user or created_by_user.AuthUser()
586 source_repo = self._get_repo(source_repo)
586 source_repo = self._get_repo(source_repo)
587 target_repo = self._get_repo(target_repo)
587 target_repo = self._get_repo(target_repo)
588
588
589 pull_request = PullRequest()
589 pull_request = PullRequest()
590 pull_request.source_repo = source_repo
590 pull_request.source_repo = source_repo
591 pull_request.source_ref = source_ref
591 pull_request.source_ref = source_ref
592 pull_request.target_repo = target_repo
592 pull_request.target_repo = target_repo
593 pull_request.target_ref = target_ref
593 pull_request.target_ref = target_ref
594 pull_request.revisions = revisions
594 pull_request.revisions = revisions
595 pull_request.title = title
595 pull_request.title = title
596 pull_request.description = description
596 pull_request.description = description
597 pull_request.description_renderer = description_renderer
597 pull_request.description_renderer = description_renderer
598 pull_request.author = created_by_user
598 pull_request.author = created_by_user
599 pull_request.reviewer_data = reviewer_data
599 pull_request.reviewer_data = reviewer_data
600 pull_request.pull_request_state = pull_request.STATE_CREATING
600 pull_request.pull_request_state = pull_request.STATE_CREATING
601 pull_request.common_ancestor_id = common_ancestor_id
601 pull_request.common_ancestor_id = common_ancestor_id
602
602
603 Session().add(pull_request)
603 Session().add(pull_request)
604 Session().flush()
604 Session().flush()
605
605
606 reviewer_ids = set()
606 reviewer_ids = set()
607 # members / reviewers
607 # members / reviewers
608 for reviewer_object in reviewers:
608 for reviewer_object in reviewers:
609 user_id, reasons, mandatory, rules = reviewer_object
609 user_id, reasons, mandatory, rules = reviewer_object
610 user = self._get_user(user_id)
610 user = self._get_user(user_id)
611
611
612 # skip duplicates
612 # skip duplicates
613 if user.user_id in reviewer_ids:
613 if user.user_id in reviewer_ids:
614 continue
614 continue
615
615
616 reviewer_ids.add(user.user_id)
616 reviewer_ids.add(user.user_id)
617
617
618 reviewer = PullRequestReviewers()
618 reviewer = PullRequestReviewers()
619 reviewer.user = user
619 reviewer.user = user
620 reviewer.pull_request = pull_request
620 reviewer.pull_request = pull_request
621 reviewer.reasons = reasons
621 reviewer.reasons = reasons
622 reviewer.mandatory = mandatory
622 reviewer.mandatory = mandatory
623
623
624 # NOTE(marcink): pick only first rule for now
624 # NOTE(marcink): pick only first rule for now
625 rule_id = list(rules)[0] if rules else None
625 rule_id = list(rules)[0] if rules else None
626 rule = RepoReviewRule.get(rule_id) if rule_id else None
626 rule = RepoReviewRule.get(rule_id) if rule_id else None
627 if rule:
627 if rule:
628 review_group = rule.user_group_vote_rule(user_id)
628 review_group = rule.user_group_vote_rule(user_id)
629 # we check if this particular reviewer is member of a voting group
629 # we check if this particular reviewer is member of a voting group
630 if review_group:
630 if review_group:
631 # NOTE(marcink):
631 # NOTE(marcink):
632 # can be that user is member of more but we pick the first same,
632 # can be that user is member of more but we pick the first same,
633 # same as default reviewers algo
633 # same as default reviewers algo
634 review_group = review_group[0]
634 review_group = review_group[0]
635
635
636 rule_data = {
636 rule_data = {
637 'rule_name':
637 'rule_name':
638 rule.review_rule_name,
638 rule.review_rule_name,
639 'rule_user_group_entry_id':
639 'rule_user_group_entry_id':
640 review_group.repo_review_rule_users_group_id,
640 review_group.repo_review_rule_users_group_id,
641 'rule_user_group_name':
641 'rule_user_group_name':
642 review_group.users_group.users_group_name,
642 review_group.users_group.users_group_name,
643 'rule_user_group_members':
643 'rule_user_group_members':
644 [x.user.username for x in review_group.users_group.members],
644 [x.user.username for x in review_group.users_group.members],
645 'rule_user_group_members_id':
645 'rule_user_group_members_id':
646 [x.user.user_id for x in review_group.users_group.members],
646 [x.user.user_id for x in review_group.users_group.members],
647 }
647 }
648 # e.g {'vote_rule': -1, 'mandatory': True}
648 # e.g {'vote_rule': -1, 'mandatory': True}
649 rule_data.update(review_group.rule_data())
649 rule_data.update(review_group.rule_data())
650
650
651 reviewer.rule_data = rule_data
651 reviewer.rule_data = rule_data
652
652
653 Session().add(reviewer)
653 Session().add(reviewer)
654 Session().flush()
654 Session().flush()
655
655
656 # Set approval status to "Under Review" for all commits which are
656 # Set approval status to "Under Review" for all commits which are
657 # part of this pull request.
657 # part of this pull request.
658 ChangesetStatusModel().set_status(
658 ChangesetStatusModel().set_status(
659 repo=target_repo,
659 repo=target_repo,
660 status=ChangesetStatus.STATUS_UNDER_REVIEW,
660 status=ChangesetStatus.STATUS_UNDER_REVIEW,
661 user=created_by_user,
661 user=created_by_user,
662 pull_request=pull_request
662 pull_request=pull_request
663 )
663 )
664 # we commit early at this point. This has to do with a fact
664 # we commit early at this point. This has to do with a fact
665 # that before queries do some row-locking. And because of that
665 # that before queries do some row-locking. And because of that
666 # we need to commit and finish transaction before below validate call
666 # we need to commit and finish transaction before below validate call
667 # that for large repos could be long resulting in long row locks
667 # that for large repos could be long resulting in long row locks
668 Session().commit()
668 Session().commit()
669
669
670 # prepare workspace, and run initial merge simulation. Set state during that
670 # prepare workspace, and run initial merge simulation. Set state during that
671 # operation
671 # operation
672 pull_request = PullRequest.get(pull_request.pull_request_id)
672 pull_request = PullRequest.get(pull_request.pull_request_id)
673
673
674 # set as merging, for merge simulation, and if finished to created so we mark
674 # set as merging, for merge simulation, and if finished to created so we mark
675 # simulation is working fine
675 # simulation is working fine
676 with pull_request.set_state(PullRequest.STATE_MERGING,
676 with pull_request.set_state(PullRequest.STATE_MERGING,
677 final_state=PullRequest.STATE_CREATED) as state_obj:
677 final_state=PullRequest.STATE_CREATED) as state_obj:
678 MergeCheck.validate(
678 MergeCheck.validate(
679 pull_request, auth_user=auth_user, translator=translator)
679 pull_request, auth_user=auth_user, translator=translator)
680
680
681 self.notify_reviewers(pull_request, reviewer_ids)
681 self.notify_reviewers(pull_request, reviewer_ids)
682 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
682 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
683
683
684 creation_data = pull_request.get_api_data(with_merge_state=False)
684 creation_data = pull_request.get_api_data(with_merge_state=False)
685 self._log_audit_action(
685 self._log_audit_action(
686 'repo.pull_request.create', {'data': creation_data},
686 'repo.pull_request.create', {'data': creation_data},
687 auth_user, pull_request)
687 auth_user, pull_request)
688
688
689 return pull_request
689 return pull_request
690
690
691 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
691 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
692 pull_request = self.__get_pull_request(pull_request)
692 pull_request = self.__get_pull_request(pull_request)
693 target_scm = pull_request.target_repo.scm_instance()
693 target_scm = pull_request.target_repo.scm_instance()
694 if action == 'create':
694 if action == 'create':
695 trigger_hook = hooks_utils.trigger_create_pull_request_hook
695 trigger_hook = hooks_utils.trigger_create_pull_request_hook
696 elif action == 'merge':
696 elif action == 'merge':
697 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
697 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
698 elif action == 'close':
698 elif action == 'close':
699 trigger_hook = hooks_utils.trigger_close_pull_request_hook
699 trigger_hook = hooks_utils.trigger_close_pull_request_hook
700 elif action == 'review_status_change':
700 elif action == 'review_status_change':
701 trigger_hook = hooks_utils.trigger_review_pull_request_hook
701 trigger_hook = hooks_utils.trigger_review_pull_request_hook
702 elif action == 'update':
702 elif action == 'update':
703 trigger_hook = hooks_utils.trigger_update_pull_request_hook
703 trigger_hook = hooks_utils.trigger_update_pull_request_hook
704 elif action == 'comment':
704 elif action == 'comment':
705 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
705 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
706 else:
706 else:
707 return
707 return
708
708
709 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
709 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
710 pull_request, action, trigger_hook)
710 pull_request, action, trigger_hook)
711 trigger_hook(
711 trigger_hook(
712 username=user.username,
712 username=user.username,
713 repo_name=pull_request.target_repo.repo_name,
713 repo_name=pull_request.target_repo.repo_name,
714 repo_type=target_scm.alias,
714 repo_type=target_scm.alias,
715 pull_request=pull_request,
715 pull_request=pull_request,
716 data=data)
716 data=data)
717
717
718 def _get_commit_ids(self, pull_request):
718 def _get_commit_ids(self, pull_request):
719 """
719 """
720 Return the commit ids of the merged pull request.
720 Return the commit ids of the merged pull request.
721
721
722 This method is not dealing correctly yet with the lack of autoupdates
722 This method is not dealing correctly yet with the lack of autoupdates
723 nor with the implicit target updates.
723 nor with the implicit target updates.
724 For example: if a commit in the source repo is already in the target it
724 For example: if a commit in the source repo is already in the target it
725 will be reported anyways.
725 will be reported anyways.
726 """
726 """
727 merge_rev = pull_request.merge_rev
727 merge_rev = pull_request.merge_rev
728 if merge_rev is None:
728 if merge_rev is None:
729 raise ValueError('This pull request was not merged yet')
729 raise ValueError('This pull request was not merged yet')
730
730
731 commit_ids = list(pull_request.revisions)
731 commit_ids = list(pull_request.revisions)
732 if merge_rev not in commit_ids:
732 if merge_rev not in commit_ids:
733 commit_ids.append(merge_rev)
733 commit_ids.append(merge_rev)
734
734
735 return commit_ids
735 return commit_ids
736
736
737 def merge_repo(self, pull_request, user, extras):
737 def merge_repo(self, pull_request, user, extras):
738 log.debug("Merging pull request %s", pull_request.pull_request_id)
738 log.debug("Merging pull request %s", pull_request.pull_request_id)
739 extras['user_agent'] = 'internal-merge'
739 extras['user_agent'] = 'internal-merge'
740 merge_state = self._merge_pull_request(pull_request, user, extras)
740 merge_state = self._merge_pull_request(pull_request, user, extras)
741 if merge_state.executed:
741 if merge_state.executed:
742 log.debug("Merge was successful, updating the pull request comments.")
742 log.debug("Merge was successful, updating the pull request comments.")
743 self._comment_and_close_pr(pull_request, user, merge_state)
743 self._comment_and_close_pr(pull_request, user, merge_state)
744
744
745 self._log_audit_action(
745 self._log_audit_action(
746 'repo.pull_request.merge',
746 'repo.pull_request.merge',
747 {'merge_state': merge_state.__dict__},
747 {'merge_state': merge_state.__dict__},
748 user, pull_request)
748 user, pull_request)
749
749
750 else:
750 else:
751 log.warn("Merge failed, not updating the pull request.")
751 log.warn("Merge failed, not updating the pull request.")
752 return merge_state
752 return merge_state
753
753
754 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
754 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
755 target_vcs = pull_request.target_repo.scm_instance()
755 target_vcs = pull_request.target_repo.scm_instance()
756 source_vcs = pull_request.source_repo.scm_instance()
756 source_vcs = pull_request.source_repo.scm_instance()
757
757
758 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
758 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
759 pr_id=pull_request.pull_request_id,
759 pr_id=pull_request.pull_request_id,
760 pr_title=pull_request.title,
760 pr_title=pull_request.title,
761 source_repo=source_vcs.name,
761 source_repo=source_vcs.name,
762 source_ref_name=pull_request.source_ref_parts.name,
762 source_ref_name=pull_request.source_ref_parts.name,
763 target_repo=target_vcs.name,
763 target_repo=target_vcs.name,
764 target_ref_name=pull_request.target_ref_parts.name,
764 target_ref_name=pull_request.target_ref_parts.name,
765 )
765 )
766
766
767 workspace_id = self._workspace_id(pull_request)
767 workspace_id = self._workspace_id(pull_request)
768 repo_id = pull_request.target_repo.repo_id
768 repo_id = pull_request.target_repo.repo_id
769 use_rebase = self._use_rebase_for_merging(pull_request)
769 use_rebase = self._use_rebase_for_merging(pull_request)
770 close_branch = self._close_branch_before_merging(pull_request)
770 close_branch = self._close_branch_before_merging(pull_request)
771 user_name = self._user_name_for_merging(pull_request, user)
771 user_name = self._user_name_for_merging(pull_request, user)
772
772
773 target_ref = self._refresh_reference(
773 target_ref = self._refresh_reference(
774 pull_request.target_ref_parts, target_vcs)
774 pull_request.target_ref_parts, target_vcs)
775
775
776 callback_daemon, extras = prepare_callback_daemon(
776 callback_daemon, extras = prepare_callback_daemon(
777 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
777 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
778 host=vcs_settings.HOOKS_HOST,
778 host=vcs_settings.HOOKS_HOST,
779 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
779 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
780
780
781 with callback_daemon:
781 with callback_daemon:
782 # TODO: johbo: Implement a clean way to run a config_override
782 # TODO: johbo: Implement a clean way to run a config_override
783 # for a single call.
783 # for a single call.
784 target_vcs.config.set(
784 target_vcs.config.set(
785 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
785 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
786
786
787 merge_state = target_vcs.merge(
787 merge_state = target_vcs.merge(
788 repo_id, workspace_id, target_ref, source_vcs,
788 repo_id, workspace_id, target_ref, source_vcs,
789 pull_request.source_ref_parts,
789 pull_request.source_ref_parts,
790 user_name=user_name, user_email=user.email,
790 user_name=user_name, user_email=user.email,
791 message=message, use_rebase=use_rebase,
791 message=message, use_rebase=use_rebase,
792 close_branch=close_branch)
792 close_branch=close_branch)
793 return merge_state
793 return merge_state
794
794
795 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
795 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
796 pull_request.merge_rev = merge_state.merge_ref.commit_id
796 pull_request.merge_rev = merge_state.merge_ref.commit_id
797 pull_request.updated_on = datetime.datetime.now()
797 pull_request.updated_on = datetime.datetime.now()
798 close_msg = close_msg or 'Pull request merged and closed'
798 close_msg = close_msg or 'Pull request merged and closed'
799
799
800 CommentsModel().create(
800 CommentsModel().create(
801 text=safe_unicode(close_msg),
801 text=safe_unicode(close_msg),
802 repo=pull_request.target_repo.repo_id,
802 repo=pull_request.target_repo.repo_id,
803 user=user.user_id,
803 user=user.user_id,
804 pull_request=pull_request.pull_request_id,
804 pull_request=pull_request.pull_request_id,
805 f_path=None,
805 f_path=None,
806 line_no=None,
806 line_no=None,
807 closing_pr=True
807 closing_pr=True
808 )
808 )
809
809
810 Session().add(pull_request)
810 Session().add(pull_request)
811 Session().flush()
811 Session().flush()
812 # TODO: paris: replace invalidation with less radical solution
812 # TODO: paris: replace invalidation with less radical solution
813 ScmModel().mark_for_invalidation(
813 ScmModel().mark_for_invalidation(
814 pull_request.target_repo.repo_name)
814 pull_request.target_repo.repo_name)
815 self.trigger_pull_request_hook(pull_request, user, 'merge')
815 self.trigger_pull_request_hook(pull_request, user, 'merge')
816
816
817 def has_valid_update_type(self, pull_request):
817 def has_valid_update_type(self, pull_request):
818 source_ref_type = pull_request.source_ref_parts.type
818 source_ref_type = pull_request.source_ref_parts.type
819 return source_ref_type in self.REF_TYPES
819 return source_ref_type in self.REF_TYPES
820
820
821 def get_flow_commits(self, pull_request):
821 def get_flow_commits(self, pull_request):
822
822
823 # source repo
823 # source repo
824 source_ref_name = pull_request.source_ref_parts.name
824 source_ref_name = pull_request.source_ref_parts.name
825 source_ref_type = pull_request.source_ref_parts.type
825 source_ref_type = pull_request.source_ref_parts.type
826 source_ref_id = pull_request.source_ref_parts.commit_id
826 source_ref_id = pull_request.source_ref_parts.commit_id
827 source_repo = pull_request.source_repo.scm_instance()
827 source_repo = pull_request.source_repo.scm_instance()
828
828
829 try:
829 try:
830 if source_ref_type in self.REF_TYPES:
830 if source_ref_type in self.REF_TYPES:
831 source_commit = source_repo.get_commit(source_ref_name)
831 source_commit = source_repo.get_commit(source_ref_name)
832 else:
832 else:
833 source_commit = source_repo.get_commit(source_ref_id)
833 source_commit = source_repo.get_commit(source_ref_id)
834 except CommitDoesNotExistError:
834 except CommitDoesNotExistError:
835 raise SourceRefMissing()
835 raise SourceRefMissing()
836
836
837 # target repo
837 # target repo
838 target_ref_name = pull_request.target_ref_parts.name
838 target_ref_name = pull_request.target_ref_parts.name
839 target_ref_type = pull_request.target_ref_parts.type
839 target_ref_type = pull_request.target_ref_parts.type
840 target_ref_id = pull_request.target_ref_parts.commit_id
840 target_ref_id = pull_request.target_ref_parts.commit_id
841 target_repo = pull_request.target_repo.scm_instance()
841 target_repo = pull_request.target_repo.scm_instance()
842
842
843 try:
843 try:
844 if target_ref_type in self.REF_TYPES:
844 if target_ref_type in self.REF_TYPES:
845 target_commit = target_repo.get_commit(target_ref_name)
845 target_commit = target_repo.get_commit(target_ref_name)
846 else:
846 else:
847 target_commit = target_repo.get_commit(target_ref_id)
847 target_commit = target_repo.get_commit(target_ref_id)
848 except CommitDoesNotExistError:
848 except CommitDoesNotExistError:
849 raise TargetRefMissing()
849 raise TargetRefMissing()
850
850
851 return source_commit, target_commit
851 return source_commit, target_commit
852
852
853 def update_commits(self, pull_request, updating_user):
853 def update_commits(self, pull_request, updating_user):
854 """
854 """
855 Get the updated list of commits for the pull request
855 Get the updated list of commits for the pull request
856 and return the new pull request version and the list
856 and return the new pull request version and the list
857 of commits processed by this update action
857 of commits processed by this update action
858
858
859 updating_user is the user_object who triggered the update
859 updating_user is the user_object who triggered the update
860 """
860 """
861 pull_request = self.__get_pull_request(pull_request)
861 pull_request = self.__get_pull_request(pull_request)
862 source_ref_type = pull_request.source_ref_parts.type
862 source_ref_type = pull_request.source_ref_parts.type
863 source_ref_name = pull_request.source_ref_parts.name
863 source_ref_name = pull_request.source_ref_parts.name
864 source_ref_id = pull_request.source_ref_parts.commit_id
864 source_ref_id = pull_request.source_ref_parts.commit_id
865
865
866 target_ref_type = pull_request.target_ref_parts.type
866 target_ref_type = pull_request.target_ref_parts.type
867 target_ref_name = pull_request.target_ref_parts.name
867 target_ref_name = pull_request.target_ref_parts.name
868 target_ref_id = pull_request.target_ref_parts.commit_id
868 target_ref_id = pull_request.target_ref_parts.commit_id
869
869
870 if not self.has_valid_update_type(pull_request):
870 if not self.has_valid_update_type(pull_request):
871 log.debug("Skipping update of pull request %s due to ref type: %s",
871 log.debug("Skipping update of pull request %s due to ref type: %s",
872 pull_request, source_ref_type)
872 pull_request, source_ref_type)
873 return UpdateResponse(
873 return UpdateResponse(
874 executed=False,
874 executed=False,
875 reason=UpdateFailureReason.WRONG_REF_TYPE,
875 reason=UpdateFailureReason.WRONG_REF_TYPE,
876 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
876 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
877 source_changed=False, target_changed=False)
877 source_changed=False, target_changed=False)
878
878
879 try:
879 try:
880 source_commit, target_commit = self.get_flow_commits(pull_request)
880 source_commit, target_commit = self.get_flow_commits(pull_request)
881 except SourceRefMissing:
881 except SourceRefMissing:
882 return UpdateResponse(
882 return UpdateResponse(
883 executed=False,
883 executed=False,
884 reason=UpdateFailureReason.MISSING_SOURCE_REF,
884 reason=UpdateFailureReason.MISSING_SOURCE_REF,
885 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
885 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
886 source_changed=False, target_changed=False)
886 source_changed=False, target_changed=False)
887 except TargetRefMissing:
887 except TargetRefMissing:
888 return UpdateResponse(
888 return UpdateResponse(
889 executed=False,
889 executed=False,
890 reason=UpdateFailureReason.MISSING_TARGET_REF,
890 reason=UpdateFailureReason.MISSING_TARGET_REF,
891 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
891 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
892 source_changed=False, target_changed=False)
892 source_changed=False, target_changed=False)
893
893
894 source_changed = source_ref_id != source_commit.raw_id
894 source_changed = source_ref_id != source_commit.raw_id
895 target_changed = target_ref_id != target_commit.raw_id
895 target_changed = target_ref_id != target_commit.raw_id
896
896
897 if not (source_changed or target_changed):
897 if not (source_changed or target_changed):
898 log.debug("Nothing changed in pull request %s", pull_request)
898 log.debug("Nothing changed in pull request %s", pull_request)
899 return UpdateResponse(
899 return UpdateResponse(
900 executed=False,
900 executed=False,
901 reason=UpdateFailureReason.NO_CHANGE,
901 reason=UpdateFailureReason.NO_CHANGE,
902 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
902 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
903 source_changed=target_changed, target_changed=source_changed)
903 source_changed=target_changed, target_changed=source_changed)
904
904
905 change_in_found = 'target repo' if target_changed else 'source repo'
905 change_in_found = 'target repo' if target_changed else 'source repo'
906 log.debug('Updating pull request because of change in %s detected',
906 log.debug('Updating pull request because of change in %s detected',
907 change_in_found)
907 change_in_found)
908
908
909 # Finally there is a need for an update, in case of source change
909 # Finally there is a need for an update, in case of source change
910 # we create a new version, else just an update
910 # we create a new version, else just an update
911 if source_changed:
911 if source_changed:
912 pull_request_version = self._create_version_from_snapshot(pull_request)
912 pull_request_version = self._create_version_from_snapshot(pull_request)
913 self._link_comments_to_version(pull_request_version)
913 self._link_comments_to_version(pull_request_version)
914 else:
914 else:
915 try:
915 try:
916 ver = pull_request.versions[-1]
916 ver = pull_request.versions[-1]
917 except IndexError:
917 except IndexError:
918 ver = None
918 ver = None
919
919
920 pull_request.pull_request_version_id = \
920 pull_request.pull_request_version_id = \
921 ver.pull_request_version_id if ver else None
921 ver.pull_request_version_id if ver else None
922 pull_request_version = pull_request
922 pull_request_version = pull_request
923
923
924 source_repo = pull_request.source_repo.scm_instance()
924 source_repo = pull_request.source_repo.scm_instance()
925 target_repo = pull_request.target_repo.scm_instance()
925 target_repo = pull_request.target_repo.scm_instance()
926
926
927 # re-compute commit ids
927 # re-compute commit ids
928 old_commit_ids = pull_request.revisions
928 old_commit_ids = pull_request.revisions
929 pre_load = ["author", "date", "message", "branch"]
929 pre_load = ["author", "date", "message", "branch"]
930 commit_ranges = target_repo.compare(
930 commit_ranges = target_repo.compare(
931 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
931 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
932 pre_load=pre_load)
932 pre_load=pre_load)
933
933
934 target_ref = target_commit.raw_id
934 target_ref = target_commit.raw_id
935 source_ref = source_commit.raw_id
935 source_ref = source_commit.raw_id
936 ancestor_commit_id = target_repo.get_common_ancestor(
936 ancestor_commit_id = target_repo.get_common_ancestor(
937 target_ref, source_ref, source_repo)
937 target_ref, source_ref, source_repo)
938
938
939 if not ancestor_commit_id:
939 if not ancestor_commit_id:
940 raise ValueError(
940 raise ValueError(
941 'cannot calculate diff info without a common ancestor. '
941 'cannot calculate diff info without a common ancestor. '
942 'Make sure both repositories are related, and have a common forking commit.')
942 'Make sure both repositories are related, and have a common forking commit.')
943
943
944 pull_request.common_ancestor_id = ancestor_commit_id
944 pull_request.common_ancestor_id = ancestor_commit_id
945
945
946 pull_request.source_ref = '%s:%s:%s' % (
946 pull_request.source_ref = '%s:%s:%s' % (
947 source_ref_type, source_ref_name, source_commit.raw_id)
947 source_ref_type, source_ref_name, source_commit.raw_id)
948 pull_request.target_ref = '%s:%s:%s' % (
948 pull_request.target_ref = '%s:%s:%s' % (
949 target_ref_type, target_ref_name, ancestor_commit_id)
949 target_ref_type, target_ref_name, ancestor_commit_id)
950
950
951 pull_request.revisions = [
951 pull_request.revisions = [
952 commit.raw_id for commit in reversed(commit_ranges)]
952 commit.raw_id for commit in reversed(commit_ranges)]
953 pull_request.updated_on = datetime.datetime.now()
953 pull_request.updated_on = datetime.datetime.now()
954 Session().add(pull_request)
954 Session().add(pull_request)
955 new_commit_ids = pull_request.revisions
955 new_commit_ids = pull_request.revisions
956
956
957 old_diff_data, new_diff_data = self._generate_update_diffs(
957 old_diff_data, new_diff_data = self._generate_update_diffs(
958 pull_request, pull_request_version)
958 pull_request, pull_request_version)
959
959
960 # calculate commit and file changes
960 # calculate commit and file changes
961 commit_changes = self._calculate_commit_id_changes(
961 commit_changes = self._calculate_commit_id_changes(
962 old_commit_ids, new_commit_ids)
962 old_commit_ids, new_commit_ids)
963 file_changes = self._calculate_file_changes(
963 file_changes = self._calculate_file_changes(
964 old_diff_data, new_diff_data)
964 old_diff_data, new_diff_data)
965
965
966 # set comments as outdated if DIFFS changed
966 # set comments as outdated if DIFFS changed
967 CommentsModel().outdate_comments(
967 CommentsModel().outdate_comments(
968 pull_request, old_diff_data=old_diff_data,
968 pull_request, old_diff_data=old_diff_data,
969 new_diff_data=new_diff_data)
969 new_diff_data=new_diff_data)
970
970
971 valid_commit_changes = (commit_changes.added or commit_changes.removed)
971 valid_commit_changes = (commit_changes.added or commit_changes.removed)
972 file_node_changes = (
972 file_node_changes = (
973 file_changes.added or file_changes.modified or file_changes.removed)
973 file_changes.added or file_changes.modified or file_changes.removed)
974 pr_has_changes = valid_commit_changes or file_node_changes
974 pr_has_changes = valid_commit_changes or file_node_changes
975
975
976 # Add an automatic comment to the pull request, in case
976 # Add an automatic comment to the pull request, in case
977 # anything has changed
977 # anything has changed
978 if pr_has_changes:
978 if pr_has_changes:
979 update_comment = CommentsModel().create(
979 update_comment = CommentsModel().create(
980 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
980 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
981 repo=pull_request.target_repo,
981 repo=pull_request.target_repo,
982 user=pull_request.author,
982 user=pull_request.author,
983 pull_request=pull_request,
983 pull_request=pull_request,
984 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
984 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
985
985
986 # Update status to "Under Review" for added commits
986 # Update status to "Under Review" for added commits
987 for commit_id in commit_changes.added:
987 for commit_id in commit_changes.added:
988 ChangesetStatusModel().set_status(
988 ChangesetStatusModel().set_status(
989 repo=pull_request.source_repo,
989 repo=pull_request.source_repo,
990 status=ChangesetStatus.STATUS_UNDER_REVIEW,
990 status=ChangesetStatus.STATUS_UNDER_REVIEW,
991 comment=update_comment,
991 comment=update_comment,
992 user=pull_request.author,
992 user=pull_request.author,
993 pull_request=pull_request,
993 pull_request=pull_request,
994 revision=commit_id)
994 revision=commit_id)
995
995
996 # send update email to users
996 # send update email to users
997 try:
997 try:
998 self.notify_users(pull_request=pull_request, updating_user=updating_user,
998 self.notify_users(pull_request=pull_request, updating_user=updating_user,
999 ancestor_commit_id=ancestor_commit_id,
999 ancestor_commit_id=ancestor_commit_id,
1000 commit_changes=commit_changes,
1000 commit_changes=commit_changes,
1001 file_changes=file_changes)
1001 file_changes=file_changes)
1002 except Exception:
1002 except Exception:
1003 log.exception('Failed to send email notification to users')
1003 log.exception('Failed to send email notification to users')
1004
1004
1005 log.debug(
1005 log.debug(
1006 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1006 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1007 'removed_ids: %s', pull_request.pull_request_id,
1007 'removed_ids: %s', pull_request.pull_request_id,
1008 commit_changes.added, commit_changes.common, commit_changes.removed)
1008 commit_changes.added, commit_changes.common, commit_changes.removed)
1009 log.debug(
1009 log.debug(
1010 'Updated pull request with the following file changes: %s',
1010 'Updated pull request with the following file changes: %s',
1011 file_changes)
1011 file_changes)
1012
1012
1013 log.info(
1013 log.info(
1014 "Updated pull request %s from commit %s to commit %s, "
1014 "Updated pull request %s from commit %s to commit %s, "
1015 "stored new version %s of this pull request.",
1015 "stored new version %s of this pull request.",
1016 pull_request.pull_request_id, source_ref_id,
1016 pull_request.pull_request_id, source_ref_id,
1017 pull_request.source_ref_parts.commit_id,
1017 pull_request.source_ref_parts.commit_id,
1018 pull_request_version.pull_request_version_id)
1018 pull_request_version.pull_request_version_id)
1019 Session().commit()
1019 Session().commit()
1020 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1020 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1021
1021
1022 return UpdateResponse(
1022 return UpdateResponse(
1023 executed=True, reason=UpdateFailureReason.NONE,
1023 executed=True, reason=UpdateFailureReason.NONE,
1024 old=pull_request, new=pull_request_version,
1024 old=pull_request, new=pull_request_version,
1025 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1025 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1026 source_changed=source_changed, target_changed=target_changed)
1026 source_changed=source_changed, target_changed=target_changed)
1027
1027
1028 def _create_version_from_snapshot(self, pull_request):
1028 def _create_version_from_snapshot(self, pull_request):
1029 version = PullRequestVersion()
1029 version = PullRequestVersion()
1030 version.title = pull_request.title
1030 version.title = pull_request.title
1031 version.description = pull_request.description
1031 version.description = pull_request.description
1032 version.status = pull_request.status
1032 version.status = pull_request.status
1033 version.pull_request_state = pull_request.pull_request_state
1033 version.pull_request_state = pull_request.pull_request_state
1034 version.created_on = datetime.datetime.now()
1034 version.created_on = datetime.datetime.now()
1035 version.updated_on = pull_request.updated_on
1035 version.updated_on = pull_request.updated_on
1036 version.user_id = pull_request.user_id
1036 version.user_id = pull_request.user_id
1037 version.source_repo = pull_request.source_repo
1037 version.source_repo = pull_request.source_repo
1038 version.source_ref = pull_request.source_ref
1038 version.source_ref = pull_request.source_ref
1039 version.target_repo = pull_request.target_repo
1039 version.target_repo = pull_request.target_repo
1040 version.target_ref = pull_request.target_ref
1040 version.target_ref = pull_request.target_ref
1041
1041
1042 version._last_merge_source_rev = pull_request._last_merge_source_rev
1042 version._last_merge_source_rev = pull_request._last_merge_source_rev
1043 version._last_merge_target_rev = pull_request._last_merge_target_rev
1043 version._last_merge_target_rev = pull_request._last_merge_target_rev
1044 version.last_merge_status = pull_request.last_merge_status
1044 version.last_merge_status = pull_request.last_merge_status
1045 version.last_merge_metadata = pull_request.last_merge_metadata
1045 version.last_merge_metadata = pull_request.last_merge_metadata
1046 version.shadow_merge_ref = pull_request.shadow_merge_ref
1046 version.shadow_merge_ref = pull_request.shadow_merge_ref
1047 version.merge_rev = pull_request.merge_rev
1047 version.merge_rev = pull_request.merge_rev
1048 version.reviewer_data = pull_request.reviewer_data
1048 version.reviewer_data = pull_request.reviewer_data
1049
1049
1050 version.revisions = pull_request.revisions
1050 version.revisions = pull_request.revisions
1051 version.common_ancestor_id = pull_request.common_ancestor_id
1051 version.common_ancestor_id = pull_request.common_ancestor_id
1052 version.pull_request = pull_request
1052 version.pull_request = pull_request
1053 Session().add(version)
1053 Session().add(version)
1054 Session().flush()
1054 Session().flush()
1055
1055
1056 return version
1056 return version
1057
1057
1058 def _generate_update_diffs(self, pull_request, pull_request_version):
1058 def _generate_update_diffs(self, pull_request, pull_request_version):
1059
1059
1060 diff_context = (
1060 diff_context = (
1061 self.DIFF_CONTEXT +
1061 self.DIFF_CONTEXT +
1062 CommentsModel.needed_extra_diff_context())
1062 CommentsModel.needed_extra_diff_context())
1063 hide_whitespace_changes = False
1063 hide_whitespace_changes = False
1064 source_repo = pull_request_version.source_repo
1064 source_repo = pull_request_version.source_repo
1065 source_ref_id = pull_request_version.source_ref_parts.commit_id
1065 source_ref_id = pull_request_version.source_ref_parts.commit_id
1066 target_ref_id = pull_request_version.target_ref_parts.commit_id
1066 target_ref_id = pull_request_version.target_ref_parts.commit_id
1067 old_diff = self._get_diff_from_pr_or_version(
1067 old_diff = self._get_diff_from_pr_or_version(
1068 source_repo, source_ref_id, target_ref_id,
1068 source_repo, source_ref_id, target_ref_id,
1069 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1069 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1070
1070
1071 source_repo = pull_request.source_repo
1071 source_repo = pull_request.source_repo
1072 source_ref_id = pull_request.source_ref_parts.commit_id
1072 source_ref_id = pull_request.source_ref_parts.commit_id
1073 target_ref_id = pull_request.target_ref_parts.commit_id
1073 target_ref_id = pull_request.target_ref_parts.commit_id
1074
1074
1075 new_diff = self._get_diff_from_pr_or_version(
1075 new_diff = self._get_diff_from_pr_or_version(
1076 source_repo, source_ref_id, target_ref_id,
1076 source_repo, source_ref_id, target_ref_id,
1077 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1077 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1078
1078
1079 old_diff_data = diffs.DiffProcessor(old_diff)
1079 old_diff_data = diffs.DiffProcessor(old_diff)
1080 old_diff_data.prepare()
1080 old_diff_data.prepare()
1081 new_diff_data = diffs.DiffProcessor(new_diff)
1081 new_diff_data = diffs.DiffProcessor(new_diff)
1082 new_diff_data.prepare()
1082 new_diff_data.prepare()
1083
1083
1084 return old_diff_data, new_diff_data
1084 return old_diff_data, new_diff_data
1085
1085
1086 def _link_comments_to_version(self, pull_request_version):
1086 def _link_comments_to_version(self, pull_request_version):
1087 """
1087 """
1088 Link all unlinked comments of this pull request to the given version.
1088 Link all unlinked comments of this pull request to the given version.
1089
1089
1090 :param pull_request_version: The `PullRequestVersion` to which
1090 :param pull_request_version: The `PullRequestVersion` to which
1091 the comments shall be linked.
1091 the comments shall be linked.
1092
1092
1093 """
1093 """
1094 pull_request = pull_request_version.pull_request
1094 pull_request = pull_request_version.pull_request
1095 comments = ChangesetComment.query()\
1095 comments = ChangesetComment.query()\
1096 .filter(
1096 .filter(
1097 # TODO: johbo: Should we query for the repo at all here?
1097 # TODO: johbo: Should we query for the repo at all here?
1098 # Pending decision on how comments of PRs are to be related
1098 # Pending decision on how comments of PRs are to be related
1099 # to either the source repo, the target repo or no repo at all.
1099 # to either the source repo, the target repo or no repo at all.
1100 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1100 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1101 ChangesetComment.pull_request == pull_request,
1101 ChangesetComment.pull_request == pull_request,
1102 ChangesetComment.pull_request_version == None)\
1102 ChangesetComment.pull_request_version == None)\
1103 .order_by(ChangesetComment.comment_id.asc())
1103 .order_by(ChangesetComment.comment_id.asc())
1104
1104
1105 # TODO: johbo: Find out why this breaks if it is done in a bulk
1105 # TODO: johbo: Find out why this breaks if it is done in a bulk
1106 # operation.
1106 # operation.
1107 for comment in comments:
1107 for comment in comments:
1108 comment.pull_request_version_id = (
1108 comment.pull_request_version_id = (
1109 pull_request_version.pull_request_version_id)
1109 pull_request_version.pull_request_version_id)
1110 Session().add(comment)
1110 Session().add(comment)
1111
1111
1112 def _calculate_commit_id_changes(self, old_ids, new_ids):
1112 def _calculate_commit_id_changes(self, old_ids, new_ids):
1113 added = [x for x in new_ids if x not in old_ids]
1113 added = [x for x in new_ids if x not in old_ids]
1114 common = [x for x in new_ids if x in old_ids]
1114 common = [x for x in new_ids if x in old_ids]
1115 removed = [x for x in old_ids if x not in new_ids]
1115 removed = [x for x in old_ids if x not in new_ids]
1116 total = new_ids
1116 total = new_ids
1117 return ChangeTuple(added, common, removed, total)
1117 return ChangeTuple(added, common, removed, total)
1118
1118
1119 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1119 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1120
1120
1121 old_files = OrderedDict()
1121 old_files = OrderedDict()
1122 for diff_data in old_diff_data.parsed_diff:
1122 for diff_data in old_diff_data.parsed_diff:
1123 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1123 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1124
1124
1125 added_files = []
1125 added_files = []
1126 modified_files = []
1126 modified_files = []
1127 removed_files = []
1127 removed_files = []
1128 for diff_data in new_diff_data.parsed_diff:
1128 for diff_data in new_diff_data.parsed_diff:
1129 new_filename = diff_data['filename']
1129 new_filename = diff_data['filename']
1130 new_hash = md5_safe(diff_data['raw_diff'])
1130 new_hash = md5_safe(diff_data['raw_diff'])
1131
1131
1132 old_hash = old_files.get(new_filename)
1132 old_hash = old_files.get(new_filename)
1133 if not old_hash:
1133 if not old_hash:
1134 # file is not present in old diff, we have to figure out from parsed diff
1134 # file is not present in old diff, we have to figure out from parsed diff
1135 # operation ADD/REMOVE
1135 # operation ADD/REMOVE
1136 operations_dict = diff_data['stats']['ops']
1136 operations_dict = diff_data['stats']['ops']
1137 if diffs.DEL_FILENODE in operations_dict:
1137 if diffs.DEL_FILENODE in operations_dict:
1138 removed_files.append(new_filename)
1138 removed_files.append(new_filename)
1139 else:
1139 else:
1140 added_files.append(new_filename)
1140 added_files.append(new_filename)
1141 else:
1141 else:
1142 if new_hash != old_hash:
1142 if new_hash != old_hash:
1143 modified_files.append(new_filename)
1143 modified_files.append(new_filename)
1144 # now remove a file from old, since we have seen it already
1144 # now remove a file from old, since we have seen it already
1145 del old_files[new_filename]
1145 del old_files[new_filename]
1146
1146
1147 # removed files is when there are present in old, but not in NEW,
1147 # removed files is when there are present in old, but not in NEW,
1148 # since we remove old files that are present in new diff, left-overs
1148 # since we remove old files that are present in new diff, left-overs
1149 # if any should be the removed files
1149 # if any should be the removed files
1150 removed_files.extend(old_files.keys())
1150 removed_files.extend(old_files.keys())
1151
1151
1152 return FileChangeTuple(added_files, modified_files, removed_files)
1152 return FileChangeTuple(added_files, modified_files, removed_files)
1153
1153
1154 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1154 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1155 """
1155 """
1156 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1156 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1157 so it's always looking the same disregarding on which default
1157 so it's always looking the same disregarding on which default
1158 renderer system is using.
1158 renderer system is using.
1159
1159
1160 :param ancestor_commit_id: ancestor raw_id
1160 :param ancestor_commit_id: ancestor raw_id
1161 :param changes: changes named tuple
1161 :param changes: changes named tuple
1162 :param file_changes: file changes named tuple
1162 :param file_changes: file changes named tuple
1163
1163
1164 """
1164 """
1165 new_status = ChangesetStatus.get_status_lbl(
1165 new_status = ChangesetStatus.get_status_lbl(
1166 ChangesetStatus.STATUS_UNDER_REVIEW)
1166 ChangesetStatus.STATUS_UNDER_REVIEW)
1167
1167
1168 changed_files = (
1168 changed_files = (
1169 file_changes.added + file_changes.modified + file_changes.removed)
1169 file_changes.added + file_changes.modified + file_changes.removed)
1170
1170
1171 params = {
1171 params = {
1172 'under_review_label': new_status,
1172 'under_review_label': new_status,
1173 'added_commits': changes.added,
1173 'added_commits': changes.added,
1174 'removed_commits': changes.removed,
1174 'removed_commits': changes.removed,
1175 'changed_files': changed_files,
1175 'changed_files': changed_files,
1176 'added_files': file_changes.added,
1176 'added_files': file_changes.added,
1177 'modified_files': file_changes.modified,
1177 'modified_files': file_changes.modified,
1178 'removed_files': file_changes.removed,
1178 'removed_files': file_changes.removed,
1179 'ancestor_commit_id': ancestor_commit_id
1179 'ancestor_commit_id': ancestor_commit_id
1180 }
1180 }
1181 renderer = RstTemplateRenderer()
1181 renderer = RstTemplateRenderer()
1182 return renderer.render('pull_request_update.mako', **params)
1182 return renderer.render('pull_request_update.mako', **params)
1183
1183
1184 def edit(self, pull_request, title, description, description_renderer, user):
1184 def edit(self, pull_request, title, description, description_renderer, user):
1185 pull_request = self.__get_pull_request(pull_request)
1185 pull_request = self.__get_pull_request(pull_request)
1186 old_data = pull_request.get_api_data(with_merge_state=False)
1186 old_data = pull_request.get_api_data(with_merge_state=False)
1187 if pull_request.is_closed():
1187 if pull_request.is_closed():
1188 raise ValueError('This pull request is closed')
1188 raise ValueError('This pull request is closed')
1189 if title:
1189 if title:
1190 pull_request.title = title
1190 pull_request.title = title
1191 pull_request.description = description
1191 pull_request.description = description
1192 pull_request.updated_on = datetime.datetime.now()
1192 pull_request.updated_on = datetime.datetime.now()
1193 pull_request.description_renderer = description_renderer
1193 pull_request.description_renderer = description_renderer
1194 Session().add(pull_request)
1194 Session().add(pull_request)
1195 self._log_audit_action(
1195 self._log_audit_action(
1196 'repo.pull_request.edit', {'old_data': old_data},
1196 'repo.pull_request.edit', {'old_data': old_data},
1197 user, pull_request)
1197 user, pull_request)
1198
1198
1199 def update_reviewers(self, pull_request, reviewer_data, user):
1199 def update_reviewers(self, pull_request, reviewer_data, user):
1200 """
1200 """
1201 Update the reviewers in the pull request
1201 Update the reviewers in the pull request
1202
1202
1203 :param pull_request: the pr to update
1203 :param pull_request: the pr to update
1204 :param reviewer_data: list of tuples
1204 :param reviewer_data: list of tuples
1205 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1205 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1206 """
1206 """
1207 pull_request = self.__get_pull_request(pull_request)
1207 pull_request = self.__get_pull_request(pull_request)
1208 if pull_request.is_closed():
1208 if pull_request.is_closed():
1209 raise ValueError('This pull request is closed')
1209 raise ValueError('This pull request is closed')
1210
1210
1211 reviewers = {}
1211 reviewers = {}
1212 for user_id, reasons, mandatory, rules in reviewer_data:
1212 for user_id, reasons, mandatory, rules in reviewer_data:
1213 if isinstance(user_id, (int, compat.string_types)):
1213 if isinstance(user_id, (int, compat.string_types)):
1214 user_id = self._get_user(user_id).user_id
1214 user_id = self._get_user(user_id).user_id
1215 reviewers[user_id] = {
1215 reviewers[user_id] = {
1216 'reasons': reasons, 'mandatory': mandatory}
1216 'reasons': reasons, 'mandatory': mandatory}
1217
1217
1218 reviewers_ids = set(reviewers.keys())
1218 reviewers_ids = set(reviewers.keys())
1219 current_reviewers = PullRequestReviewers.query()\
1219 current_reviewers = PullRequestReviewers.query()\
1220 .filter(PullRequestReviewers.pull_request ==
1220 .filter(PullRequestReviewers.pull_request ==
1221 pull_request).all()
1221 pull_request).all()
1222 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1222 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1223
1223
1224 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1224 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1225 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1225 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1226
1226
1227 log.debug("Adding %s reviewers", ids_to_add)
1227 log.debug("Adding %s reviewers", ids_to_add)
1228 log.debug("Removing %s reviewers", ids_to_remove)
1228 log.debug("Removing %s reviewers", ids_to_remove)
1229 changed = False
1229 changed = False
1230 added_audit_reviewers = []
1230 added_audit_reviewers = []
1231 removed_audit_reviewers = []
1231 removed_audit_reviewers = []
1232
1232
1233 for uid in ids_to_add:
1233 for uid in ids_to_add:
1234 changed = True
1234 changed = True
1235 _usr = self._get_user(uid)
1235 _usr = self._get_user(uid)
1236 reviewer = PullRequestReviewers()
1236 reviewer = PullRequestReviewers()
1237 reviewer.user = _usr
1237 reviewer.user = _usr
1238 reviewer.pull_request = pull_request
1238 reviewer.pull_request = pull_request
1239 reviewer.reasons = reviewers[uid]['reasons']
1239 reviewer.reasons = reviewers[uid]['reasons']
1240 # NOTE(marcink): mandatory shouldn't be changed now
1240 # NOTE(marcink): mandatory shouldn't be changed now
1241 # reviewer.mandatory = reviewers[uid]['reasons']
1241 # reviewer.mandatory = reviewers[uid]['reasons']
1242 Session().add(reviewer)
1242 Session().add(reviewer)
1243 added_audit_reviewers.append(reviewer.get_dict())
1243 added_audit_reviewers.append(reviewer.get_dict())
1244
1244
1245 for uid in ids_to_remove:
1245 for uid in ids_to_remove:
1246 changed = True
1246 changed = True
1247 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1247 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1248 # that prevents and fixes cases that we added the same reviewer twice.
1248 # that prevents and fixes cases that we added the same reviewer twice.
1249 # this CAN happen due to the lack of DB checks
1249 # this CAN happen due to the lack of DB checks
1250 reviewers = PullRequestReviewers.query()\
1250 reviewers = PullRequestReviewers.query()\
1251 .filter(PullRequestReviewers.user_id == uid,
1251 .filter(PullRequestReviewers.user_id == uid,
1252 PullRequestReviewers.pull_request == pull_request)\
1252 PullRequestReviewers.pull_request == pull_request)\
1253 .all()
1253 .all()
1254
1254
1255 for obj in reviewers:
1255 for obj in reviewers:
1256 added_audit_reviewers.append(obj.get_dict())
1256 added_audit_reviewers.append(obj.get_dict())
1257 Session().delete(obj)
1257 Session().delete(obj)
1258
1258
1259 if changed:
1259 if changed:
1260 Session().expire_all()
1260 Session().expire_all()
1261 pull_request.updated_on = datetime.datetime.now()
1261 pull_request.updated_on = datetime.datetime.now()
1262 Session().add(pull_request)
1262 Session().add(pull_request)
1263
1263
1264 # finally store audit logs
1264 # finally store audit logs
1265 for user_data in added_audit_reviewers:
1265 for user_data in added_audit_reviewers:
1266 self._log_audit_action(
1266 self._log_audit_action(
1267 'repo.pull_request.reviewer.add', {'data': user_data},
1267 'repo.pull_request.reviewer.add', {'data': user_data},
1268 user, pull_request)
1268 user, pull_request)
1269 for user_data in removed_audit_reviewers:
1269 for user_data in removed_audit_reviewers:
1270 self._log_audit_action(
1270 self._log_audit_action(
1271 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1271 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1272 user, pull_request)
1272 user, pull_request)
1273
1273
1274 self.notify_reviewers(pull_request, ids_to_add)
1274 self.notify_reviewers(pull_request, ids_to_add)
1275 return ids_to_add, ids_to_remove
1275 return ids_to_add, ids_to_remove
1276
1276
1277 def get_url(self, pull_request, request=None, permalink=False):
1277 def get_url(self, pull_request, request=None, permalink=False):
1278 if not request:
1278 if not request:
1279 request = get_current_request()
1279 request = get_current_request()
1280
1280
1281 if permalink:
1281 if permalink:
1282 return request.route_url(
1282 return request.route_url(
1283 'pull_requests_global',
1283 'pull_requests_global',
1284 pull_request_id=pull_request.pull_request_id,)
1284 pull_request_id=pull_request.pull_request_id,)
1285 else:
1285 else:
1286 return request.route_url('pullrequest_show',
1286 return request.route_url('pullrequest_show',
1287 repo_name=safe_str(pull_request.target_repo.repo_name),
1287 repo_name=safe_str(pull_request.target_repo.repo_name),
1288 pull_request_id=pull_request.pull_request_id,)
1288 pull_request_id=pull_request.pull_request_id,)
1289
1289
1290 def get_shadow_clone_url(self, pull_request, request=None):
1290 def get_shadow_clone_url(self, pull_request, request=None):
1291 """
1291 """
1292 Returns qualified url pointing to the shadow repository. If this pull
1292 Returns qualified url pointing to the shadow repository. If this pull
1293 request is closed there is no shadow repository and ``None`` will be
1293 request is closed there is no shadow repository and ``None`` will be
1294 returned.
1294 returned.
1295 """
1295 """
1296 if pull_request.is_closed():
1296 if pull_request.is_closed():
1297 return None
1297 return None
1298 else:
1298 else:
1299 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1299 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1300 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1300 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1301
1301
1302 def notify_reviewers(self, pull_request, reviewers_ids):
1302 def notify_reviewers(self, pull_request, reviewers_ids):
1303 # notification to reviewers
1303 # notification to reviewers
1304 if not reviewers_ids:
1304 if not reviewers_ids:
1305 return
1305 return
1306
1306
1307 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1307 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1308
1308
1309 pull_request_obj = pull_request
1309 pull_request_obj = pull_request
1310 # get the current participants of this pull request
1310 # get the current participants of this pull request
1311 recipients = reviewers_ids
1311 recipients = reviewers_ids
1312 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1312 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1313
1313
1314 pr_source_repo = pull_request_obj.source_repo
1314 pr_source_repo = pull_request_obj.source_repo
1315 pr_target_repo = pull_request_obj.target_repo
1315 pr_target_repo = pull_request_obj.target_repo
1316
1316
1317 pr_url = h.route_url('pullrequest_show',
1317 pr_url = h.route_url('pullrequest_show',
1318 repo_name=pr_target_repo.repo_name,
1318 repo_name=pr_target_repo.repo_name,
1319 pull_request_id=pull_request_obj.pull_request_id,)
1319 pull_request_id=pull_request_obj.pull_request_id,)
1320
1320
1321 # set some variables for email notification
1321 # set some variables for email notification
1322 pr_target_repo_url = h.route_url(
1322 pr_target_repo_url = h.route_url(
1323 'repo_summary', repo_name=pr_target_repo.repo_name)
1323 'repo_summary', repo_name=pr_target_repo.repo_name)
1324
1324
1325 pr_source_repo_url = h.route_url(
1325 pr_source_repo_url = h.route_url(
1326 'repo_summary', repo_name=pr_source_repo.repo_name)
1326 'repo_summary', repo_name=pr_source_repo.repo_name)
1327
1327
1328 # pull request specifics
1328 # pull request specifics
1329 pull_request_commits = [
1329 pull_request_commits = [
1330 (x.raw_id, x.message)
1330 (x.raw_id, x.message)
1331 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1331 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1332
1332
1333 kwargs = {
1333 kwargs = {
1334 'user': pull_request.author,
1334 'user': pull_request.author,
1335 'pull_request': pull_request_obj,
1335 'pull_request': pull_request_obj,
1336 'pull_request_commits': pull_request_commits,
1336 'pull_request_commits': pull_request_commits,
1337
1337
1338 'pull_request_target_repo': pr_target_repo,
1338 'pull_request_target_repo': pr_target_repo,
1339 'pull_request_target_repo_url': pr_target_repo_url,
1339 'pull_request_target_repo_url': pr_target_repo_url,
1340
1340
1341 'pull_request_source_repo': pr_source_repo,
1341 'pull_request_source_repo': pr_source_repo,
1342 'pull_request_source_repo_url': pr_source_repo_url,
1342 'pull_request_source_repo_url': pr_source_repo_url,
1343
1343
1344 'pull_request_url': pr_url,
1344 'pull_request_url': pr_url,
1345 }
1345 }
1346
1346
1347 # pre-generate the subject for notification itself
1347 # pre-generate the subject for notification itself
1348 (subject,
1348 (subject,
1349 _h, _e, # we don't care about those
1349 _h, _e, # we don't care about those
1350 body_plaintext) = EmailNotificationModel().render_email(
1350 body_plaintext) = EmailNotificationModel().render_email(
1351 notification_type, **kwargs)
1351 notification_type, **kwargs)
1352
1352
1353 # create notification objects, and emails
1353 # create notification objects, and emails
1354 NotificationModel().create(
1354 NotificationModel().create(
1355 created_by=pull_request.author,
1355 created_by=pull_request.author,
1356 notification_subject=subject,
1356 notification_subject=subject,
1357 notification_body=body_plaintext,
1357 notification_body=body_plaintext,
1358 notification_type=notification_type,
1358 notification_type=notification_type,
1359 recipients=recipients,
1359 recipients=recipients,
1360 email_kwargs=kwargs,
1360 email_kwargs=kwargs,
1361 )
1361 )
1362
1362
1363 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1363 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1364 commit_changes, file_changes):
1364 commit_changes, file_changes):
1365
1365
1366 updating_user_id = updating_user.user_id
1366 updating_user_id = updating_user.user_id
1367 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1367 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1368 # NOTE(marcink): send notification to all other users except to
1368 # NOTE(marcink): send notification to all other users except to
1369 # person who updated the PR
1369 # person who updated the PR
1370 recipients = reviewers.difference(set([updating_user_id]))
1370 recipients = reviewers.difference(set([updating_user_id]))
1371
1371
1372 log.debug('Notify following recipients about pull-request update %s', recipients)
1372 log.debug('Notify following recipients about pull-request update %s', recipients)
1373
1373
1374 pull_request_obj = pull_request
1374 pull_request_obj = pull_request
1375
1375
1376 # send email about the update
1376 # send email about the update
1377 changed_files = (
1377 changed_files = (
1378 file_changes.added + file_changes.modified + file_changes.removed)
1378 file_changes.added + file_changes.modified + file_changes.removed)
1379
1379
1380 pr_source_repo = pull_request_obj.source_repo
1380 pr_source_repo = pull_request_obj.source_repo
1381 pr_target_repo = pull_request_obj.target_repo
1381 pr_target_repo = pull_request_obj.target_repo
1382
1382
1383 pr_url = h.route_url('pullrequest_show',
1383 pr_url = h.route_url('pullrequest_show',
1384 repo_name=pr_target_repo.repo_name,
1384 repo_name=pr_target_repo.repo_name,
1385 pull_request_id=pull_request_obj.pull_request_id,)
1385 pull_request_id=pull_request_obj.pull_request_id,)
1386
1386
1387 # set some variables for email notification
1387 # set some variables for email notification
1388 pr_target_repo_url = h.route_url(
1388 pr_target_repo_url = h.route_url(
1389 'repo_summary', repo_name=pr_target_repo.repo_name)
1389 'repo_summary', repo_name=pr_target_repo.repo_name)
1390
1390
1391 pr_source_repo_url = h.route_url(
1391 pr_source_repo_url = h.route_url(
1392 'repo_summary', repo_name=pr_source_repo.repo_name)
1392 'repo_summary', repo_name=pr_source_repo.repo_name)
1393
1393
1394 email_kwargs = {
1394 email_kwargs = {
1395 'date': datetime.datetime.now(),
1395 'date': datetime.datetime.now(),
1396 'updating_user': updating_user,
1396 'updating_user': updating_user,
1397
1397
1398 'pull_request': pull_request_obj,
1398 'pull_request': pull_request_obj,
1399
1399
1400 'pull_request_target_repo': pr_target_repo,
1400 'pull_request_target_repo': pr_target_repo,
1401 'pull_request_target_repo_url': pr_target_repo_url,
1401 'pull_request_target_repo_url': pr_target_repo_url,
1402
1402
1403 'pull_request_source_repo': pr_source_repo,
1403 'pull_request_source_repo': pr_source_repo,
1404 'pull_request_source_repo_url': pr_source_repo_url,
1404 'pull_request_source_repo_url': pr_source_repo_url,
1405
1405
1406 'pull_request_url': pr_url,
1406 'pull_request_url': pr_url,
1407
1407
1408 'ancestor_commit_id': ancestor_commit_id,
1408 'ancestor_commit_id': ancestor_commit_id,
1409 'added_commits': commit_changes.added,
1409 'added_commits': commit_changes.added,
1410 'removed_commits': commit_changes.removed,
1410 'removed_commits': commit_changes.removed,
1411 'changed_files': changed_files,
1411 'changed_files': changed_files,
1412 'added_files': file_changes.added,
1412 'added_files': file_changes.added,
1413 'modified_files': file_changes.modified,
1413 'modified_files': file_changes.modified,
1414 'removed_files': file_changes.removed,
1414 'removed_files': file_changes.removed,
1415 }
1415 }
1416
1416
1417 (subject,
1417 (subject,
1418 _h, _e, # we don't care about those
1418 _h, _e, # we don't care about those
1419 body_plaintext) = EmailNotificationModel().render_email(
1419 body_plaintext) = EmailNotificationModel().render_email(
1420 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1420 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1421
1421
1422 # create notification objects, and emails
1422 # create notification objects, and emails
1423 NotificationModel().create(
1423 NotificationModel().create(
1424 created_by=updating_user,
1424 created_by=updating_user,
1425 notification_subject=subject,
1425 notification_subject=subject,
1426 notification_body=body_plaintext,
1426 notification_body=body_plaintext,
1427 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1427 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1428 recipients=recipients,
1428 recipients=recipients,
1429 email_kwargs=email_kwargs,
1429 email_kwargs=email_kwargs,
1430 )
1430 )
1431
1431
1432 def delete(self, pull_request, user=None):
1432 def delete(self, pull_request, user=None):
1433 if not user:
1433 if not user:
1434 user = getattr(get_current_rhodecode_user(), 'username', None)
1434 user = getattr(get_current_rhodecode_user(), 'username', None)
1435
1435
1436 pull_request = self.__get_pull_request(pull_request)
1436 pull_request = self.__get_pull_request(pull_request)
1437 old_data = pull_request.get_api_data(with_merge_state=False)
1437 old_data = pull_request.get_api_data(with_merge_state=False)
1438 self._cleanup_merge_workspace(pull_request)
1438 self._cleanup_merge_workspace(pull_request)
1439 self._log_audit_action(
1439 self._log_audit_action(
1440 'repo.pull_request.delete', {'old_data': old_data},
1440 'repo.pull_request.delete', {'old_data': old_data},
1441 user, pull_request)
1441 user, pull_request)
1442 Session().delete(pull_request)
1442 Session().delete(pull_request)
1443
1443
1444 def close_pull_request(self, pull_request, user):
1444 def close_pull_request(self, pull_request, user):
1445 pull_request = self.__get_pull_request(pull_request)
1445 pull_request = self.__get_pull_request(pull_request)
1446 self._cleanup_merge_workspace(pull_request)
1446 self._cleanup_merge_workspace(pull_request)
1447 pull_request.status = PullRequest.STATUS_CLOSED
1447 pull_request.status = PullRequest.STATUS_CLOSED
1448 pull_request.updated_on = datetime.datetime.now()
1448 pull_request.updated_on = datetime.datetime.now()
1449 Session().add(pull_request)
1449 Session().add(pull_request)
1450 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1450 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1451
1451
1452 pr_data = pull_request.get_api_data(with_merge_state=False)
1452 pr_data = pull_request.get_api_data(with_merge_state=False)
1453 self._log_audit_action(
1453 self._log_audit_action(
1454 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1454 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1455
1455
1456 def close_pull_request_with_comment(
1456 def close_pull_request_with_comment(
1457 self, pull_request, user, repo, message=None, auth_user=None):
1457 self, pull_request, user, repo, message=None, auth_user=None):
1458
1458
1459 pull_request_review_status = pull_request.calculated_review_status()
1459 pull_request_review_status = pull_request.calculated_review_status()
1460
1460
1461 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1461 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1462 # approved only if we have voting consent
1462 # approved only if we have voting consent
1463 status = ChangesetStatus.STATUS_APPROVED
1463 status = ChangesetStatus.STATUS_APPROVED
1464 else:
1464 else:
1465 status = ChangesetStatus.STATUS_REJECTED
1465 status = ChangesetStatus.STATUS_REJECTED
1466 status_lbl = ChangesetStatus.get_status_lbl(status)
1466 status_lbl = ChangesetStatus.get_status_lbl(status)
1467
1467
1468 default_message = (
1468 default_message = (
1469 'Closing with status change {transition_icon} {status}.'
1469 'Closing with status change {transition_icon} {status}.'
1470 ).format(transition_icon='>', status=status_lbl)
1470 ).format(transition_icon='>', status=status_lbl)
1471 text = message or default_message
1471 text = message or default_message
1472
1472
1473 # create a comment, and link it to new status
1473 # create a comment, and link it to new status
1474 comment = CommentsModel().create(
1474 comment = CommentsModel().create(
1475 text=text,
1475 text=text,
1476 repo=repo.repo_id,
1476 repo=repo.repo_id,
1477 user=user.user_id,
1477 user=user.user_id,
1478 pull_request=pull_request.pull_request_id,
1478 pull_request=pull_request.pull_request_id,
1479 status_change=status_lbl,
1479 status_change=status_lbl,
1480 status_change_type=status,
1480 status_change_type=status,
1481 closing_pr=True,
1481 closing_pr=True,
1482 auth_user=auth_user,
1482 auth_user=auth_user,
1483 )
1483 )
1484
1484
1485 # calculate old status before we change it
1485 # calculate old status before we change it
1486 old_calculated_status = pull_request.calculated_review_status()
1486 old_calculated_status = pull_request.calculated_review_status()
1487 ChangesetStatusModel().set_status(
1487 ChangesetStatusModel().set_status(
1488 repo.repo_id,
1488 repo.repo_id,
1489 status,
1489 status,
1490 user.user_id,
1490 user.user_id,
1491 comment=comment,
1491 comment=comment,
1492 pull_request=pull_request.pull_request_id
1492 pull_request=pull_request.pull_request_id
1493 )
1493 )
1494
1494
1495 Session().flush()
1495 Session().flush()
1496
1496
1497 self.trigger_pull_request_hook(pull_request, user, 'comment',
1497 self.trigger_pull_request_hook(pull_request, user, 'comment',
1498 data={'comment': comment})
1498 data={'comment': comment})
1499
1499
1500 # we now calculate the status of pull request again, and based on that
1500 # we now calculate the status of pull request again, and based on that
1501 # calculation trigger status change. This might happen in cases
1501 # calculation trigger status change. This might happen in cases
1502 # that non-reviewer admin closes a pr, which means his vote doesn't
1502 # that non-reviewer admin closes a pr, which means his vote doesn't
1503 # change the status, while if he's a reviewer this might change it.
1503 # change the status, while if he's a reviewer this might change it.
1504 calculated_status = pull_request.calculated_review_status()
1504 calculated_status = pull_request.calculated_review_status()
1505 if old_calculated_status != calculated_status:
1505 if old_calculated_status != calculated_status:
1506 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1506 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1507 data={'status': calculated_status})
1507 data={'status': calculated_status})
1508
1508
1509 # finally close the PR
1509 # finally close the PR
1510 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1510 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1511
1511
1512 return comment, status
1512 return comment, status
1513
1513
1514 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1514 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1515 _ = translator or get_current_request().translate
1515 _ = translator or get_current_request().translate
1516
1516
1517 if not self._is_merge_enabled(pull_request):
1517 if not self._is_merge_enabled(pull_request):
1518 return None, False, _('Server-side pull request merging is disabled.')
1518 return None, False, _('Server-side pull request merging is disabled.')
1519
1519
1520 if pull_request.is_closed():
1520 if pull_request.is_closed():
1521 return None, False, _('This pull request is closed.')
1521 return None, False, _('This pull request is closed.')
1522
1522
1523 merge_possible, msg = self._check_repo_requirements(
1523 merge_possible, msg = self._check_repo_requirements(
1524 target=pull_request.target_repo, source=pull_request.source_repo,
1524 target=pull_request.target_repo, source=pull_request.source_repo,
1525 translator=_)
1525 translator=_)
1526 if not merge_possible:
1526 if not merge_possible:
1527 return None, merge_possible, msg
1527 return None, merge_possible, msg
1528
1528
1529 try:
1529 try:
1530 merge_response = self._try_merge(
1530 merge_response = self._try_merge(
1531 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1531 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1532 log.debug("Merge response: %s", merge_response)
1532 log.debug("Merge response: %s", merge_response)
1533 return merge_response, merge_response.possible, merge_response.merge_status_message
1533 return merge_response, merge_response.possible, merge_response.merge_status_message
1534 except NotImplementedError:
1534 except NotImplementedError:
1535 return None, False, _('Pull request merging is not supported.')
1535 return None, False, _('Pull request merging is not supported.')
1536
1536
1537 def _check_repo_requirements(self, target, source, translator):
1537 def _check_repo_requirements(self, target, source, translator):
1538 """
1538 """
1539 Check if `target` and `source` have compatible requirements.
1539 Check if `target` and `source` have compatible requirements.
1540
1540
1541 Currently this is just checking for largefiles.
1541 Currently this is just checking for largefiles.
1542 """
1542 """
1543 _ = translator
1543 _ = translator
1544 target_has_largefiles = self._has_largefiles(target)
1544 target_has_largefiles = self._has_largefiles(target)
1545 source_has_largefiles = self._has_largefiles(source)
1545 source_has_largefiles = self._has_largefiles(source)
1546 merge_possible = True
1546 merge_possible = True
1547 message = u''
1547 message = u''
1548
1548
1549 if target_has_largefiles != source_has_largefiles:
1549 if target_has_largefiles != source_has_largefiles:
1550 merge_possible = False
1550 merge_possible = False
1551 if source_has_largefiles:
1551 if source_has_largefiles:
1552 message = _(
1552 message = _(
1553 'Target repository large files support is disabled.')
1553 'Target repository large files support is disabled.')
1554 else:
1554 else:
1555 message = _(
1555 message = _(
1556 'Source repository large files support is disabled.')
1556 'Source repository large files support is disabled.')
1557
1557
1558 return merge_possible, message
1558 return merge_possible, message
1559
1559
1560 def _has_largefiles(self, repo):
1560 def _has_largefiles(self, repo):
1561 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1561 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1562 'extensions', 'largefiles')
1562 'extensions', 'largefiles')
1563 return largefiles_ui and largefiles_ui[0].active
1563 return largefiles_ui and largefiles_ui[0].active
1564
1564
1565 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1565 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1566 """
1566 """
1567 Try to merge the pull request and return the merge status.
1567 Try to merge the pull request and return the merge status.
1568 """
1568 """
1569 log.debug(
1569 log.debug(
1570 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1570 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1571 pull_request.pull_request_id, force_shadow_repo_refresh)
1571 pull_request.pull_request_id, force_shadow_repo_refresh)
1572 target_vcs = pull_request.target_repo.scm_instance()
1572 target_vcs = pull_request.target_repo.scm_instance()
1573 # Refresh the target reference.
1573 # Refresh the target reference.
1574 try:
1574 try:
1575 target_ref = self._refresh_reference(
1575 target_ref = self._refresh_reference(
1576 pull_request.target_ref_parts, target_vcs)
1576 pull_request.target_ref_parts, target_vcs)
1577 except CommitDoesNotExistError:
1577 except CommitDoesNotExistError:
1578 merge_state = MergeResponse(
1578 merge_state = MergeResponse(
1579 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1579 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1580 metadata={'target_ref': pull_request.target_ref_parts})
1580 metadata={'target_ref': pull_request.target_ref_parts})
1581 return merge_state
1581 return merge_state
1582
1582
1583 target_locked = pull_request.target_repo.locked
1583 target_locked = pull_request.target_repo.locked
1584 if target_locked and target_locked[0]:
1584 if target_locked and target_locked[0]:
1585 locked_by = 'user:{}'.format(target_locked[0])
1585 locked_by = 'user:{}'.format(target_locked[0])
1586 log.debug("The target repository is locked by %s.", locked_by)
1586 log.debug("The target repository is locked by %s.", locked_by)
1587 merge_state = MergeResponse(
1587 merge_state = MergeResponse(
1588 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1588 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1589 metadata={'locked_by': locked_by})
1589 metadata={'locked_by': locked_by})
1590 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1590 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1591 pull_request, target_ref):
1591 pull_request, target_ref):
1592 log.debug("Refreshing the merge status of the repository.")
1592 log.debug("Refreshing the merge status of the repository.")
1593 merge_state = self._refresh_merge_state(
1593 merge_state = self._refresh_merge_state(
1594 pull_request, target_vcs, target_ref)
1594 pull_request, target_vcs, target_ref)
1595 else:
1595 else:
1596 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1596 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1597 metadata = {
1597 metadata = {
1598 'unresolved_files': '',
1598 'unresolved_files': '',
1599 'target_ref': pull_request.target_ref_parts,
1599 'target_ref': pull_request.target_ref_parts,
1600 'source_ref': pull_request.source_ref_parts,
1600 'source_ref': pull_request.source_ref_parts,
1601 }
1601 }
1602 if pull_request.last_merge_metadata:
1602 if pull_request.last_merge_metadata:
1603 metadata.update(pull_request.last_merge_metadata)
1603 metadata.update(pull_request.last_merge_metadata)
1604
1604
1605 if not possible and target_ref.type == 'branch':
1605 if not possible and target_ref.type == 'branch':
1606 # NOTE(marcink): case for mercurial multiple heads on branch
1606 # NOTE(marcink): case for mercurial multiple heads on branch
1607 heads = target_vcs._heads(target_ref.name)
1607 heads = target_vcs._heads(target_ref.name)
1608 if len(heads) != 1:
1608 if len(heads) != 1:
1609 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1609 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1610 metadata.update({
1610 metadata.update({
1611 'heads': heads
1611 'heads': heads
1612 })
1612 })
1613
1613
1614 merge_state = MergeResponse(
1614 merge_state = MergeResponse(
1615 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1615 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1616
1616
1617 return merge_state
1617 return merge_state
1618
1618
1619 def _refresh_reference(self, reference, vcs_repository):
1619 def _refresh_reference(self, reference, vcs_repository):
1620 if reference.type in self.UPDATABLE_REF_TYPES:
1620 if reference.type in self.UPDATABLE_REF_TYPES:
1621 name_or_id = reference.name
1621 name_or_id = reference.name
1622 else:
1622 else:
1623 name_or_id = reference.commit_id
1623 name_or_id = reference.commit_id
1624
1624
1625 refreshed_commit = vcs_repository.get_commit(name_or_id)
1625 refreshed_commit = vcs_repository.get_commit(name_or_id)
1626 refreshed_reference = Reference(
1626 refreshed_reference = Reference(
1627 reference.type, reference.name, refreshed_commit.raw_id)
1627 reference.type, reference.name, refreshed_commit.raw_id)
1628 return refreshed_reference
1628 return refreshed_reference
1629
1629
1630 def _needs_merge_state_refresh(self, pull_request, target_reference):
1630 def _needs_merge_state_refresh(self, pull_request, target_reference):
1631 return not(
1631 return not(
1632 pull_request.revisions and
1632 pull_request.revisions and
1633 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1633 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1634 target_reference.commit_id == pull_request._last_merge_target_rev)
1634 target_reference.commit_id == pull_request._last_merge_target_rev)
1635
1635
1636 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1636 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1637 workspace_id = self._workspace_id(pull_request)
1637 workspace_id = self._workspace_id(pull_request)
1638 source_vcs = pull_request.source_repo.scm_instance()
1638 source_vcs = pull_request.source_repo.scm_instance()
1639 repo_id = pull_request.target_repo.repo_id
1639 repo_id = pull_request.target_repo.repo_id
1640 use_rebase = self._use_rebase_for_merging(pull_request)
1640 use_rebase = self._use_rebase_for_merging(pull_request)
1641 close_branch = self._close_branch_before_merging(pull_request)
1641 close_branch = self._close_branch_before_merging(pull_request)
1642 merge_state = target_vcs.merge(
1642 merge_state = target_vcs.merge(
1643 repo_id, workspace_id,
1643 repo_id, workspace_id,
1644 target_reference, source_vcs, pull_request.source_ref_parts,
1644 target_reference, source_vcs, pull_request.source_ref_parts,
1645 dry_run=True, use_rebase=use_rebase,
1645 dry_run=True, use_rebase=use_rebase,
1646 close_branch=close_branch)
1646 close_branch=close_branch)
1647
1647
1648 # Do not store the response if there was an unknown error.
1648 # Do not store the response if there was an unknown error.
1649 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1649 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1650 pull_request._last_merge_source_rev = \
1650 pull_request._last_merge_source_rev = \
1651 pull_request.source_ref_parts.commit_id
1651 pull_request.source_ref_parts.commit_id
1652 pull_request._last_merge_target_rev = target_reference.commit_id
1652 pull_request._last_merge_target_rev = target_reference.commit_id
1653 pull_request.last_merge_status = merge_state.failure_reason
1653 pull_request.last_merge_status = merge_state.failure_reason
1654 pull_request.last_merge_metadata = merge_state.metadata
1654 pull_request.last_merge_metadata = merge_state.metadata
1655
1655
1656 pull_request.shadow_merge_ref = merge_state.merge_ref
1656 pull_request.shadow_merge_ref = merge_state.merge_ref
1657 Session().add(pull_request)
1657 Session().add(pull_request)
1658 Session().commit()
1658 Session().commit()
1659
1659
1660 return merge_state
1660 return merge_state
1661
1661
1662 def _workspace_id(self, pull_request):
1662 def _workspace_id(self, pull_request):
1663 workspace_id = 'pr-%s' % pull_request.pull_request_id
1663 workspace_id = 'pr-%s' % pull_request.pull_request_id
1664 return workspace_id
1664 return workspace_id
1665
1665
1666 def generate_repo_data(self, repo, commit_id=None, branch=None,
1666 def generate_repo_data(self, repo, commit_id=None, branch=None,
1667 bookmark=None, translator=None):
1667 bookmark=None, translator=None):
1668 from rhodecode.model.repo import RepoModel
1668 from rhodecode.model.repo import RepoModel
1669
1669
1670 all_refs, selected_ref = \
1670 all_refs, selected_ref = \
1671 self._get_repo_pullrequest_sources(
1671 self._get_repo_pullrequest_sources(
1672 repo.scm_instance(), commit_id=commit_id,
1672 repo.scm_instance(), commit_id=commit_id,
1673 branch=branch, bookmark=bookmark, translator=translator)
1673 branch=branch, bookmark=bookmark, translator=translator)
1674
1674
1675 refs_select2 = []
1675 refs_select2 = []
1676 for element in all_refs:
1676 for element in all_refs:
1677 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1677 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1678 refs_select2.append({'text': element[1], 'children': children})
1678 refs_select2.append({'text': element[1], 'children': children})
1679
1679
1680 return {
1680 return {
1681 'user': {
1681 'user': {
1682 'user_id': repo.user.user_id,
1682 'user_id': repo.user.user_id,
1683 'username': repo.user.username,
1683 'username': repo.user.username,
1684 'firstname': repo.user.first_name,
1684 'firstname': repo.user.first_name,
1685 'lastname': repo.user.last_name,
1685 'lastname': repo.user.last_name,
1686 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1686 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1687 },
1687 },
1688 'name': repo.repo_name,
1688 'name': repo.repo_name,
1689 'link': RepoModel().get_url(repo),
1689 'link': RepoModel().get_url(repo),
1690 'description': h.chop_at_smart(repo.description_safe, '\n'),
1690 'description': h.chop_at_smart(repo.description_safe, '\n'),
1691 'refs': {
1691 'refs': {
1692 'all_refs': all_refs,
1692 'all_refs': all_refs,
1693 'selected_ref': selected_ref,
1693 'selected_ref': selected_ref,
1694 'select2_refs': refs_select2
1694 'select2_refs': refs_select2
1695 }
1695 }
1696 }
1696 }
1697
1697
1698 def generate_pullrequest_title(self, source, source_ref, target):
1698 def generate_pullrequest_title(self, source, source_ref, target):
1699 return u'{source}#{at_ref} to {target}'.format(
1699 return u'{source}#{at_ref} to {target}'.format(
1700 source=source,
1700 source=source,
1701 at_ref=source_ref,
1701 at_ref=source_ref,
1702 target=target,
1702 target=target,
1703 )
1703 )
1704
1704
1705 def _cleanup_merge_workspace(self, pull_request):
1705 def _cleanup_merge_workspace(self, pull_request):
1706 # Merging related cleanup
1706 # Merging related cleanup
1707 repo_id = pull_request.target_repo.repo_id
1707 repo_id = pull_request.target_repo.repo_id
1708 target_scm = pull_request.target_repo.scm_instance()
1708 target_scm = pull_request.target_repo.scm_instance()
1709 workspace_id = self._workspace_id(pull_request)
1709 workspace_id = self._workspace_id(pull_request)
1710
1710
1711 try:
1711 try:
1712 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1712 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1713 except NotImplementedError:
1713 except NotImplementedError:
1714 pass
1714 pass
1715
1715
1716 def _get_repo_pullrequest_sources(
1716 def _get_repo_pullrequest_sources(
1717 self, repo, commit_id=None, branch=None, bookmark=None,
1717 self, repo, commit_id=None, branch=None, bookmark=None,
1718 translator=None):
1718 translator=None):
1719 """
1719 """
1720 Return a structure with repo's interesting commits, suitable for
1720 Return a structure with repo's interesting commits, suitable for
1721 the selectors in pullrequest controller
1721 the selectors in pullrequest controller
1722
1722
1723 :param commit_id: a commit that must be in the list somehow
1723 :param commit_id: a commit that must be in the list somehow
1724 and selected by default
1724 and selected by default
1725 :param branch: a branch that must be in the list and selected
1725 :param branch: a branch that must be in the list and selected
1726 by default - even if closed
1726 by default - even if closed
1727 :param bookmark: a bookmark that must be in the list and selected
1727 :param bookmark: a bookmark that must be in the list and selected
1728 """
1728 """
1729 _ = translator or get_current_request().translate
1729 _ = translator or get_current_request().translate
1730
1730
1731 commit_id = safe_str(commit_id) if commit_id else None
1731 commit_id = safe_str(commit_id) if commit_id else None
1732 branch = safe_unicode(branch) if branch else None
1732 branch = safe_unicode(branch) if branch else None
1733 bookmark = safe_unicode(bookmark) if bookmark else None
1733 bookmark = safe_unicode(bookmark) if bookmark else None
1734
1734
1735 selected = None
1735 selected = None
1736
1736
1737 # order matters: first source that has commit_id in it will be selected
1737 # order matters: first source that has commit_id in it will be selected
1738 sources = []
1738 sources = []
1739 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1739 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1740 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1740 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1741
1741
1742 if commit_id:
1742 if commit_id:
1743 ref_commit = (h.short_id(commit_id), commit_id)
1743 ref_commit = (h.short_id(commit_id), commit_id)
1744 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1744 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1745
1745
1746 sources.append(
1746 sources.append(
1747 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1747 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1748 )
1748 )
1749
1749
1750 groups = []
1750 groups = []
1751
1751
1752 for group_key, ref_list, group_name, match in sources:
1752 for group_key, ref_list, group_name, match in sources:
1753 group_refs = []
1753 group_refs = []
1754 for ref_name, ref_id in ref_list:
1754 for ref_name, ref_id in ref_list:
1755 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1755 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1756 group_refs.append((ref_key, ref_name))
1756 group_refs.append((ref_key, ref_name))
1757
1757
1758 if not selected:
1758 if not selected:
1759 if set([commit_id, match]) & set([ref_id, ref_name]):
1759 if set([commit_id, match]) & set([ref_id, ref_name]):
1760 selected = ref_key
1760 selected = ref_key
1761
1761
1762 if group_refs:
1762 if group_refs:
1763 groups.append((group_refs, group_name))
1763 groups.append((group_refs, group_name))
1764
1764
1765 if not selected:
1765 if not selected:
1766 ref = commit_id or branch or bookmark
1766 ref = commit_id or branch or bookmark
1767 if ref:
1767 if ref:
1768 raise CommitDoesNotExistError(
1768 raise CommitDoesNotExistError(
1769 u'No commit refs could be found matching: {}'.format(ref))
1769 u'No commit refs could be found matching: {}'.format(ref))
1770 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1770 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1771 selected = u'branch:{}:{}'.format(
1771 selected = u'branch:{}:{}'.format(
1772 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1772 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1773 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1773 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1774 )
1774 )
1775 elif repo.commit_ids:
1775 elif repo.commit_ids:
1776 # make the user select in this case
1776 # make the user select in this case
1777 selected = None
1777 selected = None
1778 else:
1778 else:
1779 raise EmptyRepositoryError()
1779 raise EmptyRepositoryError()
1780 return groups, selected
1780 return groups, selected
1781
1781
1782 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1782 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1783 hide_whitespace_changes, diff_context):
1783 hide_whitespace_changes, diff_context):
1784
1784
1785 return self._get_diff_from_pr_or_version(
1785 return self._get_diff_from_pr_or_version(
1786 source_repo, source_ref_id, target_ref_id,
1786 source_repo, source_ref_id, target_ref_id,
1787 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1787 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1788
1788
1789 def _get_diff_from_pr_or_version(
1789 def _get_diff_from_pr_or_version(
1790 self, source_repo, source_ref_id, target_ref_id,
1790 self, source_repo, source_ref_id, target_ref_id,
1791 hide_whitespace_changes, diff_context):
1791 hide_whitespace_changes, diff_context):
1792
1792
1793 target_commit = source_repo.get_commit(
1793 target_commit = source_repo.get_commit(
1794 commit_id=safe_str(target_ref_id))
1794 commit_id=safe_str(target_ref_id))
1795 source_commit = source_repo.get_commit(
1795 source_commit = source_repo.get_commit(
1796 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1796 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1797 if isinstance(source_repo, Repository):
1797 if isinstance(source_repo, Repository):
1798 vcs_repo = source_repo.scm_instance()
1798 vcs_repo = source_repo.scm_instance()
1799 else:
1799 else:
1800 vcs_repo = source_repo
1800 vcs_repo = source_repo
1801
1801
1802 # TODO: johbo: In the context of an update, we cannot reach
1802 # TODO: johbo: In the context of an update, we cannot reach
1803 # the old commit anymore with our normal mechanisms. It needs
1803 # the old commit anymore with our normal mechanisms. It needs
1804 # some sort of special support in the vcs layer to avoid this
1804 # some sort of special support in the vcs layer to avoid this
1805 # workaround.
1805 # workaround.
1806 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1806 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1807 vcs_repo.alias == 'git'):
1807 vcs_repo.alias == 'git'):
1808 source_commit.raw_id = safe_str(source_ref_id)
1808 source_commit.raw_id = safe_str(source_ref_id)
1809
1809
1810 log.debug('calculating diff between '
1810 log.debug('calculating diff between '
1811 'source_ref:%s and target_ref:%s for repo `%s`',
1811 'source_ref:%s and target_ref:%s for repo `%s`',
1812 target_ref_id, source_ref_id,
1812 target_ref_id, source_ref_id,
1813 safe_unicode(vcs_repo.path))
1813 safe_unicode(vcs_repo.path))
1814
1814
1815 vcs_diff = vcs_repo.get_diff(
1815 vcs_diff = vcs_repo.get_diff(
1816 commit1=target_commit, commit2=source_commit,
1816 commit1=target_commit, commit2=source_commit,
1817 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1817 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1818 return vcs_diff
1818 return vcs_diff
1819
1819
1820 def _is_merge_enabled(self, pull_request):
1820 def _is_merge_enabled(self, pull_request):
1821 return self._get_general_setting(
1821 return self._get_general_setting(
1822 pull_request, 'rhodecode_pr_merge_enabled')
1822 pull_request, 'rhodecode_pr_merge_enabled')
1823
1823
1824 def _use_rebase_for_merging(self, pull_request):
1824 def _use_rebase_for_merging(self, pull_request):
1825 repo_type = pull_request.target_repo.repo_type
1825 repo_type = pull_request.target_repo.repo_type
1826 if repo_type == 'hg':
1826 if repo_type == 'hg':
1827 return self._get_general_setting(
1827 return self._get_general_setting(
1828 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1828 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1829 elif repo_type == 'git':
1829 elif repo_type == 'git':
1830 return self._get_general_setting(
1830 return self._get_general_setting(
1831 pull_request, 'rhodecode_git_use_rebase_for_merging')
1831 pull_request, 'rhodecode_git_use_rebase_for_merging')
1832
1832
1833 return False
1833 return False
1834
1834
1835 def _user_name_for_merging(self, pull_request, user):
1835 def _user_name_for_merging(self, pull_request, user):
1836 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1836 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1837 if env_user_name_attr and hasattr(user, env_user_name_attr):
1837 if env_user_name_attr and hasattr(user, env_user_name_attr):
1838 user_name_attr = env_user_name_attr
1838 user_name_attr = env_user_name_attr
1839 else:
1839 else:
1840 user_name_attr = 'short_contact'
1840 user_name_attr = 'short_contact'
1841
1841
1842 user_name = getattr(user, user_name_attr)
1842 user_name = getattr(user, user_name_attr)
1843 return user_name
1843 return user_name
1844
1844
1845 def _close_branch_before_merging(self, pull_request):
1845 def _close_branch_before_merging(self, pull_request):
1846 repo_type = pull_request.target_repo.repo_type
1846 repo_type = pull_request.target_repo.repo_type
1847 if repo_type == 'hg':
1847 if repo_type == 'hg':
1848 return self._get_general_setting(
1848 return self._get_general_setting(
1849 pull_request, 'rhodecode_hg_close_branch_before_merging')
1849 pull_request, 'rhodecode_hg_close_branch_before_merging')
1850 elif repo_type == 'git':
1850 elif repo_type == 'git':
1851 return self._get_general_setting(
1851 return self._get_general_setting(
1852 pull_request, 'rhodecode_git_close_branch_before_merging')
1852 pull_request, 'rhodecode_git_close_branch_before_merging')
1853
1853
1854 return False
1854 return False
1855
1855
1856 def _get_general_setting(self, pull_request, settings_key, default=False):
1856 def _get_general_setting(self, pull_request, settings_key, default=False):
1857 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1857 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1858 settings = settings_model.get_general_settings()
1858 settings = settings_model.get_general_settings()
1859 return settings.get(settings_key, default)
1859 return settings.get(settings_key, default)
1860
1860
1861 def _log_audit_action(self, action, action_data, user, pull_request):
1861 def _log_audit_action(self, action, action_data, user, pull_request):
1862 audit_logger.store(
1862 audit_logger.store(
1863 action=action,
1863 action=action,
1864 action_data=action_data,
1864 action_data=action_data,
1865 user=user,
1865 user=user,
1866 repo=pull_request.target_repo)
1866 repo=pull_request.target_repo)
1867
1867
1868 def get_reviewer_functions(self):
1868 def get_reviewer_functions(self):
1869 """
1869 """
1870 Fetches functions for validation and fetching default reviewers.
1870 Fetches functions for validation and fetching default reviewers.
1871 If available we use the EE package, else we fallback to CE
1871 If available we use the EE package, else we fallback to CE
1872 package functions
1872 package functions
1873 """
1873 """
1874 try:
1874 try:
1875 from rc_reviewers.utils import get_default_reviewers_data
1875 from rc_reviewers.utils import get_default_reviewers_data
1876 from rc_reviewers.utils import validate_default_reviewers
1876 from rc_reviewers.utils import validate_default_reviewers
1877 except ImportError:
1877 except ImportError:
1878 from rhodecode.apps.repository.utils import get_default_reviewers_data
1878 from rhodecode.apps.repository.utils import get_default_reviewers_data
1879 from rhodecode.apps.repository.utils import validate_default_reviewers
1879 from rhodecode.apps.repository.utils import validate_default_reviewers
1880
1880
1881 return get_default_reviewers_data, validate_default_reviewers
1881 return get_default_reviewers_data, validate_default_reviewers
1882
1882
1883
1883
1884 class MergeCheck(object):
1884 class MergeCheck(object):
1885 """
1885 """
1886 Perform Merge Checks and returns a check object which stores information
1886 Perform Merge Checks and returns a check object which stores information
1887 about merge errors, and merge conditions
1887 about merge errors, and merge conditions
1888 """
1888 """
1889 TODO_CHECK = 'todo'
1889 TODO_CHECK = 'todo'
1890 PERM_CHECK = 'perm'
1890 PERM_CHECK = 'perm'
1891 REVIEW_CHECK = 'review'
1891 REVIEW_CHECK = 'review'
1892 MERGE_CHECK = 'merge'
1892 MERGE_CHECK = 'merge'
1893 WIP_CHECK = 'wip'
1893 WIP_CHECK = 'wip'
1894
1894
1895 def __init__(self):
1895 def __init__(self):
1896 self.review_status = None
1896 self.review_status = None
1897 self.merge_possible = None
1897 self.merge_possible = None
1898 self.merge_msg = ''
1898 self.merge_msg = ''
1899 self.merge_response = None
1899 self.merge_response = None
1900 self.failed = None
1900 self.failed = None
1901 self.errors = []
1901 self.errors = []
1902 self.error_details = OrderedDict()
1902 self.error_details = OrderedDict()
1903 self.source_commit = AttributeDict()
1903 self.source_commit = AttributeDict()
1904 self.target_commit = AttributeDict()
1904 self.target_commit = AttributeDict()
1905
1905
1906 def __repr__(self):
1906 def __repr__(self):
1907 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1907 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1908 self.merge_possible, self.failed, self.errors)
1908 self.merge_possible, self.failed, self.errors)
1909
1909
1910 def push_error(self, error_type, message, error_key, details):
1910 def push_error(self, error_type, message, error_key, details):
1911 self.failed = True
1911 self.failed = True
1912 self.errors.append([error_type, message])
1912 self.errors.append([error_type, message])
1913 self.error_details[error_key] = dict(
1913 self.error_details[error_key] = dict(
1914 details=details,
1914 details=details,
1915 error_type=error_type,
1915 error_type=error_type,
1916 message=message
1916 message=message
1917 )
1917 )
1918
1918
1919 @classmethod
1919 @classmethod
1920 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1920 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1921 force_shadow_repo_refresh=False):
1921 force_shadow_repo_refresh=False):
1922 _ = translator
1922 _ = translator
1923 merge_check = cls()
1923 merge_check = cls()
1924
1924
1925 # title has WIP:
1925 # title has WIP:
1926 if pull_request.work_in_progress:
1926 if pull_request.work_in_progress:
1927 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1927 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1928
1928
1929 msg = _('WIP marker in title prevents from accidental merge.')
1929 msg = _('WIP marker in title prevents from accidental merge.')
1930 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1930 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1931 if fail_early:
1931 if fail_early:
1932 return merge_check
1932 return merge_check
1933
1933
1934 # permissions to merge
1934 # permissions to merge
1935 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1935 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1936 if not user_allowed_to_merge:
1936 if not user_allowed_to_merge:
1937 log.debug("MergeCheck: cannot merge, approval is pending.")
1937 log.debug("MergeCheck: cannot merge, approval is pending.")
1938
1938
1939 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1939 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1940 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1940 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1941 if fail_early:
1941 if fail_early:
1942 return merge_check
1942 return merge_check
1943
1943
1944 # permission to merge into the target branch
1944 # permission to merge into the target branch
1945 target_commit_id = pull_request.target_ref_parts.commit_id
1945 target_commit_id = pull_request.target_ref_parts.commit_id
1946 if pull_request.target_ref_parts.type == 'branch':
1946 if pull_request.target_ref_parts.type == 'branch':
1947 branch_name = pull_request.target_ref_parts.name
1947 branch_name = pull_request.target_ref_parts.name
1948 else:
1948 else:
1949 # for mercurial we can always figure out the branch from the commit
1949 # for mercurial we can always figure out the branch from the commit
1950 # in case of bookmark
1950 # in case of bookmark
1951 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1951 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1952 branch_name = target_commit.branch
1952 branch_name = target_commit.branch
1953
1953
1954 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1954 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1955 pull_request.target_repo.repo_name, branch_name)
1955 pull_request.target_repo.repo_name, branch_name)
1956 if branch_perm and branch_perm == 'branch.none':
1956 if branch_perm and branch_perm == 'branch.none':
1957 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1957 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1958 branch_name, rule)
1958 branch_name, rule)
1959 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1959 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1960 if fail_early:
1960 if fail_early:
1961 return merge_check
1961 return merge_check
1962
1962
1963 # review status, must be always present
1963 # review status, must be always present
1964 review_status = pull_request.calculated_review_status()
1964 review_status = pull_request.calculated_review_status()
1965 merge_check.review_status = review_status
1965 merge_check.review_status = review_status
1966
1966
1967 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1967 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1968 if not status_approved:
1968 if not status_approved:
1969 log.debug("MergeCheck: cannot merge, approval is pending.")
1969 log.debug("MergeCheck: cannot merge, approval is pending.")
1970
1970
1971 msg = _('Pull request reviewer approval is pending.')
1971 msg = _('Pull request reviewer approval is pending.')
1972
1972
1973 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1973 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1974
1974
1975 if fail_early:
1975 if fail_early:
1976 return merge_check
1976 return merge_check
1977
1977
1978 # left over TODOs
1978 # left over TODOs
1979 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1979 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1980 if todos:
1980 if todos:
1981 log.debug("MergeCheck: cannot merge, {} "
1981 log.debug("MergeCheck: cannot merge, {} "
1982 "unresolved TODOs left.".format(len(todos)))
1982 "unresolved TODOs left.".format(len(todos)))
1983
1983
1984 if len(todos) == 1:
1984 if len(todos) == 1:
1985 msg = _('Cannot merge, {} TODO still not resolved.').format(
1985 msg = _('Cannot merge, {} TODO still not resolved.').format(
1986 len(todos))
1986 len(todos))
1987 else:
1987 else:
1988 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1988 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1989 len(todos))
1989 len(todos))
1990
1990
1991 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1991 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1992
1992
1993 if fail_early:
1993 if fail_early:
1994 return merge_check
1994 return merge_check
1995
1995
1996 # merge possible, here is the filesystem simulation + shadow repo
1996 # merge possible, here is the filesystem simulation + shadow repo
1997 merge_response, merge_status, msg = PullRequestModel().merge_status(
1997 merge_response, merge_status, msg = PullRequestModel().merge_status(
1998 pull_request, translator=translator,
1998 pull_request, translator=translator,
1999 force_shadow_repo_refresh=force_shadow_repo_refresh)
1999 force_shadow_repo_refresh=force_shadow_repo_refresh)
2000
2000
2001 merge_check.merge_possible = merge_status
2001 merge_check.merge_possible = merge_status
2002 merge_check.merge_msg = msg
2002 merge_check.merge_msg = msg
2003 merge_check.merge_response = merge_response
2003 merge_check.merge_response = merge_response
2004
2004
2005 source_ref_id = pull_request.source_ref_parts.commit_id
2005 source_ref_id = pull_request.source_ref_parts.commit_id
2006 target_ref_id = pull_request.target_ref_parts.commit_id
2006 target_ref_id = pull_request.target_ref_parts.commit_id
2007
2007
2008 try:
2008 try:
2009 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2009 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2010 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2010 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2011 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2011 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2012 merge_check.source_commit.current_raw_id = source_commit.raw_id
2012 merge_check.source_commit.current_raw_id = source_commit.raw_id
2013 merge_check.source_commit.previous_raw_id = source_ref_id
2013 merge_check.source_commit.previous_raw_id = source_ref_id
2014
2014
2015 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2015 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2016 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2016 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2017 merge_check.target_commit.current_raw_id = target_commit.raw_id
2017 merge_check.target_commit.current_raw_id = target_commit.raw_id
2018 merge_check.target_commit.previous_raw_id = target_ref_id
2018 merge_check.target_commit.previous_raw_id = target_ref_id
2019 except (SourceRefMissing, TargetRefMissing):
2019 except (SourceRefMissing, TargetRefMissing):
2020 pass
2020 pass
2021
2021
2022 if not merge_status:
2022 if not merge_status:
2023 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2023 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2024 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2024 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2025
2025
2026 if fail_early:
2026 if fail_early:
2027 return merge_check
2027 return merge_check
2028
2028
2029 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2029 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2030 return merge_check
2030 return merge_check
2031
2031
2032 @classmethod
2032 @classmethod
2033 def get_merge_conditions(cls, pull_request, translator):
2033 def get_merge_conditions(cls, pull_request, translator):
2034 _ = translator
2034 _ = translator
2035 merge_details = {}
2035 merge_details = {}
2036
2036
2037 model = PullRequestModel()
2037 model = PullRequestModel()
2038 use_rebase = model._use_rebase_for_merging(pull_request)
2038 use_rebase = model._use_rebase_for_merging(pull_request)
2039
2039
2040 if use_rebase:
2040 if use_rebase:
2041 merge_details['merge_strategy'] = dict(
2041 merge_details['merge_strategy'] = dict(
2042 details={},
2042 details={},
2043 message=_('Merge strategy: rebase')
2043 message=_('Merge strategy: rebase')
2044 )
2044 )
2045 else:
2045 else:
2046 merge_details['merge_strategy'] = dict(
2046 merge_details['merge_strategy'] = dict(
2047 details={},
2047 details={},
2048 message=_('Merge strategy: explicit merge commit')
2048 message=_('Merge strategy: explicit merge commit')
2049 )
2049 )
2050
2050
2051 close_branch = model._close_branch_before_merging(pull_request)
2051 close_branch = model._close_branch_before_merging(pull_request)
2052 if close_branch:
2052 if close_branch:
2053 repo_type = pull_request.target_repo.repo_type
2053 repo_type = pull_request.target_repo.repo_type
2054 close_msg = ''
2054 close_msg = ''
2055 if repo_type == 'hg':
2055 if repo_type == 'hg':
2056 close_msg = _('Source branch will be closed after merge.')
2056 close_msg = _('Source branch will be closed before the merge.')
2057 elif repo_type == 'git':
2057 elif repo_type == 'git':
2058 close_msg = _('Source branch will be deleted after merge.')
2058 close_msg = _('Source branch will be deleted after the merge.')
2059
2059
2060 merge_details['close_branch'] = dict(
2060 merge_details['close_branch'] = dict(
2061 details={},
2061 details={},
2062 message=close_msg
2062 message=close_msg
2063 )
2063 )
2064
2064
2065 return merge_details
2065 return merge_details
2066
2066
2067
2067
2068 ChangeTuple = collections.namedtuple(
2068 ChangeTuple = collections.namedtuple(
2069 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2069 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2070
2070
2071 FileChangeTuple = collections.namedtuple(
2071 FileChangeTuple = collections.namedtuple(
2072 'FileChangeTuple', ['added', 'modified', 'removed'])
2072 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now