##// END OF EJS Templates
statsd/audit-logs: cleanup push/pull user agent code....
super-admin -
r4858:6f1f534f default
parent child Browse files
Show More
@@ -1,162 +1,163 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import json
24 24 import logging
25 25
26 26 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
27 27 from rhodecode.lib.vcs.conf import settings as vcs_settings
28 28 from rhodecode.model.scm import ScmModel
29 29
30 30 log = logging.getLogger(__name__)
31 31
32 32
33 33 class VcsServer(object):
34 repo_user_agent = None # set in child classes
34 35 _path = None # set executable path for hg/git/svn binary
35 36 backend = None # set in child classes
36 37 tunnel = None # subprocess handling tunnel
37 38 write_perms = ['repository.admin', 'repository.write']
38 39 read_perms = ['repository.read', 'repository.admin', 'repository.write']
39 40
40 41 def __init__(self, user, user_permissions, config, env):
41 42 self.user = user
42 43 self.user_permissions = user_permissions
43 44 self.config = config
44 45 self.env = env
45 46 self.stdin = sys.stdin
46 47
47 48 self.repo_name = None
48 49 self.repo_mode = None
49 50 self.store = ''
50 51 self.ini_path = ''
51 52
52 53 def _invalidate_cache(self, repo_name):
53 54 """
54 55 Set's cache for this repository for invalidation on next access
55 56
56 57 :param repo_name: full repo name, also a cache key
57 58 """
58 59 ScmModel().mark_for_invalidation(repo_name)
59 60
60 61 def has_write_perm(self):
61 62 permission = self.user_permissions.get(self.repo_name)
62 63 if permission in ['repository.write', 'repository.admin']:
63 64 return True
64 65
65 66 return False
66 67
67 68 def _check_permissions(self, action):
68 69 permission = self.user_permissions.get(self.repo_name)
69 70 log.debug('permission for %s on %s are: %s',
70 71 self.user, self.repo_name, permission)
71 72
72 73 if not permission:
73 74 log.error('user `%s` permissions to repo:%s are empty. Forbidding access.',
74 75 self.user, self.repo_name)
75 76 return -2
76 77
77 78 if action == 'pull':
78 79 if permission in self.read_perms:
79 80 log.info(
80 81 'READ Permissions for User "%s" detected to repo "%s"!',
81 82 self.user, self.repo_name)
82 83 return 0
83 84 else:
84 85 if permission in self.write_perms:
85 86 log.info(
86 87 'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!',
87 88 self.user, self.repo_name)
88 89 return 0
89 90
90 91 log.error('Cannot properly fetch or verify user `%s` permissions. '
91 92 'Permissions: %s, vcs action: %s',
92 93 self.user, permission, action)
93 94 return -2
94 95
95 96 def update_environment(self, action, extras=None):
96 97
97 98 scm_data = {
98 99 'ip': os.environ['SSH_CLIENT'].split()[0],
99 100 'username': self.user.username,
100 101 'user_id': self.user.user_id,
101 102 'action': action,
102 103 'repository': self.repo_name,
103 104 'scm': self.backend,
104 105 'config': self.ini_path,
105 106 'repo_store': self.store,
106 107 'make_lock': None,
107 108 'locked_by': [None, None],
108 109 'server_url': None,
109 'user_agent': 'ssh-user-agent',
110 'user_agent': '{}/ssh-user-agent'.format(self.repo_user_agent),
110 111 'hooks': ['push', 'pull'],
111 112 'hooks_module': 'rhodecode.lib.hooks_daemon',
112 113 'is_shadow_repo': False,
113 114 'detect_force_push': False,
114 115 'check_branch_perms': False,
115 116
116 117 'SSH': True,
117 118 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name),
118 119 }
119 120 if extras:
120 121 scm_data.update(extras)
121 122 os.putenv("RC_SCM_DATA", json.dumps(scm_data))
122 123
123 124 def get_root_store(self):
124 125 root_store = self.store
125 126 if not root_store.endswith('/'):
126 127 # always append trailing slash
127 128 root_store = root_store + '/'
128 129 return root_store
129 130
130 131 def _handle_tunnel(self, extras):
131 132 # pre-auth
132 133 action = 'pull'
133 134 exit_code = self._check_permissions(action)
134 135 if exit_code:
135 136 return exit_code, False
136 137
137 138 req = self.env['request']
138 139 server_url = req.host_url + req.script_name
139 140 extras['server_url'] = server_url
140 141
141 142 log.debug('Using %s binaries from path %s', self.backend, self._path)
142 143 exit_code = self.tunnel.run(extras)
143 144
144 145 return exit_code, action == "push"
145 146
146 147 def run(self, tunnel_extras=None):
147 148 tunnel_extras = tunnel_extras or {}
148 149 extras = {}
149 150 extras.update(tunnel_extras)
150 151
151 152 callback_daemon, extras = prepare_callback_daemon(
152 153 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
153 154 host=vcs_settings.HOOKS_HOST,
154 155 use_direct_calls=False)
155 156
156 157 with callback_daemon:
157 158 try:
158 159 return self._handle_tunnel(extras)
159 160 finally:
160 161 log.debug('Running cleanup with cache invalidation')
161 162 if self.repo_name:
162 163 self._invalidate_cache(self.repo_name)
@@ -1,74 +1,75 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import logging
24 24
25 25 from .base import VcsServer
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 class GitTunnelWrapper(object):
31 31 process = None
32 32
33 33 def __init__(self, server):
34 34 self.server = server
35 35 self.stdin = sys.stdin
36 36 self.stdout = sys.stdout
37 37
38 38 def create_hooks_env(self):
39 39 pass
40 40
41 41 def command(self):
42 42 root = self.server.get_root_store()
43 43 command = "cd {root}; {git_path} {mode} '{root}{repo_name}'".format(
44 44 root=root, git_path=self.server.git_path,
45 45 mode=self.server.repo_mode, repo_name=self.server.repo_name)
46 46 log.debug("Final CMD: %s", command)
47 47 return command
48 48
49 49 def run(self, extras):
50 50 action = "push" if self.server.repo_mode == "receive-pack" else "pull"
51 51 exit_code = self.server._check_permissions(action)
52 52 if exit_code:
53 53 return exit_code
54 54
55 55 self.server.update_environment(action=action, extras=extras)
56 56 self.create_hooks_env()
57 57 return os.system(self.command())
58 58
59 59
60 60 class GitServer(VcsServer):
61 61 backend = 'git'
62 repo_user_agent = 'git'
62 63
63 64 def __init__(self, store, ini_path, repo_name, repo_mode,
64 65 user, user_permissions, config, env):
65 66 super(GitServer, self).\
66 67 __init__(user, user_permissions, config, env)
67 68
68 69 self.store = store
69 70 self.ini_path = ini_path
70 71 self.repo_name = repo_name
71 72 self._path = self.git_path = config.get('app:main', 'ssh.executable.git')
72 73
73 74 self.repo_mode = repo_mode
74 75 self.tunnel = GitTunnelWrapper(server=self)
@@ -1,147 +1,148 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import logging
24 24 import tempfile
25 25 import textwrap
26 26 import collections
27 27 from .base import VcsServer
28 28 from rhodecode.model.db import RhodeCodeUi
29 29 from rhodecode.model.settings import VcsSettingsModel
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class MercurialTunnelWrapper(object):
35 35 process = None
36 36
37 37 def __init__(self, server):
38 38 self.server = server
39 39 self.stdin = sys.stdin
40 40 self.stdout = sys.stdout
41 41 self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp(prefix='hgrc_rhodecode_')
42 42
43 43 def create_hooks_env(self):
44 44 repo_name = self.server.repo_name
45 45 hg_flags = self.server.config_to_hgrc(repo_name)
46 46
47 47 content = textwrap.dedent(
48 48 '''
49 49 # RhodeCode SSH hooks version=2.0.0
50 50 {custom}
51 51 '''
52 52 ).format(custom='\n'.join(hg_flags))
53 53
54 54 root = self.server.get_root_store()
55 55 hgrc_custom = os.path.join(root, repo_name, '.hg', 'hgrc_rhodecode')
56 56 hgrc_main = os.path.join(root, repo_name, '.hg', 'hgrc')
57 57
58 58 # cleanup custom hgrc file
59 59 if os.path.isfile(hgrc_custom):
60 60 with open(hgrc_custom, 'wb') as f:
61 61 f.write('')
62 62 log.debug('Cleanup custom hgrc file under %s', hgrc_custom)
63 63
64 64 # write temp
65 65 with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file:
66 66 hooks_env_file.write(content)
67 67
68 68 return self.hooks_env_path
69 69
70 70 def remove_configs(self):
71 71 os.remove(self.hooks_env_path)
72 72
73 73 def command(self, hgrc_path):
74 74 root = self.server.get_root_store()
75 75
76 76 command = (
77 77 "cd {root}; HGRCPATH={hgrc} {hg_path} -R {root}{repo_name} "
78 78 "serve --stdio".format(
79 79 root=root, hg_path=self.server.hg_path,
80 80 repo_name=self.server.repo_name, hgrc=hgrc_path))
81 81 log.debug("Final CMD: %s", command)
82 82 return command
83 83
84 84 def run(self, extras):
85 85 # at this point we cannot tell, we do further ACL checks
86 86 # inside the hooks
87 87 action = '?'
88 88 # permissions are check via `pre_push_ssh_auth` hook
89 89 self.server.update_environment(action=action, extras=extras)
90 90 custom_hgrc_file = self.create_hooks_env()
91 91
92 92 try:
93 93 return os.system(self.command(custom_hgrc_file))
94 94 finally:
95 95 self.remove_configs()
96 96
97 97
98 98 class MercurialServer(VcsServer):
99 99 backend = 'hg'
100 repo_user_agent = 'mercurial'
100 101 cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks']
101 102
102 103 def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env):
103 104 super(MercurialServer, self).__init__(user, user_permissions, config, env)
104 105
105 106 self.store = store
106 107 self.ini_path = ini_path
107 108 self.repo_name = repo_name
108 109 self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg')
109 110 self.tunnel = MercurialTunnelWrapper(server=self)
110 111
111 112 def config_to_hgrc(self, repo_name):
112 113 ui_sections = collections.defaultdict(list)
113 114 ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None)
114 115
115 116 # write default hooks
116 117 default_hooks = [
117 118 ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'),
118 119 ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'),
119 120 ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'),
120 121
121 122 ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'),
122 123 ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'),
123 124 ]
124 125
125 126 for k, v in default_hooks:
126 127 ui_sections['hooks'].append((k, v))
127 128
128 129 for entry in ui:
129 130 if not entry.active:
130 131 continue
131 132 sec = entry.section
132 133 key = entry.key
133 134
134 135 if sec in self.cli_flags:
135 136 # we want only custom hooks, so we skip builtins
136 137 if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN:
137 138 continue
138 139
139 140 ui_sections[sec].append([key, entry.value])
140 141
141 142 flags = []
142 143 for _sec, key_val in ui_sections.items():
143 144 flags.append(' ')
144 145 flags.append('[{}]'.format(_sec))
145 146 for key, val in key_val:
146 147 flags.append('{}= {}'.format(key, val))
147 148 return flags
@@ -1,257 +1,258 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import sys
24 24 import logging
25 25 import signal
26 26 import tempfile
27 27 from subprocess import Popen, PIPE
28 28 import urlparse
29 29
30 30 from .base import VcsServer
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 class SubversionTunnelWrapper(object):
36 36 process = None
37 37
38 38 def __init__(self, server):
39 39 self.server = server
40 40 self.timeout = 30
41 41 self.stdin = sys.stdin
42 42 self.stdout = sys.stdout
43 43 self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp()
44 44 self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp()
45 45
46 46 self.read_only = True # flag that we set to make the hooks readonly
47 47
48 48 def create_svn_config(self):
49 49 content = (
50 50 '[general]\n'
51 51 'hooks-env = {}\n').format(self.hooks_env_path)
52 52 with os.fdopen(self.svn_conf_fd, 'w') as config_file:
53 53 config_file.write(content)
54 54
55 55 def create_hooks_env(self):
56 56 content = (
57 57 '[default]\n'
58 58 'LANG = en_US.UTF-8\n')
59 59 if self.read_only:
60 60 content += 'SSH_READ_ONLY = 1\n'
61 61 with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file:
62 62 hooks_env_file.write(content)
63 63
64 64 def remove_configs(self):
65 65 os.remove(self.svn_conf_path)
66 66 os.remove(self.hooks_env_path)
67 67
68 68 def command(self):
69 69 root = self.server.get_root_store()
70 70 username = self.server.user.username
71 71
72 72 command = [
73 73 self.server.svn_path, '-t',
74 74 '--config-file', self.svn_conf_path,
75 75 '--tunnel-user', username,
76 76 '-r', root]
77 77 log.debug("Final CMD: %s", ' '.join(command))
78 78 return command
79 79
80 80 def start(self):
81 81 command = self.command()
82 82 self.process = Popen(' '.join(command), stdin=PIPE, shell=True)
83 83
84 84 def sync(self):
85 85 while self.process.poll() is None:
86 86 next_byte = self.stdin.read(1)
87 87 if not next_byte:
88 88 break
89 89 self.process.stdin.write(next_byte)
90 90 self.remove_configs()
91 91
92 92 @property
93 93 def return_code(self):
94 94 return self.process.returncode
95 95
96 96 def get_first_client_response(self):
97 97 signal.signal(signal.SIGALRM, self.interrupt)
98 98 signal.alarm(self.timeout)
99 99 first_response = self._read_first_client_response()
100 100 signal.alarm(0)
101 101 return (self._parse_first_client_response(first_response)
102 102 if first_response else None)
103 103
104 104 def patch_first_client_response(self, response, **kwargs):
105 105 self.create_hooks_env()
106 106 data = response.copy()
107 107 data.update(kwargs)
108 108 data['url'] = self._svn_string(data['url'])
109 109 data['ra_client'] = self._svn_string(data['ra_client'])
110 110 data['client'] = data['client'] or ''
111 111 buffer_ = (
112 112 "( {version} ( {capabilities} ) {url}{ra_client}"
113 113 "( {client}) ) ".format(**data))
114 114 self.process.stdin.write(buffer_)
115 115
116 116 def fail(self, message):
117 117 print("( failure ( ( 210005 {message} 0: 0 ) ) )".format(
118 118 message=self._svn_string(message)))
119 119 self.remove_configs()
120 120 self.process.kill()
121 121 return 1
122 122
123 123 def interrupt(self, signum, frame):
124 124 self.fail("Exited by timeout")
125 125
126 126 def _svn_string(self, str_):
127 127 if not str_:
128 128 return ''
129 129 return '{length}:{string} '.format(length=len(str_), string=str_)
130 130
131 131 def _read_first_client_response(self):
132 132 buffer_ = ""
133 133 brackets_stack = []
134 134 while True:
135 135 next_byte = self.stdin.read(1)
136 136 buffer_ += next_byte
137 137 if next_byte == "(":
138 138 brackets_stack.append(next_byte)
139 139 elif next_byte == ")":
140 140 brackets_stack.pop()
141 141 elif next_byte == " " and not brackets_stack:
142 142 break
143 143
144 144 return buffer_
145 145
146 146 def _parse_first_client_response(self, buffer_):
147 147 """
148 148 According to the Subversion RA protocol, the first request
149 149 should look like:
150 150
151 151 ( version:number ( cap:word ... ) url:string ? ra-client:string
152 152 ( ? client:string ) )
153 153
154 154 Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol
155 155 """
156 156 version_re = r'(?P<version>\d+)'
157 157 capabilities_re = r'\(\s(?P<capabilities>[\w\d\-\ ]+)\s\)'
158 158 url_re = r'\d+\:(?P<url>[\W\w]+)'
159 159 ra_client_re = r'(\d+\:(?P<ra_client>[\W\w]+)\s)'
160 160 client_re = r'(\d+\:(?P<client>[\W\w]+)\s)*'
161 161 regex = re.compile(
162 162 r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}'
163 163 r'\(\s{client}\)\s\)\s*$'.format(
164 164 version=version_re, capabilities=capabilities_re,
165 165 url=url_re, ra_client=ra_client_re, client=client_re))
166 166 matcher = regex.match(buffer_)
167 167
168 168 return matcher.groupdict() if matcher else None
169 169
170 170 def _match_repo_name(self, url):
171 171 """
172 172 Given an server url, try to match it against ALL known repository names.
173 173 This handles a tricky SVN case for SSH and subdir commits.
174 174 E.g if our repo name is my-svn-repo, a svn commit on file in a subdir would
175 175 result in the url with this subdir added.
176 176 """
177 177 # case 1 direct match, we don't do any "heavy" lookups
178 178 if url in self.server.user_permissions:
179 179 return url
180 180
181 181 log.debug('Extracting repository name from subdir path %s', url)
182 182 # case 2 we check all permissions, and match closes possible case...
183 183 # NOTE(dan): In this case we only know that url has a subdir parts, it's safe
184 184 # to assume that it will have the repo name as prefix, we ensure the prefix
185 185 # for similar repositories isn't matched by adding a /
186 186 # e.g subgroup/repo-name/ and subgroup/repo-name-1/ would work correct.
187 187 for repo_name in self.server.user_permissions:
188 188 repo_name_prefix = repo_name + '/'
189 189 if url.startswith(repo_name_prefix):
190 190 log.debug('Found prefix %s match, returning proper repository name',
191 191 repo_name_prefix)
192 192 return repo_name
193 193
194 194 return
195 195
196 196 def run(self, extras):
197 197 action = 'pull'
198 198 self.create_svn_config()
199 199 self.start()
200 200
201 201 first_response = self.get_first_client_response()
202 202 if not first_response:
203 203 return self.fail("Repository name cannot be extracted")
204 204
205 205 url_parts = urlparse.urlparse(first_response['url'])
206 206
207 207 self.server.repo_name = self._match_repo_name(url_parts.path.strip('/'))
208 208
209 209 exit_code = self.server._check_permissions(action)
210 210 if exit_code:
211 211 return exit_code
212 212
213 213 # set the readonly flag to False if we have proper permissions
214 214 if self.server.has_write_perm():
215 215 self.read_only = False
216 216 self.server.update_environment(action=action, extras=extras)
217 217
218 218 self.patch_first_client_response(first_response)
219 219 self.sync()
220 220 return self.return_code
221 221
222 222
223 223 class SubversionServer(VcsServer):
224 224 backend = 'svn'
225 repo_user_agent = 'svn'
225 226
226 227 def __init__(self, store, ini_path, repo_name,
227 228 user, user_permissions, config, env):
228 229 super(SubversionServer, self)\
229 230 .__init__(user, user_permissions, config, env)
230 231 self.store = store
231 232 self.ini_path = ini_path
232 233 # NOTE(dan): repo_name at this point is empty,
233 234 # this is set later in .run() based from parsed input stream
234 235 self.repo_name = repo_name
235 236 self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn')
236 237
237 238 self.tunnel = SubversionTunnelWrapper(server=self)
238 239
239 240 def _handle_tunnel(self, extras):
240 241
241 242 # pre-auth
242 243 action = 'pull'
243 244 # Special case for SVN, we extract repo name at later stage
244 245 # exit_code = self._check_permissions(action)
245 246 # if exit_code:
246 247 # return exit_code, False
247 248
248 249 req = self.env['request']
249 250 server_url = req.host_url + req.script_name
250 251 extras['server_url'] = server_url
251 252
252 253 log.debug('Using %s binaries from path %s', self.backend, self._path)
253 254 exit_code = self.tunnel.run(extras)
254 255
255 256 return exit_code, action == "push"
256 257
257 258
@@ -1,156 +1,156 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import os
23 23
24 24 import mock
25 25 import pytest
26 26
27 27 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
28 28 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
29 29
30 30
31 31 class GitServerCreator(object):
32 32 root = '/tmp/repo/path/'
33 33 git_path = '/usr/local/bin/git'
34 34 config_data = {
35 35 'app:main': {
36 36 'ssh.executable.git': git_path,
37 37 'vcs.hooks.protocol': 'http',
38 38 }
39 39 }
40 40 repo_name = 'test_git'
41 41 repo_mode = 'receive-pack'
42 42 user = plain_dummy_user()
43 43
44 44 def __init__(self):
45 45 def config_get(part, key):
46 46 return self.config_data.get(part, {}).get(key)
47 47 self.config_mock = mock.Mock()
48 48 self.config_mock.get = mock.Mock(side_effect=config_get)
49 49
50 50 def create(self, **kwargs):
51 51 parameters = {
52 52 'store': self.root,
53 53 'ini_path': '',
54 54 'user': self.user,
55 55 'repo_name': self.repo_name,
56 56 'repo_mode': self.repo_mode,
57 57 'user_permissions': {
58 58 self.repo_name: 'repository.admin'
59 59 },
60 60 'config': self.config_mock,
61 61 'env': plain_dummy_env()
62 62 }
63 63 parameters.update(kwargs)
64 64 server = GitServer(**parameters)
65 65 return server
66 66
67 67
68 68 @pytest.fixture()
69 69 def git_server(app):
70 70 return GitServerCreator()
71 71
72 72
73 73 class TestGitServer(object):
74 74
75 75 def test_command(self, git_server):
76 76 server = git_server.create()
77 77 expected_command = (
78 78 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
79 79 root=git_server.root, git_path=git_server.git_path,
80 80 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
81 81 )
82 82 assert expected_command == server.tunnel.command()
83 83
84 84 @pytest.mark.parametrize('permissions, action, code', [
85 85 ({}, 'pull', -2),
86 86 ({'test_git': 'repository.read'}, 'pull', 0),
87 87 ({'test_git': 'repository.read'}, 'push', -2),
88 88 ({'test_git': 'repository.write'}, 'push', 0),
89 89 ({'test_git': 'repository.admin'}, 'push', 0),
90 90
91 91 ])
92 92 def test_permission_checks(self, git_server, permissions, action, code):
93 93 server = git_server.create(user_permissions=permissions)
94 94 result = server._check_permissions(action)
95 95 assert result is code
96 96
97 97 @pytest.mark.parametrize('permissions, value', [
98 98 ({}, False),
99 99 ({'test_git': 'repository.read'}, False),
100 100 ({'test_git': 'repository.write'}, True),
101 101 ({'test_git': 'repository.admin'}, True),
102 102
103 103 ])
104 104 def test_has_write_permissions(self, git_server, permissions, value):
105 105 server = git_server.create(user_permissions=permissions)
106 106 result = server.has_write_perm()
107 107 assert result is value
108 108
109 109 def test_run_returns_executes_command(self, git_server):
110 110 server = git_server.create()
111 111 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
112 112
113 113 os.environ['SSH_CLIENT'] = '127.0.0.1'
114 114 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
115 115 _patch.return_value = 0
116 116 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
117 117 exit_code = server.run()
118 118
119 119 assert exit_code == (0, False)
120 120
121 121 @pytest.mark.parametrize(
122 122 'repo_mode, action', [
123 123 ['receive-pack', 'push'],
124 124 ['upload-pack', 'pull']
125 125 ])
126 126 def test_update_environment(self, git_server, repo_mode, action):
127 127 server = git_server.create(repo_mode=repo_mode)
128 128 store = server.store
129 129
130 130 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
131 131 with mock.patch('os.putenv') as putenv_mock:
132 132 server.update_environment(action)
133 133
134 134 expected_data = {
135 135 'username': git_server.user.username,
136 136 'user_id': git_server.user.user_id,
137 137 'scm': 'git',
138 138 'repository': git_server.repo_name,
139 139 'make_lock': None,
140 140 'action': action,
141 141 'ip': '10.10.10.10',
142 142 'locked_by': [None, None],
143 143 'config': '',
144 144 'repo_store': store,
145 145 'server_url': None,
146 146 'hooks': ['push', 'pull'],
147 147 'is_shadow_repo': False,
148 148 'hooks_module': 'rhodecode.lib.hooks_daemon',
149 149 'check_branch_perms': False,
150 150 'detect_force_push': False,
151 'user_agent': u'ssh-user-agent',
151 'user_agent': u'git/ssh-user-agent',
152 152 'SSH': True,
153 153 'SSH_PERMISSIONS': 'repository.admin',
154 154 }
155 155 args, kwargs = putenv_mock.call_args
156 156 assert json.loads(args[1]) == expected_data
@@ -1,535 +1,538 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Set of hooks run by RhodeCode Enterprise
24 24 """
25 25
26 26 import os
27 27 import logging
28 28
29 29 import rhodecode
30 30 from rhodecode import events
31 31 from rhodecode.lib import helpers as h
32 32 from rhodecode.lib import audit_logger
33 from rhodecode.lib.utils2 import safe_str
33 from rhodecode.lib.utils2 import safe_str, user_agent_normalizer
34 34 from rhodecode.lib.exceptions import (
35 35 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
36 36 from rhodecode.model.db import Repository, User
37 37 from rhodecode.lib.statsd_client import StatsdClient
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 class HookResponse(object):
43 43 def __init__(self, status, output):
44 44 self.status = status
45 45 self.output = output
46 46
47 47 def __add__(self, other):
48 48 other_status = getattr(other, 'status', 0)
49 49 new_status = max(self.status, other_status)
50 50 other_output = getattr(other, 'output', '')
51 51 new_output = self.output + other_output
52 52
53 53 return HookResponse(new_status, new_output)
54 54
55 55 def __bool__(self):
56 56 return self.status == 0
57 57
58 58
59 59 def is_shadow_repo(extras):
60 60 """
61 61 Returns ``True`` if this is an action executed against a shadow repository.
62 62 """
63 63 return extras['is_shadow_repo']
64 64
65 65
66 66 def _get_scm_size(alias, root_path):
67 67
68 68 if not alias.startswith('.'):
69 69 alias += '.'
70 70
71 71 size_scm, size_root = 0, 0
72 72 for path, unused_dirs, files in os.walk(safe_str(root_path)):
73 73 if path.find(alias) != -1:
74 74 for f in files:
75 75 try:
76 76 size_scm += os.path.getsize(os.path.join(path, f))
77 77 except OSError:
78 78 pass
79 79 else:
80 80 for f in files:
81 81 try:
82 82 size_root += os.path.getsize(os.path.join(path, f))
83 83 except OSError:
84 84 pass
85 85
86 86 size_scm_f = h.format_byte_size_binary(size_scm)
87 87 size_root_f = h.format_byte_size_binary(size_root)
88 88 size_total_f = h.format_byte_size_binary(size_root + size_scm)
89 89
90 90 return size_scm_f, size_root_f, size_total_f
91 91
92 92
93 93 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
94 94 def repo_size(extras):
95 95 """Present size of repository after push."""
96 96 repo = Repository.get_by_repo_name(extras.repository)
97 97 vcs_part = safe_str(u'.%s' % repo.repo_type)
98 98 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
99 99 repo.repo_full_path)
100 100 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
101 101 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
102 102 return HookResponse(0, msg)
103 103
104 104
105 105 def pre_push(extras):
106 106 """
107 107 Hook executed before pushing code.
108 108
109 109 It bans pushing when the repository is locked.
110 110 """
111 111
112 112 user = User.get_by_username(extras.username)
113 113 output = ''
114 114 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
115 115 locked_by = User.get(extras.locked_by[0]).username
116 116 reason = extras.locked_by[2]
117 117 # this exception is interpreted in git/hg middlewares and based
118 118 # on that proper return code is server to client
119 119 _http_ret = HTTPLockedRC(
120 120 _locked_by_explanation(extras.repository, locked_by, reason))
121 121 if str(_http_ret.code).startswith('2'):
122 122 # 2xx Codes don't raise exceptions
123 123 output = _http_ret.title
124 124 else:
125 125 raise _http_ret
126 126
127 127 hook_response = ''
128 128 if not is_shadow_repo(extras):
129 129 if extras.commit_ids and extras.check_branch_perms:
130 130
131 131 auth_user = user.AuthUser()
132 132 repo = Repository.get_by_repo_name(extras.repository)
133 133 affected_branches = []
134 134 if repo.repo_type == 'hg':
135 135 for entry in extras.commit_ids:
136 136 if entry['type'] == 'branch':
137 137 is_forced = bool(entry['multiple_heads'])
138 138 affected_branches.append([entry['name'], is_forced])
139 139 elif repo.repo_type == 'git':
140 140 for entry in extras.commit_ids:
141 141 if entry['type'] == 'heads':
142 142 is_forced = bool(entry['pruned_sha'])
143 143 affected_branches.append([entry['name'], is_forced])
144 144
145 145 for branch_name, is_forced in affected_branches:
146 146
147 147 rule, branch_perm = auth_user.get_rule_and_branch_permission(
148 148 extras.repository, branch_name)
149 149 if not branch_perm:
150 150 # no branch permission found for this branch, just keep checking
151 151 continue
152 152
153 153 if branch_perm == 'branch.push_force':
154 154 continue
155 155 elif branch_perm == 'branch.push' and is_forced is False:
156 156 continue
157 157 elif branch_perm == 'branch.push' and is_forced is True:
158 158 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
159 159 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
160 160 else:
161 161 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
162 162 branch_name, rule)
163 163
164 164 if halt_message:
165 165 _http_ret = HTTPBranchProtected(halt_message)
166 166 raise _http_ret
167 167
168 168 # Propagate to external components. This is done after checking the
169 169 # lock, for consistent behavior.
170 170 hook_response = pre_push_extension(
171 171 repo_store_path=Repository.base_path(), **extras)
172 172 events.trigger(events.RepoPrePushEvent(
173 173 repo_name=extras.repository, extras=extras))
174 174
175 175 return HookResponse(0, output) + hook_response
176 176
177 177
178 178 def pre_pull(extras):
179 179 """
180 180 Hook executed before pulling the code.
181 181
182 182 It bans pulling when the repository is locked.
183 183 """
184 184
185 185 output = ''
186 186 if extras.locked_by[0]:
187 187 locked_by = User.get(extras.locked_by[0]).username
188 188 reason = extras.locked_by[2]
189 189 # this exception is interpreted in git/hg middlewares and based
190 190 # on that proper return code is server to client
191 191 _http_ret = HTTPLockedRC(
192 192 _locked_by_explanation(extras.repository, locked_by, reason))
193 193 if str(_http_ret.code).startswith('2'):
194 194 # 2xx Codes don't raise exceptions
195 195 output = _http_ret.title
196 196 else:
197 197 raise _http_ret
198 198
199 199 # Propagate to external components. This is done after checking the
200 200 # lock, for consistent behavior.
201 201 hook_response = ''
202 202 if not is_shadow_repo(extras):
203 203 extras.hook_type = extras.hook_type or 'pre_pull'
204 204 hook_response = pre_pull_extension(
205 205 repo_store_path=Repository.base_path(), **extras)
206 206 events.trigger(events.RepoPrePullEvent(
207 207 repo_name=extras.repository, extras=extras))
208 208
209 209 return HookResponse(0, output) + hook_response
210 210
211 211
212 212 def post_pull(extras):
213 213 """Hook executed after client pulls the code."""
214 214
215 215 audit_user = audit_logger.UserWrap(
216 216 username=extras.username,
217 217 ip_addr=extras.ip)
218 218 repo = audit_logger.RepoWrap(repo_name=extras.repository)
219 219 audit_logger.store(
220 220 'user.pull', action_data={'user_agent': extras.user_agent},
221 221 user=audit_user, repo=repo, commit=True)
222 222
223 223 statsd = StatsdClient.statsd
224 224 if statsd:
225 statsd.incr('rhodecode_pull_total')
226
225 statsd.incr('rhodecode_pull_total', tags=[
226 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)),
227 ])
227 228 output = ''
228 229 # make lock is a tri state False, True, None. We only make lock on True
229 230 if extras.make_lock is True and not is_shadow_repo(extras):
230 231 user = User.get_by_username(extras.username)
231 232 Repository.lock(Repository.get_by_repo_name(extras.repository),
232 233 user.user_id,
233 234 lock_reason=Repository.LOCK_PULL)
234 235 msg = 'Made lock on repo `%s`' % (extras.repository,)
235 236 output += msg
236 237
237 238 if extras.locked_by[0]:
238 239 locked_by = User.get(extras.locked_by[0]).username
239 240 reason = extras.locked_by[2]
240 241 _http_ret = HTTPLockedRC(
241 242 _locked_by_explanation(extras.repository, locked_by, reason))
242 243 if str(_http_ret.code).startswith('2'):
243 244 # 2xx Codes don't raise exceptions
244 245 output += _http_ret.title
245 246
246 247 # Propagate to external components.
247 248 hook_response = ''
248 249 if not is_shadow_repo(extras):
249 250 extras.hook_type = extras.hook_type or 'post_pull'
250 251 hook_response = post_pull_extension(
251 252 repo_store_path=Repository.base_path(), **extras)
252 253 events.trigger(events.RepoPullEvent(
253 254 repo_name=extras.repository, extras=extras))
254 255
255 256 return HookResponse(0, output) + hook_response
256 257
257 258
258 259 def post_push(extras):
259 260 """Hook executed after user pushes to the repository."""
260 261 commit_ids = extras.commit_ids
261 262
262 263 # log the push call
263 264 audit_user = audit_logger.UserWrap(
264 265 username=extras.username, ip_addr=extras.ip)
265 266 repo = audit_logger.RepoWrap(repo_name=extras.repository)
266 267 audit_logger.store(
267 268 'user.push', action_data={
268 269 'user_agent': extras.user_agent,
269 270 'commit_ids': commit_ids[:400]},
270 271 user=audit_user, repo=repo, commit=True)
271 272
272 273 statsd = StatsdClient.statsd
273 274 if statsd:
274 statsd.incr('rhodecode_push_total')
275 statsd.incr('rhodecode_push_total', tags=[
276 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)),
277 ])
275 278
276 279 # Propagate to external components.
277 280 output = ''
278 281 # make lock is a tri state False, True, None. We only release lock on False
279 282 if extras.make_lock is False and not is_shadow_repo(extras):
280 283 Repository.unlock(Repository.get_by_repo_name(extras.repository))
281 284 msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository))
282 285 output += msg
283 286
284 287 if extras.locked_by[0]:
285 288 locked_by = User.get(extras.locked_by[0]).username
286 289 reason = extras.locked_by[2]
287 290 _http_ret = HTTPLockedRC(
288 291 _locked_by_explanation(extras.repository, locked_by, reason))
289 292 # TODO: johbo: if not?
290 293 if str(_http_ret.code).startswith('2'):
291 294 # 2xx Codes don't raise exceptions
292 295 output += _http_ret.title
293 296
294 297 if extras.new_refs:
295 298 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
296 299 safe_str(extras.server_url), safe_str(extras.repository))
297 300
298 301 for branch_name in extras.new_refs['branches']:
299 302 output += 'RhodeCode: open pull request link: {}\n'.format(
300 303 tmpl.format(ref_type='branch', ref_name=safe_str(branch_name)))
301 304
302 305 for book_name in extras.new_refs['bookmarks']:
303 306 output += 'RhodeCode: open pull request link: {}\n'.format(
304 307 tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name)))
305 308
306 309 hook_response = ''
307 310 if not is_shadow_repo(extras):
308 311 hook_response = post_push_extension(
309 312 repo_store_path=Repository.base_path(),
310 313 **extras)
311 314 events.trigger(events.RepoPushEvent(
312 315 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
313 316
314 317 output += 'RhodeCode: push completed\n'
315 318 return HookResponse(0, output) + hook_response
316 319
317 320
318 321 def _locked_by_explanation(repo_name, user_name, reason):
319 322 message = (
320 323 'Repository `%s` locked by user `%s`. Reason:`%s`'
321 324 % (repo_name, user_name, reason))
322 325 return message
323 326
324 327
325 328 def check_allowed_create_user(user_dict, created_by, **kwargs):
326 329 # pre create hooks
327 330 if pre_create_user.is_active():
328 331 hook_result = pre_create_user(created_by=created_by, **user_dict)
329 332 allowed = hook_result.status == 0
330 333 if not allowed:
331 334 reason = hook_result.output
332 335 raise UserCreationError(reason)
333 336
334 337
335 338 class ExtensionCallback(object):
336 339 """
337 340 Forwards a given call to rcextensions, sanitizes keyword arguments.
338 341
339 342 Does check if there is an extension active for that hook. If it is
340 343 there, it will forward all `kwargs_keys` keyword arguments to the
341 344 extension callback.
342 345 """
343 346
344 347 def __init__(self, hook_name, kwargs_keys):
345 348 self._hook_name = hook_name
346 349 self._kwargs_keys = set(kwargs_keys)
347 350
348 351 def __call__(self, *args, **kwargs):
349 352 log.debug('Calling extension callback for `%s`', self._hook_name)
350 353 callback = self._get_callback()
351 354 if not callback:
352 355 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
353 356 return
354 357
355 358 kwargs_to_pass = {}
356 359 for key in self._kwargs_keys:
357 360 try:
358 361 kwargs_to_pass[key] = kwargs[key]
359 362 except KeyError:
360 363 log.error('Failed to fetch %s key from given kwargs. '
361 364 'Expected keys: %s', key, self._kwargs_keys)
362 365 raise
363 366
364 367 # backward compat for removed api_key for old hooks. This was it works
365 368 # with older rcextensions that require api_key present
366 369 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
367 370 kwargs_to_pass['api_key'] = '_DEPRECATED_'
368 371 return callback(**kwargs_to_pass)
369 372
370 373 def is_active(self):
371 374 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
372 375
373 376 def _get_callback(self):
374 377 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
375 378
376 379
377 380 pre_pull_extension = ExtensionCallback(
378 381 hook_name='PRE_PULL_HOOK',
379 382 kwargs_keys=(
380 383 'server_url', 'config', 'scm', 'username', 'ip', 'action',
381 384 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
382 385
383 386
384 387 post_pull_extension = ExtensionCallback(
385 388 hook_name='PULL_HOOK',
386 389 kwargs_keys=(
387 390 'server_url', 'config', 'scm', 'username', 'ip', 'action',
388 391 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
389 392
390 393
391 394 pre_push_extension = ExtensionCallback(
392 395 hook_name='PRE_PUSH_HOOK',
393 396 kwargs_keys=(
394 397 'server_url', 'config', 'scm', 'username', 'ip', 'action',
395 398 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
396 399
397 400
398 401 post_push_extension = ExtensionCallback(
399 402 hook_name='PUSH_HOOK',
400 403 kwargs_keys=(
401 404 'server_url', 'config', 'scm', 'username', 'ip', 'action',
402 405 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
403 406
404 407
405 408 pre_create_user = ExtensionCallback(
406 409 hook_name='PRE_CREATE_USER_HOOK',
407 410 kwargs_keys=(
408 411 'username', 'password', 'email', 'firstname', 'lastname', 'active',
409 412 'admin', 'created_by'))
410 413
411 414
412 415 create_pull_request = ExtensionCallback(
413 416 hook_name='CREATE_PULL_REQUEST',
414 417 kwargs_keys=(
415 418 'server_url', 'config', 'scm', 'username', 'ip', 'action',
416 419 'repository', 'pull_request_id', 'url', 'title', 'description',
417 420 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
418 421 'mergeable', 'source', 'target', 'author', 'reviewers'))
419 422
420 423
421 424 merge_pull_request = ExtensionCallback(
422 425 hook_name='MERGE_PULL_REQUEST',
423 426 kwargs_keys=(
424 427 'server_url', 'config', 'scm', 'username', 'ip', 'action',
425 428 'repository', 'pull_request_id', 'url', 'title', 'description',
426 429 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
427 430 'mergeable', 'source', 'target', 'author', 'reviewers'))
428 431
429 432
430 433 close_pull_request = ExtensionCallback(
431 434 hook_name='CLOSE_PULL_REQUEST',
432 435 kwargs_keys=(
433 436 'server_url', 'config', 'scm', 'username', 'ip', 'action',
434 437 'repository', 'pull_request_id', 'url', 'title', 'description',
435 438 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
436 439 'mergeable', 'source', 'target', 'author', 'reviewers'))
437 440
438 441
439 442 review_pull_request = ExtensionCallback(
440 443 hook_name='REVIEW_PULL_REQUEST',
441 444 kwargs_keys=(
442 445 'server_url', 'config', 'scm', 'username', 'ip', 'action',
443 446 'repository', 'pull_request_id', 'url', 'title', 'description',
444 447 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
445 448 'mergeable', 'source', 'target', 'author', 'reviewers'))
446 449
447 450
448 451 comment_pull_request = ExtensionCallback(
449 452 hook_name='COMMENT_PULL_REQUEST',
450 453 kwargs_keys=(
451 454 'server_url', 'config', 'scm', 'username', 'ip', 'action',
452 455 'repository', 'pull_request_id', 'url', 'title', 'description',
453 456 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
454 457 'mergeable', 'source', 'target', 'author', 'reviewers'))
455 458
456 459
457 460 comment_edit_pull_request = ExtensionCallback(
458 461 hook_name='COMMENT_EDIT_PULL_REQUEST',
459 462 kwargs_keys=(
460 463 'server_url', 'config', 'scm', 'username', 'ip', 'action',
461 464 'repository', 'pull_request_id', 'url', 'title', 'description',
462 465 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
463 466 'mergeable', 'source', 'target', 'author', 'reviewers'))
464 467
465 468
466 469 update_pull_request = ExtensionCallback(
467 470 hook_name='UPDATE_PULL_REQUEST',
468 471 kwargs_keys=(
469 472 'server_url', 'config', 'scm', 'username', 'ip', 'action',
470 473 'repository', 'pull_request_id', 'url', 'title', 'description',
471 474 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
472 475 'mergeable', 'source', 'target', 'author', 'reviewers'))
473 476
474 477
475 478 create_user = ExtensionCallback(
476 479 hook_name='CREATE_USER_HOOK',
477 480 kwargs_keys=(
478 481 'username', 'full_name_or_username', 'full_contact', 'user_id',
479 482 'name', 'firstname', 'short_contact', 'admin', 'lastname',
480 483 'ip_addresses', 'extern_type', 'extern_name',
481 484 'email', 'api_keys', 'last_login',
482 485 'full_name', 'active', 'password', 'emails',
483 486 'inherit_default_permissions', 'created_by', 'created_on'))
484 487
485 488
486 489 delete_user = ExtensionCallback(
487 490 hook_name='DELETE_USER_HOOK',
488 491 kwargs_keys=(
489 492 'username', 'full_name_or_username', 'full_contact', 'user_id',
490 493 'name', 'firstname', 'short_contact', 'admin', 'lastname',
491 494 'ip_addresses',
492 495 'email', 'last_login',
493 496 'full_name', 'active', 'password', 'emails',
494 497 'inherit_default_permissions', 'deleted_by'))
495 498
496 499
497 500 create_repository = ExtensionCallback(
498 501 hook_name='CREATE_REPO_HOOK',
499 502 kwargs_keys=(
500 503 'repo_name', 'repo_type', 'description', 'private', 'created_on',
501 504 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
502 505 'clone_uri', 'fork_id', 'group_id', 'created_by'))
503 506
504 507
505 508 delete_repository = ExtensionCallback(
506 509 hook_name='DELETE_REPO_HOOK',
507 510 kwargs_keys=(
508 511 'repo_name', 'repo_type', 'description', 'private', 'created_on',
509 512 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
510 513 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
511 514
512 515
513 516 comment_commit_repository = ExtensionCallback(
514 517 hook_name='COMMENT_COMMIT_REPO_HOOK',
515 518 kwargs_keys=(
516 519 'repo_name', 'repo_type', 'description', 'private', 'created_on',
517 520 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
518 521 'clone_uri', 'fork_id', 'group_id',
519 522 'repository', 'created_by', 'comment', 'commit'))
520 523
521 524 comment_edit_commit_repository = ExtensionCallback(
522 525 hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK',
523 526 kwargs_keys=(
524 527 'repo_name', 'repo_type', 'description', 'private', 'created_on',
525 528 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
526 529 'clone_uri', 'fork_id', 'group_id',
527 530 'repository', 'created_by', 'comment', 'commit'))
528 531
529 532
530 533 create_repository_group = ExtensionCallback(
531 534 hook_name='CREATE_REPO_GROUP_HOOK',
532 535 kwargs_keys=(
533 536 'group_name', 'group_parent_id', 'group_description',
534 537 'group_id', 'user_id', 'created_by', 'created_on',
535 538 'enable_locking'))
@@ -1,1148 +1,1165 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Some simple helper functions
24 24 """
25 25
26 26 import collections
27 27 import datetime
28 28 import dateutil.relativedelta
29 29 import hashlib
30 30 import logging
31 31 import re
32 32 import sys
33 33 import time
34 34 import urllib
35 35 import urlobject
36 36 import uuid
37 37 import getpass
38 38 from functools import update_wrapper, partial, wraps
39 39
40 40 import pygments.lexers
41 41 import sqlalchemy
42 42 import sqlalchemy.engine.url
43 43 import sqlalchemy.exc
44 44 import sqlalchemy.sql
45 45 import webob
46 46 import pyramid.threadlocal
47 47 from pyramid import compat
48 48 from pyramid.settings import asbool
49 49
50 50 import rhodecode
51 51 from rhodecode.translation import _, _pluralize
52 52
53 53
54 54 def md5(s):
55 55 return hashlib.md5(s).hexdigest()
56 56
57 57
58 58 def md5_safe(s):
59 59 return md5(safe_str(s))
60 60
61 61
62 62 def sha1(s):
63 63 return hashlib.sha1(s).hexdigest()
64 64
65 65
66 66 def sha1_safe(s):
67 67 return sha1(safe_str(s))
68 68
69 69
70 70 def __get_lem(extra_mapping=None):
71 71 """
72 72 Get language extension map based on what's inside pygments lexers
73 73 """
74 74 d = collections.defaultdict(lambda: [])
75 75
76 76 def __clean(s):
77 77 s = s.lstrip('*')
78 78 s = s.lstrip('.')
79 79
80 80 if s.find('[') != -1:
81 81 exts = []
82 82 start, stop = s.find('['), s.find(']')
83 83
84 84 for suffix in s[start + 1:stop]:
85 85 exts.append(s[:s.find('[')] + suffix)
86 86 return [e.lower() for e in exts]
87 87 else:
88 88 return [s.lower()]
89 89
90 90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 91 m = map(__clean, t[-2])
92 92 if m:
93 93 m = reduce(lambda x, y: x + y, m)
94 94 for ext in m:
95 95 desc = lx.replace('Lexer', '')
96 96 d[ext].append(desc)
97 97
98 98 data = dict(d)
99 99
100 100 extra_mapping = extra_mapping or {}
101 101 if extra_mapping:
102 102 for k, v in extra_mapping.items():
103 103 if k not in data:
104 104 # register new mapping2lexer
105 105 data[k] = [v]
106 106
107 107 return data
108 108
109 109
110 110 def str2bool(_str):
111 111 """
112 112 returns True/False value from given string, it tries to translate the
113 113 string into boolean
114 114
115 115 :param _str: string value to translate into boolean
116 116 :rtype: boolean
117 117 :returns: boolean from given string
118 118 """
119 119 if _str is None:
120 120 return False
121 121 if _str in (True, False):
122 122 return _str
123 123 _str = str(_str).strip().lower()
124 124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125 125
126 126
127 127 def aslist(obj, sep=None, strip=True):
128 128 """
129 129 Returns given string separated by sep as list
130 130
131 131 :param obj:
132 132 :param sep:
133 133 :param strip:
134 134 """
135 135 if isinstance(obj, (basestring,)):
136 136 lst = obj.split(sep)
137 137 if strip:
138 138 lst = [v.strip() for v in lst]
139 139 return lst
140 140 elif isinstance(obj, (list, tuple)):
141 141 return obj
142 142 elif obj is None:
143 143 return []
144 144 else:
145 145 return [obj]
146 146
147 147
148 148 def convert_line_endings(line, mode):
149 149 """
150 150 Converts a given line "line end" accordingly to given mode
151 151
152 152 Available modes are::
153 153 0 - Unix
154 154 1 - Mac
155 155 2 - DOS
156 156
157 157 :param line: given line to convert
158 158 :param mode: mode to convert to
159 159 :rtype: str
160 160 :return: converted line according to mode
161 161 """
162 162 if mode == 0:
163 163 line = line.replace('\r\n', '\n')
164 164 line = line.replace('\r', '\n')
165 165 elif mode == 1:
166 166 line = line.replace('\r\n', '\r')
167 167 line = line.replace('\n', '\r')
168 168 elif mode == 2:
169 169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 170 return line
171 171
172 172
173 173 def detect_mode(line, default):
174 174 """
175 175 Detects line break for given line, if line break couldn't be found
176 176 given default value is returned
177 177
178 178 :param line: str line
179 179 :param default: default
180 180 :rtype: int
181 181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 182 """
183 183 if line.endswith('\r\n'):
184 184 return 2
185 185 elif line.endswith('\n'):
186 186 return 0
187 187 elif line.endswith('\r'):
188 188 return 1
189 189 else:
190 190 return default
191 191
192 192
193 193 def safe_int(val, default=None):
194 194 """
195 195 Returns int() of val if val is not convertable to int use default
196 196 instead
197 197
198 198 :param val:
199 199 :param default:
200 200 """
201 201
202 202 try:
203 203 val = int(val)
204 204 except (ValueError, TypeError):
205 205 val = default
206 206
207 207 return val
208 208
209 209
210 210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 211 """
212 212 safe unicode function. Does few trick to turn str_ into unicode
213 213
214 214 In case of UnicodeDecode error, we try to return it with encoding detected
215 215 by chardet library if it fails fallback to unicode with errors replaced
216 216
217 217 :param str_: string to decode
218 218 :rtype: unicode
219 219 :returns: unicode object
220 220 """
221 221 if isinstance(str_, unicode):
222 222 return str_
223 223
224 224 if not from_encoding:
225 225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 226 'utf8'), sep=',')
227 227 from_encoding = DEFAULT_ENCODINGS
228 228
229 229 if not isinstance(from_encoding, (list, tuple)):
230 230 from_encoding = [from_encoding]
231 231
232 232 try:
233 233 return unicode(str_)
234 234 except UnicodeDecodeError:
235 235 pass
236 236
237 237 for enc in from_encoding:
238 238 try:
239 239 return unicode(str_, enc)
240 240 except UnicodeDecodeError:
241 241 pass
242 242
243 243 if use_chardet:
244 244 try:
245 245 import chardet
246 246 encoding = chardet.detect(str_)['encoding']
247 247 if encoding is None:
248 248 raise Exception()
249 249 return str_.decode(encoding)
250 250 except (ImportError, UnicodeDecodeError, Exception):
251 251 return unicode(str_, from_encoding[0], 'replace')
252 252 else:
253 253 return unicode(str_, from_encoding[0], 'replace')
254 254
255 255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 256 """
257 257 safe str function. Does few trick to turn unicode_ into string
258 258
259 259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 260 by chardet library if it fails fallback to string with errors replaced
261 261
262 262 :param unicode_: unicode to encode
263 263 :rtype: str
264 264 :returns: str object
265 265 """
266 266
267 267 # if it's not basestr cast to str
268 268 if not isinstance(unicode_, compat.string_types):
269 269 return str(unicode_)
270 270
271 271 if isinstance(unicode_, str):
272 272 return unicode_
273 273
274 274 if not to_encoding:
275 275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 276 'utf8'), sep=',')
277 277 to_encoding = DEFAULT_ENCODINGS
278 278
279 279 if not isinstance(to_encoding, (list, tuple)):
280 280 to_encoding = [to_encoding]
281 281
282 282 for enc in to_encoding:
283 283 try:
284 284 return unicode_.encode(enc)
285 285 except UnicodeEncodeError:
286 286 pass
287 287
288 288 if use_chardet:
289 289 try:
290 290 import chardet
291 291 encoding = chardet.detect(unicode_)['encoding']
292 292 if encoding is None:
293 293 raise UnicodeEncodeError()
294 294
295 295 return unicode_.encode(encoding)
296 296 except (ImportError, UnicodeEncodeError):
297 297 return unicode_.encode(to_encoding[0], 'replace')
298 298 else:
299 299 return unicode_.encode(to_encoding[0], 'replace')
300 300
301 301
302 302 def remove_suffix(s, suffix):
303 303 if s.endswith(suffix):
304 304 s = s[:-1 * len(suffix)]
305 305 return s
306 306
307 307
308 308 def remove_prefix(s, prefix):
309 309 if s.startswith(prefix):
310 310 s = s[len(prefix):]
311 311 return s
312 312
313 313
314 314 def find_calling_context(ignore_modules=None):
315 315 """
316 316 Look through the calling stack and return the frame which called
317 317 this function and is part of core module ( ie. rhodecode.* )
318 318
319 319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 320 """
321 321
322 322 ignore_modules = ignore_modules or []
323 323
324 324 f = sys._getframe(2)
325 325 while f.f_back is not None:
326 326 name = f.f_globals.get('__name__')
327 327 if name and name.startswith(__name__.split('.')[0]):
328 328 if name not in ignore_modules:
329 329 return f
330 330 f = f.f_back
331 331 return None
332 332
333 333
334 334 def ping_connection(connection, branch):
335 335 if branch:
336 336 # "branch" refers to a sub-connection of a connection,
337 337 # we don't want to bother pinging on these.
338 338 return
339 339
340 340 # turn off "close with result". This flag is only used with
341 341 # "connectionless" execution, otherwise will be False in any case
342 342 save_should_close_with_result = connection.should_close_with_result
343 343 connection.should_close_with_result = False
344 344
345 345 try:
346 346 # run a SELECT 1. use a core select() so that
347 347 # the SELECT of a scalar value without a table is
348 348 # appropriately formatted for the backend
349 349 connection.scalar(sqlalchemy.sql.select([1]))
350 350 except sqlalchemy.exc.DBAPIError as err:
351 351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 352 # for the DBAPI's exception. It includes a .connection_invalidated
353 353 # attribute which specifies if this connection is a "disconnect"
354 354 # condition, which is based on inspection of the original exception
355 355 # by the dialect in use.
356 356 if err.connection_invalidated:
357 357 # run the same SELECT again - the connection will re-validate
358 358 # itself and establish a new connection. The disconnect detection
359 359 # here also causes the whole connection pool to be invalidated
360 360 # so that all stale connections are discarded.
361 361 connection.scalar(sqlalchemy.sql.select([1]))
362 362 else:
363 363 raise
364 364 finally:
365 365 # restore "close with result"
366 366 connection.should_close_with_result = save_should_close_with_result
367 367
368 368
369 369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 370 """Custom engine_from_config functions."""
371 371 log = logging.getLogger('sqlalchemy.engine')
372 372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374 374
375 375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376 376
377 377 def color_sql(sql):
378 378 color_seq = '\033[1;33m' # This is yellow: code 33
379 379 normal = '\x1b[0m'
380 380 return ''.join([color_seq, sql, normal])
381 381
382 382 if use_ping_connection:
383 383 log.debug('Adding ping_connection on the engine config.')
384 384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385 385
386 386 if debug:
387 387 # attach events only for debug configuration
388 388 def before_cursor_execute(conn, cursor, statement,
389 389 parameters, context, executemany):
390 390 setattr(conn, 'query_start_time', time.time())
391 391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 392 calling_context = find_calling_context(ignore_modules=[
393 393 'rhodecode.lib.caching_query',
394 394 'rhodecode.model.settings',
395 395 ])
396 396 if calling_context:
397 397 log.info(color_sql('call context %s:%s' % (
398 398 calling_context.f_code.co_filename,
399 399 calling_context.f_lineno,
400 400 )))
401 401
402 402 def after_cursor_execute(conn, cursor, statement,
403 403 parameters, context, executemany):
404 404 delattr(conn, 'query_start_time')
405 405
406 406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408 408
409 409 return engine
410 410
411 411
412 412 def get_encryption_key(config):
413 413 secret = config.get('rhodecode.encrypted_values.secret')
414 414 default = config['beaker.session.secret']
415 415 return secret or default
416 416
417 417
418 418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 419 short_format=False):
420 420 """
421 421 Turns a datetime into an age string.
422 422 If show_short_version is True, this generates a shorter string with
423 423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424 424
425 425 * IMPORTANT*
426 426 Code of this function is written in special way so it's easier to
427 427 backport it to javascript. If you mean to update it, please also update
428 428 `jquery.timeago-extension.js` file
429 429
430 430 :param prevdate: datetime object
431 431 :param now: get current time, if not define we use
432 432 `datetime.datetime.now()`
433 433 :param show_short_version: if it should approximate the date and
434 434 return a shorter string
435 435 :param show_suffix:
436 436 :param short_format: show short format, eg 2D instead of 2 days
437 437 :rtype: unicode
438 438 :returns: unicode words describing age
439 439 """
440 440
441 441 def _get_relative_delta(now, prevdate):
442 442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 443 return {
444 444 'year': base.years,
445 445 'month': base.months,
446 446 'day': base.days,
447 447 'hour': base.hours,
448 448 'minute': base.minutes,
449 449 'second': base.seconds,
450 450 }
451 451
452 452 def _is_leap_year(year):
453 453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454 454
455 455 def get_month(prevdate):
456 456 return prevdate.month
457 457
458 458 def get_year(prevdate):
459 459 return prevdate.year
460 460
461 461 now = now or datetime.datetime.now()
462 462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 463 deltas = {}
464 464 future = False
465 465
466 466 if prevdate > now:
467 467 now_old = now
468 468 now = prevdate
469 469 prevdate = now_old
470 470 future = True
471 471 if future:
472 472 prevdate = prevdate.replace(microsecond=0)
473 473 # Get date parts deltas
474 474 for part in order:
475 475 rel_delta = _get_relative_delta(now, prevdate)
476 476 deltas[part] = rel_delta[part]
477 477
478 478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 479 # not 1 hour, -59 minutes and -59 seconds)
480 480 offsets = [[5, 60], [4, 60], [3, 24]]
481 481 for element in offsets: # seconds, minutes, hours
482 482 num = element[0]
483 483 length = element[1]
484 484
485 485 part = order[num]
486 486 carry_part = order[num - 1]
487 487
488 488 if deltas[part] < 0:
489 489 deltas[part] += length
490 490 deltas[carry_part] -= 1
491 491
492 492 # Same thing for days except that the increment depends on the (variable)
493 493 # number of days in the month
494 494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 495 if deltas['day'] < 0:
496 496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 497 deltas['day'] += 29
498 498 else:
499 499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500 500
501 501 deltas['month'] -= 1
502 502
503 503 if deltas['month'] < 0:
504 504 deltas['month'] += 12
505 505 deltas['year'] -= 1
506 506
507 507 # Format the result
508 508 if short_format:
509 509 fmt_funcs = {
510 510 'year': lambda d: u'%dy' % d,
511 511 'month': lambda d: u'%dm' % d,
512 512 'day': lambda d: u'%dd' % d,
513 513 'hour': lambda d: u'%dh' % d,
514 514 'minute': lambda d: u'%dmin' % d,
515 515 'second': lambda d: u'%dsec' % d,
516 516 }
517 517 else:
518 518 fmt_funcs = {
519 519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 525 }
526 526
527 527 i = 0
528 528 for part in order:
529 529 value = deltas[part]
530 530 if value != 0:
531 531
532 532 if i < 5:
533 533 sub_part = order[i + 1]
534 534 sub_value = deltas[sub_part]
535 535 else:
536 536 sub_value = 0
537 537
538 538 if sub_value == 0 or show_short_version:
539 539 _val = fmt_funcs[part](value)
540 540 if future:
541 541 if show_suffix:
542 542 return _(u'in ${ago}', mapping={'ago': _val})
543 543 else:
544 544 return _(_val)
545 545
546 546 else:
547 547 if show_suffix:
548 548 return _(u'${ago} ago', mapping={'ago': _val})
549 549 else:
550 550 return _(_val)
551 551
552 552 val = fmt_funcs[part](value)
553 553 val_detail = fmt_funcs[sub_part](sub_value)
554 554 mapping = {'val': val, 'detail': val_detail}
555 555
556 556 if short_format:
557 557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 558 if show_suffix:
559 559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 560 if future:
561 561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 562 else:
563 563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 564 if show_suffix:
565 565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 566 if future:
567 567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568 568
569 569 return datetime_tmpl
570 570 i += 1
571 571 return _(u'just now')
572 572
573 573
574 574 def age_from_seconds(seconds):
575 575 seconds = safe_int(seconds) or 0
576 576 prevdate = time_to_datetime(time.time() + seconds)
577 577 return age(prevdate, show_suffix=False, show_short_version=True)
578 578
579 579
580 580 def cleaned_uri(uri):
581 581 """
582 582 Quotes '[' and ']' from uri if there is only one of them.
583 583 according to RFC3986 we cannot use such chars in uri
584 584 :param uri:
585 585 :return: uri without this chars
586 586 """
587 587 return urllib.quote(uri, safe='@$:/')
588 588
589 589
590 590 def credentials_filter(uri):
591 591 """
592 592 Returns a url with removed credentials
593 593
594 594 :param uri:
595 595 """
596 596 import urlobject
597 597 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
598 598 return 'InvalidDecryptionKey'
599 599
600 600 url_obj = urlobject.URLObject(cleaned_uri(uri))
601 601 url_obj = url_obj.without_password().without_username()
602 602
603 603 return url_obj
604 604
605 605
606 606 def get_host_info(request):
607 607 """
608 608 Generate host info, to obtain full url e.g https://server.com
609 609 use this
610 610 `{scheme}://{netloc}`
611 611 """
612 612 if not request:
613 613 return {}
614 614
615 615 qualified_home_url = request.route_url('home')
616 616 parsed_url = urlobject.URLObject(qualified_home_url)
617 617 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
618 618
619 619 return {
620 620 'scheme': parsed_url.scheme,
621 621 'netloc': parsed_url.netloc+decoded_path,
622 622 'hostname': parsed_url.hostname,
623 623 }
624 624
625 625
626 626 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
627 627 qualified_home_url = request.route_url('home')
628 628 parsed_url = urlobject.URLObject(qualified_home_url)
629 629 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
630 630
631 631 args = {
632 632 'scheme': parsed_url.scheme,
633 633 'user': '',
634 634 'sys_user': getpass.getuser(),
635 635 # path if we use proxy-prefix
636 636 'netloc': parsed_url.netloc+decoded_path,
637 637 'hostname': parsed_url.hostname,
638 638 'prefix': decoded_path,
639 639 'repo': repo_name,
640 640 'repoid': str(repo_id),
641 641 'repo_type': repo_type
642 642 }
643 643 args.update(override)
644 644 args['user'] = urllib.quote(safe_str(args['user']))
645 645
646 646 for k, v in args.items():
647 647 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
648 648
649 649 # special case for SVN clone url
650 650 if repo_type == 'svn':
651 651 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
652 652
653 653 # remove leading @ sign if it's present. Case of empty user
654 654 url_obj = urlobject.URLObject(uri_tmpl)
655 655 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
656 656
657 657 return safe_unicode(url)
658 658
659 659
660 660 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
661 661 maybe_unreachable=False, reference_obj=None):
662 662 """
663 663 Safe version of get_commit if this commit doesn't exists for a
664 664 repository it returns a Dummy one instead
665 665
666 666 :param repo: repository instance
667 667 :param commit_id: commit id as str
668 668 :param commit_idx: numeric commit index
669 669 :param pre_load: optional list of commit attributes to load
670 670 :param maybe_unreachable: translate unreachable commits on git repos
671 671 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
672 672 """
673 673 # TODO(skreft): remove these circular imports
674 674 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
675 675 from rhodecode.lib.vcs.exceptions import RepositoryError
676 676 if not isinstance(repo, BaseRepository):
677 677 raise Exception('You must pass an Repository '
678 678 'object as first argument got %s', type(repo))
679 679
680 680 try:
681 681 commit = repo.get_commit(
682 682 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
683 683 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
684 684 except (RepositoryError, LookupError):
685 685 commit = EmptyCommit()
686 686 return commit
687 687
688 688
689 689 def datetime_to_time(dt):
690 690 if dt:
691 691 return time.mktime(dt.timetuple())
692 692
693 693
694 694 def time_to_datetime(tm):
695 695 if tm:
696 696 if isinstance(tm, compat.string_types):
697 697 try:
698 698 tm = float(tm)
699 699 except ValueError:
700 700 return
701 701 return datetime.datetime.fromtimestamp(tm)
702 702
703 703
704 704 def time_to_utcdatetime(tm):
705 705 if tm:
706 706 if isinstance(tm, compat.string_types):
707 707 try:
708 708 tm = float(tm)
709 709 except ValueError:
710 710 return
711 711 return datetime.datetime.utcfromtimestamp(tm)
712 712
713 713
714 714 MENTIONS_REGEX = re.compile(
715 715 # ^@ or @ without any special chars in front
716 716 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
717 717 # main body starts with letter, then can be . - _
718 718 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
719 719 re.VERBOSE | re.MULTILINE)
720 720
721 721
722 722 def extract_mentioned_users(s):
723 723 """
724 724 Returns unique usernames from given string s that have @mention
725 725
726 726 :param s: string to get mentions
727 727 """
728 728 usrs = set()
729 729 for username in MENTIONS_REGEX.findall(s):
730 730 usrs.add(username)
731 731
732 732 return sorted(list(usrs), key=lambda k: k.lower())
733 733
734 734
735 735 class AttributeDictBase(dict):
736 736 def __getstate__(self):
737 737 odict = self.__dict__ # get attribute dictionary
738 738 return odict
739 739
740 740 def __setstate__(self, dict):
741 741 self.__dict__ = dict
742 742
743 743 __setattr__ = dict.__setitem__
744 744 __delattr__ = dict.__delitem__
745 745
746 746
747 747 class StrictAttributeDict(AttributeDictBase):
748 748 """
749 749 Strict Version of Attribute dict which raises an Attribute error when
750 750 requested attribute is not set
751 751 """
752 752 def __getattr__(self, attr):
753 753 try:
754 754 return self[attr]
755 755 except KeyError:
756 756 raise AttributeError('%s object has no attribute %s' % (
757 757 self.__class__, attr))
758 758
759 759
760 760 class AttributeDict(AttributeDictBase):
761 761 def __getattr__(self, attr):
762 762 return self.get(attr, None)
763 763
764 764
765 765
766 766 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
767 767 def __init__(self, default_factory=None, *args, **kwargs):
768 768 # in python3 you can omit the args to super
769 769 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
770 770 self.default_factory = default_factory
771 771
772 772
773 773 def fix_PATH(os_=None):
774 774 """
775 775 Get current active python path, and append it to PATH variable to fix
776 776 issues of subprocess calls and different python versions
777 777 """
778 778 if os_ is None:
779 779 import os
780 780 else:
781 781 os = os_
782 782
783 783 cur_path = os.path.split(sys.executable)[0]
784 784 if not os.environ['PATH'].startswith(cur_path):
785 785 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
786 786
787 787
788 788 def obfuscate_url_pw(engine):
789 789 _url = engine or ''
790 790 try:
791 791 _url = sqlalchemy.engine.url.make_url(engine)
792 792 if _url.password:
793 793 _url.password = 'XXXXX'
794 794 except Exception:
795 795 pass
796 796 return unicode(_url)
797 797
798 798
799 799 def get_server_url(environ):
800 800 req = webob.Request(environ)
801 801 return req.host_url + req.script_name
802 802
803 803
804 804 def unique_id(hexlen=32):
805 805 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
806 806 return suuid(truncate_to=hexlen, alphabet=alphabet)
807 807
808 808
809 809 def suuid(url=None, truncate_to=22, alphabet=None):
810 810 """
811 811 Generate and return a short URL safe UUID.
812 812
813 813 If the url parameter is provided, set the namespace to the provided
814 814 URL and generate a UUID.
815 815
816 816 :param url to get the uuid for
817 817 :truncate_to: truncate the basic 22 UUID to shorter version
818 818
819 819 The IDs won't be universally unique any longer, but the probability of
820 820 a collision will still be very low.
821 821 """
822 822 # Define our alphabet.
823 823 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
824 824
825 825 # If no URL is given, generate a random UUID.
826 826 if url is None:
827 827 unique_id = uuid.uuid4().int
828 828 else:
829 829 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
830 830
831 831 alphabet_length = len(_ALPHABET)
832 832 output = []
833 833 while unique_id > 0:
834 834 digit = unique_id % alphabet_length
835 835 output.append(_ALPHABET[digit])
836 836 unique_id = int(unique_id / alphabet_length)
837 837 return "".join(output)[:truncate_to]
838 838
839 839
840 840 def get_current_rhodecode_user(request=None):
841 841 """
842 842 Gets rhodecode user from request
843 843 """
844 844 pyramid_request = request or pyramid.threadlocal.get_current_request()
845 845
846 846 # web case
847 847 if pyramid_request and hasattr(pyramid_request, 'user'):
848 848 return pyramid_request.user
849 849
850 850 # api case
851 851 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
852 852 return pyramid_request.rpc_user
853 853
854 854 return None
855 855
856 856
857 857 def action_logger_generic(action, namespace=''):
858 858 """
859 859 A generic logger for actions useful to the system overview, tries to find
860 860 an acting user for the context of the call otherwise reports unknown user
861 861
862 862 :param action: logging message eg 'comment 5 deleted'
863 863 :param type: string
864 864
865 865 :param namespace: namespace of the logging message eg. 'repo.comments'
866 866 :param type: string
867 867
868 868 """
869 869
870 870 logger_name = 'rhodecode.actions'
871 871
872 872 if namespace:
873 873 logger_name += '.' + namespace
874 874
875 875 log = logging.getLogger(logger_name)
876 876
877 877 # get a user if we can
878 878 user = get_current_rhodecode_user()
879 879
880 880 logfunc = log.info
881 881
882 882 if not user:
883 883 user = '<unknown user>'
884 884 logfunc = log.warning
885 885
886 886 logfunc('Logging action by {}: {}'.format(user, action))
887 887
888 888
889 889 def escape_split(text, sep=',', maxsplit=-1):
890 890 r"""
891 891 Allows for escaping of the separator: e.g. arg='foo\, bar'
892 892
893 893 It should be noted that the way bash et. al. do command line parsing, those
894 894 single quotes are required.
895 895 """
896 896 escaped_sep = r'\%s' % sep
897 897
898 898 if escaped_sep not in text:
899 899 return text.split(sep, maxsplit)
900 900
901 901 before, _mid, after = text.partition(escaped_sep)
902 902 startlist = before.split(sep, maxsplit) # a regular split is fine here
903 903 unfinished = startlist[-1]
904 904 startlist = startlist[:-1]
905 905
906 906 # recurse because there may be more escaped separators
907 907 endlist = escape_split(after, sep, maxsplit)
908 908
909 909 # finish building the escaped value. we use endlist[0] becaue the first
910 910 # part of the string sent in recursion is the rest of the escaped value.
911 911 unfinished += sep + endlist[0]
912 912
913 913 return startlist + [unfinished] + endlist[1:] # put together all the parts
914 914
915 915
916 916 class OptionalAttr(object):
917 917 """
918 918 Special Optional Option that defines other attribute. Example::
919 919
920 920 def test(apiuser, userid=Optional(OAttr('apiuser')):
921 921 user = Optional.extract(userid)
922 922 # calls
923 923
924 924 """
925 925
926 926 def __init__(self, attr_name):
927 927 self.attr_name = attr_name
928 928
929 929 def __repr__(self):
930 930 return '<OptionalAttr:%s>' % self.attr_name
931 931
932 932 def __call__(self):
933 933 return self
934 934
935 935
936 936 # alias
937 937 OAttr = OptionalAttr
938 938
939 939
940 940 class Optional(object):
941 941 """
942 942 Defines an optional parameter::
943 943
944 944 param = param.getval() if isinstance(param, Optional) else param
945 945 param = param() if isinstance(param, Optional) else param
946 946
947 947 is equivalent of::
948 948
949 949 param = Optional.extract(param)
950 950
951 951 """
952 952
953 953 def __init__(self, type_):
954 954 self.type_ = type_
955 955
956 956 def __repr__(self):
957 957 return '<Optional:%s>' % self.type_.__repr__()
958 958
959 959 def __call__(self):
960 960 return self.getval()
961 961
962 962 def getval(self):
963 963 """
964 964 returns value from this Optional instance
965 965 """
966 966 if isinstance(self.type_, OAttr):
967 967 # use params name
968 968 return self.type_.attr_name
969 969 return self.type_
970 970
971 971 @classmethod
972 972 def extract(cls, val):
973 973 """
974 974 Extracts value from Optional() instance
975 975
976 976 :param val:
977 977 :return: original value if it's not Optional instance else
978 978 value of instance
979 979 """
980 980 if isinstance(val, cls):
981 981 return val.getval()
982 982 return val
983 983
984 984
985 985 def glob2re(pat):
986 986 """
987 987 Translate a shell PATTERN to a regular expression.
988 988
989 989 There is no way to quote meta-characters.
990 990 """
991 991
992 992 i, n = 0, len(pat)
993 993 res = ''
994 994 while i < n:
995 995 c = pat[i]
996 996 i = i+1
997 997 if c == '*':
998 998 #res = res + '.*'
999 999 res = res + '[^/]*'
1000 1000 elif c == '?':
1001 1001 #res = res + '.'
1002 1002 res = res + '[^/]'
1003 1003 elif c == '[':
1004 1004 j = i
1005 1005 if j < n and pat[j] == '!':
1006 1006 j = j+1
1007 1007 if j < n and pat[j] == ']':
1008 1008 j = j+1
1009 1009 while j < n and pat[j] != ']':
1010 1010 j = j+1
1011 1011 if j >= n:
1012 1012 res = res + '\\['
1013 1013 else:
1014 1014 stuff = pat[i:j].replace('\\','\\\\')
1015 1015 i = j+1
1016 1016 if stuff[0] == '!':
1017 1017 stuff = '^' + stuff[1:]
1018 1018 elif stuff[0] == '^':
1019 1019 stuff = '\\' + stuff
1020 1020 res = '%s[%s]' % (res, stuff)
1021 1021 else:
1022 1022 res = res + re.escape(c)
1023 1023 return res + '\Z(?ms)'
1024 1024
1025 1025
1026 1026 def parse_byte_string(size_str):
1027 1027 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1028 1028 if not match:
1029 1029 raise ValueError('Given size:%s is invalid, please make sure '
1030 1030 'to use format of <num>(MB|KB)' % size_str)
1031 1031
1032 1032 _parts = match.groups()
1033 1033 num, type_ = _parts
1034 1034 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1035 1035
1036 1036
1037 1037 class CachedProperty(object):
1038 1038 """
1039 1039 Lazy Attributes. With option to invalidate the cache by running a method
1040 1040
1041 1041 >>> class Foo(object):
1042 1042 ...
1043 1043 ... @CachedProperty
1044 1044 ... def heavy_func(self):
1045 1045 ... return 'super-calculation'
1046 1046 ...
1047 1047 ... foo = Foo()
1048 1048 ... foo.heavy_func() # first computation
1049 1049 ... foo.heavy_func() # fetch from cache
1050 1050 ... foo._invalidate_prop_cache('heavy_func')
1051 1051
1052 1052 # at this point calling foo.heavy_func() will be re-computed
1053 1053 """
1054 1054
1055 1055 def __init__(self, func, func_name=None):
1056 1056
1057 1057 if func_name is None:
1058 1058 func_name = func.__name__
1059 1059 self.data = (func, func_name)
1060 1060 update_wrapper(self, func)
1061 1061
1062 1062 def __get__(self, inst, class_):
1063 1063 if inst is None:
1064 1064 return self
1065 1065
1066 1066 func, func_name = self.data
1067 1067 value = func(inst)
1068 1068 inst.__dict__[func_name] = value
1069 1069 if '_invalidate_prop_cache' not in inst.__dict__:
1070 1070 inst.__dict__['_invalidate_prop_cache'] = partial(
1071 1071 self._invalidate_prop_cache, inst)
1072 1072 return value
1073 1073
1074 1074 def _invalidate_prop_cache(self, inst, name):
1075 1075 inst.__dict__.pop(name, None)
1076 1076
1077 1077
1078 1078 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
1079 1079 """
1080 1080 Retry decorator with exponential backoff.
1081 1081
1082 1082 Parameters
1083 1083 ----------
1084 1084 func : typing.Callable, optional
1085 1085 Callable on which the decorator is applied, by default None
1086 1086 exception : Exception or tuple of Exceptions, optional
1087 1087 Exception(s) that invoke retry, by default Exception
1088 1088 n_tries : int, optional
1089 1089 Number of tries before giving up, by default 5
1090 1090 delay : int, optional
1091 1091 Initial delay between retries in seconds, by default 5
1092 1092 backoff : int, optional
1093 1093 Backoff multiplier e.g. value of 2 will double the delay, by default 1
1094 1094 logger : bool, optional
1095 1095 Option to log or print, by default False
1096 1096
1097 1097 Returns
1098 1098 -------
1099 1099 typing.Callable
1100 1100 Decorated callable that calls itself when exception(s) occur.
1101 1101
1102 1102 Examples
1103 1103 --------
1104 1104 >>> import random
1105 1105 >>> @retry(exception=Exception, n_tries=3)
1106 1106 ... def test_random(text):
1107 1107 ... x = random.random()
1108 1108 ... if x < 0.5:
1109 1109 ... raise Exception("Fail")
1110 1110 ... else:
1111 1111 ... print("Success: ", text)
1112 1112 >>> test_random("It works!")
1113 1113 """
1114 1114
1115 1115 if func is None:
1116 1116 return partial(
1117 1117 retry,
1118 1118 exception=exception,
1119 1119 n_tries=n_tries,
1120 1120 delay=delay,
1121 1121 backoff=backoff,
1122 1122 logger=logger,
1123 1123 )
1124 1124
1125 1125 @wraps(func)
1126 1126 def wrapper(*args, **kwargs):
1127 1127 _n_tries, n_delay = n_tries, delay
1128 1128 log = logging.getLogger('rhodecode.retry')
1129 1129
1130 1130 while _n_tries > 1:
1131 1131 try:
1132 1132 return func(*args, **kwargs)
1133 1133 except exception as e:
1134 1134 e_details = repr(e)
1135 1135 msg = "Exception on calling func {func}: {e}, " \
1136 1136 "Retrying in {n_delay} seconds..."\
1137 1137 .format(func=func, e=e_details, n_delay=n_delay)
1138 1138 if logger:
1139 1139 log.warning(msg)
1140 1140 else:
1141 1141 print(msg)
1142 1142 time.sleep(n_delay)
1143 1143 _n_tries -= 1
1144 1144 n_delay *= backoff
1145 1145
1146 1146 return func(*args, **kwargs)
1147 1147
1148 1148 return wrapper
1149
1150
1151 def user_agent_normalizer(user_agent_raw):
1152 log = logging.getLogger('rhodecode.user_agent_normalizer')
1153 ua = (user_agent_raw or '').strip().lower()
1154
1155 try:
1156 if 'mercurial/proto-1.0' in ua:
1157 ua = ua.replace('mercurial/proto-1.0', '')
1158 ua = ua.replace('(', '').replace(')', '').strip()
1159 ua = ua.replace('mercurial ', 'mercurial/')
1160 elif ua.startswith('git'):
1161 pass
1162 except Exception:
1163 log.exception('Failed to parse scm user-agent')
1164
1165 return ua
@@ -1,2380 +1,2381 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User, User.user_id == PullRequest.user_id)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 348 order_map = {
349 349 'name_raw': PullRequest.pull_request_id,
350 350 'id': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_by and order_by in order_map:
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
409 409 """
410 410 Count the number of pull requests for a specific repository that are
411 411 awaiting review.
412 412
413 413 :param repo_name: target or source repo
414 414 :param search_q: filter by text
415 415 :param statuses: list of pull request statuses
416 416 :returns: int number of pull requests
417 417 """
418 418 pull_requests = self.get_awaiting_review(
419 419 repo_name, search_q=search_q, statuses=statuses)
420 420
421 421 return len(pull_requests)
422 422
423 423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
424 424 offset=0, length=None, order_by=None, order_dir='desc'):
425 425 """
426 426 Get all pull requests for a specific repository that are awaiting
427 427 review.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param statuses: list of pull request statuses
432 432 :param offset: pagination offset
433 433 :param length: length of returned list
434 434 :param order_by: order of the returned list
435 435 :param order_dir: 'asc' or 'desc' ordering direction
436 436 :returns: list of pull requests
437 437 """
438 438 pull_requests = self.get_all(
439 439 repo_name, search_q=search_q, statuses=statuses,
440 440 order_by=order_by, order_dir=order_dir)
441 441
442 442 _filtered_pull_requests = []
443 443 for pr in pull_requests:
444 444 status = pr.calculated_review_status()
445 445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
446 446 ChangesetStatus.STATUS_UNDER_REVIEW]:
447 447 _filtered_pull_requests.append(pr)
448 448 if length:
449 449 return _filtered_pull_requests[offset:offset+length]
450 450 else:
451 451 return _filtered_pull_requests
452 452
453 453 def _prepare_awaiting_my_review_review_query(
454 454 self, repo_name, user_id, search_q=None, statuses=None,
455 455 order_by=None, order_dir='desc'):
456 456
457 457 for_review_statuses = [
458 458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
459 459 ]
460 460
461 461 pull_request_alias = aliased(PullRequest)
462 462 status_alias = aliased(ChangesetStatus)
463 463 reviewers_alias = aliased(PullRequestReviewers)
464 464 repo_alias = aliased(Repository)
465 465
466 466 last_ver_subq = Session()\
467 467 .query(func.min(ChangesetStatus.version)) \
468 468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
469 469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
470 470 .subquery()
471 471
472 472 q = Session().query(pull_request_alias) \
473 473 .options(lazyload(pull_request_alias.author)) \
474 474 .join(reviewers_alias,
475 475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
476 476 .join(repo_alias,
477 477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
478 478 .outerjoin(status_alias,
479 479 and_(status_alias.user_id == reviewers_alias.user_id,
480 480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
481 481 .filter(or_(status_alias.version == null(),
482 482 status_alias.version == last_ver_subq)) \
483 483 .filter(reviewers_alias.user_id == user_id) \
484 484 .filter(repo_alias.repo_name == repo_name) \
485 485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
486 486 .group_by(pull_request_alias)
487 487
488 488 # closed,opened
489 489 if statuses:
490 490 q = q.filter(pull_request_alias.status.in_(statuses))
491 491
492 492 if search_q:
493 493 like_expression = u'%{}%'.format(safe_unicode(search_q))
494 494 q = q.join(User, User.user_id == pull_request_alias.user_id)
495 495 q = q.filter(or_(
496 496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
497 497 User.username.ilike(like_expression),
498 498 pull_request_alias.title.ilike(like_expression),
499 499 pull_request_alias.description.ilike(like_expression),
500 500 ))
501 501
502 502 order_map = {
503 503 'name_raw': pull_request_alias.pull_request_id,
504 504 'title': pull_request_alias.title,
505 505 'updated_on_raw': pull_request_alias.updated_on,
506 506 'target_repo': pull_request_alias.target_repo_id
507 507 }
508 508 if order_by and order_by in order_map:
509 509 if order_dir == 'asc':
510 510 q = q.order_by(order_map[order_by].asc())
511 511 else:
512 512 q = q.order_by(order_map[order_by].desc())
513 513
514 514 return q
515 515
516 516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
517 517 """
518 518 Count the number of pull requests for a specific repository that are
519 519 awaiting review from a specific user.
520 520
521 521 :param repo_name: target or source repo
522 522 :param user_id: reviewer user of the pull request
523 523 :param search_q: filter by text
524 524 :param statuses: list of pull request statuses
525 525 :returns: int number of pull requests
526 526 """
527 527 q = self._prepare_awaiting_my_review_review_query(
528 528 repo_name, user_id, search_q=search_q, statuses=statuses)
529 529 return q.count()
530 530
531 531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
532 532 offset=0, length=None, order_by=None, order_dir='desc'):
533 533 """
534 534 Get all pull requests for a specific repository that are awaiting
535 535 review from a specific user.
536 536
537 537 :param repo_name: target or source repo
538 538 :param user_id: reviewer user of the pull request
539 539 :param search_q: filter by text
540 540 :param statuses: list of pull request statuses
541 541 :param offset: pagination offset
542 542 :param length: length of returned list
543 543 :param order_by: order of the returned list
544 544 :param order_dir: 'asc' or 'desc' ordering direction
545 545 :returns: list of pull requests
546 546 """
547 547
548 548 q = self._prepare_awaiting_my_review_review_query(
549 549 repo_name, user_id, search_q=search_q, statuses=statuses,
550 550 order_by=order_by, order_dir=order_dir)
551 551
552 552 if length:
553 553 pull_requests = q.limit(length).offset(offset).all()
554 554 else:
555 555 pull_requests = q.all()
556 556
557 557 return pull_requests
558 558
559 559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
560 560 order_by=None, order_dir='desc'):
561 561 """
562 562 return a query of pull-requests user is an creator, or he's added as a reviewer
563 563 """
564 564 q = PullRequest.query()
565 565 if user_id:
566 566 reviewers_subquery = Session().query(
567 567 PullRequestReviewers.pull_request_id).filter(
568 568 PullRequestReviewers.user_id == user_id).subquery()
569 569 user_filter = or_(
570 570 PullRequest.user_id == user_id,
571 571 PullRequest.pull_request_id.in_(reviewers_subquery)
572 572 )
573 573 q = PullRequest.query().filter(user_filter)
574 574
575 575 # closed,opened
576 576 if statuses:
577 577 q = q.filter(PullRequest.status.in_(statuses))
578 578
579 579 if query:
580 580 like_expression = u'%{}%'.format(safe_unicode(query))
581 581 q = q.join(User, User.user_id == PullRequest.user_id)
582 582 q = q.filter(or_(
583 583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 584 User.username.ilike(like_expression),
585 585 PullRequest.title.ilike(like_expression),
586 586 PullRequest.description.ilike(like_expression),
587 587 ))
588 588
589 589 order_map = {
590 590 'name_raw': PullRequest.pull_request_id,
591 591 'title': PullRequest.title,
592 592 'updated_on_raw': PullRequest.updated_on,
593 593 'target_repo': PullRequest.target_repo_id
594 594 }
595 595 if order_by and order_by in order_map:
596 596 if order_dir == 'asc':
597 597 q = q.order_by(order_map[order_by].asc())
598 598 else:
599 599 q = q.order_by(order_map[order_by].desc())
600 600
601 601 return q
602 602
603 603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
604 604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
605 605 return q.count()
606 606
607 607 def get_im_participating_in(
608 608 self, user_id=None, statuses=None, query='', offset=0,
609 609 length=None, order_by=None, order_dir='desc'):
610 610 """
611 611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
612 612 """
613 613
614 614 q = self._prepare_im_participating_query(
615 615 user_id, statuses=statuses, query=query, order_by=order_by,
616 616 order_dir=order_dir)
617 617
618 618 if length:
619 619 pull_requests = q.limit(length).offset(offset).all()
620 620 else:
621 621 pull_requests = q.all()
622 622
623 623 return pull_requests
624 624
625 625 def _prepare_participating_in_for_review_query(
626 626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
627 627
628 628 for_review_statuses = [
629 629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
630 630 ]
631 631
632 632 pull_request_alias = aliased(PullRequest)
633 633 status_alias = aliased(ChangesetStatus)
634 634 reviewers_alias = aliased(PullRequestReviewers)
635 635
636 636 last_ver_subq = Session()\
637 637 .query(func.min(ChangesetStatus.version)) \
638 638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
639 639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
640 640 .subquery()
641 641
642 642 q = Session().query(pull_request_alias) \
643 643 .options(lazyload(pull_request_alias.author)) \
644 644 .join(reviewers_alias,
645 645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
646 646 .outerjoin(status_alias,
647 647 and_(status_alias.user_id == reviewers_alias.user_id,
648 648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
649 649 .filter(or_(status_alias.version == null(),
650 650 status_alias.version == last_ver_subq)) \
651 651 .filter(reviewers_alias.user_id == user_id) \
652 652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
653 653 .group_by(pull_request_alias)
654 654
655 655 # closed,opened
656 656 if statuses:
657 657 q = q.filter(pull_request_alias.status.in_(statuses))
658 658
659 659 if query:
660 660 like_expression = u'%{}%'.format(safe_unicode(query))
661 661 q = q.join(User, User.user_id == pull_request_alias.user_id)
662 662 q = q.filter(or_(
663 663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
664 664 User.username.ilike(like_expression),
665 665 pull_request_alias.title.ilike(like_expression),
666 666 pull_request_alias.description.ilike(like_expression),
667 667 ))
668 668
669 669 order_map = {
670 670 'name_raw': pull_request_alias.pull_request_id,
671 671 'title': pull_request_alias.title,
672 672 'updated_on_raw': pull_request_alias.updated_on,
673 673 'target_repo': pull_request_alias.target_repo_id
674 674 }
675 675 if order_by and order_by in order_map:
676 676 if order_dir == 'asc':
677 677 q = q.order_by(order_map[order_by].asc())
678 678 else:
679 679 q = q.order_by(order_map[order_by].desc())
680 680
681 681 return q
682 682
683 683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
684 684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
685 685 return q.count()
686 686
687 687 def get_im_participating_in_for_review(
688 688 self, user_id, statuses=None, query='', offset=0,
689 689 length=None, order_by=None, order_dir='desc'):
690 690 """
691 691 Get all Pull requests that needs user approval or rejection
692 692 """
693 693
694 694 q = self._prepare_participating_in_for_review_query(
695 695 user_id, statuses=statuses, query=query, order_by=order_by,
696 696 order_dir=order_dir)
697 697
698 698 if length:
699 699 pull_requests = q.limit(length).offset(offset).all()
700 700 else:
701 701 pull_requests = q.all()
702 702
703 703 return pull_requests
704 704
705 705 def get_versions(self, pull_request):
706 706 """
707 707 returns version of pull request sorted by ID descending
708 708 """
709 709 return PullRequestVersion.query()\
710 710 .filter(PullRequestVersion.pull_request == pull_request)\
711 711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
712 712 .all()
713 713
714 714 def get_pr_version(self, pull_request_id, version=None):
715 715 at_version = None
716 716
717 717 if version and version == 'latest':
718 718 pull_request_ver = PullRequest.get(pull_request_id)
719 719 pull_request_obj = pull_request_ver
720 720 _org_pull_request_obj = pull_request_obj
721 721 at_version = 'latest'
722 722 elif version:
723 723 pull_request_ver = PullRequestVersion.get_or_404(version)
724 724 pull_request_obj = pull_request_ver
725 725 _org_pull_request_obj = pull_request_ver.pull_request
726 726 at_version = pull_request_ver.pull_request_version_id
727 727 else:
728 728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
729 729 pull_request_id)
730 730
731 731 pull_request_display_obj = PullRequest.get_pr_display_object(
732 732 pull_request_obj, _org_pull_request_obj)
733 733
734 734 return _org_pull_request_obj, pull_request_obj, \
735 735 pull_request_display_obj, at_version
736 736
737 737 def pr_commits_versions(self, versions):
738 738 """
739 739 Maps the pull-request commits into all known PR versions. This way we can obtain
740 740 each pr version the commit was introduced in.
741 741 """
742 742 commit_versions = collections.defaultdict(list)
743 743 num_versions = [x.pull_request_version_id for x in versions]
744 744 for ver in versions:
745 745 for commit_id in ver.revisions:
746 746 ver_idx = ChangesetComment.get_index_from_version(
747 747 ver.pull_request_version_id, num_versions=num_versions)
748 748 commit_versions[commit_id].append(ver_idx)
749 749 return commit_versions
750 750
751 751 def create(self, created_by, source_repo, source_ref, target_repo,
752 752 target_ref, revisions, reviewers, observers, title, description=None,
753 753 common_ancestor_id=None,
754 754 description_renderer=None,
755 755 reviewer_data=None, translator=None, auth_user=None):
756 756 translator = translator or get_current_request().translate
757 757
758 758 created_by_user = self._get_user(created_by)
759 759 auth_user = auth_user or created_by_user.AuthUser()
760 760 source_repo = self._get_repo(source_repo)
761 761 target_repo = self._get_repo(target_repo)
762 762
763 763 pull_request = PullRequest()
764 764 pull_request.source_repo = source_repo
765 765 pull_request.source_ref = source_ref
766 766 pull_request.target_repo = target_repo
767 767 pull_request.target_ref = target_ref
768 768 pull_request.revisions = revisions
769 769 pull_request.title = title
770 770 pull_request.description = description
771 771 pull_request.description_renderer = description_renderer
772 772 pull_request.author = created_by_user
773 773 pull_request.reviewer_data = reviewer_data
774 774 pull_request.pull_request_state = pull_request.STATE_CREATING
775 775 pull_request.common_ancestor_id = common_ancestor_id
776 776
777 777 Session().add(pull_request)
778 778 Session().flush()
779 779
780 780 reviewer_ids = set()
781 781 # members / reviewers
782 782 for reviewer_object in reviewers:
783 783 user_id, reasons, mandatory, role, rules = reviewer_object
784 784 user = self._get_user(user_id)
785 785
786 786 # skip duplicates
787 787 if user.user_id in reviewer_ids:
788 788 continue
789 789
790 790 reviewer_ids.add(user.user_id)
791 791
792 792 reviewer = PullRequestReviewers()
793 793 reviewer.user = user
794 794 reviewer.pull_request = pull_request
795 795 reviewer.reasons = reasons
796 796 reviewer.mandatory = mandatory
797 797 reviewer.role = role
798 798
799 799 # NOTE(marcink): pick only first rule for now
800 800 rule_id = list(rules)[0] if rules else None
801 801 rule = RepoReviewRule.get(rule_id) if rule_id else None
802 802 if rule:
803 803 review_group = rule.user_group_vote_rule(user_id)
804 804 # we check if this particular reviewer is member of a voting group
805 805 if review_group:
806 806 # NOTE(marcink):
807 807 # can be that user is member of more but we pick the first same,
808 808 # same as default reviewers algo
809 809 review_group = review_group[0]
810 810
811 811 rule_data = {
812 812 'rule_name':
813 813 rule.review_rule_name,
814 814 'rule_user_group_entry_id':
815 815 review_group.repo_review_rule_users_group_id,
816 816 'rule_user_group_name':
817 817 review_group.users_group.users_group_name,
818 818 'rule_user_group_members':
819 819 [x.user.username for x in review_group.users_group.members],
820 820 'rule_user_group_members_id':
821 821 [x.user.user_id for x in review_group.users_group.members],
822 822 }
823 823 # e.g {'vote_rule': -1, 'mandatory': True}
824 824 rule_data.update(review_group.rule_data())
825 825
826 826 reviewer.rule_data = rule_data
827 827
828 828 Session().add(reviewer)
829 829 Session().flush()
830 830
831 831 for observer_object in observers:
832 832 user_id, reasons, mandatory, role, rules = observer_object
833 833 user = self._get_user(user_id)
834 834
835 835 # skip duplicates from reviewers
836 836 if user.user_id in reviewer_ids:
837 837 continue
838 838
839 839 #reviewer_ids.add(user.user_id)
840 840
841 841 observer = PullRequestReviewers()
842 842 observer.user = user
843 843 observer.pull_request = pull_request
844 844 observer.reasons = reasons
845 845 observer.mandatory = mandatory
846 846 observer.role = role
847 847
848 848 # NOTE(marcink): pick only first rule for now
849 849 rule_id = list(rules)[0] if rules else None
850 850 rule = RepoReviewRule.get(rule_id) if rule_id else None
851 851 if rule:
852 852 # TODO(marcink): do we need this for observers ??
853 853 pass
854 854
855 855 Session().add(observer)
856 856 Session().flush()
857 857
858 858 # Set approval status to "Under Review" for all commits which are
859 859 # part of this pull request.
860 860 ChangesetStatusModel().set_status(
861 861 repo=target_repo,
862 862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
863 863 user=created_by_user,
864 864 pull_request=pull_request
865 865 )
866 866 # we commit early at this point. This has to do with a fact
867 867 # that before queries do some row-locking. And because of that
868 868 # we need to commit and finish transaction before below validate call
869 869 # that for large repos could be long resulting in long row locks
870 870 Session().commit()
871 871
872 872 # prepare workspace, and run initial merge simulation. Set state during that
873 873 # operation
874 874 pull_request = PullRequest.get(pull_request.pull_request_id)
875 875
876 876 # set as merging, for merge simulation, and if finished to created so we mark
877 877 # simulation is working fine
878 878 with pull_request.set_state(PullRequest.STATE_MERGING,
879 879 final_state=PullRequest.STATE_CREATED) as state_obj:
880 880 MergeCheck.validate(
881 881 pull_request, auth_user=auth_user, translator=translator)
882 882
883 883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
884 884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
885 885
886 886 creation_data = pull_request.get_api_data(with_merge_state=False)
887 887 self._log_audit_action(
888 888 'repo.pull_request.create', {'data': creation_data},
889 889 auth_user, pull_request)
890 890
891 891 return pull_request
892 892
893 893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
894 894 pull_request = self.__get_pull_request(pull_request)
895 895 target_scm = pull_request.target_repo.scm_instance()
896 896 if action == 'create':
897 897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
898 898 elif action == 'merge':
899 899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
900 900 elif action == 'close':
901 901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
902 902 elif action == 'review_status_change':
903 903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
904 904 elif action == 'update':
905 905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
906 906 elif action == 'comment':
907 907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
908 908 elif action == 'comment_edit':
909 909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
910 910 else:
911 911 return
912 912
913 913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
914 914 pull_request, action, trigger_hook)
915 915 trigger_hook(
916 916 username=user.username,
917 917 repo_name=pull_request.target_repo.repo_name,
918 918 repo_type=target_scm.alias,
919 919 pull_request=pull_request,
920 920 data=data)
921 921
922 922 def _get_commit_ids(self, pull_request):
923 923 """
924 924 Return the commit ids of the merged pull request.
925 925
926 926 This method is not dealing correctly yet with the lack of autoupdates
927 927 nor with the implicit target updates.
928 928 For example: if a commit in the source repo is already in the target it
929 929 will be reported anyways.
930 930 """
931 931 merge_rev = pull_request.merge_rev
932 932 if merge_rev is None:
933 933 raise ValueError('This pull request was not merged yet')
934 934
935 935 commit_ids = list(pull_request.revisions)
936 936 if merge_rev not in commit_ids:
937 937 commit_ids.append(merge_rev)
938 938
939 939 return commit_ids
940 940
941 941 def merge_repo(self, pull_request, user, extras):
942 repo_type = pull_request.source_repo.repo_type
942 943 log.debug("Merging pull request %s", pull_request.pull_request_id)
943 extras['user_agent'] = 'internal-merge'
944 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
944 945 merge_state = self._merge_pull_request(pull_request, user, extras)
945 946 if merge_state.executed:
946 947 log.debug("Merge was successful, updating the pull request comments.")
947 948 self._comment_and_close_pr(pull_request, user, merge_state)
948 949
949 950 self._log_audit_action(
950 951 'repo.pull_request.merge',
951 952 {'merge_state': merge_state.__dict__},
952 953 user, pull_request)
953 954
954 955 else:
955 956 log.warn("Merge failed, not updating the pull request.")
956 957 return merge_state
957 958
958 959 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
959 960 target_vcs = pull_request.target_repo.scm_instance()
960 961 source_vcs = pull_request.source_repo.scm_instance()
961 962
962 963 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
963 964 pr_id=pull_request.pull_request_id,
964 965 pr_title=pull_request.title,
965 966 pr_desc=pull_request.description,
966 967 source_repo=source_vcs.name,
967 968 source_ref_name=pull_request.source_ref_parts.name,
968 969 target_repo=target_vcs.name,
969 970 target_ref_name=pull_request.target_ref_parts.name,
970 971 )
971 972
972 973 workspace_id = self._workspace_id(pull_request)
973 974 repo_id = pull_request.target_repo.repo_id
974 975 use_rebase = self._use_rebase_for_merging(pull_request)
975 976 close_branch = self._close_branch_before_merging(pull_request)
976 977 user_name = self._user_name_for_merging(pull_request, user)
977 978
978 979 target_ref = self._refresh_reference(
979 980 pull_request.target_ref_parts, target_vcs)
980 981
981 982 callback_daemon, extras = prepare_callback_daemon(
982 983 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
983 984 host=vcs_settings.HOOKS_HOST,
984 985 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
985 986
986 987 with callback_daemon:
987 988 # TODO: johbo: Implement a clean way to run a config_override
988 989 # for a single call.
989 990 target_vcs.config.set(
990 991 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
991 992
992 993 merge_state = target_vcs.merge(
993 994 repo_id, workspace_id, target_ref, source_vcs,
994 995 pull_request.source_ref_parts,
995 996 user_name=user_name, user_email=user.email,
996 997 message=message, use_rebase=use_rebase,
997 998 close_branch=close_branch)
998 999 return merge_state
999 1000
1000 1001 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1001 1002 pull_request.merge_rev = merge_state.merge_ref.commit_id
1002 1003 pull_request.updated_on = datetime.datetime.now()
1003 1004 close_msg = close_msg or 'Pull request merged and closed'
1004 1005
1005 1006 CommentsModel().create(
1006 1007 text=safe_unicode(close_msg),
1007 1008 repo=pull_request.target_repo.repo_id,
1008 1009 user=user.user_id,
1009 1010 pull_request=pull_request.pull_request_id,
1010 1011 f_path=None,
1011 1012 line_no=None,
1012 1013 closing_pr=True
1013 1014 )
1014 1015
1015 1016 Session().add(pull_request)
1016 1017 Session().flush()
1017 1018 # TODO: paris: replace invalidation with less radical solution
1018 1019 ScmModel().mark_for_invalidation(
1019 1020 pull_request.target_repo.repo_name)
1020 1021 self.trigger_pull_request_hook(pull_request, user, 'merge')
1021 1022
1022 1023 def has_valid_update_type(self, pull_request):
1023 1024 source_ref_type = pull_request.source_ref_parts.type
1024 1025 return source_ref_type in self.REF_TYPES
1025 1026
1026 1027 def get_flow_commits(self, pull_request):
1027 1028
1028 1029 # source repo
1029 1030 source_ref_name = pull_request.source_ref_parts.name
1030 1031 source_ref_type = pull_request.source_ref_parts.type
1031 1032 source_ref_id = pull_request.source_ref_parts.commit_id
1032 1033 source_repo = pull_request.source_repo.scm_instance()
1033 1034
1034 1035 try:
1035 1036 if source_ref_type in self.REF_TYPES:
1036 1037 source_commit = source_repo.get_commit(
1037 1038 source_ref_name, reference_obj=pull_request.source_ref_parts)
1038 1039 else:
1039 1040 source_commit = source_repo.get_commit(source_ref_id)
1040 1041 except CommitDoesNotExistError:
1041 1042 raise SourceRefMissing()
1042 1043
1043 1044 # target repo
1044 1045 target_ref_name = pull_request.target_ref_parts.name
1045 1046 target_ref_type = pull_request.target_ref_parts.type
1046 1047 target_ref_id = pull_request.target_ref_parts.commit_id
1047 1048 target_repo = pull_request.target_repo.scm_instance()
1048 1049
1049 1050 try:
1050 1051 if target_ref_type in self.REF_TYPES:
1051 1052 target_commit = target_repo.get_commit(
1052 1053 target_ref_name, reference_obj=pull_request.target_ref_parts)
1053 1054 else:
1054 1055 target_commit = target_repo.get_commit(target_ref_id)
1055 1056 except CommitDoesNotExistError:
1056 1057 raise TargetRefMissing()
1057 1058
1058 1059 return source_commit, target_commit
1059 1060
1060 1061 def update_commits(self, pull_request, updating_user):
1061 1062 """
1062 1063 Get the updated list of commits for the pull request
1063 1064 and return the new pull request version and the list
1064 1065 of commits processed by this update action
1065 1066
1066 1067 updating_user is the user_object who triggered the update
1067 1068 """
1068 1069 pull_request = self.__get_pull_request(pull_request)
1069 1070 source_ref_type = pull_request.source_ref_parts.type
1070 1071 source_ref_name = pull_request.source_ref_parts.name
1071 1072 source_ref_id = pull_request.source_ref_parts.commit_id
1072 1073
1073 1074 target_ref_type = pull_request.target_ref_parts.type
1074 1075 target_ref_name = pull_request.target_ref_parts.name
1075 1076 target_ref_id = pull_request.target_ref_parts.commit_id
1076 1077
1077 1078 if not self.has_valid_update_type(pull_request):
1078 1079 log.debug("Skipping update of pull request %s due to ref type: %s",
1079 1080 pull_request, source_ref_type)
1080 1081 return UpdateResponse(
1081 1082 executed=False,
1082 1083 reason=UpdateFailureReason.WRONG_REF_TYPE,
1083 1084 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1084 1085 source_changed=False, target_changed=False)
1085 1086
1086 1087 try:
1087 1088 source_commit, target_commit = self.get_flow_commits(pull_request)
1088 1089 except SourceRefMissing:
1089 1090 return UpdateResponse(
1090 1091 executed=False,
1091 1092 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1092 1093 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1093 1094 source_changed=False, target_changed=False)
1094 1095 except TargetRefMissing:
1095 1096 return UpdateResponse(
1096 1097 executed=False,
1097 1098 reason=UpdateFailureReason.MISSING_TARGET_REF,
1098 1099 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1099 1100 source_changed=False, target_changed=False)
1100 1101
1101 1102 source_changed = source_ref_id != source_commit.raw_id
1102 1103 target_changed = target_ref_id != target_commit.raw_id
1103 1104
1104 1105 if not (source_changed or target_changed):
1105 1106 log.debug("Nothing changed in pull request %s", pull_request)
1106 1107 return UpdateResponse(
1107 1108 executed=False,
1108 1109 reason=UpdateFailureReason.NO_CHANGE,
1109 1110 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1110 1111 source_changed=target_changed, target_changed=source_changed)
1111 1112
1112 1113 change_in_found = 'target repo' if target_changed else 'source repo'
1113 1114 log.debug('Updating pull request because of change in %s detected',
1114 1115 change_in_found)
1115 1116
1116 1117 # Finally there is a need for an update, in case of source change
1117 1118 # we create a new version, else just an update
1118 1119 if source_changed:
1119 1120 pull_request_version = self._create_version_from_snapshot(pull_request)
1120 1121 self._link_comments_to_version(pull_request_version)
1121 1122 else:
1122 1123 try:
1123 1124 ver = pull_request.versions[-1]
1124 1125 except IndexError:
1125 1126 ver = None
1126 1127
1127 1128 pull_request.pull_request_version_id = \
1128 1129 ver.pull_request_version_id if ver else None
1129 1130 pull_request_version = pull_request
1130 1131
1131 1132 source_repo = pull_request.source_repo.scm_instance()
1132 1133 target_repo = pull_request.target_repo.scm_instance()
1133 1134
1134 1135 # re-compute commit ids
1135 1136 old_commit_ids = pull_request.revisions
1136 1137 pre_load = ["author", "date", "message", "branch"]
1137 1138 commit_ranges = target_repo.compare(
1138 1139 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1139 1140 pre_load=pre_load)
1140 1141
1141 1142 target_ref = target_commit.raw_id
1142 1143 source_ref = source_commit.raw_id
1143 1144 ancestor_commit_id = target_repo.get_common_ancestor(
1144 1145 target_ref, source_ref, source_repo)
1145 1146
1146 1147 if not ancestor_commit_id:
1147 1148 raise ValueError(
1148 1149 'cannot calculate diff info without a common ancestor. '
1149 1150 'Make sure both repositories are related, and have a common forking commit.')
1150 1151
1151 1152 pull_request.common_ancestor_id = ancestor_commit_id
1152 1153
1153 1154 pull_request.source_ref = '%s:%s:%s' % (
1154 1155 source_ref_type, source_ref_name, source_commit.raw_id)
1155 1156 pull_request.target_ref = '%s:%s:%s' % (
1156 1157 target_ref_type, target_ref_name, ancestor_commit_id)
1157 1158
1158 1159 pull_request.revisions = [
1159 1160 commit.raw_id for commit in reversed(commit_ranges)]
1160 1161 pull_request.updated_on = datetime.datetime.now()
1161 1162 Session().add(pull_request)
1162 1163 new_commit_ids = pull_request.revisions
1163 1164
1164 1165 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 1166 pull_request, pull_request_version)
1166 1167
1167 1168 # calculate commit and file changes
1168 1169 commit_changes = self._calculate_commit_id_changes(
1169 1170 old_commit_ids, new_commit_ids)
1170 1171 file_changes = self._calculate_file_changes(
1171 1172 old_diff_data, new_diff_data)
1172 1173
1173 1174 # set comments as outdated if DIFFS changed
1174 1175 CommentsModel().outdate_comments(
1175 1176 pull_request, old_diff_data=old_diff_data,
1176 1177 new_diff_data=new_diff_data)
1177 1178
1178 1179 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 1180 file_node_changes = (
1180 1181 file_changes.added or file_changes.modified or file_changes.removed)
1181 1182 pr_has_changes = valid_commit_changes or file_node_changes
1182 1183
1183 1184 # Add an automatic comment to the pull request, in case
1184 1185 # anything has changed
1185 1186 if pr_has_changes:
1186 1187 update_comment = CommentsModel().create(
1187 1188 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 1189 repo=pull_request.target_repo,
1189 1190 user=pull_request.author,
1190 1191 pull_request=pull_request,
1191 1192 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192 1193
1193 1194 # Update status to "Under Review" for added commits
1194 1195 for commit_id in commit_changes.added:
1195 1196 ChangesetStatusModel().set_status(
1196 1197 repo=pull_request.source_repo,
1197 1198 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 1199 comment=update_comment,
1199 1200 user=pull_request.author,
1200 1201 pull_request=pull_request,
1201 1202 revision=commit_id)
1202 1203
1203 1204 # initial commit
1204 1205 Session().commit()
1205 1206
1206 1207 if pr_has_changes:
1207 1208 # send update email to users
1208 1209 try:
1209 1210 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 1211 ancestor_commit_id=ancestor_commit_id,
1211 1212 commit_changes=commit_changes,
1212 1213 file_changes=file_changes)
1213 1214 Session().commit()
1214 1215 except Exception:
1215 1216 log.exception('Failed to send email notification to users')
1216 1217 Session().rollback()
1217 1218
1218 1219 log.debug(
1219 1220 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 1221 'removed_ids: %s', pull_request.pull_request_id,
1221 1222 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 1223 log.debug(
1223 1224 'Updated pull request with the following file changes: %s',
1224 1225 file_changes)
1225 1226
1226 1227 log.info(
1227 1228 "Updated pull request %s from commit %s to commit %s, "
1228 1229 "stored new version %s of this pull request.",
1229 1230 pull_request.pull_request_id, source_ref_id,
1230 1231 pull_request.source_ref_parts.commit_id,
1231 1232 pull_request_version.pull_request_version_id)
1232 1233
1233 1234 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234 1235
1235 1236 return UpdateResponse(
1236 1237 executed=True, reason=UpdateFailureReason.NONE,
1237 1238 old=pull_request, new=pull_request_version,
1238 1239 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 1240 source_changed=source_changed, target_changed=target_changed)
1240 1241
1241 1242 def _create_version_from_snapshot(self, pull_request):
1242 1243 version = PullRequestVersion()
1243 1244 version.title = pull_request.title
1244 1245 version.description = pull_request.description
1245 1246 version.status = pull_request.status
1246 1247 version.pull_request_state = pull_request.pull_request_state
1247 1248 version.created_on = datetime.datetime.now()
1248 1249 version.updated_on = pull_request.updated_on
1249 1250 version.user_id = pull_request.user_id
1250 1251 version.source_repo = pull_request.source_repo
1251 1252 version.source_ref = pull_request.source_ref
1252 1253 version.target_repo = pull_request.target_repo
1253 1254 version.target_ref = pull_request.target_ref
1254 1255
1255 1256 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 1257 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 1258 version.last_merge_status = pull_request.last_merge_status
1258 1259 version.last_merge_metadata = pull_request.last_merge_metadata
1259 1260 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 1261 version.merge_rev = pull_request.merge_rev
1261 1262 version.reviewer_data = pull_request.reviewer_data
1262 1263
1263 1264 version.revisions = pull_request.revisions
1264 1265 version.common_ancestor_id = pull_request.common_ancestor_id
1265 1266 version.pull_request = pull_request
1266 1267 Session().add(version)
1267 1268 Session().flush()
1268 1269
1269 1270 return version
1270 1271
1271 1272 def _generate_update_diffs(self, pull_request, pull_request_version):
1272 1273
1273 1274 diff_context = (
1274 1275 self.DIFF_CONTEXT +
1275 1276 CommentsModel.needed_extra_diff_context())
1276 1277 hide_whitespace_changes = False
1277 1278 source_repo = pull_request_version.source_repo
1278 1279 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 1280 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 1281 old_diff = self._get_diff_from_pr_or_version(
1281 1282 source_repo, source_ref_id, target_ref_id,
1282 1283 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283 1284
1284 1285 source_repo = pull_request.source_repo
1285 1286 source_ref_id = pull_request.source_ref_parts.commit_id
1286 1287 target_ref_id = pull_request.target_ref_parts.commit_id
1287 1288
1288 1289 new_diff = self._get_diff_from_pr_or_version(
1289 1290 source_repo, source_ref_id, target_ref_id,
1290 1291 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291 1292
1292 1293 old_diff_data = diffs.DiffProcessor(old_diff)
1293 1294 old_diff_data.prepare()
1294 1295 new_diff_data = diffs.DiffProcessor(new_diff)
1295 1296 new_diff_data.prepare()
1296 1297
1297 1298 return old_diff_data, new_diff_data
1298 1299
1299 1300 def _link_comments_to_version(self, pull_request_version):
1300 1301 """
1301 1302 Link all unlinked comments of this pull request to the given version.
1302 1303
1303 1304 :param pull_request_version: The `PullRequestVersion` to which
1304 1305 the comments shall be linked.
1305 1306
1306 1307 """
1307 1308 pull_request = pull_request_version.pull_request
1308 1309 comments = ChangesetComment.query()\
1309 1310 .filter(
1310 1311 # TODO: johbo: Should we query for the repo at all here?
1311 1312 # Pending decision on how comments of PRs are to be related
1312 1313 # to either the source repo, the target repo or no repo at all.
1313 1314 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1314 1315 ChangesetComment.pull_request == pull_request,
1315 1316 ChangesetComment.pull_request_version == None)\
1316 1317 .order_by(ChangesetComment.comment_id.asc())
1317 1318
1318 1319 # TODO: johbo: Find out why this breaks if it is done in a bulk
1319 1320 # operation.
1320 1321 for comment in comments:
1321 1322 comment.pull_request_version_id = (
1322 1323 pull_request_version.pull_request_version_id)
1323 1324 Session().add(comment)
1324 1325
1325 1326 def _calculate_commit_id_changes(self, old_ids, new_ids):
1326 1327 added = [x for x in new_ids if x not in old_ids]
1327 1328 common = [x for x in new_ids if x in old_ids]
1328 1329 removed = [x for x in old_ids if x not in new_ids]
1329 1330 total = new_ids
1330 1331 return ChangeTuple(added, common, removed, total)
1331 1332
1332 1333 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1333 1334
1334 1335 old_files = OrderedDict()
1335 1336 for diff_data in old_diff_data.parsed_diff:
1336 1337 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1337 1338
1338 1339 added_files = []
1339 1340 modified_files = []
1340 1341 removed_files = []
1341 1342 for diff_data in new_diff_data.parsed_diff:
1342 1343 new_filename = diff_data['filename']
1343 1344 new_hash = md5_safe(diff_data['raw_diff'])
1344 1345
1345 1346 old_hash = old_files.get(new_filename)
1346 1347 if not old_hash:
1347 1348 # file is not present in old diff, we have to figure out from parsed diff
1348 1349 # operation ADD/REMOVE
1349 1350 operations_dict = diff_data['stats']['ops']
1350 1351 if diffs.DEL_FILENODE in operations_dict:
1351 1352 removed_files.append(new_filename)
1352 1353 else:
1353 1354 added_files.append(new_filename)
1354 1355 else:
1355 1356 if new_hash != old_hash:
1356 1357 modified_files.append(new_filename)
1357 1358 # now remove a file from old, since we have seen it already
1358 1359 del old_files[new_filename]
1359 1360
1360 1361 # removed files is when there are present in old, but not in NEW,
1361 1362 # since we remove old files that are present in new diff, left-overs
1362 1363 # if any should be the removed files
1363 1364 removed_files.extend(old_files.keys())
1364 1365
1365 1366 return FileChangeTuple(added_files, modified_files, removed_files)
1366 1367
1367 1368 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1368 1369 """
1369 1370 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1370 1371 so it's always looking the same disregarding on which default
1371 1372 renderer system is using.
1372 1373
1373 1374 :param ancestor_commit_id: ancestor raw_id
1374 1375 :param changes: changes named tuple
1375 1376 :param file_changes: file changes named tuple
1376 1377
1377 1378 """
1378 1379 new_status = ChangesetStatus.get_status_lbl(
1379 1380 ChangesetStatus.STATUS_UNDER_REVIEW)
1380 1381
1381 1382 changed_files = (
1382 1383 file_changes.added + file_changes.modified + file_changes.removed)
1383 1384
1384 1385 params = {
1385 1386 'under_review_label': new_status,
1386 1387 'added_commits': changes.added,
1387 1388 'removed_commits': changes.removed,
1388 1389 'changed_files': changed_files,
1389 1390 'added_files': file_changes.added,
1390 1391 'modified_files': file_changes.modified,
1391 1392 'removed_files': file_changes.removed,
1392 1393 'ancestor_commit_id': ancestor_commit_id
1393 1394 }
1394 1395 renderer = RstTemplateRenderer()
1395 1396 return renderer.render('pull_request_update.mako', **params)
1396 1397
1397 1398 def edit(self, pull_request, title, description, description_renderer, user):
1398 1399 pull_request = self.__get_pull_request(pull_request)
1399 1400 old_data = pull_request.get_api_data(with_merge_state=False)
1400 1401 if pull_request.is_closed():
1401 1402 raise ValueError('This pull request is closed')
1402 1403 if title:
1403 1404 pull_request.title = title
1404 1405 pull_request.description = description
1405 1406 pull_request.updated_on = datetime.datetime.now()
1406 1407 pull_request.description_renderer = description_renderer
1407 1408 Session().add(pull_request)
1408 1409 self._log_audit_action(
1409 1410 'repo.pull_request.edit', {'old_data': old_data},
1410 1411 user, pull_request)
1411 1412
1412 1413 def update_reviewers(self, pull_request, reviewer_data, user):
1413 1414 """
1414 1415 Update the reviewers in the pull request
1415 1416
1416 1417 :param pull_request: the pr to update
1417 1418 :param reviewer_data: list of tuples
1418 1419 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1419 1420 :param user: current use who triggers this action
1420 1421 """
1421 1422
1422 1423 pull_request = self.__get_pull_request(pull_request)
1423 1424 if pull_request.is_closed():
1424 1425 raise ValueError('This pull request is closed')
1425 1426
1426 1427 reviewers = {}
1427 1428 for user_id, reasons, mandatory, role, rules in reviewer_data:
1428 1429 if isinstance(user_id, (int, compat.string_types)):
1429 1430 user_id = self._get_user(user_id).user_id
1430 1431 reviewers[user_id] = {
1431 1432 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1432 1433
1433 1434 reviewers_ids = set(reviewers.keys())
1434 1435 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1435 1436 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1436 1437
1437 1438 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1438 1439
1439 1440 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1440 1441 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1441 1442
1442 1443 log.debug("Adding %s reviewers", ids_to_add)
1443 1444 log.debug("Removing %s reviewers", ids_to_remove)
1444 1445 changed = False
1445 1446 added_audit_reviewers = []
1446 1447 removed_audit_reviewers = []
1447 1448
1448 1449 for uid in ids_to_add:
1449 1450 changed = True
1450 1451 _usr = self._get_user(uid)
1451 1452 reviewer = PullRequestReviewers()
1452 1453 reviewer.user = _usr
1453 1454 reviewer.pull_request = pull_request
1454 1455 reviewer.reasons = reviewers[uid]['reasons']
1455 1456 # NOTE(marcink): mandatory shouldn't be changed now
1456 1457 # reviewer.mandatory = reviewers[uid]['reasons']
1457 1458 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1458 1459 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1459 1460 Session().add(reviewer)
1460 1461 added_audit_reviewers.append(reviewer.get_dict())
1461 1462
1462 1463 for uid in ids_to_remove:
1463 1464 changed = True
1464 1465 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1465 1466 # This is an edge case that handles previous state of having the same reviewer twice.
1466 1467 # this CAN happen due to the lack of DB checks
1467 1468 reviewers = PullRequestReviewers.query()\
1468 1469 .filter(PullRequestReviewers.user_id == uid,
1469 1470 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1470 1471 PullRequestReviewers.pull_request == pull_request)\
1471 1472 .all()
1472 1473
1473 1474 for obj in reviewers:
1474 1475 added_audit_reviewers.append(obj.get_dict())
1475 1476 Session().delete(obj)
1476 1477
1477 1478 if changed:
1478 1479 Session().expire_all()
1479 1480 pull_request.updated_on = datetime.datetime.now()
1480 1481 Session().add(pull_request)
1481 1482
1482 1483 # finally store audit logs
1483 1484 for user_data in added_audit_reviewers:
1484 1485 self._log_audit_action(
1485 1486 'repo.pull_request.reviewer.add', {'data': user_data},
1486 1487 user, pull_request)
1487 1488 for user_data in removed_audit_reviewers:
1488 1489 self._log_audit_action(
1489 1490 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1490 1491 user, pull_request)
1491 1492
1492 1493 self.notify_reviewers(pull_request, ids_to_add, user)
1493 1494 return ids_to_add, ids_to_remove
1494 1495
1495 1496 def update_observers(self, pull_request, observer_data, user):
1496 1497 """
1497 1498 Update the observers in the pull request
1498 1499
1499 1500 :param pull_request: the pr to update
1500 1501 :param observer_data: list of tuples
1501 1502 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1502 1503 :param user: current use who triggers this action
1503 1504 """
1504 1505 pull_request = self.__get_pull_request(pull_request)
1505 1506 if pull_request.is_closed():
1506 1507 raise ValueError('This pull request is closed')
1507 1508
1508 1509 observers = {}
1509 1510 for user_id, reasons, mandatory, role, rules in observer_data:
1510 1511 if isinstance(user_id, (int, compat.string_types)):
1511 1512 user_id = self._get_user(user_id).user_id
1512 1513 observers[user_id] = {
1513 1514 'reasons': reasons, 'observers': mandatory, 'role': role}
1514 1515
1515 1516 observers_ids = set(observers.keys())
1516 1517 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1517 1518 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1518 1519
1519 1520 current_observers_ids = set([x.user.user_id for x in current_observers])
1520 1521
1521 1522 ids_to_add = observers_ids.difference(current_observers_ids)
1522 1523 ids_to_remove = current_observers_ids.difference(observers_ids)
1523 1524
1524 1525 log.debug("Adding %s observer", ids_to_add)
1525 1526 log.debug("Removing %s observer", ids_to_remove)
1526 1527 changed = False
1527 1528 added_audit_observers = []
1528 1529 removed_audit_observers = []
1529 1530
1530 1531 for uid in ids_to_add:
1531 1532 changed = True
1532 1533 _usr = self._get_user(uid)
1533 1534 observer = PullRequestReviewers()
1534 1535 observer.user = _usr
1535 1536 observer.pull_request = pull_request
1536 1537 observer.reasons = observers[uid]['reasons']
1537 1538 # NOTE(marcink): mandatory shouldn't be changed now
1538 1539 # observer.mandatory = observer[uid]['reasons']
1539 1540
1540 1541 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1541 1542 observer.role = PullRequestReviewers.ROLE_OBSERVER
1542 1543 Session().add(observer)
1543 1544 added_audit_observers.append(observer.get_dict())
1544 1545
1545 1546 for uid in ids_to_remove:
1546 1547 changed = True
1547 1548 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1548 1549 # This is an edge case that handles previous state of having the same reviewer twice.
1549 1550 # this CAN happen due to the lack of DB checks
1550 1551 observers = PullRequestReviewers.query()\
1551 1552 .filter(PullRequestReviewers.user_id == uid,
1552 1553 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1553 1554 PullRequestReviewers.pull_request == pull_request)\
1554 1555 .all()
1555 1556
1556 1557 for obj in observers:
1557 1558 added_audit_observers.append(obj.get_dict())
1558 1559 Session().delete(obj)
1559 1560
1560 1561 if changed:
1561 1562 Session().expire_all()
1562 1563 pull_request.updated_on = datetime.datetime.now()
1563 1564 Session().add(pull_request)
1564 1565
1565 1566 # finally store audit logs
1566 1567 for user_data in added_audit_observers:
1567 1568 self._log_audit_action(
1568 1569 'repo.pull_request.observer.add', {'data': user_data},
1569 1570 user, pull_request)
1570 1571 for user_data in removed_audit_observers:
1571 1572 self._log_audit_action(
1572 1573 'repo.pull_request.observer.delete', {'old_data': user_data},
1573 1574 user, pull_request)
1574 1575
1575 1576 self.notify_observers(pull_request, ids_to_add, user)
1576 1577 return ids_to_add, ids_to_remove
1577 1578
1578 1579 def get_url(self, pull_request, request=None, permalink=False):
1579 1580 if not request:
1580 1581 request = get_current_request()
1581 1582
1582 1583 if permalink:
1583 1584 return request.route_url(
1584 1585 'pull_requests_global',
1585 1586 pull_request_id=pull_request.pull_request_id,)
1586 1587 else:
1587 1588 return request.route_url('pullrequest_show',
1588 1589 repo_name=safe_str(pull_request.target_repo.repo_name),
1589 1590 pull_request_id=pull_request.pull_request_id,)
1590 1591
1591 1592 def get_shadow_clone_url(self, pull_request, request=None):
1592 1593 """
1593 1594 Returns qualified url pointing to the shadow repository. If this pull
1594 1595 request is closed there is no shadow repository and ``None`` will be
1595 1596 returned.
1596 1597 """
1597 1598 if pull_request.is_closed():
1598 1599 return None
1599 1600 else:
1600 1601 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1601 1602 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1602 1603
1603 1604 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 1605 # notification to reviewers/observers
1605 1606 if not user_ids:
1606 1607 return
1607 1608
1608 1609 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1609 1610
1610 1611 pull_request_obj = pull_request
1611 1612 # get the current participants of this pull request
1612 1613 recipients = user_ids
1613 1614 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1614 1615
1615 1616 pr_source_repo = pull_request_obj.source_repo
1616 1617 pr_target_repo = pull_request_obj.target_repo
1617 1618
1618 1619 pr_url = h.route_url('pullrequest_show',
1619 1620 repo_name=pr_target_repo.repo_name,
1620 1621 pull_request_id=pull_request_obj.pull_request_id,)
1621 1622
1622 1623 # set some variables for email notification
1623 1624 pr_target_repo_url = h.route_url(
1624 1625 'repo_summary', repo_name=pr_target_repo.repo_name)
1625 1626
1626 1627 pr_source_repo_url = h.route_url(
1627 1628 'repo_summary', repo_name=pr_source_repo.repo_name)
1628 1629
1629 1630 # pull request specifics
1630 1631 pull_request_commits = [
1631 1632 (x.raw_id, x.message)
1632 1633 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1633 1634
1634 1635 current_rhodecode_user = user
1635 1636 kwargs = {
1636 1637 'user': current_rhodecode_user,
1637 1638 'pull_request_author': pull_request.author,
1638 1639 'pull_request': pull_request_obj,
1639 1640 'pull_request_commits': pull_request_commits,
1640 1641
1641 1642 'pull_request_target_repo': pr_target_repo,
1642 1643 'pull_request_target_repo_url': pr_target_repo_url,
1643 1644
1644 1645 'pull_request_source_repo': pr_source_repo,
1645 1646 'pull_request_source_repo_url': pr_source_repo_url,
1646 1647
1647 1648 'pull_request_url': pr_url,
1648 1649 'thread_ids': [pr_url],
1649 1650 'user_role': role
1650 1651 }
1651 1652
1652 1653 # create notification objects, and emails
1653 1654 NotificationModel().create(
1654 1655 created_by=current_rhodecode_user,
1655 1656 notification_subject='', # Filled in based on the notification_type
1656 1657 notification_body='', # Filled in based on the notification_type
1657 1658 notification_type=notification_type,
1658 1659 recipients=recipients,
1659 1660 email_kwargs=kwargs,
1660 1661 )
1661 1662
1662 1663 def notify_reviewers(self, pull_request, reviewers_ids, user):
1663 1664 return self._notify_reviewers(pull_request, reviewers_ids,
1664 1665 PullRequestReviewers.ROLE_REVIEWER, user)
1665 1666
1666 1667 def notify_observers(self, pull_request, observers_ids, user):
1667 1668 return self._notify_reviewers(pull_request, observers_ids,
1668 1669 PullRequestReviewers.ROLE_OBSERVER, user)
1669 1670
1670 1671 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1671 1672 commit_changes, file_changes):
1672 1673
1673 1674 updating_user_id = updating_user.user_id
1674 1675 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1675 1676 # NOTE(marcink): send notification to all other users except to
1676 1677 # person who updated the PR
1677 1678 recipients = reviewers.difference(set([updating_user_id]))
1678 1679
1679 1680 log.debug('Notify following recipients about pull-request update %s', recipients)
1680 1681
1681 1682 pull_request_obj = pull_request
1682 1683
1683 1684 # send email about the update
1684 1685 changed_files = (
1685 1686 file_changes.added + file_changes.modified + file_changes.removed)
1686 1687
1687 1688 pr_source_repo = pull_request_obj.source_repo
1688 1689 pr_target_repo = pull_request_obj.target_repo
1689 1690
1690 1691 pr_url = h.route_url('pullrequest_show',
1691 1692 repo_name=pr_target_repo.repo_name,
1692 1693 pull_request_id=pull_request_obj.pull_request_id,)
1693 1694
1694 1695 # set some variables for email notification
1695 1696 pr_target_repo_url = h.route_url(
1696 1697 'repo_summary', repo_name=pr_target_repo.repo_name)
1697 1698
1698 1699 pr_source_repo_url = h.route_url(
1699 1700 'repo_summary', repo_name=pr_source_repo.repo_name)
1700 1701
1701 1702 email_kwargs = {
1702 1703 'date': datetime.datetime.now(),
1703 1704 'updating_user': updating_user,
1704 1705
1705 1706 'pull_request': pull_request_obj,
1706 1707
1707 1708 'pull_request_target_repo': pr_target_repo,
1708 1709 'pull_request_target_repo_url': pr_target_repo_url,
1709 1710
1710 1711 'pull_request_source_repo': pr_source_repo,
1711 1712 'pull_request_source_repo_url': pr_source_repo_url,
1712 1713
1713 1714 'pull_request_url': pr_url,
1714 1715
1715 1716 'ancestor_commit_id': ancestor_commit_id,
1716 1717 'added_commits': commit_changes.added,
1717 1718 'removed_commits': commit_changes.removed,
1718 1719 'changed_files': changed_files,
1719 1720 'added_files': file_changes.added,
1720 1721 'modified_files': file_changes.modified,
1721 1722 'removed_files': file_changes.removed,
1722 1723 'thread_ids': [pr_url],
1723 1724 }
1724 1725
1725 1726 # create notification objects, and emails
1726 1727 NotificationModel().create(
1727 1728 created_by=updating_user,
1728 1729 notification_subject='', # Filled in based on the notification_type
1729 1730 notification_body='', # Filled in based on the notification_type
1730 1731 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1731 1732 recipients=recipients,
1732 1733 email_kwargs=email_kwargs,
1733 1734 )
1734 1735
1735 1736 def delete(self, pull_request, user=None):
1736 1737 if not user:
1737 1738 user = getattr(get_current_rhodecode_user(), 'username', None)
1738 1739
1739 1740 pull_request = self.__get_pull_request(pull_request)
1740 1741 old_data = pull_request.get_api_data(with_merge_state=False)
1741 1742 self._cleanup_merge_workspace(pull_request)
1742 1743 self._log_audit_action(
1743 1744 'repo.pull_request.delete', {'old_data': old_data},
1744 1745 user, pull_request)
1745 1746 Session().delete(pull_request)
1746 1747
1747 1748 def close_pull_request(self, pull_request, user):
1748 1749 pull_request = self.__get_pull_request(pull_request)
1749 1750 self._cleanup_merge_workspace(pull_request)
1750 1751 pull_request.status = PullRequest.STATUS_CLOSED
1751 1752 pull_request.updated_on = datetime.datetime.now()
1752 1753 Session().add(pull_request)
1753 1754 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1754 1755
1755 1756 pr_data = pull_request.get_api_data(with_merge_state=False)
1756 1757 self._log_audit_action(
1757 1758 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1758 1759
1759 1760 def close_pull_request_with_comment(
1760 1761 self, pull_request, user, repo, message=None, auth_user=None):
1761 1762
1762 1763 pull_request_review_status = pull_request.calculated_review_status()
1763 1764
1764 1765 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1765 1766 # approved only if we have voting consent
1766 1767 status = ChangesetStatus.STATUS_APPROVED
1767 1768 else:
1768 1769 status = ChangesetStatus.STATUS_REJECTED
1769 1770 status_lbl = ChangesetStatus.get_status_lbl(status)
1770 1771
1771 1772 default_message = (
1772 1773 'Closing with status change {transition_icon} {status}.'
1773 1774 ).format(transition_icon='>', status=status_lbl)
1774 1775 text = message or default_message
1775 1776
1776 1777 # create a comment, and link it to new status
1777 1778 comment = CommentsModel().create(
1778 1779 text=text,
1779 1780 repo=repo.repo_id,
1780 1781 user=user.user_id,
1781 1782 pull_request=pull_request.pull_request_id,
1782 1783 status_change=status_lbl,
1783 1784 status_change_type=status,
1784 1785 closing_pr=True,
1785 1786 auth_user=auth_user,
1786 1787 )
1787 1788
1788 1789 # calculate old status before we change it
1789 1790 old_calculated_status = pull_request.calculated_review_status()
1790 1791 ChangesetStatusModel().set_status(
1791 1792 repo.repo_id,
1792 1793 status,
1793 1794 user.user_id,
1794 1795 comment=comment,
1795 1796 pull_request=pull_request.pull_request_id
1796 1797 )
1797 1798
1798 1799 Session().flush()
1799 1800
1800 1801 self.trigger_pull_request_hook(pull_request, user, 'comment',
1801 1802 data={'comment': comment})
1802 1803
1803 1804 # we now calculate the status of pull request again, and based on that
1804 1805 # calculation trigger status change. This might happen in cases
1805 1806 # that non-reviewer admin closes a pr, which means his vote doesn't
1806 1807 # change the status, while if he's a reviewer this might change it.
1807 1808 calculated_status = pull_request.calculated_review_status()
1808 1809 if old_calculated_status != calculated_status:
1809 1810 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1810 1811 data={'status': calculated_status})
1811 1812
1812 1813 # finally close the PR
1813 1814 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1814 1815
1815 1816 return comment, status
1816 1817
1817 1818 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1818 1819 _ = translator or get_current_request().translate
1819 1820
1820 1821 if not self._is_merge_enabled(pull_request):
1821 1822 return None, False, _('Server-side pull request merging is disabled.')
1822 1823
1823 1824 if pull_request.is_closed():
1824 1825 return None, False, _('This pull request is closed.')
1825 1826
1826 1827 merge_possible, msg = self._check_repo_requirements(
1827 1828 target=pull_request.target_repo, source=pull_request.source_repo,
1828 1829 translator=_)
1829 1830 if not merge_possible:
1830 1831 return None, merge_possible, msg
1831 1832
1832 1833 try:
1833 1834 merge_response = self._try_merge(
1834 1835 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 1836 log.debug("Merge response: %s", merge_response)
1836 1837 return merge_response, merge_response.possible, merge_response.merge_status_message
1837 1838 except NotImplementedError:
1838 1839 return None, False, _('Pull request merging is not supported.')
1839 1840
1840 1841 def _check_repo_requirements(self, target, source, translator):
1841 1842 """
1842 1843 Check if `target` and `source` have compatible requirements.
1843 1844
1844 1845 Currently this is just checking for largefiles.
1845 1846 """
1846 1847 _ = translator
1847 1848 target_has_largefiles = self._has_largefiles(target)
1848 1849 source_has_largefiles = self._has_largefiles(source)
1849 1850 merge_possible = True
1850 1851 message = u''
1851 1852
1852 1853 if target_has_largefiles != source_has_largefiles:
1853 1854 merge_possible = False
1854 1855 if source_has_largefiles:
1855 1856 message = _(
1856 1857 'Target repository large files support is disabled.')
1857 1858 else:
1858 1859 message = _(
1859 1860 'Source repository large files support is disabled.')
1860 1861
1861 1862 return merge_possible, message
1862 1863
1863 1864 def _has_largefiles(self, repo):
1864 1865 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1865 1866 'extensions', 'largefiles')
1866 1867 return largefiles_ui and largefiles_ui[0].active
1867 1868
1868 1869 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1869 1870 """
1870 1871 Try to merge the pull request and return the merge status.
1871 1872 """
1872 1873 log.debug(
1873 1874 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1874 1875 pull_request.pull_request_id, force_shadow_repo_refresh)
1875 1876 target_vcs = pull_request.target_repo.scm_instance()
1876 1877 # Refresh the target reference.
1877 1878 try:
1878 1879 target_ref = self._refresh_reference(
1879 1880 pull_request.target_ref_parts, target_vcs)
1880 1881 except CommitDoesNotExistError:
1881 1882 merge_state = MergeResponse(
1882 1883 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1883 1884 metadata={'target_ref': pull_request.target_ref_parts})
1884 1885 return merge_state
1885 1886
1886 1887 target_locked = pull_request.target_repo.locked
1887 1888 if target_locked and target_locked[0]:
1888 1889 locked_by = 'user:{}'.format(target_locked[0])
1889 1890 log.debug("The target repository is locked by %s.", locked_by)
1890 1891 merge_state = MergeResponse(
1891 1892 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1892 1893 metadata={'locked_by': locked_by})
1893 1894 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1894 1895 pull_request, target_ref):
1895 1896 log.debug("Refreshing the merge status of the repository.")
1896 1897 merge_state = self._refresh_merge_state(
1897 1898 pull_request, target_vcs, target_ref)
1898 1899 else:
1899 1900 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1900 1901 metadata = {
1901 1902 'unresolved_files': '',
1902 1903 'target_ref': pull_request.target_ref_parts,
1903 1904 'source_ref': pull_request.source_ref_parts,
1904 1905 }
1905 1906 if pull_request.last_merge_metadata:
1906 1907 metadata.update(pull_request.last_merge_metadata_parsed)
1907 1908
1908 1909 if not possible and target_ref.type == 'branch':
1909 1910 # NOTE(marcink): case for mercurial multiple heads on branch
1910 1911 heads = target_vcs._heads(target_ref.name)
1911 1912 if len(heads) != 1:
1912 1913 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1913 1914 metadata.update({
1914 1915 'heads': heads
1915 1916 })
1916 1917
1917 1918 merge_state = MergeResponse(
1918 1919 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1919 1920
1920 1921 return merge_state
1921 1922
1922 1923 def _refresh_reference(self, reference, vcs_repository):
1923 1924 if reference.type in self.UPDATABLE_REF_TYPES:
1924 1925 name_or_id = reference.name
1925 1926 else:
1926 1927 name_or_id = reference.commit_id
1927 1928
1928 1929 refreshed_commit = vcs_repository.get_commit(name_or_id)
1929 1930 refreshed_reference = Reference(
1930 1931 reference.type, reference.name, refreshed_commit.raw_id)
1931 1932 return refreshed_reference
1932 1933
1933 1934 def _needs_merge_state_refresh(self, pull_request, target_reference):
1934 1935 return not(
1935 1936 pull_request.revisions and
1936 1937 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1937 1938 target_reference.commit_id == pull_request._last_merge_target_rev)
1938 1939
1939 1940 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1940 1941 workspace_id = self._workspace_id(pull_request)
1941 1942 source_vcs = pull_request.source_repo.scm_instance()
1942 1943 repo_id = pull_request.target_repo.repo_id
1943 1944 use_rebase = self._use_rebase_for_merging(pull_request)
1944 1945 close_branch = self._close_branch_before_merging(pull_request)
1945 1946 merge_state = target_vcs.merge(
1946 1947 repo_id, workspace_id,
1947 1948 target_reference, source_vcs, pull_request.source_ref_parts,
1948 1949 dry_run=True, use_rebase=use_rebase,
1949 1950 close_branch=close_branch)
1950 1951
1951 1952 # Do not store the response if there was an unknown error.
1952 1953 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1953 1954 pull_request._last_merge_source_rev = \
1954 1955 pull_request.source_ref_parts.commit_id
1955 1956 pull_request._last_merge_target_rev = target_reference.commit_id
1956 1957 pull_request.last_merge_status = merge_state.failure_reason
1957 1958 pull_request.last_merge_metadata = merge_state.metadata
1958 1959
1959 1960 pull_request.shadow_merge_ref = merge_state.merge_ref
1960 1961 Session().add(pull_request)
1961 1962 Session().commit()
1962 1963
1963 1964 return merge_state
1964 1965
1965 1966 def _workspace_id(self, pull_request):
1966 1967 workspace_id = 'pr-%s' % pull_request.pull_request_id
1967 1968 return workspace_id
1968 1969
1969 1970 def generate_repo_data(self, repo, commit_id=None, branch=None,
1970 1971 bookmark=None, translator=None):
1971 1972 from rhodecode.model.repo import RepoModel
1972 1973
1973 1974 all_refs, selected_ref = \
1974 1975 self._get_repo_pullrequest_sources(
1975 1976 repo.scm_instance(), commit_id=commit_id,
1976 1977 branch=branch, bookmark=bookmark, translator=translator)
1977 1978
1978 1979 refs_select2 = []
1979 1980 for element in all_refs:
1980 1981 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1981 1982 refs_select2.append({'text': element[1], 'children': children})
1982 1983
1983 1984 return {
1984 1985 'user': {
1985 1986 'user_id': repo.user.user_id,
1986 1987 'username': repo.user.username,
1987 1988 'firstname': repo.user.first_name,
1988 1989 'lastname': repo.user.last_name,
1989 1990 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1990 1991 },
1991 1992 'name': repo.repo_name,
1992 1993 'link': RepoModel().get_url(repo),
1993 1994 'description': h.chop_at_smart(repo.description_safe, '\n'),
1994 1995 'refs': {
1995 1996 'all_refs': all_refs,
1996 1997 'selected_ref': selected_ref,
1997 1998 'select2_refs': refs_select2
1998 1999 }
1999 2000 }
2000 2001
2001 2002 def generate_pullrequest_title(self, source, source_ref, target):
2002 2003 return u'{source}#{at_ref} to {target}'.format(
2003 2004 source=source,
2004 2005 at_ref=source_ref,
2005 2006 target=target,
2006 2007 )
2007 2008
2008 2009 def _cleanup_merge_workspace(self, pull_request):
2009 2010 # Merging related cleanup
2010 2011 repo_id = pull_request.target_repo.repo_id
2011 2012 target_scm = pull_request.target_repo.scm_instance()
2012 2013 workspace_id = self._workspace_id(pull_request)
2013 2014
2014 2015 try:
2015 2016 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2016 2017 except NotImplementedError:
2017 2018 pass
2018 2019
2019 2020 def _get_repo_pullrequest_sources(
2020 2021 self, repo, commit_id=None, branch=None, bookmark=None,
2021 2022 translator=None):
2022 2023 """
2023 2024 Return a structure with repo's interesting commits, suitable for
2024 2025 the selectors in pullrequest controller
2025 2026
2026 2027 :param commit_id: a commit that must be in the list somehow
2027 2028 and selected by default
2028 2029 :param branch: a branch that must be in the list and selected
2029 2030 by default - even if closed
2030 2031 :param bookmark: a bookmark that must be in the list and selected
2031 2032 """
2032 2033 _ = translator or get_current_request().translate
2033 2034
2034 2035 commit_id = safe_str(commit_id) if commit_id else None
2035 2036 branch = safe_unicode(branch) if branch else None
2036 2037 bookmark = safe_unicode(bookmark) if bookmark else None
2037 2038
2038 2039 selected = None
2039 2040
2040 2041 # order matters: first source that has commit_id in it will be selected
2041 2042 sources = []
2042 2043 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2043 2044 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2044 2045
2045 2046 if commit_id:
2046 2047 ref_commit = (h.short_id(commit_id), commit_id)
2047 2048 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2048 2049
2049 2050 sources.append(
2050 2051 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2051 2052 )
2052 2053
2053 2054 groups = []
2054 2055
2055 2056 for group_key, ref_list, group_name, match in sources:
2056 2057 group_refs = []
2057 2058 for ref_name, ref_id in ref_list:
2058 2059 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2059 2060 group_refs.append((ref_key, ref_name))
2060 2061
2061 2062 if not selected:
2062 2063 if set([commit_id, match]) & set([ref_id, ref_name]):
2063 2064 selected = ref_key
2064 2065
2065 2066 if group_refs:
2066 2067 groups.append((group_refs, group_name))
2067 2068
2068 2069 if not selected:
2069 2070 ref = commit_id or branch or bookmark
2070 2071 if ref:
2071 2072 raise CommitDoesNotExistError(
2072 2073 u'No commit refs could be found matching: {}'.format(ref))
2073 2074 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 2075 selected = u'branch:{}:{}'.format(
2075 2076 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2076 2077 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 2078 )
2078 2079 elif repo.commit_ids:
2079 2080 # make the user select in this case
2080 2081 selected = None
2081 2082 else:
2082 2083 raise EmptyRepositoryError()
2083 2084 return groups, selected
2084 2085
2085 2086 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2086 2087 hide_whitespace_changes, diff_context):
2087 2088
2088 2089 return self._get_diff_from_pr_or_version(
2089 2090 source_repo, source_ref_id, target_ref_id,
2090 2091 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2091 2092
2092 2093 def _get_diff_from_pr_or_version(
2093 2094 self, source_repo, source_ref_id, target_ref_id,
2094 2095 hide_whitespace_changes, diff_context):
2095 2096
2096 2097 target_commit = source_repo.get_commit(
2097 2098 commit_id=safe_str(target_ref_id))
2098 2099 source_commit = source_repo.get_commit(
2099 2100 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2100 2101 if isinstance(source_repo, Repository):
2101 2102 vcs_repo = source_repo.scm_instance()
2102 2103 else:
2103 2104 vcs_repo = source_repo
2104 2105
2105 2106 # TODO: johbo: In the context of an update, we cannot reach
2106 2107 # the old commit anymore with our normal mechanisms. It needs
2107 2108 # some sort of special support in the vcs layer to avoid this
2108 2109 # workaround.
2109 2110 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2110 2111 vcs_repo.alias == 'git'):
2111 2112 source_commit.raw_id = safe_str(source_ref_id)
2112 2113
2113 2114 log.debug('calculating diff between '
2114 2115 'source_ref:%s and target_ref:%s for repo `%s`',
2115 2116 target_ref_id, source_ref_id,
2116 2117 safe_unicode(vcs_repo.path))
2117 2118
2118 2119 vcs_diff = vcs_repo.get_diff(
2119 2120 commit1=target_commit, commit2=source_commit,
2120 2121 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2121 2122 return vcs_diff
2122 2123
2123 2124 def _is_merge_enabled(self, pull_request):
2124 2125 return self._get_general_setting(
2125 2126 pull_request, 'rhodecode_pr_merge_enabled')
2126 2127
2127 2128 def _use_rebase_for_merging(self, pull_request):
2128 2129 repo_type = pull_request.target_repo.repo_type
2129 2130 if repo_type == 'hg':
2130 2131 return self._get_general_setting(
2131 2132 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2132 2133 elif repo_type == 'git':
2133 2134 return self._get_general_setting(
2134 2135 pull_request, 'rhodecode_git_use_rebase_for_merging')
2135 2136
2136 2137 return False
2137 2138
2138 2139 def _user_name_for_merging(self, pull_request, user):
2139 2140 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2140 2141 if env_user_name_attr and hasattr(user, env_user_name_attr):
2141 2142 user_name_attr = env_user_name_attr
2142 2143 else:
2143 2144 user_name_attr = 'short_contact'
2144 2145
2145 2146 user_name = getattr(user, user_name_attr)
2146 2147 return user_name
2147 2148
2148 2149 def _close_branch_before_merging(self, pull_request):
2149 2150 repo_type = pull_request.target_repo.repo_type
2150 2151 if repo_type == 'hg':
2151 2152 return self._get_general_setting(
2152 2153 pull_request, 'rhodecode_hg_close_branch_before_merging')
2153 2154 elif repo_type == 'git':
2154 2155 return self._get_general_setting(
2155 2156 pull_request, 'rhodecode_git_close_branch_before_merging')
2156 2157
2157 2158 return False
2158 2159
2159 2160 def _get_general_setting(self, pull_request, settings_key, default=False):
2160 2161 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2161 2162 settings = settings_model.get_general_settings()
2162 2163 return settings.get(settings_key, default)
2163 2164
2164 2165 def _log_audit_action(self, action, action_data, user, pull_request):
2165 2166 audit_logger.store(
2166 2167 action=action,
2167 2168 action_data=action_data,
2168 2169 user=user,
2169 2170 repo=pull_request.target_repo)
2170 2171
2171 2172 def get_reviewer_functions(self):
2172 2173 """
2173 2174 Fetches functions for validation and fetching default reviewers.
2174 2175 If available we use the EE package, else we fallback to CE
2175 2176 package functions
2176 2177 """
2177 2178 try:
2178 2179 from rc_reviewers.utils import get_default_reviewers_data
2179 2180 from rc_reviewers.utils import validate_default_reviewers
2180 2181 from rc_reviewers.utils import validate_observers
2181 2182 except ImportError:
2182 2183 from rhodecode.apps.repository.utils import get_default_reviewers_data
2183 2184 from rhodecode.apps.repository.utils import validate_default_reviewers
2184 2185 from rhodecode.apps.repository.utils import validate_observers
2185 2186
2186 2187 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2187 2188
2188 2189
2189 2190 class MergeCheck(object):
2190 2191 """
2191 2192 Perform Merge Checks and returns a check object which stores information
2192 2193 about merge errors, and merge conditions
2193 2194 """
2194 2195 TODO_CHECK = 'todo'
2195 2196 PERM_CHECK = 'perm'
2196 2197 REVIEW_CHECK = 'review'
2197 2198 MERGE_CHECK = 'merge'
2198 2199 WIP_CHECK = 'wip'
2199 2200
2200 2201 def __init__(self):
2201 2202 self.review_status = None
2202 2203 self.merge_possible = None
2203 2204 self.merge_msg = ''
2204 2205 self.merge_response = None
2205 2206 self.failed = None
2206 2207 self.errors = []
2207 2208 self.error_details = OrderedDict()
2208 2209 self.source_commit = AttributeDict()
2209 2210 self.target_commit = AttributeDict()
2210 2211 self.reviewers_count = 0
2211 2212 self.observers_count = 0
2212 2213
2213 2214 def __repr__(self):
2214 2215 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2215 2216 self.merge_possible, self.failed, self.errors)
2216 2217
2217 2218 def push_error(self, error_type, message, error_key, details):
2218 2219 self.failed = True
2219 2220 self.errors.append([error_type, message])
2220 2221 self.error_details[error_key] = dict(
2221 2222 details=details,
2222 2223 error_type=error_type,
2223 2224 message=message
2224 2225 )
2225 2226
2226 2227 @classmethod
2227 2228 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2228 2229 force_shadow_repo_refresh=False):
2229 2230 _ = translator
2230 2231 merge_check = cls()
2231 2232
2232 2233 # title has WIP:
2233 2234 if pull_request.work_in_progress:
2234 2235 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2235 2236
2236 2237 msg = _('WIP marker in title prevents from accidental merge.')
2237 2238 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2238 2239 if fail_early:
2239 2240 return merge_check
2240 2241
2241 2242 # permissions to merge
2242 2243 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2243 2244 if not user_allowed_to_merge:
2244 2245 log.debug("MergeCheck: cannot merge, approval is pending.")
2245 2246
2246 2247 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2247 2248 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2248 2249 if fail_early:
2249 2250 return merge_check
2250 2251
2251 2252 # permission to merge into the target branch
2252 2253 target_commit_id = pull_request.target_ref_parts.commit_id
2253 2254 if pull_request.target_ref_parts.type == 'branch':
2254 2255 branch_name = pull_request.target_ref_parts.name
2255 2256 else:
2256 2257 # for mercurial we can always figure out the branch from the commit
2257 2258 # in case of bookmark
2258 2259 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2259 2260 branch_name = target_commit.branch
2260 2261
2261 2262 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2262 2263 pull_request.target_repo.repo_name, branch_name)
2263 2264 if branch_perm and branch_perm == 'branch.none':
2264 2265 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2265 2266 branch_name, rule)
2266 2267 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2267 2268 if fail_early:
2268 2269 return merge_check
2269 2270
2270 2271 # review status, must be always present
2271 2272 review_status = pull_request.calculated_review_status()
2272 2273 merge_check.review_status = review_status
2273 2274 merge_check.reviewers_count = pull_request.reviewers_count
2274 2275 merge_check.observers_count = pull_request.observers_count
2275 2276
2276 2277 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2277 2278 if not status_approved and merge_check.reviewers_count:
2278 2279 log.debug("MergeCheck: cannot merge, approval is pending.")
2279 2280 msg = _('Pull request reviewer approval is pending.')
2280 2281
2281 2282 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2282 2283
2283 2284 if fail_early:
2284 2285 return merge_check
2285 2286
2286 2287 # left over TODOs
2287 2288 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2288 2289 if todos:
2289 2290 log.debug("MergeCheck: cannot merge, {} "
2290 2291 "unresolved TODOs left.".format(len(todos)))
2291 2292
2292 2293 if len(todos) == 1:
2293 2294 msg = _('Cannot merge, {} TODO still not resolved.').format(
2294 2295 len(todos))
2295 2296 else:
2296 2297 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2297 2298 len(todos))
2298 2299
2299 2300 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2300 2301
2301 2302 if fail_early:
2302 2303 return merge_check
2303 2304
2304 2305 # merge possible, here is the filesystem simulation + shadow repo
2305 2306 merge_response, merge_status, msg = PullRequestModel().merge_status(
2306 2307 pull_request, translator=translator,
2307 2308 force_shadow_repo_refresh=force_shadow_repo_refresh)
2308 2309
2309 2310 merge_check.merge_possible = merge_status
2310 2311 merge_check.merge_msg = msg
2311 2312 merge_check.merge_response = merge_response
2312 2313
2313 2314 source_ref_id = pull_request.source_ref_parts.commit_id
2314 2315 target_ref_id = pull_request.target_ref_parts.commit_id
2315 2316
2316 2317 try:
2317 2318 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2318 2319 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2319 2320 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2320 2321 merge_check.source_commit.current_raw_id = source_commit.raw_id
2321 2322 merge_check.source_commit.previous_raw_id = source_ref_id
2322 2323
2323 2324 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2324 2325 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2325 2326 merge_check.target_commit.current_raw_id = target_commit.raw_id
2326 2327 merge_check.target_commit.previous_raw_id = target_ref_id
2327 2328 except (SourceRefMissing, TargetRefMissing):
2328 2329 pass
2329 2330
2330 2331 if not merge_status:
2331 2332 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2332 2333 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2333 2334
2334 2335 if fail_early:
2335 2336 return merge_check
2336 2337
2337 2338 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2338 2339 return merge_check
2339 2340
2340 2341 @classmethod
2341 2342 def get_merge_conditions(cls, pull_request, translator):
2342 2343 _ = translator
2343 2344 merge_details = {}
2344 2345
2345 2346 model = PullRequestModel()
2346 2347 use_rebase = model._use_rebase_for_merging(pull_request)
2347 2348
2348 2349 if use_rebase:
2349 2350 merge_details['merge_strategy'] = dict(
2350 2351 details={},
2351 2352 message=_('Merge strategy: rebase')
2352 2353 )
2353 2354 else:
2354 2355 merge_details['merge_strategy'] = dict(
2355 2356 details={},
2356 2357 message=_('Merge strategy: explicit merge commit')
2357 2358 )
2358 2359
2359 2360 close_branch = model._close_branch_before_merging(pull_request)
2360 2361 if close_branch:
2361 2362 repo_type = pull_request.target_repo.repo_type
2362 2363 close_msg = ''
2363 2364 if repo_type == 'hg':
2364 2365 close_msg = _('Source branch will be closed before the merge.')
2365 2366 elif repo_type == 'git':
2366 2367 close_msg = _('Source branch will be deleted after the merge.')
2367 2368
2368 2369 merge_details['close_branch'] = dict(
2369 2370 details={},
2370 2371 message=close_msg
2371 2372 )
2372 2373
2373 2374 return merge_details
2374 2375
2375 2376
2376 2377 ChangeTuple = collections.namedtuple(
2377 2378 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2378 2379
2379 2380 FileChangeTuple = collections.namedtuple(
2380 2381 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now