Show More
@@ -1,162 +1,163 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import sys |
|
22 | import sys | |
23 | import json |
|
23 | import json | |
24 | import logging |
|
24 | import logging | |
25 |
|
25 | |||
26 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
26 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon | |
27 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
27 | from rhodecode.lib.vcs.conf import settings as vcs_settings | |
28 | from rhodecode.model.scm import ScmModel |
|
28 | from rhodecode.model.scm import ScmModel | |
29 |
|
29 | |||
30 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | class VcsServer(object): |
|
33 | class VcsServer(object): | |
|
34 | repo_user_agent = None # set in child classes | |||
34 | _path = None # set executable path for hg/git/svn binary |
|
35 | _path = None # set executable path for hg/git/svn binary | |
35 | backend = None # set in child classes |
|
36 | backend = None # set in child classes | |
36 | tunnel = None # subprocess handling tunnel |
|
37 | tunnel = None # subprocess handling tunnel | |
37 | write_perms = ['repository.admin', 'repository.write'] |
|
38 | write_perms = ['repository.admin', 'repository.write'] | |
38 | read_perms = ['repository.read', 'repository.admin', 'repository.write'] |
|
39 | read_perms = ['repository.read', 'repository.admin', 'repository.write'] | |
39 |
|
40 | |||
40 | def __init__(self, user, user_permissions, config, env): |
|
41 | def __init__(self, user, user_permissions, config, env): | |
41 | self.user = user |
|
42 | self.user = user | |
42 | self.user_permissions = user_permissions |
|
43 | self.user_permissions = user_permissions | |
43 | self.config = config |
|
44 | self.config = config | |
44 | self.env = env |
|
45 | self.env = env | |
45 | self.stdin = sys.stdin |
|
46 | self.stdin = sys.stdin | |
46 |
|
47 | |||
47 | self.repo_name = None |
|
48 | self.repo_name = None | |
48 | self.repo_mode = None |
|
49 | self.repo_mode = None | |
49 | self.store = '' |
|
50 | self.store = '' | |
50 | self.ini_path = '' |
|
51 | self.ini_path = '' | |
51 |
|
52 | |||
52 | def _invalidate_cache(self, repo_name): |
|
53 | def _invalidate_cache(self, repo_name): | |
53 | """ |
|
54 | """ | |
54 | Set's cache for this repository for invalidation on next access |
|
55 | Set's cache for this repository for invalidation on next access | |
55 |
|
56 | |||
56 | :param repo_name: full repo name, also a cache key |
|
57 | :param repo_name: full repo name, also a cache key | |
57 | """ |
|
58 | """ | |
58 | ScmModel().mark_for_invalidation(repo_name) |
|
59 | ScmModel().mark_for_invalidation(repo_name) | |
59 |
|
60 | |||
60 | def has_write_perm(self): |
|
61 | def has_write_perm(self): | |
61 | permission = self.user_permissions.get(self.repo_name) |
|
62 | permission = self.user_permissions.get(self.repo_name) | |
62 | if permission in ['repository.write', 'repository.admin']: |
|
63 | if permission in ['repository.write', 'repository.admin']: | |
63 | return True |
|
64 | return True | |
64 |
|
65 | |||
65 | return False |
|
66 | return False | |
66 |
|
67 | |||
67 | def _check_permissions(self, action): |
|
68 | def _check_permissions(self, action): | |
68 | permission = self.user_permissions.get(self.repo_name) |
|
69 | permission = self.user_permissions.get(self.repo_name) | |
69 | log.debug('permission for %s on %s are: %s', |
|
70 | log.debug('permission for %s on %s are: %s', | |
70 | self.user, self.repo_name, permission) |
|
71 | self.user, self.repo_name, permission) | |
71 |
|
72 | |||
72 | if not permission: |
|
73 | if not permission: | |
73 | log.error('user `%s` permissions to repo:%s are empty. Forbidding access.', |
|
74 | log.error('user `%s` permissions to repo:%s are empty. Forbidding access.', | |
74 | self.user, self.repo_name) |
|
75 | self.user, self.repo_name) | |
75 | return -2 |
|
76 | return -2 | |
76 |
|
77 | |||
77 | if action == 'pull': |
|
78 | if action == 'pull': | |
78 | if permission in self.read_perms: |
|
79 | if permission in self.read_perms: | |
79 | log.info( |
|
80 | log.info( | |
80 | 'READ Permissions for User "%s" detected to repo "%s"!', |
|
81 | 'READ Permissions for User "%s" detected to repo "%s"!', | |
81 | self.user, self.repo_name) |
|
82 | self.user, self.repo_name) | |
82 | return 0 |
|
83 | return 0 | |
83 | else: |
|
84 | else: | |
84 | if permission in self.write_perms: |
|
85 | if permission in self.write_perms: | |
85 | log.info( |
|
86 | log.info( | |
86 | 'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!', |
|
87 | 'WRITE, or Higher Permissions for User "%s" detected to repo "%s"!', | |
87 | self.user, self.repo_name) |
|
88 | self.user, self.repo_name) | |
88 | return 0 |
|
89 | return 0 | |
89 |
|
90 | |||
90 | log.error('Cannot properly fetch or verify user `%s` permissions. ' |
|
91 | log.error('Cannot properly fetch or verify user `%s` permissions. ' | |
91 | 'Permissions: %s, vcs action: %s', |
|
92 | 'Permissions: %s, vcs action: %s', | |
92 | self.user, permission, action) |
|
93 | self.user, permission, action) | |
93 | return -2 |
|
94 | return -2 | |
94 |
|
95 | |||
95 | def update_environment(self, action, extras=None): |
|
96 | def update_environment(self, action, extras=None): | |
96 |
|
97 | |||
97 | scm_data = { |
|
98 | scm_data = { | |
98 | 'ip': os.environ['SSH_CLIENT'].split()[0], |
|
99 | 'ip': os.environ['SSH_CLIENT'].split()[0], | |
99 | 'username': self.user.username, |
|
100 | 'username': self.user.username, | |
100 | 'user_id': self.user.user_id, |
|
101 | 'user_id': self.user.user_id, | |
101 | 'action': action, |
|
102 | 'action': action, | |
102 | 'repository': self.repo_name, |
|
103 | 'repository': self.repo_name, | |
103 | 'scm': self.backend, |
|
104 | 'scm': self.backend, | |
104 | 'config': self.ini_path, |
|
105 | 'config': self.ini_path, | |
105 | 'repo_store': self.store, |
|
106 | 'repo_store': self.store, | |
106 | 'make_lock': None, |
|
107 | 'make_lock': None, | |
107 | 'locked_by': [None, None], |
|
108 | 'locked_by': [None, None], | |
108 | 'server_url': None, |
|
109 | 'server_url': None, | |
109 | 'user_agent': 'ssh-user-agent', |
|
110 | 'user_agent': '{}/ssh-user-agent'.format(self.repo_user_agent), | |
110 | 'hooks': ['push', 'pull'], |
|
111 | 'hooks': ['push', 'pull'], | |
111 | 'hooks_module': 'rhodecode.lib.hooks_daemon', |
|
112 | 'hooks_module': 'rhodecode.lib.hooks_daemon', | |
112 | 'is_shadow_repo': False, |
|
113 | 'is_shadow_repo': False, | |
113 | 'detect_force_push': False, |
|
114 | 'detect_force_push': False, | |
114 | 'check_branch_perms': False, |
|
115 | 'check_branch_perms': False, | |
115 |
|
116 | |||
116 | 'SSH': True, |
|
117 | 'SSH': True, | |
117 | 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name), |
|
118 | 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name), | |
118 | } |
|
119 | } | |
119 | if extras: |
|
120 | if extras: | |
120 | scm_data.update(extras) |
|
121 | scm_data.update(extras) | |
121 | os.putenv("RC_SCM_DATA", json.dumps(scm_data)) |
|
122 | os.putenv("RC_SCM_DATA", json.dumps(scm_data)) | |
122 |
|
123 | |||
123 | def get_root_store(self): |
|
124 | def get_root_store(self): | |
124 | root_store = self.store |
|
125 | root_store = self.store | |
125 | if not root_store.endswith('/'): |
|
126 | if not root_store.endswith('/'): | |
126 | # always append trailing slash |
|
127 | # always append trailing slash | |
127 | root_store = root_store + '/' |
|
128 | root_store = root_store + '/' | |
128 | return root_store |
|
129 | return root_store | |
129 |
|
130 | |||
130 | def _handle_tunnel(self, extras): |
|
131 | def _handle_tunnel(self, extras): | |
131 | # pre-auth |
|
132 | # pre-auth | |
132 | action = 'pull' |
|
133 | action = 'pull' | |
133 | exit_code = self._check_permissions(action) |
|
134 | exit_code = self._check_permissions(action) | |
134 | if exit_code: |
|
135 | if exit_code: | |
135 | return exit_code, False |
|
136 | return exit_code, False | |
136 |
|
137 | |||
137 | req = self.env['request'] |
|
138 | req = self.env['request'] | |
138 | server_url = req.host_url + req.script_name |
|
139 | server_url = req.host_url + req.script_name | |
139 | extras['server_url'] = server_url |
|
140 | extras['server_url'] = server_url | |
140 |
|
141 | |||
141 | log.debug('Using %s binaries from path %s', self.backend, self._path) |
|
142 | log.debug('Using %s binaries from path %s', self.backend, self._path) | |
142 | exit_code = self.tunnel.run(extras) |
|
143 | exit_code = self.tunnel.run(extras) | |
143 |
|
144 | |||
144 | return exit_code, action == "push" |
|
145 | return exit_code, action == "push" | |
145 |
|
146 | |||
146 | def run(self, tunnel_extras=None): |
|
147 | def run(self, tunnel_extras=None): | |
147 | tunnel_extras = tunnel_extras or {} |
|
148 | tunnel_extras = tunnel_extras or {} | |
148 | extras = {} |
|
149 | extras = {} | |
149 | extras.update(tunnel_extras) |
|
150 | extras.update(tunnel_extras) | |
150 |
|
151 | |||
151 | callback_daemon, extras = prepare_callback_daemon( |
|
152 | callback_daemon, extras = prepare_callback_daemon( | |
152 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
153 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, | |
153 | host=vcs_settings.HOOKS_HOST, |
|
154 | host=vcs_settings.HOOKS_HOST, | |
154 | use_direct_calls=False) |
|
155 | use_direct_calls=False) | |
155 |
|
156 | |||
156 | with callback_daemon: |
|
157 | with callback_daemon: | |
157 | try: |
|
158 | try: | |
158 | return self._handle_tunnel(extras) |
|
159 | return self._handle_tunnel(extras) | |
159 | finally: |
|
160 | finally: | |
160 | log.debug('Running cleanup with cache invalidation') |
|
161 | log.debug('Running cleanup with cache invalidation') | |
161 | if self.repo_name: |
|
162 | if self.repo_name: | |
162 | self._invalidate_cache(self.repo_name) |
|
163 | self._invalidate_cache(self.repo_name) |
@@ -1,74 +1,75 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import sys |
|
22 | import sys | |
23 | import logging |
|
23 | import logging | |
24 |
|
24 | |||
25 | from .base import VcsServer |
|
25 | from .base import VcsServer | |
26 |
|
26 | |||
27 | log = logging.getLogger(__name__) |
|
27 | log = logging.getLogger(__name__) | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | class GitTunnelWrapper(object): |
|
30 | class GitTunnelWrapper(object): | |
31 | process = None |
|
31 | process = None | |
32 |
|
32 | |||
33 | def __init__(self, server): |
|
33 | def __init__(self, server): | |
34 | self.server = server |
|
34 | self.server = server | |
35 | self.stdin = sys.stdin |
|
35 | self.stdin = sys.stdin | |
36 | self.stdout = sys.stdout |
|
36 | self.stdout = sys.stdout | |
37 |
|
37 | |||
38 | def create_hooks_env(self): |
|
38 | def create_hooks_env(self): | |
39 | pass |
|
39 | pass | |
40 |
|
40 | |||
41 | def command(self): |
|
41 | def command(self): | |
42 | root = self.server.get_root_store() |
|
42 | root = self.server.get_root_store() | |
43 | command = "cd {root}; {git_path} {mode} '{root}{repo_name}'".format( |
|
43 | command = "cd {root}; {git_path} {mode} '{root}{repo_name}'".format( | |
44 | root=root, git_path=self.server.git_path, |
|
44 | root=root, git_path=self.server.git_path, | |
45 | mode=self.server.repo_mode, repo_name=self.server.repo_name) |
|
45 | mode=self.server.repo_mode, repo_name=self.server.repo_name) | |
46 | log.debug("Final CMD: %s", command) |
|
46 | log.debug("Final CMD: %s", command) | |
47 | return command |
|
47 | return command | |
48 |
|
48 | |||
49 | def run(self, extras): |
|
49 | def run(self, extras): | |
50 | action = "push" if self.server.repo_mode == "receive-pack" else "pull" |
|
50 | action = "push" if self.server.repo_mode == "receive-pack" else "pull" | |
51 | exit_code = self.server._check_permissions(action) |
|
51 | exit_code = self.server._check_permissions(action) | |
52 | if exit_code: |
|
52 | if exit_code: | |
53 | return exit_code |
|
53 | return exit_code | |
54 |
|
54 | |||
55 | self.server.update_environment(action=action, extras=extras) |
|
55 | self.server.update_environment(action=action, extras=extras) | |
56 | self.create_hooks_env() |
|
56 | self.create_hooks_env() | |
57 | return os.system(self.command()) |
|
57 | return os.system(self.command()) | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | class GitServer(VcsServer): |
|
60 | class GitServer(VcsServer): | |
61 | backend = 'git' |
|
61 | backend = 'git' | |
|
62 | repo_user_agent = 'git' | |||
62 |
|
63 | |||
63 | def __init__(self, store, ini_path, repo_name, repo_mode, |
|
64 | def __init__(self, store, ini_path, repo_name, repo_mode, | |
64 | user, user_permissions, config, env): |
|
65 | user, user_permissions, config, env): | |
65 | super(GitServer, self).\ |
|
66 | super(GitServer, self).\ | |
66 | __init__(user, user_permissions, config, env) |
|
67 | __init__(user, user_permissions, config, env) | |
67 |
|
68 | |||
68 | self.store = store |
|
69 | self.store = store | |
69 | self.ini_path = ini_path |
|
70 | self.ini_path = ini_path | |
70 | self.repo_name = repo_name |
|
71 | self.repo_name = repo_name | |
71 | self._path = self.git_path = config.get('app:main', 'ssh.executable.git') |
|
72 | self._path = self.git_path = config.get('app:main', 'ssh.executable.git') | |
72 |
|
73 | |||
73 | self.repo_mode = repo_mode |
|
74 | self.repo_mode = repo_mode | |
74 | self.tunnel = GitTunnelWrapper(server=self) |
|
75 | self.tunnel = GitTunnelWrapper(server=self) |
@@ -1,147 +1,148 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import sys |
|
22 | import sys | |
23 | import logging |
|
23 | import logging | |
24 | import tempfile |
|
24 | import tempfile | |
25 | import textwrap |
|
25 | import textwrap | |
26 | import collections |
|
26 | import collections | |
27 | from .base import VcsServer |
|
27 | from .base import VcsServer | |
28 | from rhodecode.model.db import RhodeCodeUi |
|
28 | from rhodecode.model.db import RhodeCodeUi | |
29 | from rhodecode.model.settings import VcsSettingsModel |
|
29 | from rhodecode.model.settings import VcsSettingsModel | |
30 |
|
30 | |||
31 | log = logging.getLogger(__name__) |
|
31 | log = logging.getLogger(__name__) | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | class MercurialTunnelWrapper(object): |
|
34 | class MercurialTunnelWrapper(object): | |
35 | process = None |
|
35 | process = None | |
36 |
|
36 | |||
37 | def __init__(self, server): |
|
37 | def __init__(self, server): | |
38 | self.server = server |
|
38 | self.server = server | |
39 | self.stdin = sys.stdin |
|
39 | self.stdin = sys.stdin | |
40 | self.stdout = sys.stdout |
|
40 | self.stdout = sys.stdout | |
41 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp(prefix='hgrc_rhodecode_') |
|
41 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp(prefix='hgrc_rhodecode_') | |
42 |
|
42 | |||
43 | def create_hooks_env(self): |
|
43 | def create_hooks_env(self): | |
44 | repo_name = self.server.repo_name |
|
44 | repo_name = self.server.repo_name | |
45 | hg_flags = self.server.config_to_hgrc(repo_name) |
|
45 | hg_flags = self.server.config_to_hgrc(repo_name) | |
46 |
|
46 | |||
47 | content = textwrap.dedent( |
|
47 | content = textwrap.dedent( | |
48 | ''' |
|
48 | ''' | |
49 | # RhodeCode SSH hooks version=2.0.0 |
|
49 | # RhodeCode SSH hooks version=2.0.0 | |
50 | {custom} |
|
50 | {custom} | |
51 | ''' |
|
51 | ''' | |
52 | ).format(custom='\n'.join(hg_flags)) |
|
52 | ).format(custom='\n'.join(hg_flags)) | |
53 |
|
53 | |||
54 | root = self.server.get_root_store() |
|
54 | root = self.server.get_root_store() | |
55 | hgrc_custom = os.path.join(root, repo_name, '.hg', 'hgrc_rhodecode') |
|
55 | hgrc_custom = os.path.join(root, repo_name, '.hg', 'hgrc_rhodecode') | |
56 | hgrc_main = os.path.join(root, repo_name, '.hg', 'hgrc') |
|
56 | hgrc_main = os.path.join(root, repo_name, '.hg', 'hgrc') | |
57 |
|
57 | |||
58 | # cleanup custom hgrc file |
|
58 | # cleanup custom hgrc file | |
59 | if os.path.isfile(hgrc_custom): |
|
59 | if os.path.isfile(hgrc_custom): | |
60 | with open(hgrc_custom, 'wb') as f: |
|
60 | with open(hgrc_custom, 'wb') as f: | |
61 | f.write('') |
|
61 | f.write('') | |
62 | log.debug('Cleanup custom hgrc file under %s', hgrc_custom) |
|
62 | log.debug('Cleanup custom hgrc file under %s', hgrc_custom) | |
63 |
|
63 | |||
64 | # write temp |
|
64 | # write temp | |
65 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: |
|
65 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: | |
66 | hooks_env_file.write(content) |
|
66 | hooks_env_file.write(content) | |
67 |
|
67 | |||
68 | return self.hooks_env_path |
|
68 | return self.hooks_env_path | |
69 |
|
69 | |||
70 | def remove_configs(self): |
|
70 | def remove_configs(self): | |
71 | os.remove(self.hooks_env_path) |
|
71 | os.remove(self.hooks_env_path) | |
72 |
|
72 | |||
73 | def command(self, hgrc_path): |
|
73 | def command(self, hgrc_path): | |
74 | root = self.server.get_root_store() |
|
74 | root = self.server.get_root_store() | |
75 |
|
75 | |||
76 | command = ( |
|
76 | command = ( | |
77 | "cd {root}; HGRCPATH={hgrc} {hg_path} -R {root}{repo_name} " |
|
77 | "cd {root}; HGRCPATH={hgrc} {hg_path} -R {root}{repo_name} " | |
78 | "serve --stdio".format( |
|
78 | "serve --stdio".format( | |
79 | root=root, hg_path=self.server.hg_path, |
|
79 | root=root, hg_path=self.server.hg_path, | |
80 | repo_name=self.server.repo_name, hgrc=hgrc_path)) |
|
80 | repo_name=self.server.repo_name, hgrc=hgrc_path)) | |
81 | log.debug("Final CMD: %s", command) |
|
81 | log.debug("Final CMD: %s", command) | |
82 | return command |
|
82 | return command | |
83 |
|
83 | |||
84 | def run(self, extras): |
|
84 | def run(self, extras): | |
85 | # at this point we cannot tell, we do further ACL checks |
|
85 | # at this point we cannot tell, we do further ACL checks | |
86 | # inside the hooks |
|
86 | # inside the hooks | |
87 | action = '?' |
|
87 | action = '?' | |
88 | # permissions are check via `pre_push_ssh_auth` hook |
|
88 | # permissions are check via `pre_push_ssh_auth` hook | |
89 | self.server.update_environment(action=action, extras=extras) |
|
89 | self.server.update_environment(action=action, extras=extras) | |
90 | custom_hgrc_file = self.create_hooks_env() |
|
90 | custom_hgrc_file = self.create_hooks_env() | |
91 |
|
91 | |||
92 | try: |
|
92 | try: | |
93 | return os.system(self.command(custom_hgrc_file)) |
|
93 | return os.system(self.command(custom_hgrc_file)) | |
94 | finally: |
|
94 | finally: | |
95 | self.remove_configs() |
|
95 | self.remove_configs() | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | class MercurialServer(VcsServer): |
|
98 | class MercurialServer(VcsServer): | |
99 | backend = 'hg' |
|
99 | backend = 'hg' | |
|
100 | repo_user_agent = 'mercurial' | |||
100 | cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks'] |
|
101 | cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks'] | |
101 |
|
102 | |||
102 | def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env): |
|
103 | def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env): | |
103 | super(MercurialServer, self).__init__(user, user_permissions, config, env) |
|
104 | super(MercurialServer, self).__init__(user, user_permissions, config, env) | |
104 |
|
105 | |||
105 | self.store = store |
|
106 | self.store = store | |
106 | self.ini_path = ini_path |
|
107 | self.ini_path = ini_path | |
107 | self.repo_name = repo_name |
|
108 | self.repo_name = repo_name | |
108 | self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg') |
|
109 | self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg') | |
109 | self.tunnel = MercurialTunnelWrapper(server=self) |
|
110 | self.tunnel = MercurialTunnelWrapper(server=self) | |
110 |
|
111 | |||
111 | def config_to_hgrc(self, repo_name): |
|
112 | def config_to_hgrc(self, repo_name): | |
112 | ui_sections = collections.defaultdict(list) |
|
113 | ui_sections = collections.defaultdict(list) | |
113 | ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None) |
|
114 | ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None) | |
114 |
|
115 | |||
115 | # write default hooks |
|
116 | # write default hooks | |
116 | default_hooks = [ |
|
117 | default_hooks = [ | |
117 | ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'), |
|
118 | ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'), | |
118 | ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'), |
|
119 | ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'), | |
119 | ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'), |
|
120 | ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'), | |
120 |
|
121 | |||
121 | ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'), |
|
122 | ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'), | |
122 | ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'), |
|
123 | ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'), | |
123 | ] |
|
124 | ] | |
124 |
|
125 | |||
125 | for k, v in default_hooks: |
|
126 | for k, v in default_hooks: | |
126 | ui_sections['hooks'].append((k, v)) |
|
127 | ui_sections['hooks'].append((k, v)) | |
127 |
|
128 | |||
128 | for entry in ui: |
|
129 | for entry in ui: | |
129 | if not entry.active: |
|
130 | if not entry.active: | |
130 | continue |
|
131 | continue | |
131 | sec = entry.section |
|
132 | sec = entry.section | |
132 | key = entry.key |
|
133 | key = entry.key | |
133 |
|
134 | |||
134 | if sec in self.cli_flags: |
|
135 | if sec in self.cli_flags: | |
135 | # we want only custom hooks, so we skip builtins |
|
136 | # we want only custom hooks, so we skip builtins | |
136 | if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN: |
|
137 | if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN: | |
137 | continue |
|
138 | continue | |
138 |
|
139 | |||
139 | ui_sections[sec].append([key, entry.value]) |
|
140 | ui_sections[sec].append([key, entry.value]) | |
140 |
|
141 | |||
141 | flags = [] |
|
142 | flags = [] | |
142 | for _sec, key_val in ui_sections.items(): |
|
143 | for _sec, key_val in ui_sections.items(): | |
143 | flags.append(' ') |
|
144 | flags.append(' ') | |
144 | flags.append('[{}]'.format(_sec)) |
|
145 | flags.append('[{}]'.format(_sec)) | |
145 | for key, val in key_val: |
|
146 | for key, val in key_val: | |
146 | flags.append('{}= {}'.format(key, val)) |
|
147 | flags.append('{}= {}'.format(key, val)) | |
147 | return flags |
|
148 | return flags |
@@ -1,257 +1,258 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import re |
|
22 | import re | |
23 | import sys |
|
23 | import sys | |
24 | import logging |
|
24 | import logging | |
25 | import signal |
|
25 | import signal | |
26 | import tempfile |
|
26 | import tempfile | |
27 | from subprocess import Popen, PIPE |
|
27 | from subprocess import Popen, PIPE | |
28 | import urlparse |
|
28 | import urlparse | |
29 |
|
29 | |||
30 | from .base import VcsServer |
|
30 | from .base import VcsServer | |
31 |
|
31 | |||
32 | log = logging.getLogger(__name__) |
|
32 | log = logging.getLogger(__name__) | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | class SubversionTunnelWrapper(object): |
|
35 | class SubversionTunnelWrapper(object): | |
36 | process = None |
|
36 | process = None | |
37 |
|
37 | |||
38 | def __init__(self, server): |
|
38 | def __init__(self, server): | |
39 | self.server = server |
|
39 | self.server = server | |
40 | self.timeout = 30 |
|
40 | self.timeout = 30 | |
41 | self.stdin = sys.stdin |
|
41 | self.stdin = sys.stdin | |
42 | self.stdout = sys.stdout |
|
42 | self.stdout = sys.stdout | |
43 | self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp() |
|
43 | self.svn_conf_fd, self.svn_conf_path = tempfile.mkstemp() | |
44 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp() |
|
44 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp() | |
45 |
|
45 | |||
46 | self.read_only = True # flag that we set to make the hooks readonly |
|
46 | self.read_only = True # flag that we set to make the hooks readonly | |
47 |
|
47 | |||
48 | def create_svn_config(self): |
|
48 | def create_svn_config(self): | |
49 | content = ( |
|
49 | content = ( | |
50 | '[general]\n' |
|
50 | '[general]\n' | |
51 | 'hooks-env = {}\n').format(self.hooks_env_path) |
|
51 | 'hooks-env = {}\n').format(self.hooks_env_path) | |
52 | with os.fdopen(self.svn_conf_fd, 'w') as config_file: |
|
52 | with os.fdopen(self.svn_conf_fd, 'w') as config_file: | |
53 | config_file.write(content) |
|
53 | config_file.write(content) | |
54 |
|
54 | |||
55 | def create_hooks_env(self): |
|
55 | def create_hooks_env(self): | |
56 | content = ( |
|
56 | content = ( | |
57 | '[default]\n' |
|
57 | '[default]\n' | |
58 | 'LANG = en_US.UTF-8\n') |
|
58 | 'LANG = en_US.UTF-8\n') | |
59 | if self.read_only: |
|
59 | if self.read_only: | |
60 | content += 'SSH_READ_ONLY = 1\n' |
|
60 | content += 'SSH_READ_ONLY = 1\n' | |
61 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: |
|
61 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: | |
62 | hooks_env_file.write(content) |
|
62 | hooks_env_file.write(content) | |
63 |
|
63 | |||
64 | def remove_configs(self): |
|
64 | def remove_configs(self): | |
65 | os.remove(self.svn_conf_path) |
|
65 | os.remove(self.svn_conf_path) | |
66 | os.remove(self.hooks_env_path) |
|
66 | os.remove(self.hooks_env_path) | |
67 |
|
67 | |||
68 | def command(self): |
|
68 | def command(self): | |
69 | root = self.server.get_root_store() |
|
69 | root = self.server.get_root_store() | |
70 | username = self.server.user.username |
|
70 | username = self.server.user.username | |
71 |
|
71 | |||
72 | command = [ |
|
72 | command = [ | |
73 | self.server.svn_path, '-t', |
|
73 | self.server.svn_path, '-t', | |
74 | '--config-file', self.svn_conf_path, |
|
74 | '--config-file', self.svn_conf_path, | |
75 | '--tunnel-user', username, |
|
75 | '--tunnel-user', username, | |
76 | '-r', root] |
|
76 | '-r', root] | |
77 | log.debug("Final CMD: %s", ' '.join(command)) |
|
77 | log.debug("Final CMD: %s", ' '.join(command)) | |
78 | return command |
|
78 | return command | |
79 |
|
79 | |||
80 | def start(self): |
|
80 | def start(self): | |
81 | command = self.command() |
|
81 | command = self.command() | |
82 | self.process = Popen(' '.join(command), stdin=PIPE, shell=True) |
|
82 | self.process = Popen(' '.join(command), stdin=PIPE, shell=True) | |
83 |
|
83 | |||
84 | def sync(self): |
|
84 | def sync(self): | |
85 | while self.process.poll() is None: |
|
85 | while self.process.poll() is None: | |
86 | next_byte = self.stdin.read(1) |
|
86 | next_byte = self.stdin.read(1) | |
87 | if not next_byte: |
|
87 | if not next_byte: | |
88 | break |
|
88 | break | |
89 | self.process.stdin.write(next_byte) |
|
89 | self.process.stdin.write(next_byte) | |
90 | self.remove_configs() |
|
90 | self.remove_configs() | |
91 |
|
91 | |||
92 | @property |
|
92 | @property | |
93 | def return_code(self): |
|
93 | def return_code(self): | |
94 | return self.process.returncode |
|
94 | return self.process.returncode | |
95 |
|
95 | |||
96 | def get_first_client_response(self): |
|
96 | def get_first_client_response(self): | |
97 | signal.signal(signal.SIGALRM, self.interrupt) |
|
97 | signal.signal(signal.SIGALRM, self.interrupt) | |
98 | signal.alarm(self.timeout) |
|
98 | signal.alarm(self.timeout) | |
99 | first_response = self._read_first_client_response() |
|
99 | first_response = self._read_first_client_response() | |
100 | signal.alarm(0) |
|
100 | signal.alarm(0) | |
101 | return (self._parse_first_client_response(first_response) |
|
101 | return (self._parse_first_client_response(first_response) | |
102 | if first_response else None) |
|
102 | if first_response else None) | |
103 |
|
103 | |||
104 | def patch_first_client_response(self, response, **kwargs): |
|
104 | def patch_first_client_response(self, response, **kwargs): | |
105 | self.create_hooks_env() |
|
105 | self.create_hooks_env() | |
106 | data = response.copy() |
|
106 | data = response.copy() | |
107 | data.update(kwargs) |
|
107 | data.update(kwargs) | |
108 | data['url'] = self._svn_string(data['url']) |
|
108 | data['url'] = self._svn_string(data['url']) | |
109 | data['ra_client'] = self._svn_string(data['ra_client']) |
|
109 | data['ra_client'] = self._svn_string(data['ra_client']) | |
110 | data['client'] = data['client'] or '' |
|
110 | data['client'] = data['client'] or '' | |
111 | buffer_ = ( |
|
111 | buffer_ = ( | |
112 | "( {version} ( {capabilities} ) {url}{ra_client}" |
|
112 | "( {version} ( {capabilities} ) {url}{ra_client}" | |
113 | "( {client}) ) ".format(**data)) |
|
113 | "( {client}) ) ".format(**data)) | |
114 | self.process.stdin.write(buffer_) |
|
114 | self.process.stdin.write(buffer_) | |
115 |
|
115 | |||
116 | def fail(self, message): |
|
116 | def fail(self, message): | |
117 | print("( failure ( ( 210005 {message} 0: 0 ) ) )".format( |
|
117 | print("( failure ( ( 210005 {message} 0: 0 ) ) )".format( | |
118 | message=self._svn_string(message))) |
|
118 | message=self._svn_string(message))) | |
119 | self.remove_configs() |
|
119 | self.remove_configs() | |
120 | self.process.kill() |
|
120 | self.process.kill() | |
121 | return 1 |
|
121 | return 1 | |
122 |
|
122 | |||
123 | def interrupt(self, signum, frame): |
|
123 | def interrupt(self, signum, frame): | |
124 | self.fail("Exited by timeout") |
|
124 | self.fail("Exited by timeout") | |
125 |
|
125 | |||
126 | def _svn_string(self, str_): |
|
126 | def _svn_string(self, str_): | |
127 | if not str_: |
|
127 | if not str_: | |
128 | return '' |
|
128 | return '' | |
129 | return '{length}:{string} '.format(length=len(str_), string=str_) |
|
129 | return '{length}:{string} '.format(length=len(str_), string=str_) | |
130 |
|
130 | |||
131 | def _read_first_client_response(self): |
|
131 | def _read_first_client_response(self): | |
132 | buffer_ = "" |
|
132 | buffer_ = "" | |
133 | brackets_stack = [] |
|
133 | brackets_stack = [] | |
134 | while True: |
|
134 | while True: | |
135 | next_byte = self.stdin.read(1) |
|
135 | next_byte = self.stdin.read(1) | |
136 | buffer_ += next_byte |
|
136 | buffer_ += next_byte | |
137 | if next_byte == "(": |
|
137 | if next_byte == "(": | |
138 | brackets_stack.append(next_byte) |
|
138 | brackets_stack.append(next_byte) | |
139 | elif next_byte == ")": |
|
139 | elif next_byte == ")": | |
140 | brackets_stack.pop() |
|
140 | brackets_stack.pop() | |
141 | elif next_byte == " " and not brackets_stack: |
|
141 | elif next_byte == " " and not brackets_stack: | |
142 | break |
|
142 | break | |
143 |
|
143 | |||
144 | return buffer_ |
|
144 | return buffer_ | |
145 |
|
145 | |||
146 | def _parse_first_client_response(self, buffer_): |
|
146 | def _parse_first_client_response(self, buffer_): | |
147 | """ |
|
147 | """ | |
148 | According to the Subversion RA protocol, the first request |
|
148 | According to the Subversion RA protocol, the first request | |
149 | should look like: |
|
149 | should look like: | |
150 |
|
150 | |||
151 | ( version:number ( cap:word ... ) url:string ? ra-client:string |
|
151 | ( version:number ( cap:word ... ) url:string ? ra-client:string | |
152 | ( ? client:string ) ) |
|
152 | ( ? client:string ) ) | |
153 |
|
153 | |||
154 | Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol |
|
154 | Please check https://svn.apache.org/repos/asf/subversion/trunk/subversion/libsvn_ra_svn/protocol | |
155 | """ |
|
155 | """ | |
156 | version_re = r'(?P<version>\d+)' |
|
156 | version_re = r'(?P<version>\d+)' | |
157 | capabilities_re = r'\(\s(?P<capabilities>[\w\d\-\ ]+)\s\)' |
|
157 | capabilities_re = r'\(\s(?P<capabilities>[\w\d\-\ ]+)\s\)' | |
158 | url_re = r'\d+\:(?P<url>[\W\w]+)' |
|
158 | url_re = r'\d+\:(?P<url>[\W\w]+)' | |
159 | ra_client_re = r'(\d+\:(?P<ra_client>[\W\w]+)\s)' |
|
159 | ra_client_re = r'(\d+\:(?P<ra_client>[\W\w]+)\s)' | |
160 | client_re = r'(\d+\:(?P<client>[\W\w]+)\s)*' |
|
160 | client_re = r'(\d+\:(?P<client>[\W\w]+)\s)*' | |
161 | regex = re.compile( |
|
161 | regex = re.compile( | |
162 | r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}' |
|
162 | r'^\(\s{version}\s{capabilities}\s{url}\s{ra_client}' | |
163 | r'\(\s{client}\)\s\)\s*$'.format( |
|
163 | r'\(\s{client}\)\s\)\s*$'.format( | |
164 | version=version_re, capabilities=capabilities_re, |
|
164 | version=version_re, capabilities=capabilities_re, | |
165 | url=url_re, ra_client=ra_client_re, client=client_re)) |
|
165 | url=url_re, ra_client=ra_client_re, client=client_re)) | |
166 | matcher = regex.match(buffer_) |
|
166 | matcher = regex.match(buffer_) | |
167 |
|
167 | |||
168 | return matcher.groupdict() if matcher else None |
|
168 | return matcher.groupdict() if matcher else None | |
169 |
|
169 | |||
170 | def _match_repo_name(self, url): |
|
170 | def _match_repo_name(self, url): | |
171 | """ |
|
171 | """ | |
172 | Given an server url, try to match it against ALL known repository names. |
|
172 | Given an server url, try to match it against ALL known repository names. | |
173 | This handles a tricky SVN case for SSH and subdir commits. |
|
173 | This handles a tricky SVN case for SSH and subdir commits. | |
174 | E.g if our repo name is my-svn-repo, a svn commit on file in a subdir would |
|
174 | E.g if our repo name is my-svn-repo, a svn commit on file in a subdir would | |
175 | result in the url with this subdir added. |
|
175 | result in the url with this subdir added. | |
176 | """ |
|
176 | """ | |
177 | # case 1 direct match, we don't do any "heavy" lookups |
|
177 | # case 1 direct match, we don't do any "heavy" lookups | |
178 | if url in self.server.user_permissions: |
|
178 | if url in self.server.user_permissions: | |
179 | return url |
|
179 | return url | |
180 |
|
180 | |||
181 | log.debug('Extracting repository name from subdir path %s', url) |
|
181 | log.debug('Extracting repository name from subdir path %s', url) | |
182 | # case 2 we check all permissions, and match closes possible case... |
|
182 | # case 2 we check all permissions, and match closes possible case... | |
183 | # NOTE(dan): In this case we only know that url has a subdir parts, it's safe |
|
183 | # NOTE(dan): In this case we only know that url has a subdir parts, it's safe | |
184 | # to assume that it will have the repo name as prefix, we ensure the prefix |
|
184 | # to assume that it will have the repo name as prefix, we ensure the prefix | |
185 | # for similar repositories isn't matched by adding a / |
|
185 | # for similar repositories isn't matched by adding a / | |
186 | # e.g subgroup/repo-name/ and subgroup/repo-name-1/ would work correct. |
|
186 | # e.g subgroup/repo-name/ and subgroup/repo-name-1/ would work correct. | |
187 | for repo_name in self.server.user_permissions: |
|
187 | for repo_name in self.server.user_permissions: | |
188 | repo_name_prefix = repo_name + '/' |
|
188 | repo_name_prefix = repo_name + '/' | |
189 | if url.startswith(repo_name_prefix): |
|
189 | if url.startswith(repo_name_prefix): | |
190 | log.debug('Found prefix %s match, returning proper repository name', |
|
190 | log.debug('Found prefix %s match, returning proper repository name', | |
191 | repo_name_prefix) |
|
191 | repo_name_prefix) | |
192 | return repo_name |
|
192 | return repo_name | |
193 |
|
193 | |||
194 | return |
|
194 | return | |
195 |
|
195 | |||
196 | def run(self, extras): |
|
196 | def run(self, extras): | |
197 | action = 'pull' |
|
197 | action = 'pull' | |
198 | self.create_svn_config() |
|
198 | self.create_svn_config() | |
199 | self.start() |
|
199 | self.start() | |
200 |
|
200 | |||
201 | first_response = self.get_first_client_response() |
|
201 | first_response = self.get_first_client_response() | |
202 | if not first_response: |
|
202 | if not first_response: | |
203 | return self.fail("Repository name cannot be extracted") |
|
203 | return self.fail("Repository name cannot be extracted") | |
204 |
|
204 | |||
205 | url_parts = urlparse.urlparse(first_response['url']) |
|
205 | url_parts = urlparse.urlparse(first_response['url']) | |
206 |
|
206 | |||
207 | self.server.repo_name = self._match_repo_name(url_parts.path.strip('/')) |
|
207 | self.server.repo_name = self._match_repo_name(url_parts.path.strip('/')) | |
208 |
|
208 | |||
209 | exit_code = self.server._check_permissions(action) |
|
209 | exit_code = self.server._check_permissions(action) | |
210 | if exit_code: |
|
210 | if exit_code: | |
211 | return exit_code |
|
211 | return exit_code | |
212 |
|
212 | |||
213 | # set the readonly flag to False if we have proper permissions |
|
213 | # set the readonly flag to False if we have proper permissions | |
214 | if self.server.has_write_perm(): |
|
214 | if self.server.has_write_perm(): | |
215 | self.read_only = False |
|
215 | self.read_only = False | |
216 | self.server.update_environment(action=action, extras=extras) |
|
216 | self.server.update_environment(action=action, extras=extras) | |
217 |
|
217 | |||
218 | self.patch_first_client_response(first_response) |
|
218 | self.patch_first_client_response(first_response) | |
219 | self.sync() |
|
219 | self.sync() | |
220 | return self.return_code |
|
220 | return self.return_code | |
221 |
|
221 | |||
222 |
|
222 | |||
223 | class SubversionServer(VcsServer): |
|
223 | class SubversionServer(VcsServer): | |
224 | backend = 'svn' |
|
224 | backend = 'svn' | |
|
225 | repo_user_agent = 'svn' | |||
225 |
|
226 | |||
226 | def __init__(self, store, ini_path, repo_name, |
|
227 | def __init__(self, store, ini_path, repo_name, | |
227 | user, user_permissions, config, env): |
|
228 | user, user_permissions, config, env): | |
228 | super(SubversionServer, self)\ |
|
229 | super(SubversionServer, self)\ | |
229 | .__init__(user, user_permissions, config, env) |
|
230 | .__init__(user, user_permissions, config, env) | |
230 | self.store = store |
|
231 | self.store = store | |
231 | self.ini_path = ini_path |
|
232 | self.ini_path = ini_path | |
232 | # NOTE(dan): repo_name at this point is empty, |
|
233 | # NOTE(dan): repo_name at this point is empty, | |
233 | # this is set later in .run() based from parsed input stream |
|
234 | # this is set later in .run() based from parsed input stream | |
234 | self.repo_name = repo_name |
|
235 | self.repo_name = repo_name | |
235 | self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn') |
|
236 | self._path = self.svn_path = config.get('app:main', 'ssh.executable.svn') | |
236 |
|
237 | |||
237 | self.tunnel = SubversionTunnelWrapper(server=self) |
|
238 | self.tunnel = SubversionTunnelWrapper(server=self) | |
238 |
|
239 | |||
239 | def _handle_tunnel(self, extras): |
|
240 | def _handle_tunnel(self, extras): | |
240 |
|
241 | |||
241 | # pre-auth |
|
242 | # pre-auth | |
242 | action = 'pull' |
|
243 | action = 'pull' | |
243 | # Special case for SVN, we extract repo name at later stage |
|
244 | # Special case for SVN, we extract repo name at later stage | |
244 | # exit_code = self._check_permissions(action) |
|
245 | # exit_code = self._check_permissions(action) | |
245 | # if exit_code: |
|
246 | # if exit_code: | |
246 | # return exit_code, False |
|
247 | # return exit_code, False | |
247 |
|
248 | |||
248 | req = self.env['request'] |
|
249 | req = self.env['request'] | |
249 | server_url = req.host_url + req.script_name |
|
250 | server_url = req.host_url + req.script_name | |
250 | extras['server_url'] = server_url |
|
251 | extras['server_url'] = server_url | |
251 |
|
252 | |||
252 | log.debug('Using %s binaries from path %s', self.backend, self._path) |
|
253 | log.debug('Using %s binaries from path %s', self.backend, self._path) | |
253 | exit_code = self.tunnel.run(extras) |
|
254 | exit_code = self.tunnel.run(extras) | |
254 |
|
255 | |||
255 | return exit_code, action == "push" |
|
256 | return exit_code, action == "push" | |
256 |
|
257 | |||
257 |
|
258 |
@@ -1,156 +1,156 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import json |
|
21 | import json | |
22 | import os |
|
22 | import os | |
23 |
|
23 | |||
24 | import mock |
|
24 | import mock | |
25 | import pytest |
|
25 | import pytest | |
26 |
|
26 | |||
27 | from rhodecode.apps.ssh_support.lib.backends.git import GitServer |
|
27 | from rhodecode.apps.ssh_support.lib.backends.git import GitServer | |
28 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user |
|
28 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class GitServerCreator(object): |
|
31 | class GitServerCreator(object): | |
32 | root = '/tmp/repo/path/' |
|
32 | root = '/tmp/repo/path/' | |
33 | git_path = '/usr/local/bin/git' |
|
33 | git_path = '/usr/local/bin/git' | |
34 | config_data = { |
|
34 | config_data = { | |
35 | 'app:main': { |
|
35 | 'app:main': { | |
36 | 'ssh.executable.git': git_path, |
|
36 | 'ssh.executable.git': git_path, | |
37 | 'vcs.hooks.protocol': 'http', |
|
37 | 'vcs.hooks.protocol': 'http', | |
38 | } |
|
38 | } | |
39 | } |
|
39 | } | |
40 | repo_name = 'test_git' |
|
40 | repo_name = 'test_git' | |
41 | repo_mode = 'receive-pack' |
|
41 | repo_mode = 'receive-pack' | |
42 | user = plain_dummy_user() |
|
42 | user = plain_dummy_user() | |
43 |
|
43 | |||
44 | def __init__(self): |
|
44 | def __init__(self): | |
45 | def config_get(part, key): |
|
45 | def config_get(part, key): | |
46 | return self.config_data.get(part, {}).get(key) |
|
46 | return self.config_data.get(part, {}).get(key) | |
47 | self.config_mock = mock.Mock() |
|
47 | self.config_mock = mock.Mock() | |
48 | self.config_mock.get = mock.Mock(side_effect=config_get) |
|
48 | self.config_mock.get = mock.Mock(side_effect=config_get) | |
49 |
|
49 | |||
50 | def create(self, **kwargs): |
|
50 | def create(self, **kwargs): | |
51 | parameters = { |
|
51 | parameters = { | |
52 | 'store': self.root, |
|
52 | 'store': self.root, | |
53 | 'ini_path': '', |
|
53 | 'ini_path': '', | |
54 | 'user': self.user, |
|
54 | 'user': self.user, | |
55 | 'repo_name': self.repo_name, |
|
55 | 'repo_name': self.repo_name, | |
56 | 'repo_mode': self.repo_mode, |
|
56 | 'repo_mode': self.repo_mode, | |
57 | 'user_permissions': { |
|
57 | 'user_permissions': { | |
58 | self.repo_name: 'repository.admin' |
|
58 | self.repo_name: 'repository.admin' | |
59 | }, |
|
59 | }, | |
60 | 'config': self.config_mock, |
|
60 | 'config': self.config_mock, | |
61 | 'env': plain_dummy_env() |
|
61 | 'env': plain_dummy_env() | |
62 | } |
|
62 | } | |
63 | parameters.update(kwargs) |
|
63 | parameters.update(kwargs) | |
64 | server = GitServer(**parameters) |
|
64 | server = GitServer(**parameters) | |
65 | return server |
|
65 | return server | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | @pytest.fixture() |
|
68 | @pytest.fixture() | |
69 | def git_server(app): |
|
69 | def git_server(app): | |
70 | return GitServerCreator() |
|
70 | return GitServerCreator() | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | class TestGitServer(object): |
|
73 | class TestGitServer(object): | |
74 |
|
74 | |||
75 | def test_command(self, git_server): |
|
75 | def test_command(self, git_server): | |
76 | server = git_server.create() |
|
76 | server = git_server.create() | |
77 | expected_command = ( |
|
77 | expected_command = ( | |
78 | 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format( |
|
78 | 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format( | |
79 | root=git_server.root, git_path=git_server.git_path, |
|
79 | root=git_server.root, git_path=git_server.git_path, | |
80 | repo_mode=git_server.repo_mode, repo_name=git_server.repo_name) |
|
80 | repo_mode=git_server.repo_mode, repo_name=git_server.repo_name) | |
81 | ) |
|
81 | ) | |
82 | assert expected_command == server.tunnel.command() |
|
82 | assert expected_command == server.tunnel.command() | |
83 |
|
83 | |||
84 | @pytest.mark.parametrize('permissions, action, code', [ |
|
84 | @pytest.mark.parametrize('permissions, action, code', [ | |
85 | ({}, 'pull', -2), |
|
85 | ({}, 'pull', -2), | |
86 | ({'test_git': 'repository.read'}, 'pull', 0), |
|
86 | ({'test_git': 'repository.read'}, 'pull', 0), | |
87 | ({'test_git': 'repository.read'}, 'push', -2), |
|
87 | ({'test_git': 'repository.read'}, 'push', -2), | |
88 | ({'test_git': 'repository.write'}, 'push', 0), |
|
88 | ({'test_git': 'repository.write'}, 'push', 0), | |
89 | ({'test_git': 'repository.admin'}, 'push', 0), |
|
89 | ({'test_git': 'repository.admin'}, 'push', 0), | |
90 |
|
90 | |||
91 | ]) |
|
91 | ]) | |
92 | def test_permission_checks(self, git_server, permissions, action, code): |
|
92 | def test_permission_checks(self, git_server, permissions, action, code): | |
93 | server = git_server.create(user_permissions=permissions) |
|
93 | server = git_server.create(user_permissions=permissions) | |
94 | result = server._check_permissions(action) |
|
94 | result = server._check_permissions(action) | |
95 | assert result is code |
|
95 | assert result is code | |
96 |
|
96 | |||
97 | @pytest.mark.parametrize('permissions, value', [ |
|
97 | @pytest.mark.parametrize('permissions, value', [ | |
98 | ({}, False), |
|
98 | ({}, False), | |
99 | ({'test_git': 'repository.read'}, False), |
|
99 | ({'test_git': 'repository.read'}, False), | |
100 | ({'test_git': 'repository.write'}, True), |
|
100 | ({'test_git': 'repository.write'}, True), | |
101 | ({'test_git': 'repository.admin'}, True), |
|
101 | ({'test_git': 'repository.admin'}, True), | |
102 |
|
102 | |||
103 | ]) |
|
103 | ]) | |
104 | def test_has_write_permissions(self, git_server, permissions, value): |
|
104 | def test_has_write_permissions(self, git_server, permissions, value): | |
105 | server = git_server.create(user_permissions=permissions) |
|
105 | server = git_server.create(user_permissions=permissions) | |
106 | result = server.has_write_perm() |
|
106 | result = server.has_write_perm() | |
107 | assert result is value |
|
107 | assert result is value | |
108 |
|
108 | |||
109 | def test_run_returns_executes_command(self, git_server): |
|
109 | def test_run_returns_executes_command(self, git_server): | |
110 | server = git_server.create() |
|
110 | server = git_server.create() | |
111 | from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper |
|
111 | from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper | |
112 |
|
112 | |||
113 | os.environ['SSH_CLIENT'] = '127.0.0.1' |
|
113 | os.environ['SSH_CLIENT'] = '127.0.0.1' | |
114 | with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch: |
|
114 | with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch: | |
115 | _patch.return_value = 0 |
|
115 | _patch.return_value = 0 | |
116 | with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'): |
|
116 | with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'): | |
117 | exit_code = server.run() |
|
117 | exit_code = server.run() | |
118 |
|
118 | |||
119 | assert exit_code == (0, False) |
|
119 | assert exit_code == (0, False) | |
120 |
|
120 | |||
121 | @pytest.mark.parametrize( |
|
121 | @pytest.mark.parametrize( | |
122 | 'repo_mode, action', [ |
|
122 | 'repo_mode, action', [ | |
123 | ['receive-pack', 'push'], |
|
123 | ['receive-pack', 'push'], | |
124 | ['upload-pack', 'pull'] |
|
124 | ['upload-pack', 'pull'] | |
125 | ]) |
|
125 | ]) | |
126 | def test_update_environment(self, git_server, repo_mode, action): |
|
126 | def test_update_environment(self, git_server, repo_mode, action): | |
127 | server = git_server.create(repo_mode=repo_mode) |
|
127 | server = git_server.create(repo_mode=repo_mode) | |
128 | store = server.store |
|
128 | store = server.store | |
129 |
|
129 | |||
130 | with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}): |
|
130 | with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}): | |
131 | with mock.patch('os.putenv') as putenv_mock: |
|
131 | with mock.patch('os.putenv') as putenv_mock: | |
132 | server.update_environment(action) |
|
132 | server.update_environment(action) | |
133 |
|
133 | |||
134 | expected_data = { |
|
134 | expected_data = { | |
135 | 'username': git_server.user.username, |
|
135 | 'username': git_server.user.username, | |
136 | 'user_id': git_server.user.user_id, |
|
136 | 'user_id': git_server.user.user_id, | |
137 | 'scm': 'git', |
|
137 | 'scm': 'git', | |
138 | 'repository': git_server.repo_name, |
|
138 | 'repository': git_server.repo_name, | |
139 | 'make_lock': None, |
|
139 | 'make_lock': None, | |
140 | 'action': action, |
|
140 | 'action': action, | |
141 | 'ip': '10.10.10.10', |
|
141 | 'ip': '10.10.10.10', | |
142 | 'locked_by': [None, None], |
|
142 | 'locked_by': [None, None], | |
143 | 'config': '', |
|
143 | 'config': '', | |
144 | 'repo_store': store, |
|
144 | 'repo_store': store, | |
145 | 'server_url': None, |
|
145 | 'server_url': None, | |
146 | 'hooks': ['push', 'pull'], |
|
146 | 'hooks': ['push', 'pull'], | |
147 | 'is_shadow_repo': False, |
|
147 | 'is_shadow_repo': False, | |
148 | 'hooks_module': 'rhodecode.lib.hooks_daemon', |
|
148 | 'hooks_module': 'rhodecode.lib.hooks_daemon', | |
149 | 'check_branch_perms': False, |
|
149 | 'check_branch_perms': False, | |
150 | 'detect_force_push': False, |
|
150 | 'detect_force_push': False, | |
151 | 'user_agent': u'ssh-user-agent', |
|
151 | 'user_agent': u'git/ssh-user-agent', | |
152 | 'SSH': True, |
|
152 | 'SSH': True, | |
153 | 'SSH_PERMISSIONS': 'repository.admin', |
|
153 | 'SSH_PERMISSIONS': 'repository.admin', | |
154 | } |
|
154 | } | |
155 | args, kwargs = putenv_mock.call_args |
|
155 | args, kwargs = putenv_mock.call_args | |
156 | assert json.loads(args[1]) == expected_data |
|
156 | assert json.loads(args[1]) == expected_data |
@@ -1,535 +1,538 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2013-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2013-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | Set of hooks run by RhodeCode Enterprise |
|
23 | Set of hooks run by RhodeCode Enterprise | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import logging |
|
27 | import logging | |
28 |
|
28 | |||
29 | import rhodecode |
|
29 | import rhodecode | |
30 | from rhodecode import events |
|
30 | from rhodecode import events | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib import audit_logger |
|
32 | from rhodecode.lib import audit_logger | |
33 | from rhodecode.lib.utils2 import safe_str |
|
33 | from rhodecode.lib.utils2 import safe_str, user_agent_normalizer | |
34 | from rhodecode.lib.exceptions import ( |
|
34 | from rhodecode.lib.exceptions import ( | |
35 | HTTPLockedRC, HTTPBranchProtected, UserCreationError) |
|
35 | HTTPLockedRC, HTTPBranchProtected, UserCreationError) | |
36 | from rhodecode.model.db import Repository, User |
|
36 | from rhodecode.model.db import Repository, User | |
37 | from rhodecode.lib.statsd_client import StatsdClient |
|
37 | from rhodecode.lib.statsd_client import StatsdClient | |
38 |
|
38 | |||
39 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | class HookResponse(object): |
|
42 | class HookResponse(object): | |
43 | def __init__(self, status, output): |
|
43 | def __init__(self, status, output): | |
44 | self.status = status |
|
44 | self.status = status | |
45 | self.output = output |
|
45 | self.output = output | |
46 |
|
46 | |||
47 | def __add__(self, other): |
|
47 | def __add__(self, other): | |
48 | other_status = getattr(other, 'status', 0) |
|
48 | other_status = getattr(other, 'status', 0) | |
49 | new_status = max(self.status, other_status) |
|
49 | new_status = max(self.status, other_status) | |
50 | other_output = getattr(other, 'output', '') |
|
50 | other_output = getattr(other, 'output', '') | |
51 | new_output = self.output + other_output |
|
51 | new_output = self.output + other_output | |
52 |
|
52 | |||
53 | return HookResponse(new_status, new_output) |
|
53 | return HookResponse(new_status, new_output) | |
54 |
|
54 | |||
55 | def __bool__(self): |
|
55 | def __bool__(self): | |
56 | return self.status == 0 |
|
56 | return self.status == 0 | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def is_shadow_repo(extras): |
|
59 | def is_shadow_repo(extras): | |
60 | """ |
|
60 | """ | |
61 | Returns ``True`` if this is an action executed against a shadow repository. |
|
61 | Returns ``True`` if this is an action executed against a shadow repository. | |
62 | """ |
|
62 | """ | |
63 | return extras['is_shadow_repo'] |
|
63 | return extras['is_shadow_repo'] | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def _get_scm_size(alias, root_path): |
|
66 | def _get_scm_size(alias, root_path): | |
67 |
|
67 | |||
68 | if not alias.startswith('.'): |
|
68 | if not alias.startswith('.'): | |
69 | alias += '.' |
|
69 | alias += '.' | |
70 |
|
70 | |||
71 | size_scm, size_root = 0, 0 |
|
71 | size_scm, size_root = 0, 0 | |
72 | for path, unused_dirs, files in os.walk(safe_str(root_path)): |
|
72 | for path, unused_dirs, files in os.walk(safe_str(root_path)): | |
73 | if path.find(alias) != -1: |
|
73 | if path.find(alias) != -1: | |
74 | for f in files: |
|
74 | for f in files: | |
75 | try: |
|
75 | try: | |
76 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
76 | size_scm += os.path.getsize(os.path.join(path, f)) | |
77 | except OSError: |
|
77 | except OSError: | |
78 | pass |
|
78 | pass | |
79 | else: |
|
79 | else: | |
80 | for f in files: |
|
80 | for f in files: | |
81 | try: |
|
81 | try: | |
82 | size_root += os.path.getsize(os.path.join(path, f)) |
|
82 | size_root += os.path.getsize(os.path.join(path, f)) | |
83 | except OSError: |
|
83 | except OSError: | |
84 | pass |
|
84 | pass | |
85 |
|
85 | |||
86 | size_scm_f = h.format_byte_size_binary(size_scm) |
|
86 | size_scm_f = h.format_byte_size_binary(size_scm) | |
87 | size_root_f = h.format_byte_size_binary(size_root) |
|
87 | size_root_f = h.format_byte_size_binary(size_root) | |
88 | size_total_f = h.format_byte_size_binary(size_root + size_scm) |
|
88 | size_total_f = h.format_byte_size_binary(size_root + size_scm) | |
89 |
|
89 | |||
90 | return size_scm_f, size_root_f, size_total_f |
|
90 | return size_scm_f, size_root_f, size_total_f | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | # actual hooks called by Mercurial internally, and GIT by our Python Hooks |
|
93 | # actual hooks called by Mercurial internally, and GIT by our Python Hooks | |
94 | def repo_size(extras): |
|
94 | def repo_size(extras): | |
95 | """Present size of repository after push.""" |
|
95 | """Present size of repository after push.""" | |
96 | repo = Repository.get_by_repo_name(extras.repository) |
|
96 | repo = Repository.get_by_repo_name(extras.repository) | |
97 | vcs_part = safe_str(u'.%s' % repo.repo_type) |
|
97 | vcs_part = safe_str(u'.%s' % repo.repo_type) | |
98 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, |
|
98 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, | |
99 | repo.repo_full_path) |
|
99 | repo.repo_full_path) | |
100 | msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n' |
|
100 | msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n' | |
101 | % (repo.repo_name, vcs_part, size_vcs, size_root, size_total)) |
|
101 | % (repo.repo_name, vcs_part, size_vcs, size_root, size_total)) | |
102 | return HookResponse(0, msg) |
|
102 | return HookResponse(0, msg) | |
103 |
|
103 | |||
104 |
|
104 | |||
105 | def pre_push(extras): |
|
105 | def pre_push(extras): | |
106 | """ |
|
106 | """ | |
107 | Hook executed before pushing code. |
|
107 | Hook executed before pushing code. | |
108 |
|
108 | |||
109 | It bans pushing when the repository is locked. |
|
109 | It bans pushing when the repository is locked. | |
110 | """ |
|
110 | """ | |
111 |
|
111 | |||
112 | user = User.get_by_username(extras.username) |
|
112 | user = User.get_by_username(extras.username) | |
113 | output = '' |
|
113 | output = '' | |
114 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): |
|
114 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): | |
115 | locked_by = User.get(extras.locked_by[0]).username |
|
115 | locked_by = User.get(extras.locked_by[0]).username | |
116 | reason = extras.locked_by[2] |
|
116 | reason = extras.locked_by[2] | |
117 | # this exception is interpreted in git/hg middlewares and based |
|
117 | # this exception is interpreted in git/hg middlewares and based | |
118 | # on that proper return code is server to client |
|
118 | # on that proper return code is server to client | |
119 | _http_ret = HTTPLockedRC( |
|
119 | _http_ret = HTTPLockedRC( | |
120 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
120 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
121 | if str(_http_ret.code).startswith('2'): |
|
121 | if str(_http_ret.code).startswith('2'): | |
122 | # 2xx Codes don't raise exceptions |
|
122 | # 2xx Codes don't raise exceptions | |
123 | output = _http_ret.title |
|
123 | output = _http_ret.title | |
124 | else: |
|
124 | else: | |
125 | raise _http_ret |
|
125 | raise _http_ret | |
126 |
|
126 | |||
127 | hook_response = '' |
|
127 | hook_response = '' | |
128 | if not is_shadow_repo(extras): |
|
128 | if not is_shadow_repo(extras): | |
129 | if extras.commit_ids and extras.check_branch_perms: |
|
129 | if extras.commit_ids and extras.check_branch_perms: | |
130 |
|
130 | |||
131 | auth_user = user.AuthUser() |
|
131 | auth_user = user.AuthUser() | |
132 | repo = Repository.get_by_repo_name(extras.repository) |
|
132 | repo = Repository.get_by_repo_name(extras.repository) | |
133 | affected_branches = [] |
|
133 | affected_branches = [] | |
134 | if repo.repo_type == 'hg': |
|
134 | if repo.repo_type == 'hg': | |
135 | for entry in extras.commit_ids: |
|
135 | for entry in extras.commit_ids: | |
136 | if entry['type'] == 'branch': |
|
136 | if entry['type'] == 'branch': | |
137 | is_forced = bool(entry['multiple_heads']) |
|
137 | is_forced = bool(entry['multiple_heads']) | |
138 | affected_branches.append([entry['name'], is_forced]) |
|
138 | affected_branches.append([entry['name'], is_forced]) | |
139 | elif repo.repo_type == 'git': |
|
139 | elif repo.repo_type == 'git': | |
140 | for entry in extras.commit_ids: |
|
140 | for entry in extras.commit_ids: | |
141 | if entry['type'] == 'heads': |
|
141 | if entry['type'] == 'heads': | |
142 | is_forced = bool(entry['pruned_sha']) |
|
142 | is_forced = bool(entry['pruned_sha']) | |
143 | affected_branches.append([entry['name'], is_forced]) |
|
143 | affected_branches.append([entry['name'], is_forced]) | |
144 |
|
144 | |||
145 | for branch_name, is_forced in affected_branches: |
|
145 | for branch_name, is_forced in affected_branches: | |
146 |
|
146 | |||
147 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
147 | rule, branch_perm = auth_user.get_rule_and_branch_permission( | |
148 | extras.repository, branch_name) |
|
148 | extras.repository, branch_name) | |
149 | if not branch_perm: |
|
149 | if not branch_perm: | |
150 | # no branch permission found for this branch, just keep checking |
|
150 | # no branch permission found for this branch, just keep checking | |
151 | continue |
|
151 | continue | |
152 |
|
152 | |||
153 | if branch_perm == 'branch.push_force': |
|
153 | if branch_perm == 'branch.push_force': | |
154 | continue |
|
154 | continue | |
155 | elif branch_perm == 'branch.push' and is_forced is False: |
|
155 | elif branch_perm == 'branch.push' and is_forced is False: | |
156 | continue |
|
156 | continue | |
157 | elif branch_perm == 'branch.push' and is_forced is True: |
|
157 | elif branch_perm == 'branch.push' and is_forced is True: | |
158 | halt_message = 'Branch `{}` changes rejected by rule {}. ' \ |
|
158 | halt_message = 'Branch `{}` changes rejected by rule {}. ' \ | |
159 | 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule) |
|
159 | 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule) | |
160 | else: |
|
160 | else: | |
161 | halt_message = 'Branch `{}` changes rejected by rule {}.'.format( |
|
161 | halt_message = 'Branch `{}` changes rejected by rule {}.'.format( | |
162 | branch_name, rule) |
|
162 | branch_name, rule) | |
163 |
|
163 | |||
164 | if halt_message: |
|
164 | if halt_message: | |
165 | _http_ret = HTTPBranchProtected(halt_message) |
|
165 | _http_ret = HTTPBranchProtected(halt_message) | |
166 | raise _http_ret |
|
166 | raise _http_ret | |
167 |
|
167 | |||
168 | # Propagate to external components. This is done after checking the |
|
168 | # Propagate to external components. This is done after checking the | |
169 | # lock, for consistent behavior. |
|
169 | # lock, for consistent behavior. | |
170 | hook_response = pre_push_extension( |
|
170 | hook_response = pre_push_extension( | |
171 | repo_store_path=Repository.base_path(), **extras) |
|
171 | repo_store_path=Repository.base_path(), **extras) | |
172 | events.trigger(events.RepoPrePushEvent( |
|
172 | events.trigger(events.RepoPrePushEvent( | |
173 | repo_name=extras.repository, extras=extras)) |
|
173 | repo_name=extras.repository, extras=extras)) | |
174 |
|
174 | |||
175 | return HookResponse(0, output) + hook_response |
|
175 | return HookResponse(0, output) + hook_response | |
176 |
|
176 | |||
177 |
|
177 | |||
178 | def pre_pull(extras): |
|
178 | def pre_pull(extras): | |
179 | """ |
|
179 | """ | |
180 | Hook executed before pulling the code. |
|
180 | Hook executed before pulling the code. | |
181 |
|
181 | |||
182 | It bans pulling when the repository is locked. |
|
182 | It bans pulling when the repository is locked. | |
183 | """ |
|
183 | """ | |
184 |
|
184 | |||
185 | output = '' |
|
185 | output = '' | |
186 | if extras.locked_by[0]: |
|
186 | if extras.locked_by[0]: | |
187 | locked_by = User.get(extras.locked_by[0]).username |
|
187 | locked_by = User.get(extras.locked_by[0]).username | |
188 | reason = extras.locked_by[2] |
|
188 | reason = extras.locked_by[2] | |
189 | # this exception is interpreted in git/hg middlewares and based |
|
189 | # this exception is interpreted in git/hg middlewares and based | |
190 | # on that proper return code is server to client |
|
190 | # on that proper return code is server to client | |
191 | _http_ret = HTTPLockedRC( |
|
191 | _http_ret = HTTPLockedRC( | |
192 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
192 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
193 | if str(_http_ret.code).startswith('2'): |
|
193 | if str(_http_ret.code).startswith('2'): | |
194 | # 2xx Codes don't raise exceptions |
|
194 | # 2xx Codes don't raise exceptions | |
195 | output = _http_ret.title |
|
195 | output = _http_ret.title | |
196 | else: |
|
196 | else: | |
197 | raise _http_ret |
|
197 | raise _http_ret | |
198 |
|
198 | |||
199 | # Propagate to external components. This is done after checking the |
|
199 | # Propagate to external components. This is done after checking the | |
200 | # lock, for consistent behavior. |
|
200 | # lock, for consistent behavior. | |
201 | hook_response = '' |
|
201 | hook_response = '' | |
202 | if not is_shadow_repo(extras): |
|
202 | if not is_shadow_repo(extras): | |
203 | extras.hook_type = extras.hook_type or 'pre_pull' |
|
203 | extras.hook_type = extras.hook_type or 'pre_pull' | |
204 | hook_response = pre_pull_extension( |
|
204 | hook_response = pre_pull_extension( | |
205 | repo_store_path=Repository.base_path(), **extras) |
|
205 | repo_store_path=Repository.base_path(), **extras) | |
206 | events.trigger(events.RepoPrePullEvent( |
|
206 | events.trigger(events.RepoPrePullEvent( | |
207 | repo_name=extras.repository, extras=extras)) |
|
207 | repo_name=extras.repository, extras=extras)) | |
208 |
|
208 | |||
209 | return HookResponse(0, output) + hook_response |
|
209 | return HookResponse(0, output) + hook_response | |
210 |
|
210 | |||
211 |
|
211 | |||
212 | def post_pull(extras): |
|
212 | def post_pull(extras): | |
213 | """Hook executed after client pulls the code.""" |
|
213 | """Hook executed after client pulls the code.""" | |
214 |
|
214 | |||
215 | audit_user = audit_logger.UserWrap( |
|
215 | audit_user = audit_logger.UserWrap( | |
216 | username=extras.username, |
|
216 | username=extras.username, | |
217 | ip_addr=extras.ip) |
|
217 | ip_addr=extras.ip) | |
218 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
218 | repo = audit_logger.RepoWrap(repo_name=extras.repository) | |
219 | audit_logger.store( |
|
219 | audit_logger.store( | |
220 | 'user.pull', action_data={'user_agent': extras.user_agent}, |
|
220 | 'user.pull', action_data={'user_agent': extras.user_agent}, | |
221 | user=audit_user, repo=repo, commit=True) |
|
221 | user=audit_user, repo=repo, commit=True) | |
222 |
|
222 | |||
223 | statsd = StatsdClient.statsd |
|
223 | statsd = StatsdClient.statsd | |
224 | if statsd: |
|
224 | if statsd: | |
225 |
statsd.incr('rhodecode_pull_total' |
|
225 | statsd.incr('rhodecode_pull_total', tags=[ | |
226 |
|
226 | 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)), | ||
|
227 | ]) | |||
227 | output = '' |
|
228 | output = '' | |
228 | # make lock is a tri state False, True, None. We only make lock on True |
|
229 | # make lock is a tri state False, True, None. We only make lock on True | |
229 | if extras.make_lock is True and not is_shadow_repo(extras): |
|
230 | if extras.make_lock is True and not is_shadow_repo(extras): | |
230 | user = User.get_by_username(extras.username) |
|
231 | user = User.get_by_username(extras.username) | |
231 | Repository.lock(Repository.get_by_repo_name(extras.repository), |
|
232 | Repository.lock(Repository.get_by_repo_name(extras.repository), | |
232 | user.user_id, |
|
233 | user.user_id, | |
233 | lock_reason=Repository.LOCK_PULL) |
|
234 | lock_reason=Repository.LOCK_PULL) | |
234 | msg = 'Made lock on repo `%s`' % (extras.repository,) |
|
235 | msg = 'Made lock on repo `%s`' % (extras.repository,) | |
235 | output += msg |
|
236 | output += msg | |
236 |
|
237 | |||
237 | if extras.locked_by[0]: |
|
238 | if extras.locked_by[0]: | |
238 | locked_by = User.get(extras.locked_by[0]).username |
|
239 | locked_by = User.get(extras.locked_by[0]).username | |
239 | reason = extras.locked_by[2] |
|
240 | reason = extras.locked_by[2] | |
240 | _http_ret = HTTPLockedRC( |
|
241 | _http_ret = HTTPLockedRC( | |
241 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
242 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
242 | if str(_http_ret.code).startswith('2'): |
|
243 | if str(_http_ret.code).startswith('2'): | |
243 | # 2xx Codes don't raise exceptions |
|
244 | # 2xx Codes don't raise exceptions | |
244 | output += _http_ret.title |
|
245 | output += _http_ret.title | |
245 |
|
246 | |||
246 | # Propagate to external components. |
|
247 | # Propagate to external components. | |
247 | hook_response = '' |
|
248 | hook_response = '' | |
248 | if not is_shadow_repo(extras): |
|
249 | if not is_shadow_repo(extras): | |
249 | extras.hook_type = extras.hook_type or 'post_pull' |
|
250 | extras.hook_type = extras.hook_type or 'post_pull' | |
250 | hook_response = post_pull_extension( |
|
251 | hook_response = post_pull_extension( | |
251 | repo_store_path=Repository.base_path(), **extras) |
|
252 | repo_store_path=Repository.base_path(), **extras) | |
252 | events.trigger(events.RepoPullEvent( |
|
253 | events.trigger(events.RepoPullEvent( | |
253 | repo_name=extras.repository, extras=extras)) |
|
254 | repo_name=extras.repository, extras=extras)) | |
254 |
|
255 | |||
255 | return HookResponse(0, output) + hook_response |
|
256 | return HookResponse(0, output) + hook_response | |
256 |
|
257 | |||
257 |
|
258 | |||
258 | def post_push(extras): |
|
259 | def post_push(extras): | |
259 | """Hook executed after user pushes to the repository.""" |
|
260 | """Hook executed after user pushes to the repository.""" | |
260 | commit_ids = extras.commit_ids |
|
261 | commit_ids = extras.commit_ids | |
261 |
|
262 | |||
262 | # log the push call |
|
263 | # log the push call | |
263 | audit_user = audit_logger.UserWrap( |
|
264 | audit_user = audit_logger.UserWrap( | |
264 | username=extras.username, ip_addr=extras.ip) |
|
265 | username=extras.username, ip_addr=extras.ip) | |
265 | repo = audit_logger.RepoWrap(repo_name=extras.repository) |
|
266 | repo = audit_logger.RepoWrap(repo_name=extras.repository) | |
266 | audit_logger.store( |
|
267 | audit_logger.store( | |
267 | 'user.push', action_data={ |
|
268 | 'user.push', action_data={ | |
268 | 'user_agent': extras.user_agent, |
|
269 | 'user_agent': extras.user_agent, | |
269 | 'commit_ids': commit_ids[:400]}, |
|
270 | 'commit_ids': commit_ids[:400]}, | |
270 | user=audit_user, repo=repo, commit=True) |
|
271 | user=audit_user, repo=repo, commit=True) | |
271 |
|
272 | |||
272 | statsd = StatsdClient.statsd |
|
273 | statsd = StatsdClient.statsd | |
273 | if statsd: |
|
274 | if statsd: | |
274 |
statsd.incr('rhodecode_push_total' |
|
275 | statsd.incr('rhodecode_push_total', tags=[ | |
|
276 | 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)), | |||
|
277 | ]) | |||
275 |
|
278 | |||
276 | # Propagate to external components. |
|
279 | # Propagate to external components. | |
277 | output = '' |
|
280 | output = '' | |
278 | # make lock is a tri state False, True, None. We only release lock on False |
|
281 | # make lock is a tri state False, True, None. We only release lock on False | |
279 | if extras.make_lock is False and not is_shadow_repo(extras): |
|
282 | if extras.make_lock is False and not is_shadow_repo(extras): | |
280 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) |
|
283 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) | |
281 | msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository)) |
|
284 | msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository)) | |
282 | output += msg |
|
285 | output += msg | |
283 |
|
286 | |||
284 | if extras.locked_by[0]: |
|
287 | if extras.locked_by[0]: | |
285 | locked_by = User.get(extras.locked_by[0]).username |
|
288 | locked_by = User.get(extras.locked_by[0]).username | |
286 | reason = extras.locked_by[2] |
|
289 | reason = extras.locked_by[2] | |
287 | _http_ret = HTTPLockedRC( |
|
290 | _http_ret = HTTPLockedRC( | |
288 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
291 | _locked_by_explanation(extras.repository, locked_by, reason)) | |
289 | # TODO: johbo: if not? |
|
292 | # TODO: johbo: if not? | |
290 | if str(_http_ret.code).startswith('2'): |
|
293 | if str(_http_ret.code).startswith('2'): | |
291 | # 2xx Codes don't raise exceptions |
|
294 | # 2xx Codes don't raise exceptions | |
292 | output += _http_ret.title |
|
295 | output += _http_ret.title | |
293 |
|
296 | |||
294 | if extras.new_refs: |
|
297 | if extras.new_refs: | |
295 | tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( |
|
298 | tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( | |
296 | safe_str(extras.server_url), safe_str(extras.repository)) |
|
299 | safe_str(extras.server_url), safe_str(extras.repository)) | |
297 |
|
300 | |||
298 | for branch_name in extras.new_refs['branches']: |
|
301 | for branch_name in extras.new_refs['branches']: | |
299 | output += 'RhodeCode: open pull request link: {}\n'.format( |
|
302 | output += 'RhodeCode: open pull request link: {}\n'.format( | |
300 | tmpl.format(ref_type='branch', ref_name=safe_str(branch_name))) |
|
303 | tmpl.format(ref_type='branch', ref_name=safe_str(branch_name))) | |
301 |
|
304 | |||
302 | for book_name in extras.new_refs['bookmarks']: |
|
305 | for book_name in extras.new_refs['bookmarks']: | |
303 | output += 'RhodeCode: open pull request link: {}\n'.format( |
|
306 | output += 'RhodeCode: open pull request link: {}\n'.format( | |
304 | tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name))) |
|
307 | tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name))) | |
305 |
|
308 | |||
306 | hook_response = '' |
|
309 | hook_response = '' | |
307 | if not is_shadow_repo(extras): |
|
310 | if not is_shadow_repo(extras): | |
308 | hook_response = post_push_extension( |
|
311 | hook_response = post_push_extension( | |
309 | repo_store_path=Repository.base_path(), |
|
312 | repo_store_path=Repository.base_path(), | |
310 | **extras) |
|
313 | **extras) | |
311 | events.trigger(events.RepoPushEvent( |
|
314 | events.trigger(events.RepoPushEvent( | |
312 | repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) |
|
315 | repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) | |
313 |
|
316 | |||
314 | output += 'RhodeCode: push completed\n' |
|
317 | output += 'RhodeCode: push completed\n' | |
315 | return HookResponse(0, output) + hook_response |
|
318 | return HookResponse(0, output) + hook_response | |
316 |
|
319 | |||
317 |
|
320 | |||
318 | def _locked_by_explanation(repo_name, user_name, reason): |
|
321 | def _locked_by_explanation(repo_name, user_name, reason): | |
319 | message = ( |
|
322 | message = ( | |
320 | 'Repository `%s` locked by user `%s`. Reason:`%s`' |
|
323 | 'Repository `%s` locked by user `%s`. Reason:`%s`' | |
321 | % (repo_name, user_name, reason)) |
|
324 | % (repo_name, user_name, reason)) | |
322 | return message |
|
325 | return message | |
323 |
|
326 | |||
324 |
|
327 | |||
325 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
328 | def check_allowed_create_user(user_dict, created_by, **kwargs): | |
326 | # pre create hooks |
|
329 | # pre create hooks | |
327 | if pre_create_user.is_active(): |
|
330 | if pre_create_user.is_active(): | |
328 | hook_result = pre_create_user(created_by=created_by, **user_dict) |
|
331 | hook_result = pre_create_user(created_by=created_by, **user_dict) | |
329 | allowed = hook_result.status == 0 |
|
332 | allowed = hook_result.status == 0 | |
330 | if not allowed: |
|
333 | if not allowed: | |
331 | reason = hook_result.output |
|
334 | reason = hook_result.output | |
332 | raise UserCreationError(reason) |
|
335 | raise UserCreationError(reason) | |
333 |
|
336 | |||
334 |
|
337 | |||
335 | class ExtensionCallback(object): |
|
338 | class ExtensionCallback(object): | |
336 | """ |
|
339 | """ | |
337 | Forwards a given call to rcextensions, sanitizes keyword arguments. |
|
340 | Forwards a given call to rcextensions, sanitizes keyword arguments. | |
338 |
|
341 | |||
339 | Does check if there is an extension active for that hook. If it is |
|
342 | Does check if there is an extension active for that hook. If it is | |
340 | there, it will forward all `kwargs_keys` keyword arguments to the |
|
343 | there, it will forward all `kwargs_keys` keyword arguments to the | |
341 | extension callback. |
|
344 | extension callback. | |
342 | """ |
|
345 | """ | |
343 |
|
346 | |||
344 | def __init__(self, hook_name, kwargs_keys): |
|
347 | def __init__(self, hook_name, kwargs_keys): | |
345 | self._hook_name = hook_name |
|
348 | self._hook_name = hook_name | |
346 | self._kwargs_keys = set(kwargs_keys) |
|
349 | self._kwargs_keys = set(kwargs_keys) | |
347 |
|
350 | |||
348 | def __call__(self, *args, **kwargs): |
|
351 | def __call__(self, *args, **kwargs): | |
349 | log.debug('Calling extension callback for `%s`', self._hook_name) |
|
352 | log.debug('Calling extension callback for `%s`', self._hook_name) | |
350 | callback = self._get_callback() |
|
353 | callback = self._get_callback() | |
351 | if not callback: |
|
354 | if not callback: | |
352 | log.debug('extension callback `%s` not found, skipping...', self._hook_name) |
|
355 | log.debug('extension callback `%s` not found, skipping...', self._hook_name) | |
353 | return |
|
356 | return | |
354 |
|
357 | |||
355 | kwargs_to_pass = {} |
|
358 | kwargs_to_pass = {} | |
356 | for key in self._kwargs_keys: |
|
359 | for key in self._kwargs_keys: | |
357 | try: |
|
360 | try: | |
358 | kwargs_to_pass[key] = kwargs[key] |
|
361 | kwargs_to_pass[key] = kwargs[key] | |
359 | except KeyError: |
|
362 | except KeyError: | |
360 | log.error('Failed to fetch %s key from given kwargs. ' |
|
363 | log.error('Failed to fetch %s key from given kwargs. ' | |
361 | 'Expected keys: %s', key, self._kwargs_keys) |
|
364 | 'Expected keys: %s', key, self._kwargs_keys) | |
362 | raise |
|
365 | raise | |
363 |
|
366 | |||
364 | # backward compat for removed api_key for old hooks. This was it works |
|
367 | # backward compat for removed api_key for old hooks. This was it works | |
365 | # with older rcextensions that require api_key present |
|
368 | # with older rcextensions that require api_key present | |
366 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: |
|
369 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: | |
367 | kwargs_to_pass['api_key'] = '_DEPRECATED_' |
|
370 | kwargs_to_pass['api_key'] = '_DEPRECATED_' | |
368 | return callback(**kwargs_to_pass) |
|
371 | return callback(**kwargs_to_pass) | |
369 |
|
372 | |||
370 | def is_active(self): |
|
373 | def is_active(self): | |
371 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) |
|
374 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) | |
372 |
|
375 | |||
373 | def _get_callback(self): |
|
376 | def _get_callback(self): | |
374 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) |
|
377 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) | |
375 |
|
378 | |||
376 |
|
379 | |||
377 | pre_pull_extension = ExtensionCallback( |
|
380 | pre_pull_extension = ExtensionCallback( | |
378 | hook_name='PRE_PULL_HOOK', |
|
381 | hook_name='PRE_PULL_HOOK', | |
379 | kwargs_keys=( |
|
382 | kwargs_keys=( | |
380 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
383 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
381 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) |
|
384 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) | |
382 |
|
385 | |||
383 |
|
386 | |||
384 | post_pull_extension = ExtensionCallback( |
|
387 | post_pull_extension = ExtensionCallback( | |
385 | hook_name='PULL_HOOK', |
|
388 | hook_name='PULL_HOOK', | |
386 | kwargs_keys=( |
|
389 | kwargs_keys=( | |
387 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
390 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
388 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) |
|
391 | 'repository', 'hook_type', 'user_agent', 'repo_store_path',)) | |
389 |
|
392 | |||
390 |
|
393 | |||
391 | pre_push_extension = ExtensionCallback( |
|
394 | pre_push_extension = ExtensionCallback( | |
392 | hook_name='PRE_PUSH_HOOK', |
|
395 | hook_name='PRE_PUSH_HOOK', | |
393 | kwargs_keys=( |
|
396 | kwargs_keys=( | |
394 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
397 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
395 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) |
|
398 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) | |
396 |
|
399 | |||
397 |
|
400 | |||
398 | post_push_extension = ExtensionCallback( |
|
401 | post_push_extension = ExtensionCallback( | |
399 | hook_name='PUSH_HOOK', |
|
402 | hook_name='PUSH_HOOK', | |
400 | kwargs_keys=( |
|
403 | kwargs_keys=( | |
401 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
404 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
402 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) |
|
405 | 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',)) | |
403 |
|
406 | |||
404 |
|
407 | |||
405 | pre_create_user = ExtensionCallback( |
|
408 | pre_create_user = ExtensionCallback( | |
406 | hook_name='PRE_CREATE_USER_HOOK', |
|
409 | hook_name='PRE_CREATE_USER_HOOK', | |
407 | kwargs_keys=( |
|
410 | kwargs_keys=( | |
408 | 'username', 'password', 'email', 'firstname', 'lastname', 'active', |
|
411 | 'username', 'password', 'email', 'firstname', 'lastname', 'active', | |
409 | 'admin', 'created_by')) |
|
412 | 'admin', 'created_by')) | |
410 |
|
413 | |||
411 |
|
414 | |||
412 | create_pull_request = ExtensionCallback( |
|
415 | create_pull_request = ExtensionCallback( | |
413 | hook_name='CREATE_PULL_REQUEST', |
|
416 | hook_name='CREATE_PULL_REQUEST', | |
414 | kwargs_keys=( |
|
417 | kwargs_keys=( | |
415 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
418 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
416 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
419 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
417 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
420 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
418 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
421 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
419 |
|
422 | |||
420 |
|
423 | |||
421 | merge_pull_request = ExtensionCallback( |
|
424 | merge_pull_request = ExtensionCallback( | |
422 | hook_name='MERGE_PULL_REQUEST', |
|
425 | hook_name='MERGE_PULL_REQUEST', | |
423 | kwargs_keys=( |
|
426 | kwargs_keys=( | |
424 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
427 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
425 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
428 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
426 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
429 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
427 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
430 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
428 |
|
431 | |||
429 |
|
432 | |||
430 | close_pull_request = ExtensionCallback( |
|
433 | close_pull_request = ExtensionCallback( | |
431 | hook_name='CLOSE_PULL_REQUEST', |
|
434 | hook_name='CLOSE_PULL_REQUEST', | |
432 | kwargs_keys=( |
|
435 | kwargs_keys=( | |
433 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
436 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
434 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
437 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
435 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
438 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
436 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
439 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
437 |
|
440 | |||
438 |
|
441 | |||
439 | review_pull_request = ExtensionCallback( |
|
442 | review_pull_request = ExtensionCallback( | |
440 | hook_name='REVIEW_PULL_REQUEST', |
|
443 | hook_name='REVIEW_PULL_REQUEST', | |
441 | kwargs_keys=( |
|
444 | kwargs_keys=( | |
442 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
445 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
443 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
446 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
444 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
447 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
445 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
448 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
446 |
|
449 | |||
447 |
|
450 | |||
448 | comment_pull_request = ExtensionCallback( |
|
451 | comment_pull_request = ExtensionCallback( | |
449 | hook_name='COMMENT_PULL_REQUEST', |
|
452 | hook_name='COMMENT_PULL_REQUEST', | |
450 | kwargs_keys=( |
|
453 | kwargs_keys=( | |
451 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
454 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
452 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
455 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
453 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
456 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
454 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
457 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
455 |
|
458 | |||
456 |
|
459 | |||
457 | comment_edit_pull_request = ExtensionCallback( |
|
460 | comment_edit_pull_request = ExtensionCallback( | |
458 | hook_name='COMMENT_EDIT_PULL_REQUEST', |
|
461 | hook_name='COMMENT_EDIT_PULL_REQUEST', | |
459 | kwargs_keys=( |
|
462 | kwargs_keys=( | |
460 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
463 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
461 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
464 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
462 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
465 | 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
463 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
466 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
464 |
|
467 | |||
465 |
|
468 | |||
466 | update_pull_request = ExtensionCallback( |
|
469 | update_pull_request = ExtensionCallback( | |
467 | hook_name='UPDATE_PULL_REQUEST', |
|
470 | hook_name='UPDATE_PULL_REQUEST', | |
468 | kwargs_keys=( |
|
471 | kwargs_keys=( | |
469 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', |
|
472 | 'server_url', 'config', 'scm', 'username', 'ip', 'action', | |
470 | 'repository', 'pull_request_id', 'url', 'title', 'description', |
|
473 | 'repository', 'pull_request_id', 'url', 'title', 'description', | |
471 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', |
|
474 | 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status', | |
472 | 'mergeable', 'source', 'target', 'author', 'reviewers')) |
|
475 | 'mergeable', 'source', 'target', 'author', 'reviewers')) | |
473 |
|
476 | |||
474 |
|
477 | |||
475 | create_user = ExtensionCallback( |
|
478 | create_user = ExtensionCallback( | |
476 | hook_name='CREATE_USER_HOOK', |
|
479 | hook_name='CREATE_USER_HOOK', | |
477 | kwargs_keys=( |
|
480 | kwargs_keys=( | |
478 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
481 | 'username', 'full_name_or_username', 'full_contact', 'user_id', | |
479 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
482 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', | |
480 | 'ip_addresses', 'extern_type', 'extern_name', |
|
483 | 'ip_addresses', 'extern_type', 'extern_name', | |
481 | 'email', 'api_keys', 'last_login', |
|
484 | 'email', 'api_keys', 'last_login', | |
482 | 'full_name', 'active', 'password', 'emails', |
|
485 | 'full_name', 'active', 'password', 'emails', | |
483 | 'inherit_default_permissions', 'created_by', 'created_on')) |
|
486 | 'inherit_default_permissions', 'created_by', 'created_on')) | |
484 |
|
487 | |||
485 |
|
488 | |||
486 | delete_user = ExtensionCallback( |
|
489 | delete_user = ExtensionCallback( | |
487 | hook_name='DELETE_USER_HOOK', |
|
490 | hook_name='DELETE_USER_HOOK', | |
488 | kwargs_keys=( |
|
491 | kwargs_keys=( | |
489 | 'username', 'full_name_or_username', 'full_contact', 'user_id', |
|
492 | 'username', 'full_name_or_username', 'full_contact', 'user_id', | |
490 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', |
|
493 | 'name', 'firstname', 'short_contact', 'admin', 'lastname', | |
491 | 'ip_addresses', |
|
494 | 'ip_addresses', | |
492 | 'email', 'last_login', |
|
495 | 'email', 'last_login', | |
493 | 'full_name', 'active', 'password', 'emails', |
|
496 | 'full_name', 'active', 'password', 'emails', | |
494 | 'inherit_default_permissions', 'deleted_by')) |
|
497 | 'inherit_default_permissions', 'deleted_by')) | |
495 |
|
498 | |||
496 |
|
499 | |||
497 | create_repository = ExtensionCallback( |
|
500 | create_repository = ExtensionCallback( | |
498 | hook_name='CREATE_REPO_HOOK', |
|
501 | hook_name='CREATE_REPO_HOOK', | |
499 | kwargs_keys=( |
|
502 | kwargs_keys=( | |
500 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
503 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
501 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
504 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
502 | 'clone_uri', 'fork_id', 'group_id', 'created_by')) |
|
505 | 'clone_uri', 'fork_id', 'group_id', 'created_by')) | |
503 |
|
506 | |||
504 |
|
507 | |||
505 | delete_repository = ExtensionCallback( |
|
508 | delete_repository = ExtensionCallback( | |
506 | hook_name='DELETE_REPO_HOOK', |
|
509 | hook_name='DELETE_REPO_HOOK', | |
507 | kwargs_keys=( |
|
510 | kwargs_keys=( | |
508 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
511 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
509 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
512 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
510 | 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on')) |
|
513 | 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on')) | |
511 |
|
514 | |||
512 |
|
515 | |||
513 | comment_commit_repository = ExtensionCallback( |
|
516 | comment_commit_repository = ExtensionCallback( | |
514 | hook_name='COMMENT_COMMIT_REPO_HOOK', |
|
517 | hook_name='COMMENT_COMMIT_REPO_HOOK', | |
515 | kwargs_keys=( |
|
518 | kwargs_keys=( | |
516 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
519 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
517 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
520 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
518 | 'clone_uri', 'fork_id', 'group_id', |
|
521 | 'clone_uri', 'fork_id', 'group_id', | |
519 | 'repository', 'created_by', 'comment', 'commit')) |
|
522 | 'repository', 'created_by', 'comment', 'commit')) | |
520 |
|
523 | |||
521 | comment_edit_commit_repository = ExtensionCallback( |
|
524 | comment_edit_commit_repository = ExtensionCallback( | |
522 | hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK', |
|
525 | hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK', | |
523 | kwargs_keys=( |
|
526 | kwargs_keys=( | |
524 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', |
|
527 | 'repo_name', 'repo_type', 'description', 'private', 'created_on', | |
525 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', |
|
528 | 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics', | |
526 | 'clone_uri', 'fork_id', 'group_id', |
|
529 | 'clone_uri', 'fork_id', 'group_id', | |
527 | 'repository', 'created_by', 'comment', 'commit')) |
|
530 | 'repository', 'created_by', 'comment', 'commit')) | |
528 |
|
531 | |||
529 |
|
532 | |||
530 | create_repository_group = ExtensionCallback( |
|
533 | create_repository_group = ExtensionCallback( | |
531 | hook_name='CREATE_REPO_GROUP_HOOK', |
|
534 | hook_name='CREATE_REPO_GROUP_HOOK', | |
532 | kwargs_keys=( |
|
535 | kwargs_keys=( | |
533 | 'group_name', 'group_parent_id', 'group_description', |
|
536 | 'group_name', 'group_parent_id', 'group_description', | |
534 | 'group_id', 'user_id', 'created_by', 'created_on', |
|
537 | 'group_id', 'user_id', 'created_by', 'created_on', | |
535 | 'enable_locking')) |
|
538 | 'enable_locking')) |
@@ -1,1148 +1,1165 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | Some simple helper functions |
|
23 | Some simple helper functions | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import collections |
|
26 | import collections | |
27 | import datetime |
|
27 | import datetime | |
28 | import dateutil.relativedelta |
|
28 | import dateutil.relativedelta | |
29 | import hashlib |
|
29 | import hashlib | |
30 | import logging |
|
30 | import logging | |
31 | import re |
|
31 | import re | |
32 | import sys |
|
32 | import sys | |
33 | import time |
|
33 | import time | |
34 | import urllib |
|
34 | import urllib | |
35 | import urlobject |
|
35 | import urlobject | |
36 | import uuid |
|
36 | import uuid | |
37 | import getpass |
|
37 | import getpass | |
38 | from functools import update_wrapper, partial, wraps |
|
38 | from functools import update_wrapper, partial, wraps | |
39 |
|
39 | |||
40 | import pygments.lexers |
|
40 | import pygments.lexers | |
41 | import sqlalchemy |
|
41 | import sqlalchemy | |
42 | import sqlalchemy.engine.url |
|
42 | import sqlalchemy.engine.url | |
43 | import sqlalchemy.exc |
|
43 | import sqlalchemy.exc | |
44 | import sqlalchemy.sql |
|
44 | import sqlalchemy.sql | |
45 | import webob |
|
45 | import webob | |
46 | import pyramid.threadlocal |
|
46 | import pyramid.threadlocal | |
47 | from pyramid import compat |
|
47 | from pyramid import compat | |
48 | from pyramid.settings import asbool |
|
48 | from pyramid.settings import asbool | |
49 |
|
49 | |||
50 | import rhodecode |
|
50 | import rhodecode | |
51 | from rhodecode.translation import _, _pluralize |
|
51 | from rhodecode.translation import _, _pluralize | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | def md5(s): |
|
54 | def md5(s): | |
55 | return hashlib.md5(s).hexdigest() |
|
55 | return hashlib.md5(s).hexdigest() | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | def md5_safe(s): |
|
58 | def md5_safe(s): | |
59 | return md5(safe_str(s)) |
|
59 | return md5(safe_str(s)) | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | def sha1(s): |
|
62 | def sha1(s): | |
63 | return hashlib.sha1(s).hexdigest() |
|
63 | return hashlib.sha1(s).hexdigest() | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def sha1_safe(s): |
|
66 | def sha1_safe(s): | |
67 | return sha1(safe_str(s)) |
|
67 | return sha1(safe_str(s)) | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | def __get_lem(extra_mapping=None): |
|
70 | def __get_lem(extra_mapping=None): | |
71 | """ |
|
71 | """ | |
72 | Get language extension map based on what's inside pygments lexers |
|
72 | Get language extension map based on what's inside pygments lexers | |
73 | """ |
|
73 | """ | |
74 | d = collections.defaultdict(lambda: []) |
|
74 | d = collections.defaultdict(lambda: []) | |
75 |
|
75 | |||
76 | def __clean(s): |
|
76 | def __clean(s): | |
77 | s = s.lstrip('*') |
|
77 | s = s.lstrip('*') | |
78 | s = s.lstrip('.') |
|
78 | s = s.lstrip('.') | |
79 |
|
79 | |||
80 | if s.find('[') != -1: |
|
80 | if s.find('[') != -1: | |
81 | exts = [] |
|
81 | exts = [] | |
82 | start, stop = s.find('['), s.find(']') |
|
82 | start, stop = s.find('['), s.find(']') | |
83 |
|
83 | |||
84 | for suffix in s[start + 1:stop]: |
|
84 | for suffix in s[start + 1:stop]: | |
85 | exts.append(s[:s.find('[')] + suffix) |
|
85 | exts.append(s[:s.find('[')] + suffix) | |
86 | return [e.lower() for e in exts] |
|
86 | return [e.lower() for e in exts] | |
87 | else: |
|
87 | else: | |
88 | return [s.lower()] |
|
88 | return [s.lower()] | |
89 |
|
89 | |||
90 | for lx, t in sorted(pygments.lexers.LEXERS.items()): |
|
90 | for lx, t in sorted(pygments.lexers.LEXERS.items()): | |
91 | m = map(__clean, t[-2]) |
|
91 | m = map(__clean, t[-2]) | |
92 | if m: |
|
92 | if m: | |
93 | m = reduce(lambda x, y: x + y, m) |
|
93 | m = reduce(lambda x, y: x + y, m) | |
94 | for ext in m: |
|
94 | for ext in m: | |
95 | desc = lx.replace('Lexer', '') |
|
95 | desc = lx.replace('Lexer', '') | |
96 | d[ext].append(desc) |
|
96 | d[ext].append(desc) | |
97 |
|
97 | |||
98 | data = dict(d) |
|
98 | data = dict(d) | |
99 |
|
99 | |||
100 | extra_mapping = extra_mapping or {} |
|
100 | extra_mapping = extra_mapping or {} | |
101 | if extra_mapping: |
|
101 | if extra_mapping: | |
102 | for k, v in extra_mapping.items(): |
|
102 | for k, v in extra_mapping.items(): | |
103 | if k not in data: |
|
103 | if k not in data: | |
104 | # register new mapping2lexer |
|
104 | # register new mapping2lexer | |
105 | data[k] = [v] |
|
105 | data[k] = [v] | |
106 |
|
106 | |||
107 | return data |
|
107 | return data | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | def str2bool(_str): |
|
110 | def str2bool(_str): | |
111 | """ |
|
111 | """ | |
112 | returns True/False value from given string, it tries to translate the |
|
112 | returns True/False value from given string, it tries to translate the | |
113 | string into boolean |
|
113 | string into boolean | |
114 |
|
114 | |||
115 | :param _str: string value to translate into boolean |
|
115 | :param _str: string value to translate into boolean | |
116 | :rtype: boolean |
|
116 | :rtype: boolean | |
117 | :returns: boolean from given string |
|
117 | :returns: boolean from given string | |
118 | """ |
|
118 | """ | |
119 | if _str is None: |
|
119 | if _str is None: | |
120 | return False |
|
120 | return False | |
121 | if _str in (True, False): |
|
121 | if _str in (True, False): | |
122 | return _str |
|
122 | return _str | |
123 | _str = str(_str).strip().lower() |
|
123 | _str = str(_str).strip().lower() | |
124 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') |
|
124 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') | |
125 |
|
125 | |||
126 |
|
126 | |||
127 | def aslist(obj, sep=None, strip=True): |
|
127 | def aslist(obj, sep=None, strip=True): | |
128 | """ |
|
128 | """ | |
129 | Returns given string separated by sep as list |
|
129 | Returns given string separated by sep as list | |
130 |
|
130 | |||
131 | :param obj: |
|
131 | :param obj: | |
132 | :param sep: |
|
132 | :param sep: | |
133 | :param strip: |
|
133 | :param strip: | |
134 | """ |
|
134 | """ | |
135 | if isinstance(obj, (basestring,)): |
|
135 | if isinstance(obj, (basestring,)): | |
136 | lst = obj.split(sep) |
|
136 | lst = obj.split(sep) | |
137 | if strip: |
|
137 | if strip: | |
138 | lst = [v.strip() for v in lst] |
|
138 | lst = [v.strip() for v in lst] | |
139 | return lst |
|
139 | return lst | |
140 | elif isinstance(obj, (list, tuple)): |
|
140 | elif isinstance(obj, (list, tuple)): | |
141 | return obj |
|
141 | return obj | |
142 | elif obj is None: |
|
142 | elif obj is None: | |
143 | return [] |
|
143 | return [] | |
144 | else: |
|
144 | else: | |
145 | return [obj] |
|
145 | return [obj] | |
146 |
|
146 | |||
147 |
|
147 | |||
148 | def convert_line_endings(line, mode): |
|
148 | def convert_line_endings(line, mode): | |
149 | """ |
|
149 | """ | |
150 | Converts a given line "line end" accordingly to given mode |
|
150 | Converts a given line "line end" accordingly to given mode | |
151 |
|
151 | |||
152 | Available modes are:: |
|
152 | Available modes are:: | |
153 | 0 - Unix |
|
153 | 0 - Unix | |
154 | 1 - Mac |
|
154 | 1 - Mac | |
155 | 2 - DOS |
|
155 | 2 - DOS | |
156 |
|
156 | |||
157 | :param line: given line to convert |
|
157 | :param line: given line to convert | |
158 | :param mode: mode to convert to |
|
158 | :param mode: mode to convert to | |
159 | :rtype: str |
|
159 | :rtype: str | |
160 | :return: converted line according to mode |
|
160 | :return: converted line according to mode | |
161 | """ |
|
161 | """ | |
162 | if mode == 0: |
|
162 | if mode == 0: | |
163 | line = line.replace('\r\n', '\n') |
|
163 | line = line.replace('\r\n', '\n') | |
164 | line = line.replace('\r', '\n') |
|
164 | line = line.replace('\r', '\n') | |
165 | elif mode == 1: |
|
165 | elif mode == 1: | |
166 | line = line.replace('\r\n', '\r') |
|
166 | line = line.replace('\r\n', '\r') | |
167 | line = line.replace('\n', '\r') |
|
167 | line = line.replace('\n', '\r') | |
168 | elif mode == 2: |
|
168 | elif mode == 2: | |
169 | line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line) |
|
169 | line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line) | |
170 | return line |
|
170 | return line | |
171 |
|
171 | |||
172 |
|
172 | |||
173 | def detect_mode(line, default): |
|
173 | def detect_mode(line, default): | |
174 | """ |
|
174 | """ | |
175 | Detects line break for given line, if line break couldn't be found |
|
175 | Detects line break for given line, if line break couldn't be found | |
176 | given default value is returned |
|
176 | given default value is returned | |
177 |
|
177 | |||
178 | :param line: str line |
|
178 | :param line: str line | |
179 | :param default: default |
|
179 | :param default: default | |
180 | :rtype: int |
|
180 | :rtype: int | |
181 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
181 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS | |
182 | """ |
|
182 | """ | |
183 | if line.endswith('\r\n'): |
|
183 | if line.endswith('\r\n'): | |
184 | return 2 |
|
184 | return 2 | |
185 | elif line.endswith('\n'): |
|
185 | elif line.endswith('\n'): | |
186 | return 0 |
|
186 | return 0 | |
187 | elif line.endswith('\r'): |
|
187 | elif line.endswith('\r'): | |
188 | return 1 |
|
188 | return 1 | |
189 | else: |
|
189 | else: | |
190 | return default |
|
190 | return default | |
191 |
|
191 | |||
192 |
|
192 | |||
193 | def safe_int(val, default=None): |
|
193 | def safe_int(val, default=None): | |
194 | """ |
|
194 | """ | |
195 | Returns int() of val if val is not convertable to int use default |
|
195 | Returns int() of val if val is not convertable to int use default | |
196 | instead |
|
196 | instead | |
197 |
|
197 | |||
198 | :param val: |
|
198 | :param val: | |
199 | :param default: |
|
199 | :param default: | |
200 | """ |
|
200 | """ | |
201 |
|
201 | |||
202 | try: |
|
202 | try: | |
203 | val = int(val) |
|
203 | val = int(val) | |
204 | except (ValueError, TypeError): |
|
204 | except (ValueError, TypeError): | |
205 | val = default |
|
205 | val = default | |
206 |
|
206 | |||
207 | return val |
|
207 | return val | |
208 |
|
208 | |||
209 |
|
209 | |||
210 | def safe_unicode(str_, from_encoding=None, use_chardet=False): |
|
210 | def safe_unicode(str_, from_encoding=None, use_chardet=False): | |
211 | """ |
|
211 | """ | |
212 | safe unicode function. Does few trick to turn str_ into unicode |
|
212 | safe unicode function. Does few trick to turn str_ into unicode | |
213 |
|
213 | |||
214 | In case of UnicodeDecode error, we try to return it with encoding detected |
|
214 | In case of UnicodeDecode error, we try to return it with encoding detected | |
215 | by chardet library if it fails fallback to unicode with errors replaced |
|
215 | by chardet library if it fails fallback to unicode with errors replaced | |
216 |
|
216 | |||
217 | :param str_: string to decode |
|
217 | :param str_: string to decode | |
218 | :rtype: unicode |
|
218 | :rtype: unicode | |
219 | :returns: unicode object |
|
219 | :returns: unicode object | |
220 | """ |
|
220 | """ | |
221 | if isinstance(str_, unicode): |
|
221 | if isinstance(str_, unicode): | |
222 | return str_ |
|
222 | return str_ | |
223 |
|
223 | |||
224 | if not from_encoding: |
|
224 | if not from_encoding: | |
225 | DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding', |
|
225 | DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding', | |
226 | 'utf8'), sep=',') |
|
226 | 'utf8'), sep=',') | |
227 | from_encoding = DEFAULT_ENCODINGS |
|
227 | from_encoding = DEFAULT_ENCODINGS | |
228 |
|
228 | |||
229 | if not isinstance(from_encoding, (list, tuple)): |
|
229 | if not isinstance(from_encoding, (list, tuple)): | |
230 | from_encoding = [from_encoding] |
|
230 | from_encoding = [from_encoding] | |
231 |
|
231 | |||
232 | try: |
|
232 | try: | |
233 | return unicode(str_) |
|
233 | return unicode(str_) | |
234 | except UnicodeDecodeError: |
|
234 | except UnicodeDecodeError: | |
235 | pass |
|
235 | pass | |
236 |
|
236 | |||
237 | for enc in from_encoding: |
|
237 | for enc in from_encoding: | |
238 | try: |
|
238 | try: | |
239 | return unicode(str_, enc) |
|
239 | return unicode(str_, enc) | |
240 | except UnicodeDecodeError: |
|
240 | except UnicodeDecodeError: | |
241 | pass |
|
241 | pass | |
242 |
|
242 | |||
243 | if use_chardet: |
|
243 | if use_chardet: | |
244 | try: |
|
244 | try: | |
245 | import chardet |
|
245 | import chardet | |
246 | encoding = chardet.detect(str_)['encoding'] |
|
246 | encoding = chardet.detect(str_)['encoding'] | |
247 | if encoding is None: |
|
247 | if encoding is None: | |
248 | raise Exception() |
|
248 | raise Exception() | |
249 | return str_.decode(encoding) |
|
249 | return str_.decode(encoding) | |
250 | except (ImportError, UnicodeDecodeError, Exception): |
|
250 | except (ImportError, UnicodeDecodeError, Exception): | |
251 | return unicode(str_, from_encoding[0], 'replace') |
|
251 | return unicode(str_, from_encoding[0], 'replace') | |
252 | else: |
|
252 | else: | |
253 | return unicode(str_, from_encoding[0], 'replace') |
|
253 | return unicode(str_, from_encoding[0], 'replace') | |
254 |
|
254 | |||
255 | def safe_str(unicode_, to_encoding=None, use_chardet=False): |
|
255 | def safe_str(unicode_, to_encoding=None, use_chardet=False): | |
256 | """ |
|
256 | """ | |
257 | safe str function. Does few trick to turn unicode_ into string |
|
257 | safe str function. Does few trick to turn unicode_ into string | |
258 |
|
258 | |||
259 | In case of UnicodeEncodeError, we try to return it with encoding detected |
|
259 | In case of UnicodeEncodeError, we try to return it with encoding detected | |
260 | by chardet library if it fails fallback to string with errors replaced |
|
260 | by chardet library if it fails fallback to string with errors replaced | |
261 |
|
261 | |||
262 | :param unicode_: unicode to encode |
|
262 | :param unicode_: unicode to encode | |
263 | :rtype: str |
|
263 | :rtype: str | |
264 | :returns: str object |
|
264 | :returns: str object | |
265 | """ |
|
265 | """ | |
266 |
|
266 | |||
267 | # if it's not basestr cast to str |
|
267 | # if it's not basestr cast to str | |
268 | if not isinstance(unicode_, compat.string_types): |
|
268 | if not isinstance(unicode_, compat.string_types): | |
269 | return str(unicode_) |
|
269 | return str(unicode_) | |
270 |
|
270 | |||
271 | if isinstance(unicode_, str): |
|
271 | if isinstance(unicode_, str): | |
272 | return unicode_ |
|
272 | return unicode_ | |
273 |
|
273 | |||
274 | if not to_encoding: |
|
274 | if not to_encoding: | |
275 | DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding', |
|
275 | DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding', | |
276 | 'utf8'), sep=',') |
|
276 | 'utf8'), sep=',') | |
277 | to_encoding = DEFAULT_ENCODINGS |
|
277 | to_encoding = DEFAULT_ENCODINGS | |
278 |
|
278 | |||
279 | if not isinstance(to_encoding, (list, tuple)): |
|
279 | if not isinstance(to_encoding, (list, tuple)): | |
280 | to_encoding = [to_encoding] |
|
280 | to_encoding = [to_encoding] | |
281 |
|
281 | |||
282 | for enc in to_encoding: |
|
282 | for enc in to_encoding: | |
283 | try: |
|
283 | try: | |
284 | return unicode_.encode(enc) |
|
284 | return unicode_.encode(enc) | |
285 | except UnicodeEncodeError: |
|
285 | except UnicodeEncodeError: | |
286 | pass |
|
286 | pass | |
287 |
|
287 | |||
288 | if use_chardet: |
|
288 | if use_chardet: | |
289 | try: |
|
289 | try: | |
290 | import chardet |
|
290 | import chardet | |
291 | encoding = chardet.detect(unicode_)['encoding'] |
|
291 | encoding = chardet.detect(unicode_)['encoding'] | |
292 | if encoding is None: |
|
292 | if encoding is None: | |
293 | raise UnicodeEncodeError() |
|
293 | raise UnicodeEncodeError() | |
294 |
|
294 | |||
295 | return unicode_.encode(encoding) |
|
295 | return unicode_.encode(encoding) | |
296 | except (ImportError, UnicodeEncodeError): |
|
296 | except (ImportError, UnicodeEncodeError): | |
297 | return unicode_.encode(to_encoding[0], 'replace') |
|
297 | return unicode_.encode(to_encoding[0], 'replace') | |
298 | else: |
|
298 | else: | |
299 | return unicode_.encode(to_encoding[0], 'replace') |
|
299 | return unicode_.encode(to_encoding[0], 'replace') | |
300 |
|
300 | |||
301 |
|
301 | |||
302 | def remove_suffix(s, suffix): |
|
302 | def remove_suffix(s, suffix): | |
303 | if s.endswith(suffix): |
|
303 | if s.endswith(suffix): | |
304 | s = s[:-1 * len(suffix)] |
|
304 | s = s[:-1 * len(suffix)] | |
305 | return s |
|
305 | return s | |
306 |
|
306 | |||
307 |
|
307 | |||
308 | def remove_prefix(s, prefix): |
|
308 | def remove_prefix(s, prefix): | |
309 | if s.startswith(prefix): |
|
309 | if s.startswith(prefix): | |
310 | s = s[len(prefix):] |
|
310 | s = s[len(prefix):] | |
311 | return s |
|
311 | return s | |
312 |
|
312 | |||
313 |
|
313 | |||
314 | def find_calling_context(ignore_modules=None): |
|
314 | def find_calling_context(ignore_modules=None): | |
315 | """ |
|
315 | """ | |
316 | Look through the calling stack and return the frame which called |
|
316 | Look through the calling stack and return the frame which called | |
317 | this function and is part of core module ( ie. rhodecode.* ) |
|
317 | this function and is part of core module ( ie. rhodecode.* ) | |
318 |
|
318 | |||
319 | :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib'] |
|
319 | :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib'] | |
320 | """ |
|
320 | """ | |
321 |
|
321 | |||
322 | ignore_modules = ignore_modules or [] |
|
322 | ignore_modules = ignore_modules or [] | |
323 |
|
323 | |||
324 | f = sys._getframe(2) |
|
324 | f = sys._getframe(2) | |
325 | while f.f_back is not None: |
|
325 | while f.f_back is not None: | |
326 | name = f.f_globals.get('__name__') |
|
326 | name = f.f_globals.get('__name__') | |
327 | if name and name.startswith(__name__.split('.')[0]): |
|
327 | if name and name.startswith(__name__.split('.')[0]): | |
328 | if name not in ignore_modules: |
|
328 | if name not in ignore_modules: | |
329 | return f |
|
329 | return f | |
330 | f = f.f_back |
|
330 | f = f.f_back | |
331 | return None |
|
331 | return None | |
332 |
|
332 | |||
333 |
|
333 | |||
334 | def ping_connection(connection, branch): |
|
334 | def ping_connection(connection, branch): | |
335 | if branch: |
|
335 | if branch: | |
336 | # "branch" refers to a sub-connection of a connection, |
|
336 | # "branch" refers to a sub-connection of a connection, | |
337 | # we don't want to bother pinging on these. |
|
337 | # we don't want to bother pinging on these. | |
338 | return |
|
338 | return | |
339 |
|
339 | |||
340 | # turn off "close with result". This flag is only used with |
|
340 | # turn off "close with result". This flag is only used with | |
341 | # "connectionless" execution, otherwise will be False in any case |
|
341 | # "connectionless" execution, otherwise will be False in any case | |
342 | save_should_close_with_result = connection.should_close_with_result |
|
342 | save_should_close_with_result = connection.should_close_with_result | |
343 | connection.should_close_with_result = False |
|
343 | connection.should_close_with_result = False | |
344 |
|
344 | |||
345 | try: |
|
345 | try: | |
346 | # run a SELECT 1. use a core select() so that |
|
346 | # run a SELECT 1. use a core select() so that | |
347 | # the SELECT of a scalar value without a table is |
|
347 | # the SELECT of a scalar value without a table is | |
348 | # appropriately formatted for the backend |
|
348 | # appropriately formatted for the backend | |
349 | connection.scalar(sqlalchemy.sql.select([1])) |
|
349 | connection.scalar(sqlalchemy.sql.select([1])) | |
350 | except sqlalchemy.exc.DBAPIError as err: |
|
350 | except sqlalchemy.exc.DBAPIError as err: | |
351 | # catch SQLAlchemy's DBAPIError, which is a wrapper |
|
351 | # catch SQLAlchemy's DBAPIError, which is a wrapper | |
352 | # for the DBAPI's exception. It includes a .connection_invalidated |
|
352 | # for the DBAPI's exception. It includes a .connection_invalidated | |
353 | # attribute which specifies if this connection is a "disconnect" |
|
353 | # attribute which specifies if this connection is a "disconnect" | |
354 | # condition, which is based on inspection of the original exception |
|
354 | # condition, which is based on inspection of the original exception | |
355 | # by the dialect in use. |
|
355 | # by the dialect in use. | |
356 | if err.connection_invalidated: |
|
356 | if err.connection_invalidated: | |
357 | # run the same SELECT again - the connection will re-validate |
|
357 | # run the same SELECT again - the connection will re-validate | |
358 | # itself and establish a new connection. The disconnect detection |
|
358 | # itself and establish a new connection. The disconnect detection | |
359 | # here also causes the whole connection pool to be invalidated |
|
359 | # here also causes the whole connection pool to be invalidated | |
360 | # so that all stale connections are discarded. |
|
360 | # so that all stale connections are discarded. | |
361 | connection.scalar(sqlalchemy.sql.select([1])) |
|
361 | connection.scalar(sqlalchemy.sql.select([1])) | |
362 | else: |
|
362 | else: | |
363 | raise |
|
363 | raise | |
364 | finally: |
|
364 | finally: | |
365 | # restore "close with result" |
|
365 | # restore "close with result" | |
366 | connection.should_close_with_result = save_should_close_with_result |
|
366 | connection.should_close_with_result = save_should_close_with_result | |
367 |
|
367 | |||
368 |
|
368 | |||
369 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): |
|
369 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): | |
370 | """Custom engine_from_config functions.""" |
|
370 | """Custom engine_from_config functions.""" | |
371 | log = logging.getLogger('sqlalchemy.engine') |
|
371 | log = logging.getLogger('sqlalchemy.engine') | |
372 | use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None)) |
|
372 | use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None)) | |
373 | debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None)) |
|
373 | debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None)) | |
374 |
|
374 | |||
375 | engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs) |
|
375 | engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs) | |
376 |
|
376 | |||
377 | def color_sql(sql): |
|
377 | def color_sql(sql): | |
378 | color_seq = '\033[1;33m' # This is yellow: code 33 |
|
378 | color_seq = '\033[1;33m' # This is yellow: code 33 | |
379 | normal = '\x1b[0m' |
|
379 | normal = '\x1b[0m' | |
380 | return ''.join([color_seq, sql, normal]) |
|
380 | return ''.join([color_seq, sql, normal]) | |
381 |
|
381 | |||
382 | if use_ping_connection: |
|
382 | if use_ping_connection: | |
383 | log.debug('Adding ping_connection on the engine config.') |
|
383 | log.debug('Adding ping_connection on the engine config.') | |
384 | sqlalchemy.event.listen(engine, "engine_connect", ping_connection) |
|
384 | sqlalchemy.event.listen(engine, "engine_connect", ping_connection) | |
385 |
|
385 | |||
386 | if debug: |
|
386 | if debug: | |
387 | # attach events only for debug configuration |
|
387 | # attach events only for debug configuration | |
388 | def before_cursor_execute(conn, cursor, statement, |
|
388 | def before_cursor_execute(conn, cursor, statement, | |
389 | parameters, context, executemany): |
|
389 | parameters, context, executemany): | |
390 | setattr(conn, 'query_start_time', time.time()) |
|
390 | setattr(conn, 'query_start_time', time.time()) | |
391 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) |
|
391 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) | |
392 | calling_context = find_calling_context(ignore_modules=[ |
|
392 | calling_context = find_calling_context(ignore_modules=[ | |
393 | 'rhodecode.lib.caching_query', |
|
393 | 'rhodecode.lib.caching_query', | |
394 | 'rhodecode.model.settings', |
|
394 | 'rhodecode.model.settings', | |
395 | ]) |
|
395 | ]) | |
396 | if calling_context: |
|
396 | if calling_context: | |
397 | log.info(color_sql('call context %s:%s' % ( |
|
397 | log.info(color_sql('call context %s:%s' % ( | |
398 | calling_context.f_code.co_filename, |
|
398 | calling_context.f_code.co_filename, | |
399 | calling_context.f_lineno, |
|
399 | calling_context.f_lineno, | |
400 | ))) |
|
400 | ))) | |
401 |
|
401 | |||
402 | def after_cursor_execute(conn, cursor, statement, |
|
402 | def after_cursor_execute(conn, cursor, statement, | |
403 | parameters, context, executemany): |
|
403 | parameters, context, executemany): | |
404 | delattr(conn, 'query_start_time') |
|
404 | delattr(conn, 'query_start_time') | |
405 |
|
405 | |||
406 | sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute) |
|
406 | sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute) | |
407 | sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute) |
|
407 | sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute) | |
408 |
|
408 | |||
409 | return engine |
|
409 | return engine | |
410 |
|
410 | |||
411 |
|
411 | |||
412 | def get_encryption_key(config): |
|
412 | def get_encryption_key(config): | |
413 | secret = config.get('rhodecode.encrypted_values.secret') |
|
413 | secret = config.get('rhodecode.encrypted_values.secret') | |
414 | default = config['beaker.session.secret'] |
|
414 | default = config['beaker.session.secret'] | |
415 | return secret or default |
|
415 | return secret or default | |
416 |
|
416 | |||
417 |
|
417 | |||
418 | def age(prevdate, now=None, show_short_version=False, show_suffix=True, |
|
418 | def age(prevdate, now=None, show_short_version=False, show_suffix=True, | |
419 | short_format=False): |
|
419 | short_format=False): | |
420 | """ |
|
420 | """ | |
421 | Turns a datetime into an age string. |
|
421 | Turns a datetime into an age string. | |
422 | If show_short_version is True, this generates a shorter string with |
|
422 | If show_short_version is True, this generates a shorter string with | |
423 | an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'. |
|
423 | an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'. | |
424 |
|
424 | |||
425 | * IMPORTANT* |
|
425 | * IMPORTANT* | |
426 | Code of this function is written in special way so it's easier to |
|
426 | Code of this function is written in special way so it's easier to | |
427 | backport it to javascript. If you mean to update it, please also update |
|
427 | backport it to javascript. If you mean to update it, please also update | |
428 | `jquery.timeago-extension.js` file |
|
428 | `jquery.timeago-extension.js` file | |
429 |
|
429 | |||
430 | :param prevdate: datetime object |
|
430 | :param prevdate: datetime object | |
431 | :param now: get current time, if not define we use |
|
431 | :param now: get current time, if not define we use | |
432 | `datetime.datetime.now()` |
|
432 | `datetime.datetime.now()` | |
433 | :param show_short_version: if it should approximate the date and |
|
433 | :param show_short_version: if it should approximate the date and | |
434 | return a shorter string |
|
434 | return a shorter string | |
435 | :param show_suffix: |
|
435 | :param show_suffix: | |
436 | :param short_format: show short format, eg 2D instead of 2 days |
|
436 | :param short_format: show short format, eg 2D instead of 2 days | |
437 | :rtype: unicode |
|
437 | :rtype: unicode | |
438 | :returns: unicode words describing age |
|
438 | :returns: unicode words describing age | |
439 | """ |
|
439 | """ | |
440 |
|
440 | |||
441 | def _get_relative_delta(now, prevdate): |
|
441 | def _get_relative_delta(now, prevdate): | |
442 | base = dateutil.relativedelta.relativedelta(now, prevdate) |
|
442 | base = dateutil.relativedelta.relativedelta(now, prevdate) | |
443 | return { |
|
443 | return { | |
444 | 'year': base.years, |
|
444 | 'year': base.years, | |
445 | 'month': base.months, |
|
445 | 'month': base.months, | |
446 | 'day': base.days, |
|
446 | 'day': base.days, | |
447 | 'hour': base.hours, |
|
447 | 'hour': base.hours, | |
448 | 'minute': base.minutes, |
|
448 | 'minute': base.minutes, | |
449 | 'second': base.seconds, |
|
449 | 'second': base.seconds, | |
450 | } |
|
450 | } | |
451 |
|
451 | |||
452 | def _is_leap_year(year): |
|
452 | def _is_leap_year(year): | |
453 | return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) |
|
453 | return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) | |
454 |
|
454 | |||
455 | def get_month(prevdate): |
|
455 | def get_month(prevdate): | |
456 | return prevdate.month |
|
456 | return prevdate.month | |
457 |
|
457 | |||
458 | def get_year(prevdate): |
|
458 | def get_year(prevdate): | |
459 | return prevdate.year |
|
459 | return prevdate.year | |
460 |
|
460 | |||
461 | now = now or datetime.datetime.now() |
|
461 | now = now or datetime.datetime.now() | |
462 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
462 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] | |
463 | deltas = {} |
|
463 | deltas = {} | |
464 | future = False |
|
464 | future = False | |
465 |
|
465 | |||
466 | if prevdate > now: |
|
466 | if prevdate > now: | |
467 | now_old = now |
|
467 | now_old = now | |
468 | now = prevdate |
|
468 | now = prevdate | |
469 | prevdate = now_old |
|
469 | prevdate = now_old | |
470 | future = True |
|
470 | future = True | |
471 | if future: |
|
471 | if future: | |
472 | prevdate = prevdate.replace(microsecond=0) |
|
472 | prevdate = prevdate.replace(microsecond=0) | |
473 | # Get date parts deltas |
|
473 | # Get date parts deltas | |
474 | for part in order: |
|
474 | for part in order: | |
475 | rel_delta = _get_relative_delta(now, prevdate) |
|
475 | rel_delta = _get_relative_delta(now, prevdate) | |
476 | deltas[part] = rel_delta[part] |
|
476 | deltas[part] = rel_delta[part] | |
477 |
|
477 | |||
478 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
478 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, | |
479 | # not 1 hour, -59 minutes and -59 seconds) |
|
479 | # not 1 hour, -59 minutes and -59 seconds) | |
480 | offsets = [[5, 60], [4, 60], [3, 24]] |
|
480 | offsets = [[5, 60], [4, 60], [3, 24]] | |
481 | for element in offsets: # seconds, minutes, hours |
|
481 | for element in offsets: # seconds, minutes, hours | |
482 | num = element[0] |
|
482 | num = element[0] | |
483 | length = element[1] |
|
483 | length = element[1] | |
484 |
|
484 | |||
485 | part = order[num] |
|
485 | part = order[num] | |
486 | carry_part = order[num - 1] |
|
486 | carry_part = order[num - 1] | |
487 |
|
487 | |||
488 | if deltas[part] < 0: |
|
488 | if deltas[part] < 0: | |
489 | deltas[part] += length |
|
489 | deltas[part] += length | |
490 | deltas[carry_part] -= 1 |
|
490 | deltas[carry_part] -= 1 | |
491 |
|
491 | |||
492 | # Same thing for days except that the increment depends on the (variable) |
|
492 | # Same thing for days except that the increment depends on the (variable) | |
493 | # number of days in the month |
|
493 | # number of days in the month | |
494 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
494 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] | |
495 | if deltas['day'] < 0: |
|
495 | if deltas['day'] < 0: | |
496 | if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)): |
|
496 | if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)): | |
497 | deltas['day'] += 29 |
|
497 | deltas['day'] += 29 | |
498 | else: |
|
498 | else: | |
499 | deltas['day'] += month_lengths[get_month(prevdate) - 1] |
|
499 | deltas['day'] += month_lengths[get_month(prevdate) - 1] | |
500 |
|
500 | |||
501 | deltas['month'] -= 1 |
|
501 | deltas['month'] -= 1 | |
502 |
|
502 | |||
503 | if deltas['month'] < 0: |
|
503 | if deltas['month'] < 0: | |
504 | deltas['month'] += 12 |
|
504 | deltas['month'] += 12 | |
505 | deltas['year'] -= 1 |
|
505 | deltas['year'] -= 1 | |
506 |
|
506 | |||
507 | # Format the result |
|
507 | # Format the result | |
508 | if short_format: |
|
508 | if short_format: | |
509 | fmt_funcs = { |
|
509 | fmt_funcs = { | |
510 | 'year': lambda d: u'%dy' % d, |
|
510 | 'year': lambda d: u'%dy' % d, | |
511 | 'month': lambda d: u'%dm' % d, |
|
511 | 'month': lambda d: u'%dm' % d, | |
512 | 'day': lambda d: u'%dd' % d, |
|
512 | 'day': lambda d: u'%dd' % d, | |
513 | 'hour': lambda d: u'%dh' % d, |
|
513 | 'hour': lambda d: u'%dh' % d, | |
514 | 'minute': lambda d: u'%dmin' % d, |
|
514 | 'minute': lambda d: u'%dmin' % d, | |
515 | 'second': lambda d: u'%dsec' % d, |
|
515 | 'second': lambda d: u'%dsec' % d, | |
516 | } |
|
516 | } | |
517 | else: |
|
517 | else: | |
518 | fmt_funcs = { |
|
518 | fmt_funcs = { | |
519 | 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(), |
|
519 | 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(), | |
520 | 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(), |
|
520 | 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(), | |
521 | 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(), |
|
521 | 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(), | |
522 | 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(), |
|
522 | 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(), | |
523 | 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(), |
|
523 | 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(), | |
524 | 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(), |
|
524 | 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(), | |
525 | } |
|
525 | } | |
526 |
|
526 | |||
527 | i = 0 |
|
527 | i = 0 | |
528 | for part in order: |
|
528 | for part in order: | |
529 | value = deltas[part] |
|
529 | value = deltas[part] | |
530 | if value != 0: |
|
530 | if value != 0: | |
531 |
|
531 | |||
532 | if i < 5: |
|
532 | if i < 5: | |
533 | sub_part = order[i + 1] |
|
533 | sub_part = order[i + 1] | |
534 | sub_value = deltas[sub_part] |
|
534 | sub_value = deltas[sub_part] | |
535 | else: |
|
535 | else: | |
536 | sub_value = 0 |
|
536 | sub_value = 0 | |
537 |
|
537 | |||
538 | if sub_value == 0 or show_short_version: |
|
538 | if sub_value == 0 or show_short_version: | |
539 | _val = fmt_funcs[part](value) |
|
539 | _val = fmt_funcs[part](value) | |
540 | if future: |
|
540 | if future: | |
541 | if show_suffix: |
|
541 | if show_suffix: | |
542 | return _(u'in ${ago}', mapping={'ago': _val}) |
|
542 | return _(u'in ${ago}', mapping={'ago': _val}) | |
543 | else: |
|
543 | else: | |
544 | return _(_val) |
|
544 | return _(_val) | |
545 |
|
545 | |||
546 | else: |
|
546 | else: | |
547 | if show_suffix: |
|
547 | if show_suffix: | |
548 | return _(u'${ago} ago', mapping={'ago': _val}) |
|
548 | return _(u'${ago} ago', mapping={'ago': _val}) | |
549 | else: |
|
549 | else: | |
550 | return _(_val) |
|
550 | return _(_val) | |
551 |
|
551 | |||
552 | val = fmt_funcs[part](value) |
|
552 | val = fmt_funcs[part](value) | |
553 | val_detail = fmt_funcs[sub_part](sub_value) |
|
553 | val_detail = fmt_funcs[sub_part](sub_value) | |
554 | mapping = {'val': val, 'detail': val_detail} |
|
554 | mapping = {'val': val, 'detail': val_detail} | |
555 |
|
555 | |||
556 | if short_format: |
|
556 | if short_format: | |
557 | datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping) |
|
557 | datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping) | |
558 | if show_suffix: |
|
558 | if show_suffix: | |
559 | datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping) |
|
559 | datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping) | |
560 | if future: |
|
560 | if future: | |
561 | datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping) |
|
561 | datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping) | |
562 | else: |
|
562 | else: | |
563 | datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping) |
|
563 | datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping) | |
564 | if show_suffix: |
|
564 | if show_suffix: | |
565 | datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping) |
|
565 | datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping) | |
566 | if future: |
|
566 | if future: | |
567 | datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping) |
|
567 | datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping) | |
568 |
|
568 | |||
569 | return datetime_tmpl |
|
569 | return datetime_tmpl | |
570 | i += 1 |
|
570 | i += 1 | |
571 | return _(u'just now') |
|
571 | return _(u'just now') | |
572 |
|
572 | |||
573 |
|
573 | |||
574 | def age_from_seconds(seconds): |
|
574 | def age_from_seconds(seconds): | |
575 | seconds = safe_int(seconds) or 0 |
|
575 | seconds = safe_int(seconds) or 0 | |
576 | prevdate = time_to_datetime(time.time() + seconds) |
|
576 | prevdate = time_to_datetime(time.time() + seconds) | |
577 | return age(prevdate, show_suffix=False, show_short_version=True) |
|
577 | return age(prevdate, show_suffix=False, show_short_version=True) | |
578 |
|
578 | |||
579 |
|
579 | |||
580 | def cleaned_uri(uri): |
|
580 | def cleaned_uri(uri): | |
581 | """ |
|
581 | """ | |
582 | Quotes '[' and ']' from uri if there is only one of them. |
|
582 | Quotes '[' and ']' from uri if there is only one of them. | |
583 | according to RFC3986 we cannot use such chars in uri |
|
583 | according to RFC3986 we cannot use such chars in uri | |
584 | :param uri: |
|
584 | :param uri: | |
585 | :return: uri without this chars |
|
585 | :return: uri without this chars | |
586 | """ |
|
586 | """ | |
587 | return urllib.quote(uri, safe='@$:/') |
|
587 | return urllib.quote(uri, safe='@$:/') | |
588 |
|
588 | |||
589 |
|
589 | |||
590 | def credentials_filter(uri): |
|
590 | def credentials_filter(uri): | |
591 | """ |
|
591 | """ | |
592 | Returns a url with removed credentials |
|
592 | Returns a url with removed credentials | |
593 |
|
593 | |||
594 | :param uri: |
|
594 | :param uri: | |
595 | """ |
|
595 | """ | |
596 | import urlobject |
|
596 | import urlobject | |
597 | if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue): |
|
597 | if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue): | |
598 | return 'InvalidDecryptionKey' |
|
598 | return 'InvalidDecryptionKey' | |
599 |
|
599 | |||
600 | url_obj = urlobject.URLObject(cleaned_uri(uri)) |
|
600 | url_obj = urlobject.URLObject(cleaned_uri(uri)) | |
601 | url_obj = url_obj.without_password().without_username() |
|
601 | url_obj = url_obj.without_password().without_username() | |
602 |
|
602 | |||
603 | return url_obj |
|
603 | return url_obj | |
604 |
|
604 | |||
605 |
|
605 | |||
606 | def get_host_info(request): |
|
606 | def get_host_info(request): | |
607 | """ |
|
607 | """ | |
608 | Generate host info, to obtain full url e.g https://server.com |
|
608 | Generate host info, to obtain full url e.g https://server.com | |
609 | use this |
|
609 | use this | |
610 | `{scheme}://{netloc}` |
|
610 | `{scheme}://{netloc}` | |
611 | """ |
|
611 | """ | |
612 | if not request: |
|
612 | if not request: | |
613 | return {} |
|
613 | return {} | |
614 |
|
614 | |||
615 | qualified_home_url = request.route_url('home') |
|
615 | qualified_home_url = request.route_url('home') | |
616 | parsed_url = urlobject.URLObject(qualified_home_url) |
|
616 | parsed_url = urlobject.URLObject(qualified_home_url) | |
617 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/'))) |
|
617 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/'))) | |
618 |
|
618 | |||
619 | return { |
|
619 | return { | |
620 | 'scheme': parsed_url.scheme, |
|
620 | 'scheme': parsed_url.scheme, | |
621 | 'netloc': parsed_url.netloc+decoded_path, |
|
621 | 'netloc': parsed_url.netloc+decoded_path, | |
622 | 'hostname': parsed_url.hostname, |
|
622 | 'hostname': parsed_url.hostname, | |
623 | } |
|
623 | } | |
624 |
|
624 | |||
625 |
|
625 | |||
626 | def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override): |
|
626 | def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override): | |
627 | qualified_home_url = request.route_url('home') |
|
627 | qualified_home_url = request.route_url('home') | |
628 | parsed_url = urlobject.URLObject(qualified_home_url) |
|
628 | parsed_url = urlobject.URLObject(qualified_home_url) | |
629 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/'))) |
|
629 | decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/'))) | |
630 |
|
630 | |||
631 | args = { |
|
631 | args = { | |
632 | 'scheme': parsed_url.scheme, |
|
632 | 'scheme': parsed_url.scheme, | |
633 | 'user': '', |
|
633 | 'user': '', | |
634 | 'sys_user': getpass.getuser(), |
|
634 | 'sys_user': getpass.getuser(), | |
635 | # path if we use proxy-prefix |
|
635 | # path if we use proxy-prefix | |
636 | 'netloc': parsed_url.netloc+decoded_path, |
|
636 | 'netloc': parsed_url.netloc+decoded_path, | |
637 | 'hostname': parsed_url.hostname, |
|
637 | 'hostname': parsed_url.hostname, | |
638 | 'prefix': decoded_path, |
|
638 | 'prefix': decoded_path, | |
639 | 'repo': repo_name, |
|
639 | 'repo': repo_name, | |
640 | 'repoid': str(repo_id), |
|
640 | 'repoid': str(repo_id), | |
641 | 'repo_type': repo_type |
|
641 | 'repo_type': repo_type | |
642 | } |
|
642 | } | |
643 | args.update(override) |
|
643 | args.update(override) | |
644 | args['user'] = urllib.quote(safe_str(args['user'])) |
|
644 | args['user'] = urllib.quote(safe_str(args['user'])) | |
645 |
|
645 | |||
646 | for k, v in args.items(): |
|
646 | for k, v in args.items(): | |
647 | uri_tmpl = uri_tmpl.replace('{%s}' % k, v) |
|
647 | uri_tmpl = uri_tmpl.replace('{%s}' % k, v) | |
648 |
|
648 | |||
649 | # special case for SVN clone url |
|
649 | # special case for SVN clone url | |
650 | if repo_type == 'svn': |
|
650 | if repo_type == 'svn': | |
651 | uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://') |
|
651 | uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://') | |
652 |
|
652 | |||
653 | # remove leading @ sign if it's present. Case of empty user |
|
653 | # remove leading @ sign if it's present. Case of empty user | |
654 | url_obj = urlobject.URLObject(uri_tmpl) |
|
654 | url_obj = urlobject.URLObject(uri_tmpl) | |
655 | url = url_obj.with_netloc(url_obj.netloc.lstrip('@')) |
|
655 | url = url_obj.with_netloc(url_obj.netloc.lstrip('@')) | |
656 |
|
656 | |||
657 | return safe_unicode(url) |
|
657 | return safe_unicode(url) | |
658 |
|
658 | |||
659 |
|
659 | |||
660 | def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None, |
|
660 | def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None, | |
661 | maybe_unreachable=False, reference_obj=None): |
|
661 | maybe_unreachable=False, reference_obj=None): | |
662 | """ |
|
662 | """ | |
663 | Safe version of get_commit if this commit doesn't exists for a |
|
663 | Safe version of get_commit if this commit doesn't exists for a | |
664 | repository it returns a Dummy one instead |
|
664 | repository it returns a Dummy one instead | |
665 |
|
665 | |||
666 | :param repo: repository instance |
|
666 | :param repo: repository instance | |
667 | :param commit_id: commit id as str |
|
667 | :param commit_id: commit id as str | |
668 | :param commit_idx: numeric commit index |
|
668 | :param commit_idx: numeric commit index | |
669 | :param pre_load: optional list of commit attributes to load |
|
669 | :param pre_load: optional list of commit attributes to load | |
670 | :param maybe_unreachable: translate unreachable commits on git repos |
|
670 | :param maybe_unreachable: translate unreachable commits on git repos | |
671 | :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123" |
|
671 | :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123" | |
672 | """ |
|
672 | """ | |
673 | # TODO(skreft): remove these circular imports |
|
673 | # TODO(skreft): remove these circular imports | |
674 | from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit |
|
674 | from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit | |
675 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
675 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
676 | if not isinstance(repo, BaseRepository): |
|
676 | if not isinstance(repo, BaseRepository): | |
677 | raise Exception('You must pass an Repository ' |
|
677 | raise Exception('You must pass an Repository ' | |
678 | 'object as first argument got %s', type(repo)) |
|
678 | 'object as first argument got %s', type(repo)) | |
679 |
|
679 | |||
680 | try: |
|
680 | try: | |
681 | commit = repo.get_commit( |
|
681 | commit = repo.get_commit( | |
682 | commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load, |
|
682 | commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load, | |
683 | maybe_unreachable=maybe_unreachable, reference_obj=reference_obj) |
|
683 | maybe_unreachable=maybe_unreachable, reference_obj=reference_obj) | |
684 | except (RepositoryError, LookupError): |
|
684 | except (RepositoryError, LookupError): | |
685 | commit = EmptyCommit() |
|
685 | commit = EmptyCommit() | |
686 | return commit |
|
686 | return commit | |
687 |
|
687 | |||
688 |
|
688 | |||
689 | def datetime_to_time(dt): |
|
689 | def datetime_to_time(dt): | |
690 | if dt: |
|
690 | if dt: | |
691 | return time.mktime(dt.timetuple()) |
|
691 | return time.mktime(dt.timetuple()) | |
692 |
|
692 | |||
693 |
|
693 | |||
694 | def time_to_datetime(tm): |
|
694 | def time_to_datetime(tm): | |
695 | if tm: |
|
695 | if tm: | |
696 | if isinstance(tm, compat.string_types): |
|
696 | if isinstance(tm, compat.string_types): | |
697 | try: |
|
697 | try: | |
698 | tm = float(tm) |
|
698 | tm = float(tm) | |
699 | except ValueError: |
|
699 | except ValueError: | |
700 | return |
|
700 | return | |
701 | return datetime.datetime.fromtimestamp(tm) |
|
701 | return datetime.datetime.fromtimestamp(tm) | |
702 |
|
702 | |||
703 |
|
703 | |||
704 | def time_to_utcdatetime(tm): |
|
704 | def time_to_utcdatetime(tm): | |
705 | if tm: |
|
705 | if tm: | |
706 | if isinstance(tm, compat.string_types): |
|
706 | if isinstance(tm, compat.string_types): | |
707 | try: |
|
707 | try: | |
708 | tm = float(tm) |
|
708 | tm = float(tm) | |
709 | except ValueError: |
|
709 | except ValueError: | |
710 | return |
|
710 | return | |
711 | return datetime.datetime.utcfromtimestamp(tm) |
|
711 | return datetime.datetime.utcfromtimestamp(tm) | |
712 |
|
712 | |||
713 |
|
713 | |||
714 | MENTIONS_REGEX = re.compile( |
|
714 | MENTIONS_REGEX = re.compile( | |
715 | # ^@ or @ without any special chars in front |
|
715 | # ^@ or @ without any special chars in front | |
716 | r'(?:^@|[^a-zA-Z0-9\-\_\.]@)' |
|
716 | r'(?:^@|[^a-zA-Z0-9\-\_\.]@)' | |
717 | # main body starts with letter, then can be . - _ |
|
717 | # main body starts with letter, then can be . - _ | |
718 | r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)', |
|
718 | r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)', | |
719 | re.VERBOSE | re.MULTILINE) |
|
719 | re.VERBOSE | re.MULTILINE) | |
720 |
|
720 | |||
721 |
|
721 | |||
722 | def extract_mentioned_users(s): |
|
722 | def extract_mentioned_users(s): | |
723 | """ |
|
723 | """ | |
724 | Returns unique usernames from given string s that have @mention |
|
724 | Returns unique usernames from given string s that have @mention | |
725 |
|
725 | |||
726 | :param s: string to get mentions |
|
726 | :param s: string to get mentions | |
727 | """ |
|
727 | """ | |
728 | usrs = set() |
|
728 | usrs = set() | |
729 | for username in MENTIONS_REGEX.findall(s): |
|
729 | for username in MENTIONS_REGEX.findall(s): | |
730 | usrs.add(username) |
|
730 | usrs.add(username) | |
731 |
|
731 | |||
732 | return sorted(list(usrs), key=lambda k: k.lower()) |
|
732 | return sorted(list(usrs), key=lambda k: k.lower()) | |
733 |
|
733 | |||
734 |
|
734 | |||
735 | class AttributeDictBase(dict): |
|
735 | class AttributeDictBase(dict): | |
736 | def __getstate__(self): |
|
736 | def __getstate__(self): | |
737 | odict = self.__dict__ # get attribute dictionary |
|
737 | odict = self.__dict__ # get attribute dictionary | |
738 | return odict |
|
738 | return odict | |
739 |
|
739 | |||
740 | def __setstate__(self, dict): |
|
740 | def __setstate__(self, dict): | |
741 | self.__dict__ = dict |
|
741 | self.__dict__ = dict | |
742 |
|
742 | |||
743 | __setattr__ = dict.__setitem__ |
|
743 | __setattr__ = dict.__setitem__ | |
744 | __delattr__ = dict.__delitem__ |
|
744 | __delattr__ = dict.__delitem__ | |
745 |
|
745 | |||
746 |
|
746 | |||
747 | class StrictAttributeDict(AttributeDictBase): |
|
747 | class StrictAttributeDict(AttributeDictBase): | |
748 | """ |
|
748 | """ | |
749 | Strict Version of Attribute dict which raises an Attribute error when |
|
749 | Strict Version of Attribute dict which raises an Attribute error when | |
750 | requested attribute is not set |
|
750 | requested attribute is not set | |
751 | """ |
|
751 | """ | |
752 | def __getattr__(self, attr): |
|
752 | def __getattr__(self, attr): | |
753 | try: |
|
753 | try: | |
754 | return self[attr] |
|
754 | return self[attr] | |
755 | except KeyError: |
|
755 | except KeyError: | |
756 | raise AttributeError('%s object has no attribute %s' % ( |
|
756 | raise AttributeError('%s object has no attribute %s' % ( | |
757 | self.__class__, attr)) |
|
757 | self.__class__, attr)) | |
758 |
|
758 | |||
759 |
|
759 | |||
760 | class AttributeDict(AttributeDictBase): |
|
760 | class AttributeDict(AttributeDictBase): | |
761 | def __getattr__(self, attr): |
|
761 | def __getattr__(self, attr): | |
762 | return self.get(attr, None) |
|
762 | return self.get(attr, None) | |
763 |
|
763 | |||
764 |
|
764 | |||
765 |
|
765 | |||
766 | class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict): |
|
766 | class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict): | |
767 | def __init__(self, default_factory=None, *args, **kwargs): |
|
767 | def __init__(self, default_factory=None, *args, **kwargs): | |
768 | # in python3 you can omit the args to super |
|
768 | # in python3 you can omit the args to super | |
769 | super(OrderedDefaultDict, self).__init__(*args, **kwargs) |
|
769 | super(OrderedDefaultDict, self).__init__(*args, **kwargs) | |
770 | self.default_factory = default_factory |
|
770 | self.default_factory = default_factory | |
771 |
|
771 | |||
772 |
|
772 | |||
773 | def fix_PATH(os_=None): |
|
773 | def fix_PATH(os_=None): | |
774 | """ |
|
774 | """ | |
775 | Get current active python path, and append it to PATH variable to fix |
|
775 | Get current active python path, and append it to PATH variable to fix | |
776 | issues of subprocess calls and different python versions |
|
776 | issues of subprocess calls and different python versions | |
777 | """ |
|
777 | """ | |
778 | if os_ is None: |
|
778 | if os_ is None: | |
779 | import os |
|
779 | import os | |
780 | else: |
|
780 | else: | |
781 | os = os_ |
|
781 | os = os_ | |
782 |
|
782 | |||
783 | cur_path = os.path.split(sys.executable)[0] |
|
783 | cur_path = os.path.split(sys.executable)[0] | |
784 | if not os.environ['PATH'].startswith(cur_path): |
|
784 | if not os.environ['PATH'].startswith(cur_path): | |
785 | os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH']) |
|
785 | os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH']) | |
786 |
|
786 | |||
787 |
|
787 | |||
788 | def obfuscate_url_pw(engine): |
|
788 | def obfuscate_url_pw(engine): | |
789 | _url = engine or '' |
|
789 | _url = engine or '' | |
790 | try: |
|
790 | try: | |
791 | _url = sqlalchemy.engine.url.make_url(engine) |
|
791 | _url = sqlalchemy.engine.url.make_url(engine) | |
792 | if _url.password: |
|
792 | if _url.password: | |
793 | _url.password = 'XXXXX' |
|
793 | _url.password = 'XXXXX' | |
794 | except Exception: |
|
794 | except Exception: | |
795 | pass |
|
795 | pass | |
796 | return unicode(_url) |
|
796 | return unicode(_url) | |
797 |
|
797 | |||
798 |
|
798 | |||
799 | def get_server_url(environ): |
|
799 | def get_server_url(environ): | |
800 | req = webob.Request(environ) |
|
800 | req = webob.Request(environ) | |
801 | return req.host_url + req.script_name |
|
801 | return req.host_url + req.script_name | |
802 |
|
802 | |||
803 |
|
803 | |||
804 | def unique_id(hexlen=32): |
|
804 | def unique_id(hexlen=32): | |
805 | alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz" |
|
805 | alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz" | |
806 | return suuid(truncate_to=hexlen, alphabet=alphabet) |
|
806 | return suuid(truncate_to=hexlen, alphabet=alphabet) | |
807 |
|
807 | |||
808 |
|
808 | |||
809 | def suuid(url=None, truncate_to=22, alphabet=None): |
|
809 | def suuid(url=None, truncate_to=22, alphabet=None): | |
810 | """ |
|
810 | """ | |
811 | Generate and return a short URL safe UUID. |
|
811 | Generate and return a short URL safe UUID. | |
812 |
|
812 | |||
813 | If the url parameter is provided, set the namespace to the provided |
|
813 | If the url parameter is provided, set the namespace to the provided | |
814 | URL and generate a UUID. |
|
814 | URL and generate a UUID. | |
815 |
|
815 | |||
816 | :param url to get the uuid for |
|
816 | :param url to get the uuid for | |
817 | :truncate_to: truncate the basic 22 UUID to shorter version |
|
817 | :truncate_to: truncate the basic 22 UUID to shorter version | |
818 |
|
818 | |||
819 | The IDs won't be universally unique any longer, but the probability of |
|
819 | The IDs won't be universally unique any longer, but the probability of | |
820 | a collision will still be very low. |
|
820 | a collision will still be very low. | |
821 | """ |
|
821 | """ | |
822 | # Define our alphabet. |
|
822 | # Define our alphabet. | |
823 | _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ" |
|
823 | _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ" | |
824 |
|
824 | |||
825 | # If no URL is given, generate a random UUID. |
|
825 | # If no URL is given, generate a random UUID. | |
826 | if url is None: |
|
826 | if url is None: | |
827 | unique_id = uuid.uuid4().int |
|
827 | unique_id = uuid.uuid4().int | |
828 | else: |
|
828 | else: | |
829 | unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int |
|
829 | unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int | |
830 |
|
830 | |||
831 | alphabet_length = len(_ALPHABET) |
|
831 | alphabet_length = len(_ALPHABET) | |
832 | output = [] |
|
832 | output = [] | |
833 | while unique_id > 0: |
|
833 | while unique_id > 0: | |
834 | digit = unique_id % alphabet_length |
|
834 | digit = unique_id % alphabet_length | |
835 | output.append(_ALPHABET[digit]) |
|
835 | output.append(_ALPHABET[digit]) | |
836 | unique_id = int(unique_id / alphabet_length) |
|
836 | unique_id = int(unique_id / alphabet_length) | |
837 | return "".join(output)[:truncate_to] |
|
837 | return "".join(output)[:truncate_to] | |
838 |
|
838 | |||
839 |
|
839 | |||
840 | def get_current_rhodecode_user(request=None): |
|
840 | def get_current_rhodecode_user(request=None): | |
841 | """ |
|
841 | """ | |
842 | Gets rhodecode user from request |
|
842 | Gets rhodecode user from request | |
843 | """ |
|
843 | """ | |
844 | pyramid_request = request or pyramid.threadlocal.get_current_request() |
|
844 | pyramid_request = request or pyramid.threadlocal.get_current_request() | |
845 |
|
845 | |||
846 | # web case |
|
846 | # web case | |
847 | if pyramid_request and hasattr(pyramid_request, 'user'): |
|
847 | if pyramid_request and hasattr(pyramid_request, 'user'): | |
848 | return pyramid_request.user |
|
848 | return pyramid_request.user | |
849 |
|
849 | |||
850 | # api case |
|
850 | # api case | |
851 | if pyramid_request and hasattr(pyramid_request, 'rpc_user'): |
|
851 | if pyramid_request and hasattr(pyramid_request, 'rpc_user'): | |
852 | return pyramid_request.rpc_user |
|
852 | return pyramid_request.rpc_user | |
853 |
|
853 | |||
854 | return None |
|
854 | return None | |
855 |
|
855 | |||
856 |
|
856 | |||
857 | def action_logger_generic(action, namespace=''): |
|
857 | def action_logger_generic(action, namespace=''): | |
858 | """ |
|
858 | """ | |
859 | A generic logger for actions useful to the system overview, tries to find |
|
859 | A generic logger for actions useful to the system overview, tries to find | |
860 | an acting user for the context of the call otherwise reports unknown user |
|
860 | an acting user for the context of the call otherwise reports unknown user | |
861 |
|
861 | |||
862 | :param action: logging message eg 'comment 5 deleted' |
|
862 | :param action: logging message eg 'comment 5 deleted' | |
863 | :param type: string |
|
863 | :param type: string | |
864 |
|
864 | |||
865 | :param namespace: namespace of the logging message eg. 'repo.comments' |
|
865 | :param namespace: namespace of the logging message eg. 'repo.comments' | |
866 | :param type: string |
|
866 | :param type: string | |
867 |
|
867 | |||
868 | """ |
|
868 | """ | |
869 |
|
869 | |||
870 | logger_name = 'rhodecode.actions' |
|
870 | logger_name = 'rhodecode.actions' | |
871 |
|
871 | |||
872 | if namespace: |
|
872 | if namespace: | |
873 | logger_name += '.' + namespace |
|
873 | logger_name += '.' + namespace | |
874 |
|
874 | |||
875 | log = logging.getLogger(logger_name) |
|
875 | log = logging.getLogger(logger_name) | |
876 |
|
876 | |||
877 | # get a user if we can |
|
877 | # get a user if we can | |
878 | user = get_current_rhodecode_user() |
|
878 | user = get_current_rhodecode_user() | |
879 |
|
879 | |||
880 | logfunc = log.info |
|
880 | logfunc = log.info | |
881 |
|
881 | |||
882 | if not user: |
|
882 | if not user: | |
883 | user = '<unknown user>' |
|
883 | user = '<unknown user>' | |
884 | logfunc = log.warning |
|
884 | logfunc = log.warning | |
885 |
|
885 | |||
886 | logfunc('Logging action by {}: {}'.format(user, action)) |
|
886 | logfunc('Logging action by {}: {}'.format(user, action)) | |
887 |
|
887 | |||
888 |
|
888 | |||
889 | def escape_split(text, sep=',', maxsplit=-1): |
|
889 | def escape_split(text, sep=',', maxsplit=-1): | |
890 | r""" |
|
890 | r""" | |
891 | Allows for escaping of the separator: e.g. arg='foo\, bar' |
|
891 | Allows for escaping of the separator: e.g. arg='foo\, bar' | |
892 |
|
892 | |||
893 | It should be noted that the way bash et. al. do command line parsing, those |
|
893 | It should be noted that the way bash et. al. do command line parsing, those | |
894 | single quotes are required. |
|
894 | single quotes are required. | |
895 | """ |
|
895 | """ | |
896 | escaped_sep = r'\%s' % sep |
|
896 | escaped_sep = r'\%s' % sep | |
897 |
|
897 | |||
898 | if escaped_sep not in text: |
|
898 | if escaped_sep not in text: | |
899 | return text.split(sep, maxsplit) |
|
899 | return text.split(sep, maxsplit) | |
900 |
|
900 | |||
901 | before, _mid, after = text.partition(escaped_sep) |
|
901 | before, _mid, after = text.partition(escaped_sep) | |
902 | startlist = before.split(sep, maxsplit) # a regular split is fine here |
|
902 | startlist = before.split(sep, maxsplit) # a regular split is fine here | |
903 | unfinished = startlist[-1] |
|
903 | unfinished = startlist[-1] | |
904 | startlist = startlist[:-1] |
|
904 | startlist = startlist[:-1] | |
905 |
|
905 | |||
906 | # recurse because there may be more escaped separators |
|
906 | # recurse because there may be more escaped separators | |
907 | endlist = escape_split(after, sep, maxsplit) |
|
907 | endlist = escape_split(after, sep, maxsplit) | |
908 |
|
908 | |||
909 | # finish building the escaped value. we use endlist[0] becaue the first |
|
909 | # finish building the escaped value. we use endlist[0] becaue the first | |
910 | # part of the string sent in recursion is the rest of the escaped value. |
|
910 | # part of the string sent in recursion is the rest of the escaped value. | |
911 | unfinished += sep + endlist[0] |
|
911 | unfinished += sep + endlist[0] | |
912 |
|
912 | |||
913 | return startlist + [unfinished] + endlist[1:] # put together all the parts |
|
913 | return startlist + [unfinished] + endlist[1:] # put together all the parts | |
914 |
|
914 | |||
915 |
|
915 | |||
916 | class OptionalAttr(object): |
|
916 | class OptionalAttr(object): | |
917 | """ |
|
917 | """ | |
918 | Special Optional Option that defines other attribute. Example:: |
|
918 | Special Optional Option that defines other attribute. Example:: | |
919 |
|
919 | |||
920 | def test(apiuser, userid=Optional(OAttr('apiuser')): |
|
920 | def test(apiuser, userid=Optional(OAttr('apiuser')): | |
921 | user = Optional.extract(userid) |
|
921 | user = Optional.extract(userid) | |
922 | # calls |
|
922 | # calls | |
923 |
|
923 | |||
924 | """ |
|
924 | """ | |
925 |
|
925 | |||
926 | def __init__(self, attr_name): |
|
926 | def __init__(self, attr_name): | |
927 | self.attr_name = attr_name |
|
927 | self.attr_name = attr_name | |
928 |
|
928 | |||
929 | def __repr__(self): |
|
929 | def __repr__(self): | |
930 | return '<OptionalAttr:%s>' % self.attr_name |
|
930 | return '<OptionalAttr:%s>' % self.attr_name | |
931 |
|
931 | |||
932 | def __call__(self): |
|
932 | def __call__(self): | |
933 | return self |
|
933 | return self | |
934 |
|
934 | |||
935 |
|
935 | |||
936 | # alias |
|
936 | # alias | |
937 | OAttr = OptionalAttr |
|
937 | OAttr = OptionalAttr | |
938 |
|
938 | |||
939 |
|
939 | |||
940 | class Optional(object): |
|
940 | class Optional(object): | |
941 | """ |
|
941 | """ | |
942 | Defines an optional parameter:: |
|
942 | Defines an optional parameter:: | |
943 |
|
943 | |||
944 | param = param.getval() if isinstance(param, Optional) else param |
|
944 | param = param.getval() if isinstance(param, Optional) else param | |
945 | param = param() if isinstance(param, Optional) else param |
|
945 | param = param() if isinstance(param, Optional) else param | |
946 |
|
946 | |||
947 | is equivalent of:: |
|
947 | is equivalent of:: | |
948 |
|
948 | |||
949 | param = Optional.extract(param) |
|
949 | param = Optional.extract(param) | |
950 |
|
950 | |||
951 | """ |
|
951 | """ | |
952 |
|
952 | |||
953 | def __init__(self, type_): |
|
953 | def __init__(self, type_): | |
954 | self.type_ = type_ |
|
954 | self.type_ = type_ | |
955 |
|
955 | |||
956 | def __repr__(self): |
|
956 | def __repr__(self): | |
957 | return '<Optional:%s>' % self.type_.__repr__() |
|
957 | return '<Optional:%s>' % self.type_.__repr__() | |
958 |
|
958 | |||
959 | def __call__(self): |
|
959 | def __call__(self): | |
960 | return self.getval() |
|
960 | return self.getval() | |
961 |
|
961 | |||
962 | def getval(self): |
|
962 | def getval(self): | |
963 | """ |
|
963 | """ | |
964 | returns value from this Optional instance |
|
964 | returns value from this Optional instance | |
965 | """ |
|
965 | """ | |
966 | if isinstance(self.type_, OAttr): |
|
966 | if isinstance(self.type_, OAttr): | |
967 | # use params name |
|
967 | # use params name | |
968 | return self.type_.attr_name |
|
968 | return self.type_.attr_name | |
969 | return self.type_ |
|
969 | return self.type_ | |
970 |
|
970 | |||
971 | @classmethod |
|
971 | @classmethod | |
972 | def extract(cls, val): |
|
972 | def extract(cls, val): | |
973 | """ |
|
973 | """ | |
974 | Extracts value from Optional() instance |
|
974 | Extracts value from Optional() instance | |
975 |
|
975 | |||
976 | :param val: |
|
976 | :param val: | |
977 | :return: original value if it's not Optional instance else |
|
977 | :return: original value if it's not Optional instance else | |
978 | value of instance |
|
978 | value of instance | |
979 | """ |
|
979 | """ | |
980 | if isinstance(val, cls): |
|
980 | if isinstance(val, cls): | |
981 | return val.getval() |
|
981 | return val.getval() | |
982 | return val |
|
982 | return val | |
983 |
|
983 | |||
984 |
|
984 | |||
985 | def glob2re(pat): |
|
985 | def glob2re(pat): | |
986 | """ |
|
986 | """ | |
987 | Translate a shell PATTERN to a regular expression. |
|
987 | Translate a shell PATTERN to a regular expression. | |
988 |
|
988 | |||
989 | There is no way to quote meta-characters. |
|
989 | There is no way to quote meta-characters. | |
990 | """ |
|
990 | """ | |
991 |
|
991 | |||
992 | i, n = 0, len(pat) |
|
992 | i, n = 0, len(pat) | |
993 | res = '' |
|
993 | res = '' | |
994 | while i < n: |
|
994 | while i < n: | |
995 | c = pat[i] |
|
995 | c = pat[i] | |
996 | i = i+1 |
|
996 | i = i+1 | |
997 | if c == '*': |
|
997 | if c == '*': | |
998 | #res = res + '.*' |
|
998 | #res = res + '.*' | |
999 | res = res + '[^/]*' |
|
999 | res = res + '[^/]*' | |
1000 | elif c == '?': |
|
1000 | elif c == '?': | |
1001 | #res = res + '.' |
|
1001 | #res = res + '.' | |
1002 | res = res + '[^/]' |
|
1002 | res = res + '[^/]' | |
1003 | elif c == '[': |
|
1003 | elif c == '[': | |
1004 | j = i |
|
1004 | j = i | |
1005 | if j < n and pat[j] == '!': |
|
1005 | if j < n and pat[j] == '!': | |
1006 | j = j+1 |
|
1006 | j = j+1 | |
1007 | if j < n and pat[j] == ']': |
|
1007 | if j < n and pat[j] == ']': | |
1008 | j = j+1 |
|
1008 | j = j+1 | |
1009 | while j < n and pat[j] != ']': |
|
1009 | while j < n and pat[j] != ']': | |
1010 | j = j+1 |
|
1010 | j = j+1 | |
1011 | if j >= n: |
|
1011 | if j >= n: | |
1012 | res = res + '\\[' |
|
1012 | res = res + '\\[' | |
1013 | else: |
|
1013 | else: | |
1014 | stuff = pat[i:j].replace('\\','\\\\') |
|
1014 | stuff = pat[i:j].replace('\\','\\\\') | |
1015 | i = j+1 |
|
1015 | i = j+1 | |
1016 | if stuff[0] == '!': |
|
1016 | if stuff[0] == '!': | |
1017 | stuff = '^' + stuff[1:] |
|
1017 | stuff = '^' + stuff[1:] | |
1018 | elif stuff[0] == '^': |
|
1018 | elif stuff[0] == '^': | |
1019 | stuff = '\\' + stuff |
|
1019 | stuff = '\\' + stuff | |
1020 | res = '%s[%s]' % (res, stuff) |
|
1020 | res = '%s[%s]' % (res, stuff) | |
1021 | else: |
|
1021 | else: | |
1022 | res = res + re.escape(c) |
|
1022 | res = res + re.escape(c) | |
1023 | return res + '\Z(?ms)' |
|
1023 | return res + '\Z(?ms)' | |
1024 |
|
1024 | |||
1025 |
|
1025 | |||
1026 | def parse_byte_string(size_str): |
|
1026 | def parse_byte_string(size_str): | |
1027 | match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE) |
|
1027 | match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE) | |
1028 | if not match: |
|
1028 | if not match: | |
1029 | raise ValueError('Given size:%s is invalid, please make sure ' |
|
1029 | raise ValueError('Given size:%s is invalid, please make sure ' | |
1030 | 'to use format of <num>(MB|KB)' % size_str) |
|
1030 | 'to use format of <num>(MB|KB)' % size_str) | |
1031 |
|
1031 | |||
1032 | _parts = match.groups() |
|
1032 | _parts = match.groups() | |
1033 | num, type_ = _parts |
|
1033 | num, type_ = _parts | |
1034 | return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()] |
|
1034 | return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()] | |
1035 |
|
1035 | |||
1036 |
|
1036 | |||
1037 | class CachedProperty(object): |
|
1037 | class CachedProperty(object): | |
1038 | """ |
|
1038 | """ | |
1039 | Lazy Attributes. With option to invalidate the cache by running a method |
|
1039 | Lazy Attributes. With option to invalidate the cache by running a method | |
1040 |
|
1040 | |||
1041 | >>> class Foo(object): |
|
1041 | >>> class Foo(object): | |
1042 | ... |
|
1042 | ... | |
1043 | ... @CachedProperty |
|
1043 | ... @CachedProperty | |
1044 | ... def heavy_func(self): |
|
1044 | ... def heavy_func(self): | |
1045 | ... return 'super-calculation' |
|
1045 | ... return 'super-calculation' | |
1046 | ... |
|
1046 | ... | |
1047 | ... foo = Foo() |
|
1047 | ... foo = Foo() | |
1048 | ... foo.heavy_func() # first computation |
|
1048 | ... foo.heavy_func() # first computation | |
1049 | ... foo.heavy_func() # fetch from cache |
|
1049 | ... foo.heavy_func() # fetch from cache | |
1050 | ... foo._invalidate_prop_cache('heavy_func') |
|
1050 | ... foo._invalidate_prop_cache('heavy_func') | |
1051 |
|
1051 | |||
1052 | # at this point calling foo.heavy_func() will be re-computed |
|
1052 | # at this point calling foo.heavy_func() will be re-computed | |
1053 | """ |
|
1053 | """ | |
1054 |
|
1054 | |||
1055 | def __init__(self, func, func_name=None): |
|
1055 | def __init__(self, func, func_name=None): | |
1056 |
|
1056 | |||
1057 | if func_name is None: |
|
1057 | if func_name is None: | |
1058 | func_name = func.__name__ |
|
1058 | func_name = func.__name__ | |
1059 | self.data = (func, func_name) |
|
1059 | self.data = (func, func_name) | |
1060 | update_wrapper(self, func) |
|
1060 | update_wrapper(self, func) | |
1061 |
|
1061 | |||
1062 | def __get__(self, inst, class_): |
|
1062 | def __get__(self, inst, class_): | |
1063 | if inst is None: |
|
1063 | if inst is None: | |
1064 | return self |
|
1064 | return self | |
1065 |
|
1065 | |||
1066 | func, func_name = self.data |
|
1066 | func, func_name = self.data | |
1067 | value = func(inst) |
|
1067 | value = func(inst) | |
1068 | inst.__dict__[func_name] = value |
|
1068 | inst.__dict__[func_name] = value | |
1069 | if '_invalidate_prop_cache' not in inst.__dict__: |
|
1069 | if '_invalidate_prop_cache' not in inst.__dict__: | |
1070 | inst.__dict__['_invalidate_prop_cache'] = partial( |
|
1070 | inst.__dict__['_invalidate_prop_cache'] = partial( | |
1071 | self._invalidate_prop_cache, inst) |
|
1071 | self._invalidate_prop_cache, inst) | |
1072 | return value |
|
1072 | return value | |
1073 |
|
1073 | |||
1074 | def _invalidate_prop_cache(self, inst, name): |
|
1074 | def _invalidate_prop_cache(self, inst, name): | |
1075 | inst.__dict__.pop(name, None) |
|
1075 | inst.__dict__.pop(name, None) | |
1076 |
|
1076 | |||
1077 |
|
1077 | |||
1078 | def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True): |
|
1078 | def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True): | |
1079 | """ |
|
1079 | """ | |
1080 | Retry decorator with exponential backoff. |
|
1080 | Retry decorator with exponential backoff. | |
1081 |
|
1081 | |||
1082 | Parameters |
|
1082 | Parameters | |
1083 | ---------- |
|
1083 | ---------- | |
1084 | func : typing.Callable, optional |
|
1084 | func : typing.Callable, optional | |
1085 | Callable on which the decorator is applied, by default None |
|
1085 | Callable on which the decorator is applied, by default None | |
1086 | exception : Exception or tuple of Exceptions, optional |
|
1086 | exception : Exception or tuple of Exceptions, optional | |
1087 | Exception(s) that invoke retry, by default Exception |
|
1087 | Exception(s) that invoke retry, by default Exception | |
1088 | n_tries : int, optional |
|
1088 | n_tries : int, optional | |
1089 | Number of tries before giving up, by default 5 |
|
1089 | Number of tries before giving up, by default 5 | |
1090 | delay : int, optional |
|
1090 | delay : int, optional | |
1091 | Initial delay between retries in seconds, by default 5 |
|
1091 | Initial delay between retries in seconds, by default 5 | |
1092 | backoff : int, optional |
|
1092 | backoff : int, optional | |
1093 | Backoff multiplier e.g. value of 2 will double the delay, by default 1 |
|
1093 | Backoff multiplier e.g. value of 2 will double the delay, by default 1 | |
1094 | logger : bool, optional |
|
1094 | logger : bool, optional | |
1095 | Option to log or print, by default False |
|
1095 | Option to log or print, by default False | |
1096 |
|
1096 | |||
1097 | Returns |
|
1097 | Returns | |
1098 | ------- |
|
1098 | ------- | |
1099 | typing.Callable |
|
1099 | typing.Callable | |
1100 | Decorated callable that calls itself when exception(s) occur. |
|
1100 | Decorated callable that calls itself when exception(s) occur. | |
1101 |
|
1101 | |||
1102 | Examples |
|
1102 | Examples | |
1103 | -------- |
|
1103 | -------- | |
1104 | >>> import random |
|
1104 | >>> import random | |
1105 | >>> @retry(exception=Exception, n_tries=3) |
|
1105 | >>> @retry(exception=Exception, n_tries=3) | |
1106 | ... def test_random(text): |
|
1106 | ... def test_random(text): | |
1107 | ... x = random.random() |
|
1107 | ... x = random.random() | |
1108 | ... if x < 0.5: |
|
1108 | ... if x < 0.5: | |
1109 | ... raise Exception("Fail") |
|
1109 | ... raise Exception("Fail") | |
1110 | ... else: |
|
1110 | ... else: | |
1111 | ... print("Success: ", text) |
|
1111 | ... print("Success: ", text) | |
1112 | >>> test_random("It works!") |
|
1112 | >>> test_random("It works!") | |
1113 | """ |
|
1113 | """ | |
1114 |
|
1114 | |||
1115 | if func is None: |
|
1115 | if func is None: | |
1116 | return partial( |
|
1116 | return partial( | |
1117 | retry, |
|
1117 | retry, | |
1118 | exception=exception, |
|
1118 | exception=exception, | |
1119 | n_tries=n_tries, |
|
1119 | n_tries=n_tries, | |
1120 | delay=delay, |
|
1120 | delay=delay, | |
1121 | backoff=backoff, |
|
1121 | backoff=backoff, | |
1122 | logger=logger, |
|
1122 | logger=logger, | |
1123 | ) |
|
1123 | ) | |
1124 |
|
1124 | |||
1125 | @wraps(func) |
|
1125 | @wraps(func) | |
1126 | def wrapper(*args, **kwargs): |
|
1126 | def wrapper(*args, **kwargs): | |
1127 | _n_tries, n_delay = n_tries, delay |
|
1127 | _n_tries, n_delay = n_tries, delay | |
1128 | log = logging.getLogger('rhodecode.retry') |
|
1128 | log = logging.getLogger('rhodecode.retry') | |
1129 |
|
1129 | |||
1130 | while _n_tries > 1: |
|
1130 | while _n_tries > 1: | |
1131 | try: |
|
1131 | try: | |
1132 | return func(*args, **kwargs) |
|
1132 | return func(*args, **kwargs) | |
1133 | except exception as e: |
|
1133 | except exception as e: | |
1134 | e_details = repr(e) |
|
1134 | e_details = repr(e) | |
1135 | msg = "Exception on calling func {func}: {e}, " \ |
|
1135 | msg = "Exception on calling func {func}: {e}, " \ | |
1136 | "Retrying in {n_delay} seconds..."\ |
|
1136 | "Retrying in {n_delay} seconds..."\ | |
1137 | .format(func=func, e=e_details, n_delay=n_delay) |
|
1137 | .format(func=func, e=e_details, n_delay=n_delay) | |
1138 | if logger: |
|
1138 | if logger: | |
1139 | log.warning(msg) |
|
1139 | log.warning(msg) | |
1140 | else: |
|
1140 | else: | |
1141 | print(msg) |
|
1141 | print(msg) | |
1142 | time.sleep(n_delay) |
|
1142 | time.sleep(n_delay) | |
1143 | _n_tries -= 1 |
|
1143 | _n_tries -= 1 | |
1144 | n_delay *= backoff |
|
1144 | n_delay *= backoff | |
1145 |
|
1145 | |||
1146 | return func(*args, **kwargs) |
|
1146 | return func(*args, **kwargs) | |
1147 |
|
1147 | |||
1148 | return wrapper |
|
1148 | return wrapper | |
|
1149 | ||||
|
1150 | ||||
|
1151 | def user_agent_normalizer(user_agent_raw): | |||
|
1152 | log = logging.getLogger('rhodecode.user_agent_normalizer') | |||
|
1153 | ua = (user_agent_raw or '').strip().lower() | |||
|
1154 | ||||
|
1155 | try: | |||
|
1156 | if 'mercurial/proto-1.0' in ua: | |||
|
1157 | ua = ua.replace('mercurial/proto-1.0', '') | |||
|
1158 | ua = ua.replace('(', '').replace(')', '').strip() | |||
|
1159 | ua = ua.replace('mercurial ', 'mercurial/') | |||
|
1160 | elif ua.startswith('git'): | |||
|
1161 | pass | |||
|
1162 | except Exception: | |||
|
1163 | log.exception('Failed to parse scm user-agent') | |||
|
1164 | ||||
|
1165 | return ua |
@@ -1,2380 +1,2381 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | pull request model for RhodeCode |
|
23 | pull request model for RhodeCode | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | import json |
|
27 | import json | |
28 | import logging |
|
28 | import logging | |
29 | import os |
|
29 | import os | |
30 |
|
30 | |||
31 | import datetime |
|
31 | import datetime | |
32 | import urllib |
|
32 | import urllib | |
33 | import collections |
|
33 | import collections | |
34 |
|
34 | |||
35 | from pyramid import compat |
|
35 | from pyramid import compat | |
36 | from pyramid.threadlocal import get_current_request |
|
36 | from pyramid.threadlocal import get_current_request | |
37 |
|
37 | |||
38 | from rhodecode.lib.vcs.nodes import FileNode |
|
38 | from rhodecode.lib.vcs.nodes import FileNode | |
39 | from rhodecode.translation import lazy_ugettext |
|
39 | from rhodecode.translation import lazy_ugettext | |
40 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
40 | from rhodecode.lib import helpers as h, hooks_utils, diffs | |
41 | from rhodecode.lib import audit_logger |
|
41 | from rhodecode.lib import audit_logger | |
42 | from rhodecode.lib.compat import OrderedDict |
|
42 | from rhodecode.lib.compat import OrderedDict | |
43 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
43 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon | |
44 | from rhodecode.lib.markup_renderer import ( |
|
44 | from rhodecode.lib.markup_renderer import ( | |
45 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
45 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) | |
46 | from rhodecode.lib.utils2 import ( |
|
46 | from rhodecode.lib.utils2 import ( | |
47 | safe_unicode, safe_str, md5_safe, AttributeDict, safe_int, |
|
47 | safe_unicode, safe_str, md5_safe, AttributeDict, safe_int, | |
48 | get_current_rhodecode_user) |
|
48 | get_current_rhodecode_user) | |
49 | from rhodecode.lib.vcs.backends.base import ( |
|
49 | from rhodecode.lib.vcs.backends.base import ( | |
50 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason, |
|
50 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason, | |
51 | TargetRefMissing, SourceRefMissing) |
|
51 | TargetRefMissing, SourceRefMissing) | |
52 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
52 | from rhodecode.lib.vcs.conf import settings as vcs_settings | |
53 | from rhodecode.lib.vcs.exceptions import ( |
|
53 | from rhodecode.lib.vcs.exceptions import ( | |
54 | CommitDoesNotExistError, EmptyRepositoryError) |
|
54 | CommitDoesNotExistError, EmptyRepositoryError) | |
55 | from rhodecode.model import BaseModel |
|
55 | from rhodecode.model import BaseModel | |
56 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
56 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
57 | from rhodecode.model.comment import CommentsModel |
|
57 | from rhodecode.model.comment import CommentsModel | |
58 | from rhodecode.model.db import ( |
|
58 | from rhodecode.model.db import ( | |
59 | aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
59 | aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, | |
60 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User) |
|
60 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User) | |
61 | from rhodecode.model.meta import Session |
|
61 | from rhodecode.model.meta import Session | |
62 | from rhodecode.model.notification import NotificationModel, \ |
|
62 | from rhodecode.model.notification import NotificationModel, \ | |
63 | EmailNotificationModel |
|
63 | EmailNotificationModel | |
64 | from rhodecode.model.scm import ScmModel |
|
64 | from rhodecode.model.scm import ScmModel | |
65 | from rhodecode.model.settings import VcsSettingsModel |
|
65 | from rhodecode.model.settings import VcsSettingsModel | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | log = logging.getLogger(__name__) |
|
68 | log = logging.getLogger(__name__) | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | # Data structure to hold the response data when updating commits during a pull |
|
71 | # Data structure to hold the response data when updating commits during a pull | |
72 | # request update. |
|
72 | # request update. | |
73 | class UpdateResponse(object): |
|
73 | class UpdateResponse(object): | |
74 |
|
74 | |||
75 | def __init__(self, executed, reason, new, old, common_ancestor_id, |
|
75 | def __init__(self, executed, reason, new, old, common_ancestor_id, | |
76 | commit_changes, source_changed, target_changed): |
|
76 | commit_changes, source_changed, target_changed): | |
77 |
|
77 | |||
78 | self.executed = executed |
|
78 | self.executed = executed | |
79 | self.reason = reason |
|
79 | self.reason = reason | |
80 | self.new = new |
|
80 | self.new = new | |
81 | self.old = old |
|
81 | self.old = old | |
82 | self.common_ancestor_id = common_ancestor_id |
|
82 | self.common_ancestor_id = common_ancestor_id | |
83 | self.changes = commit_changes |
|
83 | self.changes = commit_changes | |
84 | self.source_changed = source_changed |
|
84 | self.source_changed = source_changed | |
85 | self.target_changed = target_changed |
|
85 | self.target_changed = target_changed | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def get_diff_info( |
|
88 | def get_diff_info( | |
89 | source_repo, source_ref, target_repo, target_ref, get_authors=False, |
|
89 | source_repo, source_ref, target_repo, target_ref, get_authors=False, | |
90 | get_commit_authors=True): |
|
90 | get_commit_authors=True): | |
91 | """ |
|
91 | """ | |
92 | Calculates detailed diff information for usage in preview of creation of a pull-request. |
|
92 | Calculates detailed diff information for usage in preview of creation of a pull-request. | |
93 | This is also used for default reviewers logic |
|
93 | This is also used for default reviewers logic | |
94 | """ |
|
94 | """ | |
95 |
|
95 | |||
96 | source_scm = source_repo.scm_instance() |
|
96 | source_scm = source_repo.scm_instance() | |
97 | target_scm = target_repo.scm_instance() |
|
97 | target_scm = target_repo.scm_instance() | |
98 |
|
98 | |||
99 | ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm) |
|
99 | ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm) | |
100 | if not ancestor_id: |
|
100 | if not ancestor_id: | |
101 | raise ValueError( |
|
101 | raise ValueError( | |
102 | 'cannot calculate diff info without a common ancestor. ' |
|
102 | 'cannot calculate diff info without a common ancestor. ' | |
103 | 'Make sure both repositories are related, and have a common forking commit.') |
|
103 | 'Make sure both repositories are related, and have a common forking commit.') | |
104 |
|
104 | |||
105 | # case here is that want a simple diff without incoming commits, |
|
105 | # case here is that want a simple diff without incoming commits, | |
106 | # previewing what will be merged based only on commits in the source. |
|
106 | # previewing what will be merged based only on commits in the source. | |
107 | log.debug('Using ancestor %s as source_ref instead of %s', |
|
107 | log.debug('Using ancestor %s as source_ref instead of %s', | |
108 | ancestor_id, source_ref) |
|
108 | ancestor_id, source_ref) | |
109 |
|
109 | |||
110 | # source of changes now is the common ancestor |
|
110 | # source of changes now is the common ancestor | |
111 | source_commit = source_scm.get_commit(commit_id=ancestor_id) |
|
111 | source_commit = source_scm.get_commit(commit_id=ancestor_id) | |
112 | # target commit becomes the source ref as it is the last commit |
|
112 | # target commit becomes the source ref as it is the last commit | |
113 | # for diff generation this logic gives proper diff |
|
113 | # for diff generation this logic gives proper diff | |
114 | target_commit = source_scm.get_commit(commit_id=source_ref) |
|
114 | target_commit = source_scm.get_commit(commit_id=source_ref) | |
115 |
|
115 | |||
116 | vcs_diff = \ |
|
116 | vcs_diff = \ | |
117 | source_scm.get_diff(commit1=source_commit, commit2=target_commit, |
|
117 | source_scm.get_diff(commit1=source_commit, commit2=target_commit, | |
118 | ignore_whitespace=False, context=3) |
|
118 | ignore_whitespace=False, context=3) | |
119 |
|
119 | |||
120 | diff_processor = diffs.DiffProcessor( |
|
120 | diff_processor = diffs.DiffProcessor( | |
121 | vcs_diff, format='newdiff', diff_limit=None, |
|
121 | vcs_diff, format='newdiff', diff_limit=None, | |
122 | file_limit=None, show_full_diff=True) |
|
122 | file_limit=None, show_full_diff=True) | |
123 |
|
123 | |||
124 | _parsed = diff_processor.prepare() |
|
124 | _parsed = diff_processor.prepare() | |
125 |
|
125 | |||
126 | all_files = [] |
|
126 | all_files = [] | |
127 | all_files_changes = [] |
|
127 | all_files_changes = [] | |
128 | changed_lines = {} |
|
128 | changed_lines = {} | |
129 | stats = [0, 0] |
|
129 | stats = [0, 0] | |
130 | for f in _parsed: |
|
130 | for f in _parsed: | |
131 | all_files.append(f['filename']) |
|
131 | all_files.append(f['filename']) | |
132 | all_files_changes.append({ |
|
132 | all_files_changes.append({ | |
133 | 'filename': f['filename'], |
|
133 | 'filename': f['filename'], | |
134 | 'stats': f['stats'] |
|
134 | 'stats': f['stats'] | |
135 | }) |
|
135 | }) | |
136 | stats[0] += f['stats']['added'] |
|
136 | stats[0] += f['stats']['added'] | |
137 | stats[1] += f['stats']['deleted'] |
|
137 | stats[1] += f['stats']['deleted'] | |
138 |
|
138 | |||
139 | changed_lines[f['filename']] = [] |
|
139 | changed_lines[f['filename']] = [] | |
140 | if len(f['chunks']) < 2: |
|
140 | if len(f['chunks']) < 2: | |
141 | continue |
|
141 | continue | |
142 | # first line is "context" information |
|
142 | # first line is "context" information | |
143 | for chunks in f['chunks'][1:]: |
|
143 | for chunks in f['chunks'][1:]: | |
144 | for chunk in chunks['lines']: |
|
144 | for chunk in chunks['lines']: | |
145 | if chunk['action'] not in ('del', 'mod'): |
|
145 | if chunk['action'] not in ('del', 'mod'): | |
146 | continue |
|
146 | continue | |
147 | changed_lines[f['filename']].append(chunk['old_lineno']) |
|
147 | changed_lines[f['filename']].append(chunk['old_lineno']) | |
148 |
|
148 | |||
149 | commit_authors = [] |
|
149 | commit_authors = [] | |
150 | user_counts = {} |
|
150 | user_counts = {} | |
151 | email_counts = {} |
|
151 | email_counts = {} | |
152 | author_counts = {} |
|
152 | author_counts = {} | |
153 | _commit_cache = {} |
|
153 | _commit_cache = {} | |
154 |
|
154 | |||
155 | commits = [] |
|
155 | commits = [] | |
156 | if get_commit_authors: |
|
156 | if get_commit_authors: | |
157 | log.debug('Obtaining commit authors from set of commits') |
|
157 | log.debug('Obtaining commit authors from set of commits') | |
158 | _compare_data = target_scm.compare( |
|
158 | _compare_data = target_scm.compare( | |
159 | target_ref, source_ref, source_scm, merge=True, |
|
159 | target_ref, source_ref, source_scm, merge=True, | |
160 | pre_load=["author", "date", "message"] |
|
160 | pre_load=["author", "date", "message"] | |
161 | ) |
|
161 | ) | |
162 |
|
162 | |||
163 | for commit in _compare_data: |
|
163 | for commit in _compare_data: | |
164 | # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned |
|
164 | # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned | |
165 | # at this function which is later called via JSON serialization |
|
165 | # at this function which is later called via JSON serialization | |
166 | serialized_commit = dict( |
|
166 | serialized_commit = dict( | |
167 | author=commit.author, |
|
167 | author=commit.author, | |
168 | date=commit.date, |
|
168 | date=commit.date, | |
169 | message=commit.message, |
|
169 | message=commit.message, | |
170 | commit_id=commit.raw_id, |
|
170 | commit_id=commit.raw_id, | |
171 | raw_id=commit.raw_id |
|
171 | raw_id=commit.raw_id | |
172 | ) |
|
172 | ) | |
173 | commits.append(serialized_commit) |
|
173 | commits.append(serialized_commit) | |
174 | user = User.get_from_cs_author(serialized_commit['author']) |
|
174 | user = User.get_from_cs_author(serialized_commit['author']) | |
175 | if user and user not in commit_authors: |
|
175 | if user and user not in commit_authors: | |
176 | commit_authors.append(user) |
|
176 | commit_authors.append(user) | |
177 |
|
177 | |||
178 | # lines |
|
178 | # lines | |
179 | if get_authors: |
|
179 | if get_authors: | |
180 | log.debug('Calculating authors of changed files') |
|
180 | log.debug('Calculating authors of changed files') | |
181 | target_commit = source_repo.get_commit(ancestor_id) |
|
181 | target_commit = source_repo.get_commit(ancestor_id) | |
182 |
|
182 | |||
183 | for fname, lines in changed_lines.items(): |
|
183 | for fname, lines in changed_lines.items(): | |
184 |
|
184 | |||
185 | try: |
|
185 | try: | |
186 | node = target_commit.get_node(fname, pre_load=["is_binary"]) |
|
186 | node = target_commit.get_node(fname, pre_load=["is_binary"]) | |
187 | except Exception: |
|
187 | except Exception: | |
188 | log.exception("Failed to load node with path %s", fname) |
|
188 | log.exception("Failed to load node with path %s", fname) | |
189 | continue |
|
189 | continue | |
190 |
|
190 | |||
191 | if not isinstance(node, FileNode): |
|
191 | if not isinstance(node, FileNode): | |
192 | continue |
|
192 | continue | |
193 |
|
193 | |||
194 | # NOTE(marcink): for binary node we don't do annotation, just use last author |
|
194 | # NOTE(marcink): for binary node we don't do annotation, just use last author | |
195 | if node.is_binary: |
|
195 | if node.is_binary: | |
196 | author = node.last_commit.author |
|
196 | author = node.last_commit.author | |
197 | email = node.last_commit.author_email |
|
197 | email = node.last_commit.author_email | |
198 |
|
198 | |||
199 | user = User.get_from_cs_author(author) |
|
199 | user = User.get_from_cs_author(author) | |
200 | if user: |
|
200 | if user: | |
201 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
201 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 | |
202 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
202 | author_counts[author] = author_counts.get(author, 0) + 1 | |
203 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
203 | email_counts[email] = email_counts.get(email, 0) + 1 | |
204 |
|
204 | |||
205 | continue |
|
205 | continue | |
206 |
|
206 | |||
207 | for annotation in node.annotate: |
|
207 | for annotation in node.annotate: | |
208 | line_no, commit_id, get_commit_func, line_text = annotation |
|
208 | line_no, commit_id, get_commit_func, line_text = annotation | |
209 | if line_no in lines: |
|
209 | if line_no in lines: | |
210 | if commit_id not in _commit_cache: |
|
210 | if commit_id not in _commit_cache: | |
211 | _commit_cache[commit_id] = get_commit_func() |
|
211 | _commit_cache[commit_id] = get_commit_func() | |
212 | commit = _commit_cache[commit_id] |
|
212 | commit = _commit_cache[commit_id] | |
213 | author = commit.author |
|
213 | author = commit.author | |
214 | email = commit.author_email |
|
214 | email = commit.author_email | |
215 | user = User.get_from_cs_author(author) |
|
215 | user = User.get_from_cs_author(author) | |
216 | if user: |
|
216 | if user: | |
217 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
217 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 | |
218 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
218 | author_counts[author] = author_counts.get(author, 0) + 1 | |
219 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
219 | email_counts[email] = email_counts.get(email, 0) + 1 | |
220 |
|
220 | |||
221 | log.debug('Default reviewers processing finished') |
|
221 | log.debug('Default reviewers processing finished') | |
222 |
|
222 | |||
223 | return { |
|
223 | return { | |
224 | 'commits': commits, |
|
224 | 'commits': commits, | |
225 | 'files': all_files_changes, |
|
225 | 'files': all_files_changes, | |
226 | 'stats': stats, |
|
226 | 'stats': stats, | |
227 | 'ancestor': ancestor_id, |
|
227 | 'ancestor': ancestor_id, | |
228 | # original authors of modified files |
|
228 | # original authors of modified files | |
229 | 'original_authors': { |
|
229 | 'original_authors': { | |
230 | 'users': user_counts, |
|
230 | 'users': user_counts, | |
231 | 'authors': author_counts, |
|
231 | 'authors': author_counts, | |
232 | 'emails': email_counts, |
|
232 | 'emails': email_counts, | |
233 | }, |
|
233 | }, | |
234 | 'commit_authors': commit_authors |
|
234 | 'commit_authors': commit_authors | |
235 | } |
|
235 | } | |
236 |
|
236 | |||
237 |
|
237 | |||
238 | class PullRequestModel(BaseModel): |
|
238 | class PullRequestModel(BaseModel): | |
239 |
|
239 | |||
240 | cls = PullRequest |
|
240 | cls = PullRequest | |
241 |
|
241 | |||
242 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT |
|
242 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT | |
243 |
|
243 | |||
244 | UPDATE_STATUS_MESSAGES = { |
|
244 | UPDATE_STATUS_MESSAGES = { | |
245 | UpdateFailureReason.NONE: lazy_ugettext( |
|
245 | UpdateFailureReason.NONE: lazy_ugettext( | |
246 | 'Pull request update successful.'), |
|
246 | 'Pull request update successful.'), | |
247 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
247 | UpdateFailureReason.UNKNOWN: lazy_ugettext( | |
248 | 'Pull request update failed because of an unknown error.'), |
|
248 | 'Pull request update failed because of an unknown error.'), | |
249 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
249 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( | |
250 | 'No update needed because the source and target have not changed.'), |
|
250 | 'No update needed because the source and target have not changed.'), | |
251 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
251 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( | |
252 | 'Pull request cannot be updated because the reference type is ' |
|
252 | 'Pull request cannot be updated because the reference type is ' | |
253 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
253 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), | |
254 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
254 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( | |
255 | 'This pull request cannot be updated because the target ' |
|
255 | 'This pull request cannot be updated because the target ' | |
256 | 'reference is missing.'), |
|
256 | 'reference is missing.'), | |
257 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
257 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( | |
258 | 'This pull request cannot be updated because the source ' |
|
258 | 'This pull request cannot be updated because the source ' | |
259 | 'reference is missing.'), |
|
259 | 'reference is missing.'), | |
260 | } |
|
260 | } | |
261 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] |
|
261 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] | |
262 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] |
|
262 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] | |
263 |
|
263 | |||
264 | def __get_pull_request(self, pull_request): |
|
264 | def __get_pull_request(self, pull_request): | |
265 | return self._get_instance(( |
|
265 | return self._get_instance(( | |
266 | PullRequest, PullRequestVersion), pull_request) |
|
266 | PullRequest, PullRequestVersion), pull_request) | |
267 |
|
267 | |||
268 | def _check_perms(self, perms, pull_request, user, api=False): |
|
268 | def _check_perms(self, perms, pull_request, user, api=False): | |
269 | if not api: |
|
269 | if not api: | |
270 | return h.HasRepoPermissionAny(*perms)( |
|
270 | return h.HasRepoPermissionAny(*perms)( | |
271 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
271 | user=user, repo_name=pull_request.target_repo.repo_name) | |
272 | else: |
|
272 | else: | |
273 | return h.HasRepoPermissionAnyApi(*perms)( |
|
273 | return h.HasRepoPermissionAnyApi(*perms)( | |
274 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
274 | user=user, repo_name=pull_request.target_repo.repo_name) | |
275 |
|
275 | |||
276 | def check_user_read(self, pull_request, user, api=False): |
|
276 | def check_user_read(self, pull_request, user, api=False): | |
277 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
277 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
278 | return self._check_perms(_perms, pull_request, user, api) |
|
278 | return self._check_perms(_perms, pull_request, user, api) | |
279 |
|
279 | |||
280 | def check_user_merge(self, pull_request, user, api=False): |
|
280 | def check_user_merge(self, pull_request, user, api=False): | |
281 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
281 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) | |
282 | return self._check_perms(_perms, pull_request, user, api) |
|
282 | return self._check_perms(_perms, pull_request, user, api) | |
283 |
|
283 | |||
284 | def check_user_update(self, pull_request, user, api=False): |
|
284 | def check_user_update(self, pull_request, user, api=False): | |
285 | owner = user.user_id == pull_request.user_id |
|
285 | owner = user.user_id == pull_request.user_id | |
286 | return self.check_user_merge(pull_request, user, api) or owner |
|
286 | return self.check_user_merge(pull_request, user, api) or owner | |
287 |
|
287 | |||
288 | def check_user_delete(self, pull_request, user): |
|
288 | def check_user_delete(self, pull_request, user): | |
289 | owner = user.user_id == pull_request.user_id |
|
289 | owner = user.user_id == pull_request.user_id | |
290 | _perms = ('repository.admin',) |
|
290 | _perms = ('repository.admin',) | |
291 | return self._check_perms(_perms, pull_request, user) or owner |
|
291 | return self._check_perms(_perms, pull_request, user) or owner | |
292 |
|
292 | |||
293 | def is_user_reviewer(self, pull_request, user): |
|
293 | def is_user_reviewer(self, pull_request, user): | |
294 | return user.user_id in [ |
|
294 | return user.user_id in [ | |
295 | x.user_id for x in |
|
295 | x.user_id for x in | |
296 | pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER) |
|
296 | pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER) | |
297 | if x.user |
|
297 | if x.user | |
298 | ] |
|
298 | ] | |
299 |
|
299 | |||
300 | def check_user_change_status(self, pull_request, user, api=False): |
|
300 | def check_user_change_status(self, pull_request, user, api=False): | |
301 | return self.check_user_update(pull_request, user, api) \ |
|
301 | return self.check_user_update(pull_request, user, api) \ | |
302 | or self.is_user_reviewer(pull_request, user) |
|
302 | or self.is_user_reviewer(pull_request, user) | |
303 |
|
303 | |||
304 | def check_user_comment(self, pull_request, user): |
|
304 | def check_user_comment(self, pull_request, user): | |
305 | owner = user.user_id == pull_request.user_id |
|
305 | owner = user.user_id == pull_request.user_id | |
306 | return self.check_user_read(pull_request, user) or owner |
|
306 | return self.check_user_read(pull_request, user) or owner | |
307 |
|
307 | |||
308 | def get(self, pull_request): |
|
308 | def get(self, pull_request): | |
309 | return self.__get_pull_request(pull_request) |
|
309 | return self.__get_pull_request(pull_request) | |
310 |
|
310 | |||
311 | def _prepare_get_all_query(self, repo_name, search_q=None, source=False, |
|
311 | def _prepare_get_all_query(self, repo_name, search_q=None, source=False, | |
312 | statuses=None, opened_by=None, order_by=None, |
|
312 | statuses=None, opened_by=None, order_by=None, | |
313 | order_dir='desc', only_created=False): |
|
313 | order_dir='desc', only_created=False): | |
314 | repo = None |
|
314 | repo = None | |
315 | if repo_name: |
|
315 | if repo_name: | |
316 | repo = self._get_repo(repo_name) |
|
316 | repo = self._get_repo(repo_name) | |
317 |
|
317 | |||
318 | q = PullRequest.query() |
|
318 | q = PullRequest.query() | |
319 |
|
319 | |||
320 | if search_q: |
|
320 | if search_q: | |
321 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
321 | like_expression = u'%{}%'.format(safe_unicode(search_q)) | |
322 | q = q.join(User, User.user_id == PullRequest.user_id) |
|
322 | q = q.join(User, User.user_id == PullRequest.user_id) | |
323 | q = q.filter(or_( |
|
323 | q = q.filter(or_( | |
324 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
324 | cast(PullRequest.pull_request_id, String).ilike(like_expression), | |
325 | User.username.ilike(like_expression), |
|
325 | User.username.ilike(like_expression), | |
326 | PullRequest.title.ilike(like_expression), |
|
326 | PullRequest.title.ilike(like_expression), | |
327 | PullRequest.description.ilike(like_expression), |
|
327 | PullRequest.description.ilike(like_expression), | |
328 | )) |
|
328 | )) | |
329 |
|
329 | |||
330 | # source or target |
|
330 | # source or target | |
331 | if repo and source: |
|
331 | if repo and source: | |
332 | q = q.filter(PullRequest.source_repo == repo) |
|
332 | q = q.filter(PullRequest.source_repo == repo) | |
333 | elif repo: |
|
333 | elif repo: | |
334 | q = q.filter(PullRequest.target_repo == repo) |
|
334 | q = q.filter(PullRequest.target_repo == repo) | |
335 |
|
335 | |||
336 | # closed,opened |
|
336 | # closed,opened | |
337 | if statuses: |
|
337 | if statuses: | |
338 | q = q.filter(PullRequest.status.in_(statuses)) |
|
338 | q = q.filter(PullRequest.status.in_(statuses)) | |
339 |
|
339 | |||
340 | # opened by filter |
|
340 | # opened by filter | |
341 | if opened_by: |
|
341 | if opened_by: | |
342 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
342 | q = q.filter(PullRequest.user_id.in_(opened_by)) | |
343 |
|
343 | |||
344 | # only get those that are in "created" state |
|
344 | # only get those that are in "created" state | |
345 | if only_created: |
|
345 | if only_created: | |
346 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) |
|
346 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) | |
347 |
|
347 | |||
348 | order_map = { |
|
348 | order_map = { | |
349 | 'name_raw': PullRequest.pull_request_id, |
|
349 | 'name_raw': PullRequest.pull_request_id, | |
350 | 'id': PullRequest.pull_request_id, |
|
350 | 'id': PullRequest.pull_request_id, | |
351 | 'title': PullRequest.title, |
|
351 | 'title': PullRequest.title, | |
352 | 'updated_on_raw': PullRequest.updated_on, |
|
352 | 'updated_on_raw': PullRequest.updated_on, | |
353 | 'target_repo': PullRequest.target_repo_id |
|
353 | 'target_repo': PullRequest.target_repo_id | |
354 | } |
|
354 | } | |
355 | if order_by and order_by in order_map: |
|
355 | if order_by and order_by in order_map: | |
356 | if order_dir == 'asc': |
|
356 | if order_dir == 'asc': | |
357 | q = q.order_by(order_map[order_by].asc()) |
|
357 | q = q.order_by(order_map[order_by].asc()) | |
358 | else: |
|
358 | else: | |
359 | q = q.order_by(order_map[order_by].desc()) |
|
359 | q = q.order_by(order_map[order_by].desc()) | |
360 |
|
360 | |||
361 | return q |
|
361 | return q | |
362 |
|
362 | |||
363 | def count_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
363 | def count_all(self, repo_name, search_q=None, source=False, statuses=None, | |
364 | opened_by=None): |
|
364 | opened_by=None): | |
365 | """ |
|
365 | """ | |
366 | Count the number of pull requests for a specific repository. |
|
366 | Count the number of pull requests for a specific repository. | |
367 |
|
367 | |||
368 | :param repo_name: target or source repo |
|
368 | :param repo_name: target or source repo | |
369 | :param search_q: filter by text |
|
369 | :param search_q: filter by text | |
370 | :param source: boolean flag to specify if repo_name refers to source |
|
370 | :param source: boolean flag to specify if repo_name refers to source | |
371 | :param statuses: list of pull request statuses |
|
371 | :param statuses: list of pull request statuses | |
372 | :param opened_by: author user of the pull request |
|
372 | :param opened_by: author user of the pull request | |
373 | :returns: int number of pull requests |
|
373 | :returns: int number of pull requests | |
374 | """ |
|
374 | """ | |
375 | q = self._prepare_get_all_query( |
|
375 | q = self._prepare_get_all_query( | |
376 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
376 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
377 | opened_by=opened_by) |
|
377 | opened_by=opened_by) | |
378 |
|
378 | |||
379 | return q.count() |
|
379 | return q.count() | |
380 |
|
380 | |||
381 | def get_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
381 | def get_all(self, repo_name, search_q=None, source=False, statuses=None, | |
382 | opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): |
|
382 | opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): | |
383 | """ |
|
383 | """ | |
384 | Get all pull requests for a specific repository. |
|
384 | Get all pull requests for a specific repository. | |
385 |
|
385 | |||
386 | :param repo_name: target or source repo |
|
386 | :param repo_name: target or source repo | |
387 | :param search_q: filter by text |
|
387 | :param search_q: filter by text | |
388 | :param source: boolean flag to specify if repo_name refers to source |
|
388 | :param source: boolean flag to specify if repo_name refers to source | |
389 | :param statuses: list of pull request statuses |
|
389 | :param statuses: list of pull request statuses | |
390 | :param opened_by: author user of the pull request |
|
390 | :param opened_by: author user of the pull request | |
391 | :param offset: pagination offset |
|
391 | :param offset: pagination offset | |
392 | :param length: length of returned list |
|
392 | :param length: length of returned list | |
393 | :param order_by: order of the returned list |
|
393 | :param order_by: order of the returned list | |
394 | :param order_dir: 'asc' or 'desc' ordering direction |
|
394 | :param order_dir: 'asc' or 'desc' ordering direction | |
395 | :returns: list of pull requests |
|
395 | :returns: list of pull requests | |
396 | """ |
|
396 | """ | |
397 | q = self._prepare_get_all_query( |
|
397 | q = self._prepare_get_all_query( | |
398 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
398 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
399 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
399 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) | |
400 |
|
400 | |||
401 | if length: |
|
401 | if length: | |
402 | pull_requests = q.limit(length).offset(offset).all() |
|
402 | pull_requests = q.limit(length).offset(offset).all() | |
403 | else: |
|
403 | else: | |
404 | pull_requests = q.all() |
|
404 | pull_requests = q.all() | |
405 |
|
405 | |||
406 | return pull_requests |
|
406 | return pull_requests | |
407 |
|
407 | |||
408 | def count_awaiting_review(self, repo_name, search_q=None, statuses=None): |
|
408 | def count_awaiting_review(self, repo_name, search_q=None, statuses=None): | |
409 | """ |
|
409 | """ | |
410 | Count the number of pull requests for a specific repository that are |
|
410 | Count the number of pull requests for a specific repository that are | |
411 | awaiting review. |
|
411 | awaiting review. | |
412 |
|
412 | |||
413 | :param repo_name: target or source repo |
|
413 | :param repo_name: target or source repo | |
414 | :param search_q: filter by text |
|
414 | :param search_q: filter by text | |
415 | :param statuses: list of pull request statuses |
|
415 | :param statuses: list of pull request statuses | |
416 | :returns: int number of pull requests |
|
416 | :returns: int number of pull requests | |
417 | """ |
|
417 | """ | |
418 | pull_requests = self.get_awaiting_review( |
|
418 | pull_requests = self.get_awaiting_review( | |
419 | repo_name, search_q=search_q, statuses=statuses) |
|
419 | repo_name, search_q=search_q, statuses=statuses) | |
420 |
|
420 | |||
421 | return len(pull_requests) |
|
421 | return len(pull_requests) | |
422 |
|
422 | |||
423 | def get_awaiting_review(self, repo_name, search_q=None, statuses=None, |
|
423 | def get_awaiting_review(self, repo_name, search_q=None, statuses=None, | |
424 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
424 | offset=0, length=None, order_by=None, order_dir='desc'): | |
425 | """ |
|
425 | """ | |
426 | Get all pull requests for a specific repository that are awaiting |
|
426 | Get all pull requests for a specific repository that are awaiting | |
427 | review. |
|
427 | review. | |
428 |
|
428 | |||
429 | :param repo_name: target or source repo |
|
429 | :param repo_name: target or source repo | |
430 | :param search_q: filter by text |
|
430 | :param search_q: filter by text | |
431 | :param statuses: list of pull request statuses |
|
431 | :param statuses: list of pull request statuses | |
432 | :param offset: pagination offset |
|
432 | :param offset: pagination offset | |
433 | :param length: length of returned list |
|
433 | :param length: length of returned list | |
434 | :param order_by: order of the returned list |
|
434 | :param order_by: order of the returned list | |
435 | :param order_dir: 'asc' or 'desc' ordering direction |
|
435 | :param order_dir: 'asc' or 'desc' ordering direction | |
436 | :returns: list of pull requests |
|
436 | :returns: list of pull requests | |
437 | """ |
|
437 | """ | |
438 | pull_requests = self.get_all( |
|
438 | pull_requests = self.get_all( | |
439 | repo_name, search_q=search_q, statuses=statuses, |
|
439 | repo_name, search_q=search_q, statuses=statuses, | |
440 | order_by=order_by, order_dir=order_dir) |
|
440 | order_by=order_by, order_dir=order_dir) | |
441 |
|
441 | |||
442 | _filtered_pull_requests = [] |
|
442 | _filtered_pull_requests = [] | |
443 | for pr in pull_requests: |
|
443 | for pr in pull_requests: | |
444 | status = pr.calculated_review_status() |
|
444 | status = pr.calculated_review_status() | |
445 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
445 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, | |
446 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
446 | ChangesetStatus.STATUS_UNDER_REVIEW]: | |
447 | _filtered_pull_requests.append(pr) |
|
447 | _filtered_pull_requests.append(pr) | |
448 | if length: |
|
448 | if length: | |
449 | return _filtered_pull_requests[offset:offset+length] |
|
449 | return _filtered_pull_requests[offset:offset+length] | |
450 | else: |
|
450 | else: | |
451 | return _filtered_pull_requests |
|
451 | return _filtered_pull_requests | |
452 |
|
452 | |||
453 | def _prepare_awaiting_my_review_review_query( |
|
453 | def _prepare_awaiting_my_review_review_query( | |
454 | self, repo_name, user_id, search_q=None, statuses=None, |
|
454 | self, repo_name, user_id, search_q=None, statuses=None, | |
455 | order_by=None, order_dir='desc'): |
|
455 | order_by=None, order_dir='desc'): | |
456 |
|
456 | |||
457 | for_review_statuses = [ |
|
457 | for_review_statuses = [ | |
458 | ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED |
|
458 | ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED | |
459 | ] |
|
459 | ] | |
460 |
|
460 | |||
461 | pull_request_alias = aliased(PullRequest) |
|
461 | pull_request_alias = aliased(PullRequest) | |
462 | status_alias = aliased(ChangesetStatus) |
|
462 | status_alias = aliased(ChangesetStatus) | |
463 | reviewers_alias = aliased(PullRequestReviewers) |
|
463 | reviewers_alias = aliased(PullRequestReviewers) | |
464 | repo_alias = aliased(Repository) |
|
464 | repo_alias = aliased(Repository) | |
465 |
|
465 | |||
466 | last_ver_subq = Session()\ |
|
466 | last_ver_subq = Session()\ | |
467 | .query(func.min(ChangesetStatus.version)) \ |
|
467 | .query(func.min(ChangesetStatus.version)) \ | |
468 | .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\ |
|
468 | .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\ | |
469 | .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \ |
|
469 | .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \ | |
470 | .subquery() |
|
470 | .subquery() | |
471 |
|
471 | |||
472 | q = Session().query(pull_request_alias) \ |
|
472 | q = Session().query(pull_request_alias) \ | |
473 | .options(lazyload(pull_request_alias.author)) \ |
|
473 | .options(lazyload(pull_request_alias.author)) \ | |
474 | .join(reviewers_alias, |
|
474 | .join(reviewers_alias, | |
475 | reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \ |
|
475 | reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \ | |
476 | .join(repo_alias, |
|
476 | .join(repo_alias, | |
477 | repo_alias.repo_id == pull_request_alias.target_repo_id) \ |
|
477 | repo_alias.repo_id == pull_request_alias.target_repo_id) \ | |
478 | .outerjoin(status_alias, |
|
478 | .outerjoin(status_alias, | |
479 | and_(status_alias.user_id == reviewers_alias.user_id, |
|
479 | and_(status_alias.user_id == reviewers_alias.user_id, | |
480 | status_alias.pull_request_id == reviewers_alias.pull_request_id)) \ |
|
480 | status_alias.pull_request_id == reviewers_alias.pull_request_id)) \ | |
481 | .filter(or_(status_alias.version == null(), |
|
481 | .filter(or_(status_alias.version == null(), | |
482 | status_alias.version == last_ver_subq)) \ |
|
482 | status_alias.version == last_ver_subq)) \ | |
483 | .filter(reviewers_alias.user_id == user_id) \ |
|
483 | .filter(reviewers_alias.user_id == user_id) \ | |
484 | .filter(repo_alias.repo_name == repo_name) \ |
|
484 | .filter(repo_alias.repo_name == repo_name) \ | |
485 | .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \ |
|
485 | .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \ | |
486 | .group_by(pull_request_alias) |
|
486 | .group_by(pull_request_alias) | |
487 |
|
487 | |||
488 | # closed,opened |
|
488 | # closed,opened | |
489 | if statuses: |
|
489 | if statuses: | |
490 | q = q.filter(pull_request_alias.status.in_(statuses)) |
|
490 | q = q.filter(pull_request_alias.status.in_(statuses)) | |
491 |
|
491 | |||
492 | if search_q: |
|
492 | if search_q: | |
493 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
493 | like_expression = u'%{}%'.format(safe_unicode(search_q)) | |
494 | q = q.join(User, User.user_id == pull_request_alias.user_id) |
|
494 | q = q.join(User, User.user_id == pull_request_alias.user_id) | |
495 | q = q.filter(or_( |
|
495 | q = q.filter(or_( | |
496 | cast(pull_request_alias.pull_request_id, String).ilike(like_expression), |
|
496 | cast(pull_request_alias.pull_request_id, String).ilike(like_expression), | |
497 | User.username.ilike(like_expression), |
|
497 | User.username.ilike(like_expression), | |
498 | pull_request_alias.title.ilike(like_expression), |
|
498 | pull_request_alias.title.ilike(like_expression), | |
499 | pull_request_alias.description.ilike(like_expression), |
|
499 | pull_request_alias.description.ilike(like_expression), | |
500 | )) |
|
500 | )) | |
501 |
|
501 | |||
502 | order_map = { |
|
502 | order_map = { | |
503 | 'name_raw': pull_request_alias.pull_request_id, |
|
503 | 'name_raw': pull_request_alias.pull_request_id, | |
504 | 'title': pull_request_alias.title, |
|
504 | 'title': pull_request_alias.title, | |
505 | 'updated_on_raw': pull_request_alias.updated_on, |
|
505 | 'updated_on_raw': pull_request_alias.updated_on, | |
506 | 'target_repo': pull_request_alias.target_repo_id |
|
506 | 'target_repo': pull_request_alias.target_repo_id | |
507 | } |
|
507 | } | |
508 | if order_by and order_by in order_map: |
|
508 | if order_by and order_by in order_map: | |
509 | if order_dir == 'asc': |
|
509 | if order_dir == 'asc': | |
510 | q = q.order_by(order_map[order_by].asc()) |
|
510 | q = q.order_by(order_map[order_by].asc()) | |
511 | else: |
|
511 | else: | |
512 | q = q.order_by(order_map[order_by].desc()) |
|
512 | q = q.order_by(order_map[order_by].desc()) | |
513 |
|
513 | |||
514 | return q |
|
514 | return q | |
515 |
|
515 | |||
516 | def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None): |
|
516 | def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None): | |
517 | """ |
|
517 | """ | |
518 | Count the number of pull requests for a specific repository that are |
|
518 | Count the number of pull requests for a specific repository that are | |
519 | awaiting review from a specific user. |
|
519 | awaiting review from a specific user. | |
520 |
|
520 | |||
521 | :param repo_name: target or source repo |
|
521 | :param repo_name: target or source repo | |
522 | :param user_id: reviewer user of the pull request |
|
522 | :param user_id: reviewer user of the pull request | |
523 | :param search_q: filter by text |
|
523 | :param search_q: filter by text | |
524 | :param statuses: list of pull request statuses |
|
524 | :param statuses: list of pull request statuses | |
525 | :returns: int number of pull requests |
|
525 | :returns: int number of pull requests | |
526 | """ |
|
526 | """ | |
527 | q = self._prepare_awaiting_my_review_review_query( |
|
527 | q = self._prepare_awaiting_my_review_review_query( | |
528 | repo_name, user_id, search_q=search_q, statuses=statuses) |
|
528 | repo_name, user_id, search_q=search_q, statuses=statuses) | |
529 | return q.count() |
|
529 | return q.count() | |
530 |
|
530 | |||
531 | def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None, |
|
531 | def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None, | |
532 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
532 | offset=0, length=None, order_by=None, order_dir='desc'): | |
533 | """ |
|
533 | """ | |
534 | Get all pull requests for a specific repository that are awaiting |
|
534 | Get all pull requests for a specific repository that are awaiting | |
535 | review from a specific user. |
|
535 | review from a specific user. | |
536 |
|
536 | |||
537 | :param repo_name: target or source repo |
|
537 | :param repo_name: target or source repo | |
538 | :param user_id: reviewer user of the pull request |
|
538 | :param user_id: reviewer user of the pull request | |
539 | :param search_q: filter by text |
|
539 | :param search_q: filter by text | |
540 | :param statuses: list of pull request statuses |
|
540 | :param statuses: list of pull request statuses | |
541 | :param offset: pagination offset |
|
541 | :param offset: pagination offset | |
542 | :param length: length of returned list |
|
542 | :param length: length of returned list | |
543 | :param order_by: order of the returned list |
|
543 | :param order_by: order of the returned list | |
544 | :param order_dir: 'asc' or 'desc' ordering direction |
|
544 | :param order_dir: 'asc' or 'desc' ordering direction | |
545 | :returns: list of pull requests |
|
545 | :returns: list of pull requests | |
546 | """ |
|
546 | """ | |
547 |
|
547 | |||
548 | q = self._prepare_awaiting_my_review_review_query( |
|
548 | q = self._prepare_awaiting_my_review_review_query( | |
549 | repo_name, user_id, search_q=search_q, statuses=statuses, |
|
549 | repo_name, user_id, search_q=search_q, statuses=statuses, | |
550 | order_by=order_by, order_dir=order_dir) |
|
550 | order_by=order_by, order_dir=order_dir) | |
551 |
|
551 | |||
552 | if length: |
|
552 | if length: | |
553 | pull_requests = q.limit(length).offset(offset).all() |
|
553 | pull_requests = q.limit(length).offset(offset).all() | |
554 | else: |
|
554 | else: | |
555 | pull_requests = q.all() |
|
555 | pull_requests = q.all() | |
556 |
|
556 | |||
557 | return pull_requests |
|
557 | return pull_requests | |
558 |
|
558 | |||
559 | def _prepare_im_participating_query(self, user_id=None, statuses=None, query='', |
|
559 | def _prepare_im_participating_query(self, user_id=None, statuses=None, query='', | |
560 | order_by=None, order_dir='desc'): |
|
560 | order_by=None, order_dir='desc'): | |
561 | """ |
|
561 | """ | |
562 | return a query of pull-requests user is an creator, or he's added as a reviewer |
|
562 | return a query of pull-requests user is an creator, or he's added as a reviewer | |
563 | """ |
|
563 | """ | |
564 | q = PullRequest.query() |
|
564 | q = PullRequest.query() | |
565 | if user_id: |
|
565 | if user_id: | |
566 | reviewers_subquery = Session().query( |
|
566 | reviewers_subquery = Session().query( | |
567 | PullRequestReviewers.pull_request_id).filter( |
|
567 | PullRequestReviewers.pull_request_id).filter( | |
568 | PullRequestReviewers.user_id == user_id).subquery() |
|
568 | PullRequestReviewers.user_id == user_id).subquery() | |
569 | user_filter = or_( |
|
569 | user_filter = or_( | |
570 | PullRequest.user_id == user_id, |
|
570 | PullRequest.user_id == user_id, | |
571 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
571 | PullRequest.pull_request_id.in_(reviewers_subquery) | |
572 | ) |
|
572 | ) | |
573 | q = PullRequest.query().filter(user_filter) |
|
573 | q = PullRequest.query().filter(user_filter) | |
574 |
|
574 | |||
575 | # closed,opened |
|
575 | # closed,opened | |
576 | if statuses: |
|
576 | if statuses: | |
577 | q = q.filter(PullRequest.status.in_(statuses)) |
|
577 | q = q.filter(PullRequest.status.in_(statuses)) | |
578 |
|
578 | |||
579 | if query: |
|
579 | if query: | |
580 | like_expression = u'%{}%'.format(safe_unicode(query)) |
|
580 | like_expression = u'%{}%'.format(safe_unicode(query)) | |
581 | q = q.join(User, User.user_id == PullRequest.user_id) |
|
581 | q = q.join(User, User.user_id == PullRequest.user_id) | |
582 | q = q.filter(or_( |
|
582 | q = q.filter(or_( | |
583 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
583 | cast(PullRequest.pull_request_id, String).ilike(like_expression), | |
584 | User.username.ilike(like_expression), |
|
584 | User.username.ilike(like_expression), | |
585 | PullRequest.title.ilike(like_expression), |
|
585 | PullRequest.title.ilike(like_expression), | |
586 | PullRequest.description.ilike(like_expression), |
|
586 | PullRequest.description.ilike(like_expression), | |
587 | )) |
|
587 | )) | |
588 |
|
588 | |||
589 | order_map = { |
|
589 | order_map = { | |
590 | 'name_raw': PullRequest.pull_request_id, |
|
590 | 'name_raw': PullRequest.pull_request_id, | |
591 | 'title': PullRequest.title, |
|
591 | 'title': PullRequest.title, | |
592 | 'updated_on_raw': PullRequest.updated_on, |
|
592 | 'updated_on_raw': PullRequest.updated_on, | |
593 | 'target_repo': PullRequest.target_repo_id |
|
593 | 'target_repo': PullRequest.target_repo_id | |
594 | } |
|
594 | } | |
595 | if order_by and order_by in order_map: |
|
595 | if order_by and order_by in order_map: | |
596 | if order_dir == 'asc': |
|
596 | if order_dir == 'asc': | |
597 | q = q.order_by(order_map[order_by].asc()) |
|
597 | q = q.order_by(order_map[order_by].asc()) | |
598 | else: |
|
598 | else: | |
599 | q = q.order_by(order_map[order_by].desc()) |
|
599 | q = q.order_by(order_map[order_by].desc()) | |
600 |
|
600 | |||
601 | return q |
|
601 | return q | |
602 |
|
602 | |||
603 | def count_im_participating_in(self, user_id=None, statuses=None, query=''): |
|
603 | def count_im_participating_in(self, user_id=None, statuses=None, query=''): | |
604 | q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query) |
|
604 | q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query) | |
605 | return q.count() |
|
605 | return q.count() | |
606 |
|
606 | |||
607 | def get_im_participating_in( |
|
607 | def get_im_participating_in( | |
608 | self, user_id=None, statuses=None, query='', offset=0, |
|
608 | self, user_id=None, statuses=None, query='', offset=0, | |
609 | length=None, order_by=None, order_dir='desc'): |
|
609 | length=None, order_by=None, order_dir='desc'): | |
610 | """ |
|
610 | """ | |
611 | Get all Pull requests that i'm participating in as a reviewer, or i have opened |
|
611 | Get all Pull requests that i'm participating in as a reviewer, or i have opened | |
612 | """ |
|
612 | """ | |
613 |
|
613 | |||
614 | q = self._prepare_im_participating_query( |
|
614 | q = self._prepare_im_participating_query( | |
615 | user_id, statuses=statuses, query=query, order_by=order_by, |
|
615 | user_id, statuses=statuses, query=query, order_by=order_by, | |
616 | order_dir=order_dir) |
|
616 | order_dir=order_dir) | |
617 |
|
617 | |||
618 | if length: |
|
618 | if length: | |
619 | pull_requests = q.limit(length).offset(offset).all() |
|
619 | pull_requests = q.limit(length).offset(offset).all() | |
620 | else: |
|
620 | else: | |
621 | pull_requests = q.all() |
|
621 | pull_requests = q.all() | |
622 |
|
622 | |||
623 | return pull_requests |
|
623 | return pull_requests | |
624 |
|
624 | |||
625 | def _prepare_participating_in_for_review_query( |
|
625 | def _prepare_participating_in_for_review_query( | |
626 | self, user_id, statuses=None, query='', order_by=None, order_dir='desc'): |
|
626 | self, user_id, statuses=None, query='', order_by=None, order_dir='desc'): | |
627 |
|
627 | |||
628 | for_review_statuses = [ |
|
628 | for_review_statuses = [ | |
629 | ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED |
|
629 | ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED | |
630 | ] |
|
630 | ] | |
631 |
|
631 | |||
632 | pull_request_alias = aliased(PullRequest) |
|
632 | pull_request_alias = aliased(PullRequest) | |
633 | status_alias = aliased(ChangesetStatus) |
|
633 | status_alias = aliased(ChangesetStatus) | |
634 | reviewers_alias = aliased(PullRequestReviewers) |
|
634 | reviewers_alias = aliased(PullRequestReviewers) | |
635 |
|
635 | |||
636 | last_ver_subq = Session()\ |
|
636 | last_ver_subq = Session()\ | |
637 | .query(func.min(ChangesetStatus.version)) \ |
|
637 | .query(func.min(ChangesetStatus.version)) \ | |
638 | .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\ |
|
638 | .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\ | |
639 | .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \ |
|
639 | .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \ | |
640 | .subquery() |
|
640 | .subquery() | |
641 |
|
641 | |||
642 | q = Session().query(pull_request_alias) \ |
|
642 | q = Session().query(pull_request_alias) \ | |
643 | .options(lazyload(pull_request_alias.author)) \ |
|
643 | .options(lazyload(pull_request_alias.author)) \ | |
644 | .join(reviewers_alias, |
|
644 | .join(reviewers_alias, | |
645 | reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \ |
|
645 | reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \ | |
646 | .outerjoin(status_alias, |
|
646 | .outerjoin(status_alias, | |
647 | and_(status_alias.user_id == reviewers_alias.user_id, |
|
647 | and_(status_alias.user_id == reviewers_alias.user_id, | |
648 | status_alias.pull_request_id == reviewers_alias.pull_request_id)) \ |
|
648 | status_alias.pull_request_id == reviewers_alias.pull_request_id)) \ | |
649 | .filter(or_(status_alias.version == null(), |
|
649 | .filter(or_(status_alias.version == null(), | |
650 | status_alias.version == last_ver_subq)) \ |
|
650 | status_alias.version == last_ver_subq)) \ | |
651 | .filter(reviewers_alias.user_id == user_id) \ |
|
651 | .filter(reviewers_alias.user_id == user_id) \ | |
652 | .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \ |
|
652 | .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \ | |
653 | .group_by(pull_request_alias) |
|
653 | .group_by(pull_request_alias) | |
654 |
|
654 | |||
655 | # closed,opened |
|
655 | # closed,opened | |
656 | if statuses: |
|
656 | if statuses: | |
657 | q = q.filter(pull_request_alias.status.in_(statuses)) |
|
657 | q = q.filter(pull_request_alias.status.in_(statuses)) | |
658 |
|
658 | |||
659 | if query: |
|
659 | if query: | |
660 | like_expression = u'%{}%'.format(safe_unicode(query)) |
|
660 | like_expression = u'%{}%'.format(safe_unicode(query)) | |
661 | q = q.join(User, User.user_id == pull_request_alias.user_id) |
|
661 | q = q.join(User, User.user_id == pull_request_alias.user_id) | |
662 | q = q.filter(or_( |
|
662 | q = q.filter(or_( | |
663 | cast(pull_request_alias.pull_request_id, String).ilike(like_expression), |
|
663 | cast(pull_request_alias.pull_request_id, String).ilike(like_expression), | |
664 | User.username.ilike(like_expression), |
|
664 | User.username.ilike(like_expression), | |
665 | pull_request_alias.title.ilike(like_expression), |
|
665 | pull_request_alias.title.ilike(like_expression), | |
666 | pull_request_alias.description.ilike(like_expression), |
|
666 | pull_request_alias.description.ilike(like_expression), | |
667 | )) |
|
667 | )) | |
668 |
|
668 | |||
669 | order_map = { |
|
669 | order_map = { | |
670 | 'name_raw': pull_request_alias.pull_request_id, |
|
670 | 'name_raw': pull_request_alias.pull_request_id, | |
671 | 'title': pull_request_alias.title, |
|
671 | 'title': pull_request_alias.title, | |
672 | 'updated_on_raw': pull_request_alias.updated_on, |
|
672 | 'updated_on_raw': pull_request_alias.updated_on, | |
673 | 'target_repo': pull_request_alias.target_repo_id |
|
673 | 'target_repo': pull_request_alias.target_repo_id | |
674 | } |
|
674 | } | |
675 | if order_by and order_by in order_map: |
|
675 | if order_by and order_by in order_map: | |
676 | if order_dir == 'asc': |
|
676 | if order_dir == 'asc': | |
677 | q = q.order_by(order_map[order_by].asc()) |
|
677 | q = q.order_by(order_map[order_by].asc()) | |
678 | else: |
|
678 | else: | |
679 | q = q.order_by(order_map[order_by].desc()) |
|
679 | q = q.order_by(order_map[order_by].desc()) | |
680 |
|
680 | |||
681 | return q |
|
681 | return q | |
682 |
|
682 | |||
683 | def count_im_participating_in_for_review(self, user_id, statuses=None, query=''): |
|
683 | def count_im_participating_in_for_review(self, user_id, statuses=None, query=''): | |
684 | q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query) |
|
684 | q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query) | |
685 | return q.count() |
|
685 | return q.count() | |
686 |
|
686 | |||
687 | def get_im_participating_in_for_review( |
|
687 | def get_im_participating_in_for_review( | |
688 | self, user_id, statuses=None, query='', offset=0, |
|
688 | self, user_id, statuses=None, query='', offset=0, | |
689 | length=None, order_by=None, order_dir='desc'): |
|
689 | length=None, order_by=None, order_dir='desc'): | |
690 | """ |
|
690 | """ | |
691 | Get all Pull requests that needs user approval or rejection |
|
691 | Get all Pull requests that needs user approval or rejection | |
692 | """ |
|
692 | """ | |
693 |
|
693 | |||
694 | q = self._prepare_participating_in_for_review_query( |
|
694 | q = self._prepare_participating_in_for_review_query( | |
695 | user_id, statuses=statuses, query=query, order_by=order_by, |
|
695 | user_id, statuses=statuses, query=query, order_by=order_by, | |
696 | order_dir=order_dir) |
|
696 | order_dir=order_dir) | |
697 |
|
697 | |||
698 | if length: |
|
698 | if length: | |
699 | pull_requests = q.limit(length).offset(offset).all() |
|
699 | pull_requests = q.limit(length).offset(offset).all() | |
700 | else: |
|
700 | else: | |
701 | pull_requests = q.all() |
|
701 | pull_requests = q.all() | |
702 |
|
702 | |||
703 | return pull_requests |
|
703 | return pull_requests | |
704 |
|
704 | |||
705 | def get_versions(self, pull_request): |
|
705 | def get_versions(self, pull_request): | |
706 | """ |
|
706 | """ | |
707 | returns version of pull request sorted by ID descending |
|
707 | returns version of pull request sorted by ID descending | |
708 | """ |
|
708 | """ | |
709 | return PullRequestVersion.query()\ |
|
709 | return PullRequestVersion.query()\ | |
710 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
710 | .filter(PullRequestVersion.pull_request == pull_request)\ | |
711 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
711 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ | |
712 | .all() |
|
712 | .all() | |
713 |
|
713 | |||
714 | def get_pr_version(self, pull_request_id, version=None): |
|
714 | def get_pr_version(self, pull_request_id, version=None): | |
715 | at_version = None |
|
715 | at_version = None | |
716 |
|
716 | |||
717 | if version and version == 'latest': |
|
717 | if version and version == 'latest': | |
718 | pull_request_ver = PullRequest.get(pull_request_id) |
|
718 | pull_request_ver = PullRequest.get(pull_request_id) | |
719 | pull_request_obj = pull_request_ver |
|
719 | pull_request_obj = pull_request_ver | |
720 | _org_pull_request_obj = pull_request_obj |
|
720 | _org_pull_request_obj = pull_request_obj | |
721 | at_version = 'latest' |
|
721 | at_version = 'latest' | |
722 | elif version: |
|
722 | elif version: | |
723 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
723 | pull_request_ver = PullRequestVersion.get_or_404(version) | |
724 | pull_request_obj = pull_request_ver |
|
724 | pull_request_obj = pull_request_ver | |
725 | _org_pull_request_obj = pull_request_ver.pull_request |
|
725 | _org_pull_request_obj = pull_request_ver.pull_request | |
726 | at_version = pull_request_ver.pull_request_version_id |
|
726 | at_version = pull_request_ver.pull_request_version_id | |
727 | else: |
|
727 | else: | |
728 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
728 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( | |
729 | pull_request_id) |
|
729 | pull_request_id) | |
730 |
|
730 | |||
731 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
731 | pull_request_display_obj = PullRequest.get_pr_display_object( | |
732 | pull_request_obj, _org_pull_request_obj) |
|
732 | pull_request_obj, _org_pull_request_obj) | |
733 |
|
733 | |||
734 | return _org_pull_request_obj, pull_request_obj, \ |
|
734 | return _org_pull_request_obj, pull_request_obj, \ | |
735 | pull_request_display_obj, at_version |
|
735 | pull_request_display_obj, at_version | |
736 |
|
736 | |||
737 | def pr_commits_versions(self, versions): |
|
737 | def pr_commits_versions(self, versions): | |
738 | """ |
|
738 | """ | |
739 | Maps the pull-request commits into all known PR versions. This way we can obtain |
|
739 | Maps the pull-request commits into all known PR versions. This way we can obtain | |
740 | each pr version the commit was introduced in. |
|
740 | each pr version the commit was introduced in. | |
741 | """ |
|
741 | """ | |
742 | commit_versions = collections.defaultdict(list) |
|
742 | commit_versions = collections.defaultdict(list) | |
743 | num_versions = [x.pull_request_version_id for x in versions] |
|
743 | num_versions = [x.pull_request_version_id for x in versions] | |
744 | for ver in versions: |
|
744 | for ver in versions: | |
745 | for commit_id in ver.revisions: |
|
745 | for commit_id in ver.revisions: | |
746 | ver_idx = ChangesetComment.get_index_from_version( |
|
746 | ver_idx = ChangesetComment.get_index_from_version( | |
747 | ver.pull_request_version_id, num_versions=num_versions) |
|
747 | ver.pull_request_version_id, num_versions=num_versions) | |
748 | commit_versions[commit_id].append(ver_idx) |
|
748 | commit_versions[commit_id].append(ver_idx) | |
749 | return commit_versions |
|
749 | return commit_versions | |
750 |
|
750 | |||
751 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
751 | def create(self, created_by, source_repo, source_ref, target_repo, | |
752 | target_ref, revisions, reviewers, observers, title, description=None, |
|
752 | target_ref, revisions, reviewers, observers, title, description=None, | |
753 | common_ancestor_id=None, |
|
753 | common_ancestor_id=None, | |
754 | description_renderer=None, |
|
754 | description_renderer=None, | |
755 | reviewer_data=None, translator=None, auth_user=None): |
|
755 | reviewer_data=None, translator=None, auth_user=None): | |
756 | translator = translator or get_current_request().translate |
|
756 | translator = translator or get_current_request().translate | |
757 |
|
757 | |||
758 | created_by_user = self._get_user(created_by) |
|
758 | created_by_user = self._get_user(created_by) | |
759 | auth_user = auth_user or created_by_user.AuthUser() |
|
759 | auth_user = auth_user or created_by_user.AuthUser() | |
760 | source_repo = self._get_repo(source_repo) |
|
760 | source_repo = self._get_repo(source_repo) | |
761 | target_repo = self._get_repo(target_repo) |
|
761 | target_repo = self._get_repo(target_repo) | |
762 |
|
762 | |||
763 | pull_request = PullRequest() |
|
763 | pull_request = PullRequest() | |
764 | pull_request.source_repo = source_repo |
|
764 | pull_request.source_repo = source_repo | |
765 | pull_request.source_ref = source_ref |
|
765 | pull_request.source_ref = source_ref | |
766 | pull_request.target_repo = target_repo |
|
766 | pull_request.target_repo = target_repo | |
767 | pull_request.target_ref = target_ref |
|
767 | pull_request.target_ref = target_ref | |
768 | pull_request.revisions = revisions |
|
768 | pull_request.revisions = revisions | |
769 | pull_request.title = title |
|
769 | pull_request.title = title | |
770 | pull_request.description = description |
|
770 | pull_request.description = description | |
771 | pull_request.description_renderer = description_renderer |
|
771 | pull_request.description_renderer = description_renderer | |
772 | pull_request.author = created_by_user |
|
772 | pull_request.author = created_by_user | |
773 | pull_request.reviewer_data = reviewer_data |
|
773 | pull_request.reviewer_data = reviewer_data | |
774 | pull_request.pull_request_state = pull_request.STATE_CREATING |
|
774 | pull_request.pull_request_state = pull_request.STATE_CREATING | |
775 | pull_request.common_ancestor_id = common_ancestor_id |
|
775 | pull_request.common_ancestor_id = common_ancestor_id | |
776 |
|
776 | |||
777 | Session().add(pull_request) |
|
777 | Session().add(pull_request) | |
778 | Session().flush() |
|
778 | Session().flush() | |
779 |
|
779 | |||
780 | reviewer_ids = set() |
|
780 | reviewer_ids = set() | |
781 | # members / reviewers |
|
781 | # members / reviewers | |
782 | for reviewer_object in reviewers: |
|
782 | for reviewer_object in reviewers: | |
783 | user_id, reasons, mandatory, role, rules = reviewer_object |
|
783 | user_id, reasons, mandatory, role, rules = reviewer_object | |
784 | user = self._get_user(user_id) |
|
784 | user = self._get_user(user_id) | |
785 |
|
785 | |||
786 | # skip duplicates |
|
786 | # skip duplicates | |
787 | if user.user_id in reviewer_ids: |
|
787 | if user.user_id in reviewer_ids: | |
788 | continue |
|
788 | continue | |
789 |
|
789 | |||
790 | reviewer_ids.add(user.user_id) |
|
790 | reviewer_ids.add(user.user_id) | |
791 |
|
791 | |||
792 | reviewer = PullRequestReviewers() |
|
792 | reviewer = PullRequestReviewers() | |
793 | reviewer.user = user |
|
793 | reviewer.user = user | |
794 | reviewer.pull_request = pull_request |
|
794 | reviewer.pull_request = pull_request | |
795 | reviewer.reasons = reasons |
|
795 | reviewer.reasons = reasons | |
796 | reviewer.mandatory = mandatory |
|
796 | reviewer.mandatory = mandatory | |
797 | reviewer.role = role |
|
797 | reviewer.role = role | |
798 |
|
798 | |||
799 | # NOTE(marcink): pick only first rule for now |
|
799 | # NOTE(marcink): pick only first rule for now | |
800 | rule_id = list(rules)[0] if rules else None |
|
800 | rule_id = list(rules)[0] if rules else None | |
801 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
801 | rule = RepoReviewRule.get(rule_id) if rule_id else None | |
802 | if rule: |
|
802 | if rule: | |
803 | review_group = rule.user_group_vote_rule(user_id) |
|
803 | review_group = rule.user_group_vote_rule(user_id) | |
804 | # we check if this particular reviewer is member of a voting group |
|
804 | # we check if this particular reviewer is member of a voting group | |
805 | if review_group: |
|
805 | if review_group: | |
806 | # NOTE(marcink): |
|
806 | # NOTE(marcink): | |
807 | # can be that user is member of more but we pick the first same, |
|
807 | # can be that user is member of more but we pick the first same, | |
808 | # same as default reviewers algo |
|
808 | # same as default reviewers algo | |
809 | review_group = review_group[0] |
|
809 | review_group = review_group[0] | |
810 |
|
810 | |||
811 | rule_data = { |
|
811 | rule_data = { | |
812 | 'rule_name': |
|
812 | 'rule_name': | |
813 | rule.review_rule_name, |
|
813 | rule.review_rule_name, | |
814 | 'rule_user_group_entry_id': |
|
814 | 'rule_user_group_entry_id': | |
815 | review_group.repo_review_rule_users_group_id, |
|
815 | review_group.repo_review_rule_users_group_id, | |
816 | 'rule_user_group_name': |
|
816 | 'rule_user_group_name': | |
817 | review_group.users_group.users_group_name, |
|
817 | review_group.users_group.users_group_name, | |
818 | 'rule_user_group_members': |
|
818 | 'rule_user_group_members': | |
819 | [x.user.username for x in review_group.users_group.members], |
|
819 | [x.user.username for x in review_group.users_group.members], | |
820 | 'rule_user_group_members_id': |
|
820 | 'rule_user_group_members_id': | |
821 | [x.user.user_id for x in review_group.users_group.members], |
|
821 | [x.user.user_id for x in review_group.users_group.members], | |
822 | } |
|
822 | } | |
823 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
823 | # e.g {'vote_rule': -1, 'mandatory': True} | |
824 | rule_data.update(review_group.rule_data()) |
|
824 | rule_data.update(review_group.rule_data()) | |
825 |
|
825 | |||
826 | reviewer.rule_data = rule_data |
|
826 | reviewer.rule_data = rule_data | |
827 |
|
827 | |||
828 | Session().add(reviewer) |
|
828 | Session().add(reviewer) | |
829 | Session().flush() |
|
829 | Session().flush() | |
830 |
|
830 | |||
831 | for observer_object in observers: |
|
831 | for observer_object in observers: | |
832 | user_id, reasons, mandatory, role, rules = observer_object |
|
832 | user_id, reasons, mandatory, role, rules = observer_object | |
833 | user = self._get_user(user_id) |
|
833 | user = self._get_user(user_id) | |
834 |
|
834 | |||
835 | # skip duplicates from reviewers |
|
835 | # skip duplicates from reviewers | |
836 | if user.user_id in reviewer_ids: |
|
836 | if user.user_id in reviewer_ids: | |
837 | continue |
|
837 | continue | |
838 |
|
838 | |||
839 | #reviewer_ids.add(user.user_id) |
|
839 | #reviewer_ids.add(user.user_id) | |
840 |
|
840 | |||
841 | observer = PullRequestReviewers() |
|
841 | observer = PullRequestReviewers() | |
842 | observer.user = user |
|
842 | observer.user = user | |
843 | observer.pull_request = pull_request |
|
843 | observer.pull_request = pull_request | |
844 | observer.reasons = reasons |
|
844 | observer.reasons = reasons | |
845 | observer.mandatory = mandatory |
|
845 | observer.mandatory = mandatory | |
846 | observer.role = role |
|
846 | observer.role = role | |
847 |
|
847 | |||
848 | # NOTE(marcink): pick only first rule for now |
|
848 | # NOTE(marcink): pick only first rule for now | |
849 | rule_id = list(rules)[0] if rules else None |
|
849 | rule_id = list(rules)[0] if rules else None | |
850 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
850 | rule = RepoReviewRule.get(rule_id) if rule_id else None | |
851 | if rule: |
|
851 | if rule: | |
852 | # TODO(marcink): do we need this for observers ?? |
|
852 | # TODO(marcink): do we need this for observers ?? | |
853 | pass |
|
853 | pass | |
854 |
|
854 | |||
855 | Session().add(observer) |
|
855 | Session().add(observer) | |
856 | Session().flush() |
|
856 | Session().flush() | |
857 |
|
857 | |||
858 | # Set approval status to "Under Review" for all commits which are |
|
858 | # Set approval status to "Under Review" for all commits which are | |
859 | # part of this pull request. |
|
859 | # part of this pull request. | |
860 | ChangesetStatusModel().set_status( |
|
860 | ChangesetStatusModel().set_status( | |
861 | repo=target_repo, |
|
861 | repo=target_repo, | |
862 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
862 | status=ChangesetStatus.STATUS_UNDER_REVIEW, | |
863 | user=created_by_user, |
|
863 | user=created_by_user, | |
864 | pull_request=pull_request |
|
864 | pull_request=pull_request | |
865 | ) |
|
865 | ) | |
866 | # we commit early at this point. This has to do with a fact |
|
866 | # we commit early at this point. This has to do with a fact | |
867 | # that before queries do some row-locking. And because of that |
|
867 | # that before queries do some row-locking. And because of that | |
868 | # we need to commit and finish transaction before below validate call |
|
868 | # we need to commit and finish transaction before below validate call | |
869 | # that for large repos could be long resulting in long row locks |
|
869 | # that for large repos could be long resulting in long row locks | |
870 | Session().commit() |
|
870 | Session().commit() | |
871 |
|
871 | |||
872 | # prepare workspace, and run initial merge simulation. Set state during that |
|
872 | # prepare workspace, and run initial merge simulation. Set state during that | |
873 | # operation |
|
873 | # operation | |
874 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
874 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
875 |
|
875 | |||
876 | # set as merging, for merge simulation, and if finished to created so we mark |
|
876 | # set as merging, for merge simulation, and if finished to created so we mark | |
877 | # simulation is working fine |
|
877 | # simulation is working fine | |
878 | with pull_request.set_state(PullRequest.STATE_MERGING, |
|
878 | with pull_request.set_state(PullRequest.STATE_MERGING, | |
879 | final_state=PullRequest.STATE_CREATED) as state_obj: |
|
879 | final_state=PullRequest.STATE_CREATED) as state_obj: | |
880 | MergeCheck.validate( |
|
880 | MergeCheck.validate( | |
881 | pull_request, auth_user=auth_user, translator=translator) |
|
881 | pull_request, auth_user=auth_user, translator=translator) | |
882 |
|
882 | |||
883 | self.notify_reviewers(pull_request, reviewer_ids, created_by_user) |
|
883 | self.notify_reviewers(pull_request, reviewer_ids, created_by_user) | |
884 | self.trigger_pull_request_hook(pull_request, created_by_user, 'create') |
|
884 | self.trigger_pull_request_hook(pull_request, created_by_user, 'create') | |
885 |
|
885 | |||
886 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
886 | creation_data = pull_request.get_api_data(with_merge_state=False) | |
887 | self._log_audit_action( |
|
887 | self._log_audit_action( | |
888 | 'repo.pull_request.create', {'data': creation_data}, |
|
888 | 'repo.pull_request.create', {'data': creation_data}, | |
889 | auth_user, pull_request) |
|
889 | auth_user, pull_request) | |
890 |
|
890 | |||
891 | return pull_request |
|
891 | return pull_request | |
892 |
|
892 | |||
893 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): |
|
893 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): | |
894 | pull_request = self.__get_pull_request(pull_request) |
|
894 | pull_request = self.__get_pull_request(pull_request) | |
895 | target_scm = pull_request.target_repo.scm_instance() |
|
895 | target_scm = pull_request.target_repo.scm_instance() | |
896 | if action == 'create': |
|
896 | if action == 'create': | |
897 | trigger_hook = hooks_utils.trigger_create_pull_request_hook |
|
897 | trigger_hook = hooks_utils.trigger_create_pull_request_hook | |
898 | elif action == 'merge': |
|
898 | elif action == 'merge': | |
899 | trigger_hook = hooks_utils.trigger_merge_pull_request_hook |
|
899 | trigger_hook = hooks_utils.trigger_merge_pull_request_hook | |
900 | elif action == 'close': |
|
900 | elif action == 'close': | |
901 | trigger_hook = hooks_utils.trigger_close_pull_request_hook |
|
901 | trigger_hook = hooks_utils.trigger_close_pull_request_hook | |
902 | elif action == 'review_status_change': |
|
902 | elif action == 'review_status_change': | |
903 | trigger_hook = hooks_utils.trigger_review_pull_request_hook |
|
903 | trigger_hook = hooks_utils.trigger_review_pull_request_hook | |
904 | elif action == 'update': |
|
904 | elif action == 'update': | |
905 | trigger_hook = hooks_utils.trigger_update_pull_request_hook |
|
905 | trigger_hook = hooks_utils.trigger_update_pull_request_hook | |
906 | elif action == 'comment': |
|
906 | elif action == 'comment': | |
907 | trigger_hook = hooks_utils.trigger_comment_pull_request_hook |
|
907 | trigger_hook = hooks_utils.trigger_comment_pull_request_hook | |
908 | elif action == 'comment_edit': |
|
908 | elif action == 'comment_edit': | |
909 | trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook |
|
909 | trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook | |
910 | else: |
|
910 | else: | |
911 | return |
|
911 | return | |
912 |
|
912 | |||
913 | log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s', |
|
913 | log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s', | |
914 | pull_request, action, trigger_hook) |
|
914 | pull_request, action, trigger_hook) | |
915 | trigger_hook( |
|
915 | trigger_hook( | |
916 | username=user.username, |
|
916 | username=user.username, | |
917 | repo_name=pull_request.target_repo.repo_name, |
|
917 | repo_name=pull_request.target_repo.repo_name, | |
918 | repo_type=target_scm.alias, |
|
918 | repo_type=target_scm.alias, | |
919 | pull_request=pull_request, |
|
919 | pull_request=pull_request, | |
920 | data=data) |
|
920 | data=data) | |
921 |
|
921 | |||
922 | def _get_commit_ids(self, pull_request): |
|
922 | def _get_commit_ids(self, pull_request): | |
923 | """ |
|
923 | """ | |
924 | Return the commit ids of the merged pull request. |
|
924 | Return the commit ids of the merged pull request. | |
925 |
|
925 | |||
926 | This method is not dealing correctly yet with the lack of autoupdates |
|
926 | This method is not dealing correctly yet with the lack of autoupdates | |
927 | nor with the implicit target updates. |
|
927 | nor with the implicit target updates. | |
928 | For example: if a commit in the source repo is already in the target it |
|
928 | For example: if a commit in the source repo is already in the target it | |
929 | will be reported anyways. |
|
929 | will be reported anyways. | |
930 | """ |
|
930 | """ | |
931 | merge_rev = pull_request.merge_rev |
|
931 | merge_rev = pull_request.merge_rev | |
932 | if merge_rev is None: |
|
932 | if merge_rev is None: | |
933 | raise ValueError('This pull request was not merged yet') |
|
933 | raise ValueError('This pull request was not merged yet') | |
934 |
|
934 | |||
935 | commit_ids = list(pull_request.revisions) |
|
935 | commit_ids = list(pull_request.revisions) | |
936 | if merge_rev not in commit_ids: |
|
936 | if merge_rev not in commit_ids: | |
937 | commit_ids.append(merge_rev) |
|
937 | commit_ids.append(merge_rev) | |
938 |
|
938 | |||
939 | return commit_ids |
|
939 | return commit_ids | |
940 |
|
940 | |||
941 | def merge_repo(self, pull_request, user, extras): |
|
941 | def merge_repo(self, pull_request, user, extras): | |
|
942 | repo_type = pull_request.source_repo.repo_type | |||
942 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
943 | log.debug("Merging pull request %s", pull_request.pull_request_id) | |
943 | extras['user_agent'] = 'internal-merge' |
|
944 | extras['user_agent'] = '{}/internal-merge'.format(repo_type) | |
944 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
945 | merge_state = self._merge_pull_request(pull_request, user, extras) | |
945 | if merge_state.executed: |
|
946 | if merge_state.executed: | |
946 | log.debug("Merge was successful, updating the pull request comments.") |
|
947 | log.debug("Merge was successful, updating the pull request comments.") | |
947 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
948 | self._comment_and_close_pr(pull_request, user, merge_state) | |
948 |
|
949 | |||
949 | self._log_audit_action( |
|
950 | self._log_audit_action( | |
950 | 'repo.pull_request.merge', |
|
951 | 'repo.pull_request.merge', | |
951 | {'merge_state': merge_state.__dict__}, |
|
952 | {'merge_state': merge_state.__dict__}, | |
952 | user, pull_request) |
|
953 | user, pull_request) | |
953 |
|
954 | |||
954 | else: |
|
955 | else: | |
955 | log.warn("Merge failed, not updating the pull request.") |
|
956 | log.warn("Merge failed, not updating the pull request.") | |
956 | return merge_state |
|
957 | return merge_state | |
957 |
|
958 | |||
958 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
959 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): | |
959 | target_vcs = pull_request.target_repo.scm_instance() |
|
960 | target_vcs = pull_request.target_repo.scm_instance() | |
960 | source_vcs = pull_request.source_repo.scm_instance() |
|
961 | source_vcs = pull_request.source_repo.scm_instance() | |
961 |
|
962 | |||
962 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( |
|
963 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( | |
963 | pr_id=pull_request.pull_request_id, |
|
964 | pr_id=pull_request.pull_request_id, | |
964 | pr_title=pull_request.title, |
|
965 | pr_title=pull_request.title, | |
965 | pr_desc=pull_request.description, |
|
966 | pr_desc=pull_request.description, | |
966 | source_repo=source_vcs.name, |
|
967 | source_repo=source_vcs.name, | |
967 | source_ref_name=pull_request.source_ref_parts.name, |
|
968 | source_ref_name=pull_request.source_ref_parts.name, | |
968 | target_repo=target_vcs.name, |
|
969 | target_repo=target_vcs.name, | |
969 | target_ref_name=pull_request.target_ref_parts.name, |
|
970 | target_ref_name=pull_request.target_ref_parts.name, | |
970 | ) |
|
971 | ) | |
971 |
|
972 | |||
972 | workspace_id = self._workspace_id(pull_request) |
|
973 | workspace_id = self._workspace_id(pull_request) | |
973 | repo_id = pull_request.target_repo.repo_id |
|
974 | repo_id = pull_request.target_repo.repo_id | |
974 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
975 | use_rebase = self._use_rebase_for_merging(pull_request) | |
975 | close_branch = self._close_branch_before_merging(pull_request) |
|
976 | close_branch = self._close_branch_before_merging(pull_request) | |
976 | user_name = self._user_name_for_merging(pull_request, user) |
|
977 | user_name = self._user_name_for_merging(pull_request, user) | |
977 |
|
978 | |||
978 | target_ref = self._refresh_reference( |
|
979 | target_ref = self._refresh_reference( | |
979 | pull_request.target_ref_parts, target_vcs) |
|
980 | pull_request.target_ref_parts, target_vcs) | |
980 |
|
981 | |||
981 | callback_daemon, extras = prepare_callback_daemon( |
|
982 | callback_daemon, extras = prepare_callback_daemon( | |
982 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
983 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, | |
983 | host=vcs_settings.HOOKS_HOST, |
|
984 | host=vcs_settings.HOOKS_HOST, | |
984 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
985 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) | |
985 |
|
986 | |||
986 | with callback_daemon: |
|
987 | with callback_daemon: | |
987 | # TODO: johbo: Implement a clean way to run a config_override |
|
988 | # TODO: johbo: Implement a clean way to run a config_override | |
988 | # for a single call. |
|
989 | # for a single call. | |
989 | target_vcs.config.set( |
|
990 | target_vcs.config.set( | |
990 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
991 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) | |
991 |
|
992 | |||
992 | merge_state = target_vcs.merge( |
|
993 | merge_state = target_vcs.merge( | |
993 | repo_id, workspace_id, target_ref, source_vcs, |
|
994 | repo_id, workspace_id, target_ref, source_vcs, | |
994 | pull_request.source_ref_parts, |
|
995 | pull_request.source_ref_parts, | |
995 | user_name=user_name, user_email=user.email, |
|
996 | user_name=user_name, user_email=user.email, | |
996 | message=message, use_rebase=use_rebase, |
|
997 | message=message, use_rebase=use_rebase, | |
997 | close_branch=close_branch) |
|
998 | close_branch=close_branch) | |
998 | return merge_state |
|
999 | return merge_state | |
999 |
|
1000 | |||
1000 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
1001 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): | |
1001 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
1002 | pull_request.merge_rev = merge_state.merge_ref.commit_id | |
1002 | pull_request.updated_on = datetime.datetime.now() |
|
1003 | pull_request.updated_on = datetime.datetime.now() | |
1003 | close_msg = close_msg or 'Pull request merged and closed' |
|
1004 | close_msg = close_msg or 'Pull request merged and closed' | |
1004 |
|
1005 | |||
1005 | CommentsModel().create( |
|
1006 | CommentsModel().create( | |
1006 | text=safe_unicode(close_msg), |
|
1007 | text=safe_unicode(close_msg), | |
1007 | repo=pull_request.target_repo.repo_id, |
|
1008 | repo=pull_request.target_repo.repo_id, | |
1008 | user=user.user_id, |
|
1009 | user=user.user_id, | |
1009 | pull_request=pull_request.pull_request_id, |
|
1010 | pull_request=pull_request.pull_request_id, | |
1010 | f_path=None, |
|
1011 | f_path=None, | |
1011 | line_no=None, |
|
1012 | line_no=None, | |
1012 | closing_pr=True |
|
1013 | closing_pr=True | |
1013 | ) |
|
1014 | ) | |
1014 |
|
1015 | |||
1015 | Session().add(pull_request) |
|
1016 | Session().add(pull_request) | |
1016 | Session().flush() |
|
1017 | Session().flush() | |
1017 | # TODO: paris: replace invalidation with less radical solution |
|
1018 | # TODO: paris: replace invalidation with less radical solution | |
1018 | ScmModel().mark_for_invalidation( |
|
1019 | ScmModel().mark_for_invalidation( | |
1019 | pull_request.target_repo.repo_name) |
|
1020 | pull_request.target_repo.repo_name) | |
1020 | self.trigger_pull_request_hook(pull_request, user, 'merge') |
|
1021 | self.trigger_pull_request_hook(pull_request, user, 'merge') | |
1021 |
|
1022 | |||
1022 | def has_valid_update_type(self, pull_request): |
|
1023 | def has_valid_update_type(self, pull_request): | |
1023 | source_ref_type = pull_request.source_ref_parts.type |
|
1024 | source_ref_type = pull_request.source_ref_parts.type | |
1024 | return source_ref_type in self.REF_TYPES |
|
1025 | return source_ref_type in self.REF_TYPES | |
1025 |
|
1026 | |||
1026 | def get_flow_commits(self, pull_request): |
|
1027 | def get_flow_commits(self, pull_request): | |
1027 |
|
1028 | |||
1028 | # source repo |
|
1029 | # source repo | |
1029 | source_ref_name = pull_request.source_ref_parts.name |
|
1030 | source_ref_name = pull_request.source_ref_parts.name | |
1030 | source_ref_type = pull_request.source_ref_parts.type |
|
1031 | source_ref_type = pull_request.source_ref_parts.type | |
1031 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1032 | source_ref_id = pull_request.source_ref_parts.commit_id | |
1032 | source_repo = pull_request.source_repo.scm_instance() |
|
1033 | source_repo = pull_request.source_repo.scm_instance() | |
1033 |
|
1034 | |||
1034 | try: |
|
1035 | try: | |
1035 | if source_ref_type in self.REF_TYPES: |
|
1036 | if source_ref_type in self.REF_TYPES: | |
1036 | source_commit = source_repo.get_commit( |
|
1037 | source_commit = source_repo.get_commit( | |
1037 | source_ref_name, reference_obj=pull_request.source_ref_parts) |
|
1038 | source_ref_name, reference_obj=pull_request.source_ref_parts) | |
1038 | else: |
|
1039 | else: | |
1039 | source_commit = source_repo.get_commit(source_ref_id) |
|
1040 | source_commit = source_repo.get_commit(source_ref_id) | |
1040 | except CommitDoesNotExistError: |
|
1041 | except CommitDoesNotExistError: | |
1041 | raise SourceRefMissing() |
|
1042 | raise SourceRefMissing() | |
1042 |
|
1043 | |||
1043 | # target repo |
|
1044 | # target repo | |
1044 | target_ref_name = pull_request.target_ref_parts.name |
|
1045 | target_ref_name = pull_request.target_ref_parts.name | |
1045 | target_ref_type = pull_request.target_ref_parts.type |
|
1046 | target_ref_type = pull_request.target_ref_parts.type | |
1046 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1047 | target_ref_id = pull_request.target_ref_parts.commit_id | |
1047 | target_repo = pull_request.target_repo.scm_instance() |
|
1048 | target_repo = pull_request.target_repo.scm_instance() | |
1048 |
|
1049 | |||
1049 | try: |
|
1050 | try: | |
1050 | if target_ref_type in self.REF_TYPES: |
|
1051 | if target_ref_type in self.REF_TYPES: | |
1051 | target_commit = target_repo.get_commit( |
|
1052 | target_commit = target_repo.get_commit( | |
1052 | target_ref_name, reference_obj=pull_request.target_ref_parts) |
|
1053 | target_ref_name, reference_obj=pull_request.target_ref_parts) | |
1053 | else: |
|
1054 | else: | |
1054 | target_commit = target_repo.get_commit(target_ref_id) |
|
1055 | target_commit = target_repo.get_commit(target_ref_id) | |
1055 | except CommitDoesNotExistError: |
|
1056 | except CommitDoesNotExistError: | |
1056 | raise TargetRefMissing() |
|
1057 | raise TargetRefMissing() | |
1057 |
|
1058 | |||
1058 | return source_commit, target_commit |
|
1059 | return source_commit, target_commit | |
1059 |
|
1060 | |||
1060 | def update_commits(self, pull_request, updating_user): |
|
1061 | def update_commits(self, pull_request, updating_user): | |
1061 | """ |
|
1062 | """ | |
1062 | Get the updated list of commits for the pull request |
|
1063 | Get the updated list of commits for the pull request | |
1063 | and return the new pull request version and the list |
|
1064 | and return the new pull request version and the list | |
1064 | of commits processed by this update action |
|
1065 | of commits processed by this update action | |
1065 |
|
1066 | |||
1066 | updating_user is the user_object who triggered the update |
|
1067 | updating_user is the user_object who triggered the update | |
1067 | """ |
|
1068 | """ | |
1068 | pull_request = self.__get_pull_request(pull_request) |
|
1069 | pull_request = self.__get_pull_request(pull_request) | |
1069 | source_ref_type = pull_request.source_ref_parts.type |
|
1070 | source_ref_type = pull_request.source_ref_parts.type | |
1070 | source_ref_name = pull_request.source_ref_parts.name |
|
1071 | source_ref_name = pull_request.source_ref_parts.name | |
1071 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1072 | source_ref_id = pull_request.source_ref_parts.commit_id | |
1072 |
|
1073 | |||
1073 | target_ref_type = pull_request.target_ref_parts.type |
|
1074 | target_ref_type = pull_request.target_ref_parts.type | |
1074 | target_ref_name = pull_request.target_ref_parts.name |
|
1075 | target_ref_name = pull_request.target_ref_parts.name | |
1075 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1076 | target_ref_id = pull_request.target_ref_parts.commit_id | |
1076 |
|
1077 | |||
1077 | if not self.has_valid_update_type(pull_request): |
|
1078 | if not self.has_valid_update_type(pull_request): | |
1078 | log.debug("Skipping update of pull request %s due to ref type: %s", |
|
1079 | log.debug("Skipping update of pull request %s due to ref type: %s", | |
1079 | pull_request, source_ref_type) |
|
1080 | pull_request, source_ref_type) | |
1080 | return UpdateResponse( |
|
1081 | return UpdateResponse( | |
1081 | executed=False, |
|
1082 | executed=False, | |
1082 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
1083 | reason=UpdateFailureReason.WRONG_REF_TYPE, | |
1083 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1084 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
1084 | source_changed=False, target_changed=False) |
|
1085 | source_changed=False, target_changed=False) | |
1085 |
|
1086 | |||
1086 | try: |
|
1087 | try: | |
1087 | source_commit, target_commit = self.get_flow_commits(pull_request) |
|
1088 | source_commit, target_commit = self.get_flow_commits(pull_request) | |
1088 | except SourceRefMissing: |
|
1089 | except SourceRefMissing: | |
1089 | return UpdateResponse( |
|
1090 | return UpdateResponse( | |
1090 | executed=False, |
|
1091 | executed=False, | |
1091 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
1092 | reason=UpdateFailureReason.MISSING_SOURCE_REF, | |
1092 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1093 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
1093 | source_changed=False, target_changed=False) |
|
1094 | source_changed=False, target_changed=False) | |
1094 | except TargetRefMissing: |
|
1095 | except TargetRefMissing: | |
1095 | return UpdateResponse( |
|
1096 | return UpdateResponse( | |
1096 | executed=False, |
|
1097 | executed=False, | |
1097 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
1098 | reason=UpdateFailureReason.MISSING_TARGET_REF, | |
1098 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1099 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
1099 | source_changed=False, target_changed=False) |
|
1100 | source_changed=False, target_changed=False) | |
1100 |
|
1101 | |||
1101 | source_changed = source_ref_id != source_commit.raw_id |
|
1102 | source_changed = source_ref_id != source_commit.raw_id | |
1102 | target_changed = target_ref_id != target_commit.raw_id |
|
1103 | target_changed = target_ref_id != target_commit.raw_id | |
1103 |
|
1104 | |||
1104 | if not (source_changed or target_changed): |
|
1105 | if not (source_changed or target_changed): | |
1105 | log.debug("Nothing changed in pull request %s", pull_request) |
|
1106 | log.debug("Nothing changed in pull request %s", pull_request) | |
1106 | return UpdateResponse( |
|
1107 | return UpdateResponse( | |
1107 | executed=False, |
|
1108 | executed=False, | |
1108 | reason=UpdateFailureReason.NO_CHANGE, |
|
1109 | reason=UpdateFailureReason.NO_CHANGE, | |
1109 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1110 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
1110 | source_changed=target_changed, target_changed=source_changed) |
|
1111 | source_changed=target_changed, target_changed=source_changed) | |
1111 |
|
1112 | |||
1112 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
1113 | change_in_found = 'target repo' if target_changed else 'source repo' | |
1113 | log.debug('Updating pull request because of change in %s detected', |
|
1114 | log.debug('Updating pull request because of change in %s detected', | |
1114 | change_in_found) |
|
1115 | change_in_found) | |
1115 |
|
1116 | |||
1116 | # Finally there is a need for an update, in case of source change |
|
1117 | # Finally there is a need for an update, in case of source change | |
1117 | # we create a new version, else just an update |
|
1118 | # we create a new version, else just an update | |
1118 | if source_changed: |
|
1119 | if source_changed: | |
1119 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
1120 | pull_request_version = self._create_version_from_snapshot(pull_request) | |
1120 | self._link_comments_to_version(pull_request_version) |
|
1121 | self._link_comments_to_version(pull_request_version) | |
1121 | else: |
|
1122 | else: | |
1122 | try: |
|
1123 | try: | |
1123 | ver = pull_request.versions[-1] |
|
1124 | ver = pull_request.versions[-1] | |
1124 | except IndexError: |
|
1125 | except IndexError: | |
1125 | ver = None |
|
1126 | ver = None | |
1126 |
|
1127 | |||
1127 | pull_request.pull_request_version_id = \ |
|
1128 | pull_request.pull_request_version_id = \ | |
1128 | ver.pull_request_version_id if ver else None |
|
1129 | ver.pull_request_version_id if ver else None | |
1129 | pull_request_version = pull_request |
|
1130 | pull_request_version = pull_request | |
1130 |
|
1131 | |||
1131 | source_repo = pull_request.source_repo.scm_instance() |
|
1132 | source_repo = pull_request.source_repo.scm_instance() | |
1132 | target_repo = pull_request.target_repo.scm_instance() |
|
1133 | target_repo = pull_request.target_repo.scm_instance() | |
1133 |
|
1134 | |||
1134 | # re-compute commit ids |
|
1135 | # re-compute commit ids | |
1135 | old_commit_ids = pull_request.revisions |
|
1136 | old_commit_ids = pull_request.revisions | |
1136 | pre_load = ["author", "date", "message", "branch"] |
|
1137 | pre_load = ["author", "date", "message", "branch"] | |
1137 | commit_ranges = target_repo.compare( |
|
1138 | commit_ranges = target_repo.compare( | |
1138 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
1139 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, | |
1139 | pre_load=pre_load) |
|
1140 | pre_load=pre_load) | |
1140 |
|
1141 | |||
1141 | target_ref = target_commit.raw_id |
|
1142 | target_ref = target_commit.raw_id | |
1142 | source_ref = source_commit.raw_id |
|
1143 | source_ref = source_commit.raw_id | |
1143 | ancestor_commit_id = target_repo.get_common_ancestor( |
|
1144 | ancestor_commit_id = target_repo.get_common_ancestor( | |
1144 | target_ref, source_ref, source_repo) |
|
1145 | target_ref, source_ref, source_repo) | |
1145 |
|
1146 | |||
1146 | if not ancestor_commit_id: |
|
1147 | if not ancestor_commit_id: | |
1147 | raise ValueError( |
|
1148 | raise ValueError( | |
1148 | 'cannot calculate diff info without a common ancestor. ' |
|
1149 | 'cannot calculate diff info without a common ancestor. ' | |
1149 | 'Make sure both repositories are related, and have a common forking commit.') |
|
1150 | 'Make sure both repositories are related, and have a common forking commit.') | |
1150 |
|
1151 | |||
1151 | pull_request.common_ancestor_id = ancestor_commit_id |
|
1152 | pull_request.common_ancestor_id = ancestor_commit_id | |
1152 |
|
1153 | |||
1153 | pull_request.source_ref = '%s:%s:%s' % ( |
|
1154 | pull_request.source_ref = '%s:%s:%s' % ( | |
1154 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
1155 | source_ref_type, source_ref_name, source_commit.raw_id) | |
1155 | pull_request.target_ref = '%s:%s:%s' % ( |
|
1156 | pull_request.target_ref = '%s:%s:%s' % ( | |
1156 | target_ref_type, target_ref_name, ancestor_commit_id) |
|
1157 | target_ref_type, target_ref_name, ancestor_commit_id) | |
1157 |
|
1158 | |||
1158 | pull_request.revisions = [ |
|
1159 | pull_request.revisions = [ | |
1159 | commit.raw_id for commit in reversed(commit_ranges)] |
|
1160 | commit.raw_id for commit in reversed(commit_ranges)] | |
1160 | pull_request.updated_on = datetime.datetime.now() |
|
1161 | pull_request.updated_on = datetime.datetime.now() | |
1161 | Session().add(pull_request) |
|
1162 | Session().add(pull_request) | |
1162 | new_commit_ids = pull_request.revisions |
|
1163 | new_commit_ids = pull_request.revisions | |
1163 |
|
1164 | |||
1164 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
1165 | old_diff_data, new_diff_data = self._generate_update_diffs( | |
1165 | pull_request, pull_request_version) |
|
1166 | pull_request, pull_request_version) | |
1166 |
|
1167 | |||
1167 | # calculate commit and file changes |
|
1168 | # calculate commit and file changes | |
1168 | commit_changes = self._calculate_commit_id_changes( |
|
1169 | commit_changes = self._calculate_commit_id_changes( | |
1169 | old_commit_ids, new_commit_ids) |
|
1170 | old_commit_ids, new_commit_ids) | |
1170 | file_changes = self._calculate_file_changes( |
|
1171 | file_changes = self._calculate_file_changes( | |
1171 | old_diff_data, new_diff_data) |
|
1172 | old_diff_data, new_diff_data) | |
1172 |
|
1173 | |||
1173 | # set comments as outdated if DIFFS changed |
|
1174 | # set comments as outdated if DIFFS changed | |
1174 | CommentsModel().outdate_comments( |
|
1175 | CommentsModel().outdate_comments( | |
1175 | pull_request, old_diff_data=old_diff_data, |
|
1176 | pull_request, old_diff_data=old_diff_data, | |
1176 | new_diff_data=new_diff_data) |
|
1177 | new_diff_data=new_diff_data) | |
1177 |
|
1178 | |||
1178 | valid_commit_changes = (commit_changes.added or commit_changes.removed) |
|
1179 | valid_commit_changes = (commit_changes.added or commit_changes.removed) | |
1179 | file_node_changes = ( |
|
1180 | file_node_changes = ( | |
1180 | file_changes.added or file_changes.modified or file_changes.removed) |
|
1181 | file_changes.added or file_changes.modified or file_changes.removed) | |
1181 | pr_has_changes = valid_commit_changes or file_node_changes |
|
1182 | pr_has_changes = valid_commit_changes or file_node_changes | |
1182 |
|
1183 | |||
1183 | # Add an automatic comment to the pull request, in case |
|
1184 | # Add an automatic comment to the pull request, in case | |
1184 | # anything has changed |
|
1185 | # anything has changed | |
1185 | if pr_has_changes: |
|
1186 | if pr_has_changes: | |
1186 | update_comment = CommentsModel().create( |
|
1187 | update_comment = CommentsModel().create( | |
1187 | text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), |
|
1188 | text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), | |
1188 | repo=pull_request.target_repo, |
|
1189 | repo=pull_request.target_repo, | |
1189 | user=pull_request.author, |
|
1190 | user=pull_request.author, | |
1190 | pull_request=pull_request, |
|
1191 | pull_request=pull_request, | |
1191 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
1192 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) | |
1192 |
|
1193 | |||
1193 | # Update status to "Under Review" for added commits |
|
1194 | # Update status to "Under Review" for added commits | |
1194 | for commit_id in commit_changes.added: |
|
1195 | for commit_id in commit_changes.added: | |
1195 | ChangesetStatusModel().set_status( |
|
1196 | ChangesetStatusModel().set_status( | |
1196 | repo=pull_request.source_repo, |
|
1197 | repo=pull_request.source_repo, | |
1197 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
1198 | status=ChangesetStatus.STATUS_UNDER_REVIEW, | |
1198 | comment=update_comment, |
|
1199 | comment=update_comment, | |
1199 | user=pull_request.author, |
|
1200 | user=pull_request.author, | |
1200 | pull_request=pull_request, |
|
1201 | pull_request=pull_request, | |
1201 | revision=commit_id) |
|
1202 | revision=commit_id) | |
1202 |
|
1203 | |||
1203 | # initial commit |
|
1204 | # initial commit | |
1204 | Session().commit() |
|
1205 | Session().commit() | |
1205 |
|
1206 | |||
1206 | if pr_has_changes: |
|
1207 | if pr_has_changes: | |
1207 | # send update email to users |
|
1208 | # send update email to users | |
1208 | try: |
|
1209 | try: | |
1209 | self.notify_users(pull_request=pull_request, updating_user=updating_user, |
|
1210 | self.notify_users(pull_request=pull_request, updating_user=updating_user, | |
1210 | ancestor_commit_id=ancestor_commit_id, |
|
1211 | ancestor_commit_id=ancestor_commit_id, | |
1211 | commit_changes=commit_changes, |
|
1212 | commit_changes=commit_changes, | |
1212 | file_changes=file_changes) |
|
1213 | file_changes=file_changes) | |
1213 | Session().commit() |
|
1214 | Session().commit() | |
1214 | except Exception: |
|
1215 | except Exception: | |
1215 | log.exception('Failed to send email notification to users') |
|
1216 | log.exception('Failed to send email notification to users') | |
1216 | Session().rollback() |
|
1217 | Session().rollback() | |
1217 |
|
1218 | |||
1218 | log.debug( |
|
1219 | log.debug( | |
1219 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
1220 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' | |
1220 | 'removed_ids: %s', pull_request.pull_request_id, |
|
1221 | 'removed_ids: %s', pull_request.pull_request_id, | |
1221 | commit_changes.added, commit_changes.common, commit_changes.removed) |
|
1222 | commit_changes.added, commit_changes.common, commit_changes.removed) | |
1222 | log.debug( |
|
1223 | log.debug( | |
1223 | 'Updated pull request with the following file changes: %s', |
|
1224 | 'Updated pull request with the following file changes: %s', | |
1224 | file_changes) |
|
1225 | file_changes) | |
1225 |
|
1226 | |||
1226 | log.info( |
|
1227 | log.info( | |
1227 | "Updated pull request %s from commit %s to commit %s, " |
|
1228 | "Updated pull request %s from commit %s to commit %s, " | |
1228 | "stored new version %s of this pull request.", |
|
1229 | "stored new version %s of this pull request.", | |
1229 | pull_request.pull_request_id, source_ref_id, |
|
1230 | pull_request.pull_request_id, source_ref_id, | |
1230 | pull_request.source_ref_parts.commit_id, |
|
1231 | pull_request.source_ref_parts.commit_id, | |
1231 | pull_request_version.pull_request_version_id) |
|
1232 | pull_request_version.pull_request_version_id) | |
1232 |
|
1233 | |||
1233 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') |
|
1234 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') | |
1234 |
|
1235 | |||
1235 | return UpdateResponse( |
|
1236 | return UpdateResponse( | |
1236 | executed=True, reason=UpdateFailureReason.NONE, |
|
1237 | executed=True, reason=UpdateFailureReason.NONE, | |
1237 | old=pull_request, new=pull_request_version, |
|
1238 | old=pull_request, new=pull_request_version, | |
1238 | common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, |
|
1239 | common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, | |
1239 | source_changed=source_changed, target_changed=target_changed) |
|
1240 | source_changed=source_changed, target_changed=target_changed) | |
1240 |
|
1241 | |||
1241 | def _create_version_from_snapshot(self, pull_request): |
|
1242 | def _create_version_from_snapshot(self, pull_request): | |
1242 | version = PullRequestVersion() |
|
1243 | version = PullRequestVersion() | |
1243 | version.title = pull_request.title |
|
1244 | version.title = pull_request.title | |
1244 | version.description = pull_request.description |
|
1245 | version.description = pull_request.description | |
1245 | version.status = pull_request.status |
|
1246 | version.status = pull_request.status | |
1246 | version.pull_request_state = pull_request.pull_request_state |
|
1247 | version.pull_request_state = pull_request.pull_request_state | |
1247 | version.created_on = datetime.datetime.now() |
|
1248 | version.created_on = datetime.datetime.now() | |
1248 | version.updated_on = pull_request.updated_on |
|
1249 | version.updated_on = pull_request.updated_on | |
1249 | version.user_id = pull_request.user_id |
|
1250 | version.user_id = pull_request.user_id | |
1250 | version.source_repo = pull_request.source_repo |
|
1251 | version.source_repo = pull_request.source_repo | |
1251 | version.source_ref = pull_request.source_ref |
|
1252 | version.source_ref = pull_request.source_ref | |
1252 | version.target_repo = pull_request.target_repo |
|
1253 | version.target_repo = pull_request.target_repo | |
1253 | version.target_ref = pull_request.target_ref |
|
1254 | version.target_ref = pull_request.target_ref | |
1254 |
|
1255 | |||
1255 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
1256 | version._last_merge_source_rev = pull_request._last_merge_source_rev | |
1256 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
1257 | version._last_merge_target_rev = pull_request._last_merge_target_rev | |
1257 | version.last_merge_status = pull_request.last_merge_status |
|
1258 | version.last_merge_status = pull_request.last_merge_status | |
1258 | version.last_merge_metadata = pull_request.last_merge_metadata |
|
1259 | version.last_merge_metadata = pull_request.last_merge_metadata | |
1259 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
1260 | version.shadow_merge_ref = pull_request.shadow_merge_ref | |
1260 | version.merge_rev = pull_request.merge_rev |
|
1261 | version.merge_rev = pull_request.merge_rev | |
1261 | version.reviewer_data = pull_request.reviewer_data |
|
1262 | version.reviewer_data = pull_request.reviewer_data | |
1262 |
|
1263 | |||
1263 | version.revisions = pull_request.revisions |
|
1264 | version.revisions = pull_request.revisions | |
1264 | version.common_ancestor_id = pull_request.common_ancestor_id |
|
1265 | version.common_ancestor_id = pull_request.common_ancestor_id | |
1265 | version.pull_request = pull_request |
|
1266 | version.pull_request = pull_request | |
1266 | Session().add(version) |
|
1267 | Session().add(version) | |
1267 | Session().flush() |
|
1268 | Session().flush() | |
1268 |
|
1269 | |||
1269 | return version |
|
1270 | return version | |
1270 |
|
1271 | |||
1271 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
1272 | def _generate_update_diffs(self, pull_request, pull_request_version): | |
1272 |
|
1273 | |||
1273 | diff_context = ( |
|
1274 | diff_context = ( | |
1274 | self.DIFF_CONTEXT + |
|
1275 | self.DIFF_CONTEXT + | |
1275 | CommentsModel.needed_extra_diff_context()) |
|
1276 | CommentsModel.needed_extra_diff_context()) | |
1276 | hide_whitespace_changes = False |
|
1277 | hide_whitespace_changes = False | |
1277 | source_repo = pull_request_version.source_repo |
|
1278 | source_repo = pull_request_version.source_repo | |
1278 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
1279 | source_ref_id = pull_request_version.source_ref_parts.commit_id | |
1279 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
1280 | target_ref_id = pull_request_version.target_ref_parts.commit_id | |
1280 | old_diff = self._get_diff_from_pr_or_version( |
|
1281 | old_diff = self._get_diff_from_pr_or_version( | |
1281 | source_repo, source_ref_id, target_ref_id, |
|
1282 | source_repo, source_ref_id, target_ref_id, | |
1282 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1283 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
1283 |
|
1284 | |||
1284 | source_repo = pull_request.source_repo |
|
1285 | source_repo = pull_request.source_repo | |
1285 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1286 | source_ref_id = pull_request.source_ref_parts.commit_id | |
1286 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1287 | target_ref_id = pull_request.target_ref_parts.commit_id | |
1287 |
|
1288 | |||
1288 | new_diff = self._get_diff_from_pr_or_version( |
|
1289 | new_diff = self._get_diff_from_pr_or_version( | |
1289 | source_repo, source_ref_id, target_ref_id, |
|
1290 | source_repo, source_ref_id, target_ref_id, | |
1290 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1291 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
1291 |
|
1292 | |||
1292 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
1293 | old_diff_data = diffs.DiffProcessor(old_diff) | |
1293 | old_diff_data.prepare() |
|
1294 | old_diff_data.prepare() | |
1294 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
1295 | new_diff_data = diffs.DiffProcessor(new_diff) | |
1295 | new_diff_data.prepare() |
|
1296 | new_diff_data.prepare() | |
1296 |
|
1297 | |||
1297 | return old_diff_data, new_diff_data |
|
1298 | return old_diff_data, new_diff_data | |
1298 |
|
1299 | |||
1299 | def _link_comments_to_version(self, pull_request_version): |
|
1300 | def _link_comments_to_version(self, pull_request_version): | |
1300 | """ |
|
1301 | """ | |
1301 | Link all unlinked comments of this pull request to the given version. |
|
1302 | Link all unlinked comments of this pull request to the given version. | |
1302 |
|
1303 | |||
1303 | :param pull_request_version: The `PullRequestVersion` to which |
|
1304 | :param pull_request_version: The `PullRequestVersion` to which | |
1304 | the comments shall be linked. |
|
1305 | the comments shall be linked. | |
1305 |
|
1306 | |||
1306 | """ |
|
1307 | """ | |
1307 | pull_request = pull_request_version.pull_request |
|
1308 | pull_request = pull_request_version.pull_request | |
1308 | comments = ChangesetComment.query()\ |
|
1309 | comments = ChangesetComment.query()\ | |
1309 | .filter( |
|
1310 | .filter( | |
1310 | # TODO: johbo: Should we query for the repo at all here? |
|
1311 | # TODO: johbo: Should we query for the repo at all here? | |
1311 | # Pending decision on how comments of PRs are to be related |
|
1312 | # Pending decision on how comments of PRs are to be related | |
1312 | # to either the source repo, the target repo or no repo at all. |
|
1313 | # to either the source repo, the target repo or no repo at all. | |
1313 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
1314 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, | |
1314 | ChangesetComment.pull_request == pull_request, |
|
1315 | ChangesetComment.pull_request == pull_request, | |
1315 | ChangesetComment.pull_request_version == None)\ |
|
1316 | ChangesetComment.pull_request_version == None)\ | |
1316 | .order_by(ChangesetComment.comment_id.asc()) |
|
1317 | .order_by(ChangesetComment.comment_id.asc()) | |
1317 |
|
1318 | |||
1318 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
1319 | # TODO: johbo: Find out why this breaks if it is done in a bulk | |
1319 | # operation. |
|
1320 | # operation. | |
1320 | for comment in comments: |
|
1321 | for comment in comments: | |
1321 | comment.pull_request_version_id = ( |
|
1322 | comment.pull_request_version_id = ( | |
1322 | pull_request_version.pull_request_version_id) |
|
1323 | pull_request_version.pull_request_version_id) | |
1323 | Session().add(comment) |
|
1324 | Session().add(comment) | |
1324 |
|
1325 | |||
1325 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
1326 | def _calculate_commit_id_changes(self, old_ids, new_ids): | |
1326 | added = [x for x in new_ids if x not in old_ids] |
|
1327 | added = [x for x in new_ids if x not in old_ids] | |
1327 | common = [x for x in new_ids if x in old_ids] |
|
1328 | common = [x for x in new_ids if x in old_ids] | |
1328 | removed = [x for x in old_ids if x not in new_ids] |
|
1329 | removed = [x for x in old_ids if x not in new_ids] | |
1329 | total = new_ids |
|
1330 | total = new_ids | |
1330 | return ChangeTuple(added, common, removed, total) |
|
1331 | return ChangeTuple(added, common, removed, total) | |
1331 |
|
1332 | |||
1332 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
1333 | def _calculate_file_changes(self, old_diff_data, new_diff_data): | |
1333 |
|
1334 | |||
1334 | old_files = OrderedDict() |
|
1335 | old_files = OrderedDict() | |
1335 | for diff_data in old_diff_data.parsed_diff: |
|
1336 | for diff_data in old_diff_data.parsed_diff: | |
1336 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
1337 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) | |
1337 |
|
1338 | |||
1338 | added_files = [] |
|
1339 | added_files = [] | |
1339 | modified_files = [] |
|
1340 | modified_files = [] | |
1340 | removed_files = [] |
|
1341 | removed_files = [] | |
1341 | for diff_data in new_diff_data.parsed_diff: |
|
1342 | for diff_data in new_diff_data.parsed_diff: | |
1342 | new_filename = diff_data['filename'] |
|
1343 | new_filename = diff_data['filename'] | |
1343 | new_hash = md5_safe(diff_data['raw_diff']) |
|
1344 | new_hash = md5_safe(diff_data['raw_diff']) | |
1344 |
|
1345 | |||
1345 | old_hash = old_files.get(new_filename) |
|
1346 | old_hash = old_files.get(new_filename) | |
1346 | if not old_hash: |
|
1347 | if not old_hash: | |
1347 | # file is not present in old diff, we have to figure out from parsed diff |
|
1348 | # file is not present in old diff, we have to figure out from parsed diff | |
1348 | # operation ADD/REMOVE |
|
1349 | # operation ADD/REMOVE | |
1349 | operations_dict = diff_data['stats']['ops'] |
|
1350 | operations_dict = diff_data['stats']['ops'] | |
1350 | if diffs.DEL_FILENODE in operations_dict: |
|
1351 | if diffs.DEL_FILENODE in operations_dict: | |
1351 | removed_files.append(new_filename) |
|
1352 | removed_files.append(new_filename) | |
1352 | else: |
|
1353 | else: | |
1353 | added_files.append(new_filename) |
|
1354 | added_files.append(new_filename) | |
1354 | else: |
|
1355 | else: | |
1355 | if new_hash != old_hash: |
|
1356 | if new_hash != old_hash: | |
1356 | modified_files.append(new_filename) |
|
1357 | modified_files.append(new_filename) | |
1357 | # now remove a file from old, since we have seen it already |
|
1358 | # now remove a file from old, since we have seen it already | |
1358 | del old_files[new_filename] |
|
1359 | del old_files[new_filename] | |
1359 |
|
1360 | |||
1360 | # removed files is when there are present in old, but not in NEW, |
|
1361 | # removed files is when there are present in old, but not in NEW, | |
1361 | # since we remove old files that are present in new diff, left-overs |
|
1362 | # since we remove old files that are present in new diff, left-overs | |
1362 | # if any should be the removed files |
|
1363 | # if any should be the removed files | |
1363 | removed_files.extend(old_files.keys()) |
|
1364 | removed_files.extend(old_files.keys()) | |
1364 |
|
1365 | |||
1365 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
1366 | return FileChangeTuple(added_files, modified_files, removed_files) | |
1366 |
|
1367 | |||
1367 | def _render_update_message(self, ancestor_commit_id, changes, file_changes): |
|
1368 | def _render_update_message(self, ancestor_commit_id, changes, file_changes): | |
1368 | """ |
|
1369 | """ | |
1369 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
1370 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), | |
1370 | so it's always looking the same disregarding on which default |
|
1371 | so it's always looking the same disregarding on which default | |
1371 | renderer system is using. |
|
1372 | renderer system is using. | |
1372 |
|
1373 | |||
1373 | :param ancestor_commit_id: ancestor raw_id |
|
1374 | :param ancestor_commit_id: ancestor raw_id | |
1374 | :param changes: changes named tuple |
|
1375 | :param changes: changes named tuple | |
1375 | :param file_changes: file changes named tuple |
|
1376 | :param file_changes: file changes named tuple | |
1376 |
|
1377 | |||
1377 | """ |
|
1378 | """ | |
1378 | new_status = ChangesetStatus.get_status_lbl( |
|
1379 | new_status = ChangesetStatus.get_status_lbl( | |
1379 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
1380 | ChangesetStatus.STATUS_UNDER_REVIEW) | |
1380 |
|
1381 | |||
1381 | changed_files = ( |
|
1382 | changed_files = ( | |
1382 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1383 | file_changes.added + file_changes.modified + file_changes.removed) | |
1383 |
|
1384 | |||
1384 | params = { |
|
1385 | params = { | |
1385 | 'under_review_label': new_status, |
|
1386 | 'under_review_label': new_status, | |
1386 | 'added_commits': changes.added, |
|
1387 | 'added_commits': changes.added, | |
1387 | 'removed_commits': changes.removed, |
|
1388 | 'removed_commits': changes.removed, | |
1388 | 'changed_files': changed_files, |
|
1389 | 'changed_files': changed_files, | |
1389 | 'added_files': file_changes.added, |
|
1390 | 'added_files': file_changes.added, | |
1390 | 'modified_files': file_changes.modified, |
|
1391 | 'modified_files': file_changes.modified, | |
1391 | 'removed_files': file_changes.removed, |
|
1392 | 'removed_files': file_changes.removed, | |
1392 | 'ancestor_commit_id': ancestor_commit_id |
|
1393 | 'ancestor_commit_id': ancestor_commit_id | |
1393 | } |
|
1394 | } | |
1394 | renderer = RstTemplateRenderer() |
|
1395 | renderer = RstTemplateRenderer() | |
1395 | return renderer.render('pull_request_update.mako', **params) |
|
1396 | return renderer.render('pull_request_update.mako', **params) | |
1396 |
|
1397 | |||
1397 | def edit(self, pull_request, title, description, description_renderer, user): |
|
1398 | def edit(self, pull_request, title, description, description_renderer, user): | |
1398 | pull_request = self.__get_pull_request(pull_request) |
|
1399 | pull_request = self.__get_pull_request(pull_request) | |
1399 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1400 | old_data = pull_request.get_api_data(with_merge_state=False) | |
1400 | if pull_request.is_closed(): |
|
1401 | if pull_request.is_closed(): | |
1401 | raise ValueError('This pull request is closed') |
|
1402 | raise ValueError('This pull request is closed') | |
1402 | if title: |
|
1403 | if title: | |
1403 | pull_request.title = title |
|
1404 | pull_request.title = title | |
1404 | pull_request.description = description |
|
1405 | pull_request.description = description | |
1405 | pull_request.updated_on = datetime.datetime.now() |
|
1406 | pull_request.updated_on = datetime.datetime.now() | |
1406 | pull_request.description_renderer = description_renderer |
|
1407 | pull_request.description_renderer = description_renderer | |
1407 | Session().add(pull_request) |
|
1408 | Session().add(pull_request) | |
1408 | self._log_audit_action( |
|
1409 | self._log_audit_action( | |
1409 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
1410 | 'repo.pull_request.edit', {'old_data': old_data}, | |
1410 | user, pull_request) |
|
1411 | user, pull_request) | |
1411 |
|
1412 | |||
1412 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
1413 | def update_reviewers(self, pull_request, reviewer_data, user): | |
1413 | """ |
|
1414 | """ | |
1414 | Update the reviewers in the pull request |
|
1415 | Update the reviewers in the pull request | |
1415 |
|
1416 | |||
1416 | :param pull_request: the pr to update |
|
1417 | :param pull_request: the pr to update | |
1417 | :param reviewer_data: list of tuples |
|
1418 | :param reviewer_data: list of tuples | |
1418 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] |
|
1419 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] | |
1419 | :param user: current use who triggers this action |
|
1420 | :param user: current use who triggers this action | |
1420 | """ |
|
1421 | """ | |
1421 |
|
1422 | |||
1422 | pull_request = self.__get_pull_request(pull_request) |
|
1423 | pull_request = self.__get_pull_request(pull_request) | |
1423 | if pull_request.is_closed(): |
|
1424 | if pull_request.is_closed(): | |
1424 | raise ValueError('This pull request is closed') |
|
1425 | raise ValueError('This pull request is closed') | |
1425 |
|
1426 | |||
1426 | reviewers = {} |
|
1427 | reviewers = {} | |
1427 | for user_id, reasons, mandatory, role, rules in reviewer_data: |
|
1428 | for user_id, reasons, mandatory, role, rules in reviewer_data: | |
1428 | if isinstance(user_id, (int, compat.string_types)): |
|
1429 | if isinstance(user_id, (int, compat.string_types)): | |
1429 | user_id = self._get_user(user_id).user_id |
|
1430 | user_id = self._get_user(user_id).user_id | |
1430 | reviewers[user_id] = { |
|
1431 | reviewers[user_id] = { | |
1431 | 'reasons': reasons, 'mandatory': mandatory, 'role': role} |
|
1432 | 'reasons': reasons, 'mandatory': mandatory, 'role': role} | |
1432 |
|
1433 | |||
1433 | reviewers_ids = set(reviewers.keys()) |
|
1434 | reviewers_ids = set(reviewers.keys()) | |
1434 | current_reviewers = PullRequestReviewers.get_pull_request_reviewers( |
|
1435 | current_reviewers = PullRequestReviewers.get_pull_request_reviewers( | |
1435 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER) |
|
1436 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER) | |
1436 |
|
1437 | |||
1437 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1438 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) | |
1438 |
|
1439 | |||
1439 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1440 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) | |
1440 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1441 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) | |
1441 |
|
1442 | |||
1442 | log.debug("Adding %s reviewers", ids_to_add) |
|
1443 | log.debug("Adding %s reviewers", ids_to_add) | |
1443 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1444 | log.debug("Removing %s reviewers", ids_to_remove) | |
1444 | changed = False |
|
1445 | changed = False | |
1445 | added_audit_reviewers = [] |
|
1446 | added_audit_reviewers = [] | |
1446 | removed_audit_reviewers = [] |
|
1447 | removed_audit_reviewers = [] | |
1447 |
|
1448 | |||
1448 | for uid in ids_to_add: |
|
1449 | for uid in ids_to_add: | |
1449 | changed = True |
|
1450 | changed = True | |
1450 | _usr = self._get_user(uid) |
|
1451 | _usr = self._get_user(uid) | |
1451 | reviewer = PullRequestReviewers() |
|
1452 | reviewer = PullRequestReviewers() | |
1452 | reviewer.user = _usr |
|
1453 | reviewer.user = _usr | |
1453 | reviewer.pull_request = pull_request |
|
1454 | reviewer.pull_request = pull_request | |
1454 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1455 | reviewer.reasons = reviewers[uid]['reasons'] | |
1455 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1456 | # NOTE(marcink): mandatory shouldn't be changed now | |
1456 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1457 | # reviewer.mandatory = reviewers[uid]['reasons'] | |
1457 | # NOTE(marcink): role should be hardcoded, so we won't edit it. |
|
1458 | # NOTE(marcink): role should be hardcoded, so we won't edit it. | |
1458 | reviewer.role = PullRequestReviewers.ROLE_REVIEWER |
|
1459 | reviewer.role = PullRequestReviewers.ROLE_REVIEWER | |
1459 | Session().add(reviewer) |
|
1460 | Session().add(reviewer) | |
1460 | added_audit_reviewers.append(reviewer.get_dict()) |
|
1461 | added_audit_reviewers.append(reviewer.get_dict()) | |
1461 |
|
1462 | |||
1462 | for uid in ids_to_remove: |
|
1463 | for uid in ids_to_remove: | |
1463 | changed = True |
|
1464 | changed = True | |
1464 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). |
|
1465 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). | |
1465 | # This is an edge case that handles previous state of having the same reviewer twice. |
|
1466 | # This is an edge case that handles previous state of having the same reviewer twice. | |
1466 | # this CAN happen due to the lack of DB checks |
|
1467 | # this CAN happen due to the lack of DB checks | |
1467 | reviewers = PullRequestReviewers.query()\ |
|
1468 | reviewers = PullRequestReviewers.query()\ | |
1468 | .filter(PullRequestReviewers.user_id == uid, |
|
1469 | .filter(PullRequestReviewers.user_id == uid, | |
1469 | PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER, |
|
1470 | PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER, | |
1470 | PullRequestReviewers.pull_request == pull_request)\ |
|
1471 | PullRequestReviewers.pull_request == pull_request)\ | |
1471 | .all() |
|
1472 | .all() | |
1472 |
|
1473 | |||
1473 | for obj in reviewers: |
|
1474 | for obj in reviewers: | |
1474 | added_audit_reviewers.append(obj.get_dict()) |
|
1475 | added_audit_reviewers.append(obj.get_dict()) | |
1475 | Session().delete(obj) |
|
1476 | Session().delete(obj) | |
1476 |
|
1477 | |||
1477 | if changed: |
|
1478 | if changed: | |
1478 | Session().expire_all() |
|
1479 | Session().expire_all() | |
1479 | pull_request.updated_on = datetime.datetime.now() |
|
1480 | pull_request.updated_on = datetime.datetime.now() | |
1480 | Session().add(pull_request) |
|
1481 | Session().add(pull_request) | |
1481 |
|
1482 | |||
1482 | # finally store audit logs |
|
1483 | # finally store audit logs | |
1483 | for user_data in added_audit_reviewers: |
|
1484 | for user_data in added_audit_reviewers: | |
1484 | self._log_audit_action( |
|
1485 | self._log_audit_action( | |
1485 | 'repo.pull_request.reviewer.add', {'data': user_data}, |
|
1486 | 'repo.pull_request.reviewer.add', {'data': user_data}, | |
1486 | user, pull_request) |
|
1487 | user, pull_request) | |
1487 | for user_data in removed_audit_reviewers: |
|
1488 | for user_data in removed_audit_reviewers: | |
1488 | self._log_audit_action( |
|
1489 | self._log_audit_action( | |
1489 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, |
|
1490 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, | |
1490 | user, pull_request) |
|
1491 | user, pull_request) | |
1491 |
|
1492 | |||
1492 | self.notify_reviewers(pull_request, ids_to_add, user) |
|
1493 | self.notify_reviewers(pull_request, ids_to_add, user) | |
1493 | return ids_to_add, ids_to_remove |
|
1494 | return ids_to_add, ids_to_remove | |
1494 |
|
1495 | |||
1495 | def update_observers(self, pull_request, observer_data, user): |
|
1496 | def update_observers(self, pull_request, observer_data, user): | |
1496 | """ |
|
1497 | """ | |
1497 | Update the observers in the pull request |
|
1498 | Update the observers in the pull request | |
1498 |
|
1499 | |||
1499 | :param pull_request: the pr to update |
|
1500 | :param pull_request: the pr to update | |
1500 | :param observer_data: list of tuples |
|
1501 | :param observer_data: list of tuples | |
1501 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] |
|
1502 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] | |
1502 | :param user: current use who triggers this action |
|
1503 | :param user: current use who triggers this action | |
1503 | """ |
|
1504 | """ | |
1504 | pull_request = self.__get_pull_request(pull_request) |
|
1505 | pull_request = self.__get_pull_request(pull_request) | |
1505 | if pull_request.is_closed(): |
|
1506 | if pull_request.is_closed(): | |
1506 | raise ValueError('This pull request is closed') |
|
1507 | raise ValueError('This pull request is closed') | |
1507 |
|
1508 | |||
1508 | observers = {} |
|
1509 | observers = {} | |
1509 | for user_id, reasons, mandatory, role, rules in observer_data: |
|
1510 | for user_id, reasons, mandatory, role, rules in observer_data: | |
1510 | if isinstance(user_id, (int, compat.string_types)): |
|
1511 | if isinstance(user_id, (int, compat.string_types)): | |
1511 | user_id = self._get_user(user_id).user_id |
|
1512 | user_id = self._get_user(user_id).user_id | |
1512 | observers[user_id] = { |
|
1513 | observers[user_id] = { | |
1513 | 'reasons': reasons, 'observers': mandatory, 'role': role} |
|
1514 | 'reasons': reasons, 'observers': mandatory, 'role': role} | |
1514 |
|
1515 | |||
1515 | observers_ids = set(observers.keys()) |
|
1516 | observers_ids = set(observers.keys()) | |
1516 | current_observers = PullRequestReviewers.get_pull_request_reviewers( |
|
1517 | current_observers = PullRequestReviewers.get_pull_request_reviewers( | |
1517 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER) |
|
1518 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER) | |
1518 |
|
1519 | |||
1519 | current_observers_ids = set([x.user.user_id for x in current_observers]) |
|
1520 | current_observers_ids = set([x.user.user_id for x in current_observers]) | |
1520 |
|
1521 | |||
1521 | ids_to_add = observers_ids.difference(current_observers_ids) |
|
1522 | ids_to_add = observers_ids.difference(current_observers_ids) | |
1522 | ids_to_remove = current_observers_ids.difference(observers_ids) |
|
1523 | ids_to_remove = current_observers_ids.difference(observers_ids) | |
1523 |
|
1524 | |||
1524 | log.debug("Adding %s observer", ids_to_add) |
|
1525 | log.debug("Adding %s observer", ids_to_add) | |
1525 | log.debug("Removing %s observer", ids_to_remove) |
|
1526 | log.debug("Removing %s observer", ids_to_remove) | |
1526 | changed = False |
|
1527 | changed = False | |
1527 | added_audit_observers = [] |
|
1528 | added_audit_observers = [] | |
1528 | removed_audit_observers = [] |
|
1529 | removed_audit_observers = [] | |
1529 |
|
1530 | |||
1530 | for uid in ids_to_add: |
|
1531 | for uid in ids_to_add: | |
1531 | changed = True |
|
1532 | changed = True | |
1532 | _usr = self._get_user(uid) |
|
1533 | _usr = self._get_user(uid) | |
1533 | observer = PullRequestReviewers() |
|
1534 | observer = PullRequestReviewers() | |
1534 | observer.user = _usr |
|
1535 | observer.user = _usr | |
1535 | observer.pull_request = pull_request |
|
1536 | observer.pull_request = pull_request | |
1536 | observer.reasons = observers[uid]['reasons'] |
|
1537 | observer.reasons = observers[uid]['reasons'] | |
1537 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1538 | # NOTE(marcink): mandatory shouldn't be changed now | |
1538 | # observer.mandatory = observer[uid]['reasons'] |
|
1539 | # observer.mandatory = observer[uid]['reasons'] | |
1539 |
|
1540 | |||
1540 | # NOTE(marcink): role should be hardcoded, so we won't edit it. |
|
1541 | # NOTE(marcink): role should be hardcoded, so we won't edit it. | |
1541 | observer.role = PullRequestReviewers.ROLE_OBSERVER |
|
1542 | observer.role = PullRequestReviewers.ROLE_OBSERVER | |
1542 | Session().add(observer) |
|
1543 | Session().add(observer) | |
1543 | added_audit_observers.append(observer.get_dict()) |
|
1544 | added_audit_observers.append(observer.get_dict()) | |
1544 |
|
1545 | |||
1545 | for uid in ids_to_remove: |
|
1546 | for uid in ids_to_remove: | |
1546 | changed = True |
|
1547 | changed = True | |
1547 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). |
|
1548 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). | |
1548 | # This is an edge case that handles previous state of having the same reviewer twice. |
|
1549 | # This is an edge case that handles previous state of having the same reviewer twice. | |
1549 | # this CAN happen due to the lack of DB checks |
|
1550 | # this CAN happen due to the lack of DB checks | |
1550 | observers = PullRequestReviewers.query()\ |
|
1551 | observers = PullRequestReviewers.query()\ | |
1551 | .filter(PullRequestReviewers.user_id == uid, |
|
1552 | .filter(PullRequestReviewers.user_id == uid, | |
1552 | PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER, |
|
1553 | PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER, | |
1553 | PullRequestReviewers.pull_request == pull_request)\ |
|
1554 | PullRequestReviewers.pull_request == pull_request)\ | |
1554 | .all() |
|
1555 | .all() | |
1555 |
|
1556 | |||
1556 | for obj in observers: |
|
1557 | for obj in observers: | |
1557 | added_audit_observers.append(obj.get_dict()) |
|
1558 | added_audit_observers.append(obj.get_dict()) | |
1558 | Session().delete(obj) |
|
1559 | Session().delete(obj) | |
1559 |
|
1560 | |||
1560 | if changed: |
|
1561 | if changed: | |
1561 | Session().expire_all() |
|
1562 | Session().expire_all() | |
1562 | pull_request.updated_on = datetime.datetime.now() |
|
1563 | pull_request.updated_on = datetime.datetime.now() | |
1563 | Session().add(pull_request) |
|
1564 | Session().add(pull_request) | |
1564 |
|
1565 | |||
1565 | # finally store audit logs |
|
1566 | # finally store audit logs | |
1566 | for user_data in added_audit_observers: |
|
1567 | for user_data in added_audit_observers: | |
1567 | self._log_audit_action( |
|
1568 | self._log_audit_action( | |
1568 | 'repo.pull_request.observer.add', {'data': user_data}, |
|
1569 | 'repo.pull_request.observer.add', {'data': user_data}, | |
1569 | user, pull_request) |
|
1570 | user, pull_request) | |
1570 | for user_data in removed_audit_observers: |
|
1571 | for user_data in removed_audit_observers: | |
1571 | self._log_audit_action( |
|
1572 | self._log_audit_action( | |
1572 | 'repo.pull_request.observer.delete', {'old_data': user_data}, |
|
1573 | 'repo.pull_request.observer.delete', {'old_data': user_data}, | |
1573 | user, pull_request) |
|
1574 | user, pull_request) | |
1574 |
|
1575 | |||
1575 | self.notify_observers(pull_request, ids_to_add, user) |
|
1576 | self.notify_observers(pull_request, ids_to_add, user) | |
1576 | return ids_to_add, ids_to_remove |
|
1577 | return ids_to_add, ids_to_remove | |
1577 |
|
1578 | |||
1578 | def get_url(self, pull_request, request=None, permalink=False): |
|
1579 | def get_url(self, pull_request, request=None, permalink=False): | |
1579 | if not request: |
|
1580 | if not request: | |
1580 | request = get_current_request() |
|
1581 | request = get_current_request() | |
1581 |
|
1582 | |||
1582 | if permalink: |
|
1583 | if permalink: | |
1583 | return request.route_url( |
|
1584 | return request.route_url( | |
1584 | 'pull_requests_global', |
|
1585 | 'pull_requests_global', | |
1585 | pull_request_id=pull_request.pull_request_id,) |
|
1586 | pull_request_id=pull_request.pull_request_id,) | |
1586 | else: |
|
1587 | else: | |
1587 | return request.route_url('pullrequest_show', |
|
1588 | return request.route_url('pullrequest_show', | |
1588 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1589 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
1589 | pull_request_id=pull_request.pull_request_id,) |
|
1590 | pull_request_id=pull_request.pull_request_id,) | |
1590 |
|
1591 | |||
1591 | def get_shadow_clone_url(self, pull_request, request=None): |
|
1592 | def get_shadow_clone_url(self, pull_request, request=None): | |
1592 | """ |
|
1593 | """ | |
1593 | Returns qualified url pointing to the shadow repository. If this pull |
|
1594 | Returns qualified url pointing to the shadow repository. If this pull | |
1594 | request is closed there is no shadow repository and ``None`` will be |
|
1595 | request is closed there is no shadow repository and ``None`` will be | |
1595 | returned. |
|
1596 | returned. | |
1596 | """ |
|
1597 | """ | |
1597 | if pull_request.is_closed(): |
|
1598 | if pull_request.is_closed(): | |
1598 | return None |
|
1599 | return None | |
1599 | else: |
|
1600 | else: | |
1600 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) |
|
1601 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) | |
1601 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1602 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) | |
1602 |
|
1603 | |||
1603 | def _notify_reviewers(self, pull_request, user_ids, role, user): |
|
1604 | def _notify_reviewers(self, pull_request, user_ids, role, user): | |
1604 | # notification to reviewers/observers |
|
1605 | # notification to reviewers/observers | |
1605 | if not user_ids: |
|
1606 | if not user_ids: | |
1606 | return |
|
1607 | return | |
1607 |
|
1608 | |||
1608 | log.debug('Notify following %s users about pull-request %s', role, user_ids) |
|
1609 | log.debug('Notify following %s users about pull-request %s', role, user_ids) | |
1609 |
|
1610 | |||
1610 | pull_request_obj = pull_request |
|
1611 | pull_request_obj = pull_request | |
1611 | # get the current participants of this pull request |
|
1612 | # get the current participants of this pull request | |
1612 | recipients = user_ids |
|
1613 | recipients = user_ids | |
1613 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1614 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST | |
1614 |
|
1615 | |||
1615 | pr_source_repo = pull_request_obj.source_repo |
|
1616 | pr_source_repo = pull_request_obj.source_repo | |
1616 | pr_target_repo = pull_request_obj.target_repo |
|
1617 | pr_target_repo = pull_request_obj.target_repo | |
1617 |
|
1618 | |||
1618 | pr_url = h.route_url('pullrequest_show', |
|
1619 | pr_url = h.route_url('pullrequest_show', | |
1619 | repo_name=pr_target_repo.repo_name, |
|
1620 | repo_name=pr_target_repo.repo_name, | |
1620 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1621 | pull_request_id=pull_request_obj.pull_request_id,) | |
1621 |
|
1622 | |||
1622 | # set some variables for email notification |
|
1623 | # set some variables for email notification | |
1623 | pr_target_repo_url = h.route_url( |
|
1624 | pr_target_repo_url = h.route_url( | |
1624 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1625 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
1625 |
|
1626 | |||
1626 | pr_source_repo_url = h.route_url( |
|
1627 | pr_source_repo_url = h.route_url( | |
1627 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1628 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
1628 |
|
1629 | |||
1629 | # pull request specifics |
|
1630 | # pull request specifics | |
1630 | pull_request_commits = [ |
|
1631 | pull_request_commits = [ | |
1631 | (x.raw_id, x.message) |
|
1632 | (x.raw_id, x.message) | |
1632 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1633 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] | |
1633 |
|
1634 | |||
1634 | current_rhodecode_user = user |
|
1635 | current_rhodecode_user = user | |
1635 | kwargs = { |
|
1636 | kwargs = { | |
1636 | 'user': current_rhodecode_user, |
|
1637 | 'user': current_rhodecode_user, | |
1637 | 'pull_request_author': pull_request.author, |
|
1638 | 'pull_request_author': pull_request.author, | |
1638 | 'pull_request': pull_request_obj, |
|
1639 | 'pull_request': pull_request_obj, | |
1639 | 'pull_request_commits': pull_request_commits, |
|
1640 | 'pull_request_commits': pull_request_commits, | |
1640 |
|
1641 | |||
1641 | 'pull_request_target_repo': pr_target_repo, |
|
1642 | 'pull_request_target_repo': pr_target_repo, | |
1642 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1643 | 'pull_request_target_repo_url': pr_target_repo_url, | |
1643 |
|
1644 | |||
1644 | 'pull_request_source_repo': pr_source_repo, |
|
1645 | 'pull_request_source_repo': pr_source_repo, | |
1645 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1646 | 'pull_request_source_repo_url': pr_source_repo_url, | |
1646 |
|
1647 | |||
1647 | 'pull_request_url': pr_url, |
|
1648 | 'pull_request_url': pr_url, | |
1648 | 'thread_ids': [pr_url], |
|
1649 | 'thread_ids': [pr_url], | |
1649 | 'user_role': role |
|
1650 | 'user_role': role | |
1650 | } |
|
1651 | } | |
1651 |
|
1652 | |||
1652 | # create notification objects, and emails |
|
1653 | # create notification objects, and emails | |
1653 | NotificationModel().create( |
|
1654 | NotificationModel().create( | |
1654 | created_by=current_rhodecode_user, |
|
1655 | created_by=current_rhodecode_user, | |
1655 | notification_subject='', # Filled in based on the notification_type |
|
1656 | notification_subject='', # Filled in based on the notification_type | |
1656 | notification_body='', # Filled in based on the notification_type |
|
1657 | notification_body='', # Filled in based on the notification_type | |
1657 | notification_type=notification_type, |
|
1658 | notification_type=notification_type, | |
1658 | recipients=recipients, |
|
1659 | recipients=recipients, | |
1659 | email_kwargs=kwargs, |
|
1660 | email_kwargs=kwargs, | |
1660 | ) |
|
1661 | ) | |
1661 |
|
1662 | |||
1662 | def notify_reviewers(self, pull_request, reviewers_ids, user): |
|
1663 | def notify_reviewers(self, pull_request, reviewers_ids, user): | |
1663 | return self._notify_reviewers(pull_request, reviewers_ids, |
|
1664 | return self._notify_reviewers(pull_request, reviewers_ids, | |
1664 | PullRequestReviewers.ROLE_REVIEWER, user) |
|
1665 | PullRequestReviewers.ROLE_REVIEWER, user) | |
1665 |
|
1666 | |||
1666 | def notify_observers(self, pull_request, observers_ids, user): |
|
1667 | def notify_observers(self, pull_request, observers_ids, user): | |
1667 | return self._notify_reviewers(pull_request, observers_ids, |
|
1668 | return self._notify_reviewers(pull_request, observers_ids, | |
1668 | PullRequestReviewers.ROLE_OBSERVER, user) |
|
1669 | PullRequestReviewers.ROLE_OBSERVER, user) | |
1669 |
|
1670 | |||
1670 | def notify_users(self, pull_request, updating_user, ancestor_commit_id, |
|
1671 | def notify_users(self, pull_request, updating_user, ancestor_commit_id, | |
1671 | commit_changes, file_changes): |
|
1672 | commit_changes, file_changes): | |
1672 |
|
1673 | |||
1673 | updating_user_id = updating_user.user_id |
|
1674 | updating_user_id = updating_user.user_id | |
1674 | reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()]) |
|
1675 | reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()]) | |
1675 | # NOTE(marcink): send notification to all other users except to |
|
1676 | # NOTE(marcink): send notification to all other users except to | |
1676 | # person who updated the PR |
|
1677 | # person who updated the PR | |
1677 | recipients = reviewers.difference(set([updating_user_id])) |
|
1678 | recipients = reviewers.difference(set([updating_user_id])) | |
1678 |
|
1679 | |||
1679 | log.debug('Notify following recipients about pull-request update %s', recipients) |
|
1680 | log.debug('Notify following recipients about pull-request update %s', recipients) | |
1680 |
|
1681 | |||
1681 | pull_request_obj = pull_request |
|
1682 | pull_request_obj = pull_request | |
1682 |
|
1683 | |||
1683 | # send email about the update |
|
1684 | # send email about the update | |
1684 | changed_files = ( |
|
1685 | changed_files = ( | |
1685 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1686 | file_changes.added + file_changes.modified + file_changes.removed) | |
1686 |
|
1687 | |||
1687 | pr_source_repo = pull_request_obj.source_repo |
|
1688 | pr_source_repo = pull_request_obj.source_repo | |
1688 | pr_target_repo = pull_request_obj.target_repo |
|
1689 | pr_target_repo = pull_request_obj.target_repo | |
1689 |
|
1690 | |||
1690 | pr_url = h.route_url('pullrequest_show', |
|
1691 | pr_url = h.route_url('pullrequest_show', | |
1691 | repo_name=pr_target_repo.repo_name, |
|
1692 | repo_name=pr_target_repo.repo_name, | |
1692 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1693 | pull_request_id=pull_request_obj.pull_request_id,) | |
1693 |
|
1694 | |||
1694 | # set some variables for email notification |
|
1695 | # set some variables for email notification | |
1695 | pr_target_repo_url = h.route_url( |
|
1696 | pr_target_repo_url = h.route_url( | |
1696 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1697 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
1697 |
|
1698 | |||
1698 | pr_source_repo_url = h.route_url( |
|
1699 | pr_source_repo_url = h.route_url( | |
1699 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1700 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
1700 |
|
1701 | |||
1701 | email_kwargs = { |
|
1702 | email_kwargs = { | |
1702 | 'date': datetime.datetime.now(), |
|
1703 | 'date': datetime.datetime.now(), | |
1703 | 'updating_user': updating_user, |
|
1704 | 'updating_user': updating_user, | |
1704 |
|
1705 | |||
1705 | 'pull_request': pull_request_obj, |
|
1706 | 'pull_request': pull_request_obj, | |
1706 |
|
1707 | |||
1707 | 'pull_request_target_repo': pr_target_repo, |
|
1708 | 'pull_request_target_repo': pr_target_repo, | |
1708 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1709 | 'pull_request_target_repo_url': pr_target_repo_url, | |
1709 |
|
1710 | |||
1710 | 'pull_request_source_repo': pr_source_repo, |
|
1711 | 'pull_request_source_repo': pr_source_repo, | |
1711 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1712 | 'pull_request_source_repo_url': pr_source_repo_url, | |
1712 |
|
1713 | |||
1713 | 'pull_request_url': pr_url, |
|
1714 | 'pull_request_url': pr_url, | |
1714 |
|
1715 | |||
1715 | 'ancestor_commit_id': ancestor_commit_id, |
|
1716 | 'ancestor_commit_id': ancestor_commit_id, | |
1716 | 'added_commits': commit_changes.added, |
|
1717 | 'added_commits': commit_changes.added, | |
1717 | 'removed_commits': commit_changes.removed, |
|
1718 | 'removed_commits': commit_changes.removed, | |
1718 | 'changed_files': changed_files, |
|
1719 | 'changed_files': changed_files, | |
1719 | 'added_files': file_changes.added, |
|
1720 | 'added_files': file_changes.added, | |
1720 | 'modified_files': file_changes.modified, |
|
1721 | 'modified_files': file_changes.modified, | |
1721 | 'removed_files': file_changes.removed, |
|
1722 | 'removed_files': file_changes.removed, | |
1722 | 'thread_ids': [pr_url], |
|
1723 | 'thread_ids': [pr_url], | |
1723 | } |
|
1724 | } | |
1724 |
|
1725 | |||
1725 | # create notification objects, and emails |
|
1726 | # create notification objects, and emails | |
1726 | NotificationModel().create( |
|
1727 | NotificationModel().create( | |
1727 | created_by=updating_user, |
|
1728 | created_by=updating_user, | |
1728 | notification_subject='', # Filled in based on the notification_type |
|
1729 | notification_subject='', # Filled in based on the notification_type | |
1729 | notification_body='', # Filled in based on the notification_type |
|
1730 | notification_body='', # Filled in based on the notification_type | |
1730 | notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, |
|
1731 | notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, | |
1731 | recipients=recipients, |
|
1732 | recipients=recipients, | |
1732 | email_kwargs=email_kwargs, |
|
1733 | email_kwargs=email_kwargs, | |
1733 | ) |
|
1734 | ) | |
1734 |
|
1735 | |||
1735 | def delete(self, pull_request, user=None): |
|
1736 | def delete(self, pull_request, user=None): | |
1736 | if not user: |
|
1737 | if not user: | |
1737 | user = getattr(get_current_rhodecode_user(), 'username', None) |
|
1738 | user = getattr(get_current_rhodecode_user(), 'username', None) | |
1738 |
|
1739 | |||
1739 | pull_request = self.__get_pull_request(pull_request) |
|
1740 | pull_request = self.__get_pull_request(pull_request) | |
1740 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1741 | old_data = pull_request.get_api_data(with_merge_state=False) | |
1741 | self._cleanup_merge_workspace(pull_request) |
|
1742 | self._cleanup_merge_workspace(pull_request) | |
1742 | self._log_audit_action( |
|
1743 | self._log_audit_action( | |
1743 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1744 | 'repo.pull_request.delete', {'old_data': old_data}, | |
1744 | user, pull_request) |
|
1745 | user, pull_request) | |
1745 | Session().delete(pull_request) |
|
1746 | Session().delete(pull_request) | |
1746 |
|
1747 | |||
1747 | def close_pull_request(self, pull_request, user): |
|
1748 | def close_pull_request(self, pull_request, user): | |
1748 | pull_request = self.__get_pull_request(pull_request) |
|
1749 | pull_request = self.__get_pull_request(pull_request) | |
1749 | self._cleanup_merge_workspace(pull_request) |
|
1750 | self._cleanup_merge_workspace(pull_request) | |
1750 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1751 | pull_request.status = PullRequest.STATUS_CLOSED | |
1751 | pull_request.updated_on = datetime.datetime.now() |
|
1752 | pull_request.updated_on = datetime.datetime.now() | |
1752 | Session().add(pull_request) |
|
1753 | Session().add(pull_request) | |
1753 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'close') |
|
1754 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'close') | |
1754 |
|
1755 | |||
1755 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1756 | pr_data = pull_request.get_api_data(with_merge_state=False) | |
1756 | self._log_audit_action( |
|
1757 | self._log_audit_action( | |
1757 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1758 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) | |
1758 |
|
1759 | |||
1759 | def close_pull_request_with_comment( |
|
1760 | def close_pull_request_with_comment( | |
1760 | self, pull_request, user, repo, message=None, auth_user=None): |
|
1761 | self, pull_request, user, repo, message=None, auth_user=None): | |
1761 |
|
1762 | |||
1762 | pull_request_review_status = pull_request.calculated_review_status() |
|
1763 | pull_request_review_status = pull_request.calculated_review_status() | |
1763 |
|
1764 | |||
1764 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1765 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: | |
1765 | # approved only if we have voting consent |
|
1766 | # approved only if we have voting consent | |
1766 | status = ChangesetStatus.STATUS_APPROVED |
|
1767 | status = ChangesetStatus.STATUS_APPROVED | |
1767 | else: |
|
1768 | else: | |
1768 | status = ChangesetStatus.STATUS_REJECTED |
|
1769 | status = ChangesetStatus.STATUS_REJECTED | |
1769 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1770 | status_lbl = ChangesetStatus.get_status_lbl(status) | |
1770 |
|
1771 | |||
1771 | default_message = ( |
|
1772 | default_message = ( | |
1772 | 'Closing with status change {transition_icon} {status}.' |
|
1773 | 'Closing with status change {transition_icon} {status}.' | |
1773 | ).format(transition_icon='>', status=status_lbl) |
|
1774 | ).format(transition_icon='>', status=status_lbl) | |
1774 | text = message or default_message |
|
1775 | text = message or default_message | |
1775 |
|
1776 | |||
1776 | # create a comment, and link it to new status |
|
1777 | # create a comment, and link it to new status | |
1777 | comment = CommentsModel().create( |
|
1778 | comment = CommentsModel().create( | |
1778 | text=text, |
|
1779 | text=text, | |
1779 | repo=repo.repo_id, |
|
1780 | repo=repo.repo_id, | |
1780 | user=user.user_id, |
|
1781 | user=user.user_id, | |
1781 | pull_request=pull_request.pull_request_id, |
|
1782 | pull_request=pull_request.pull_request_id, | |
1782 | status_change=status_lbl, |
|
1783 | status_change=status_lbl, | |
1783 | status_change_type=status, |
|
1784 | status_change_type=status, | |
1784 | closing_pr=True, |
|
1785 | closing_pr=True, | |
1785 | auth_user=auth_user, |
|
1786 | auth_user=auth_user, | |
1786 | ) |
|
1787 | ) | |
1787 |
|
1788 | |||
1788 | # calculate old status before we change it |
|
1789 | # calculate old status before we change it | |
1789 | old_calculated_status = pull_request.calculated_review_status() |
|
1790 | old_calculated_status = pull_request.calculated_review_status() | |
1790 | ChangesetStatusModel().set_status( |
|
1791 | ChangesetStatusModel().set_status( | |
1791 | repo.repo_id, |
|
1792 | repo.repo_id, | |
1792 | status, |
|
1793 | status, | |
1793 | user.user_id, |
|
1794 | user.user_id, | |
1794 | comment=comment, |
|
1795 | comment=comment, | |
1795 | pull_request=pull_request.pull_request_id |
|
1796 | pull_request=pull_request.pull_request_id | |
1796 | ) |
|
1797 | ) | |
1797 |
|
1798 | |||
1798 | Session().flush() |
|
1799 | Session().flush() | |
1799 |
|
1800 | |||
1800 | self.trigger_pull_request_hook(pull_request, user, 'comment', |
|
1801 | self.trigger_pull_request_hook(pull_request, user, 'comment', | |
1801 | data={'comment': comment}) |
|
1802 | data={'comment': comment}) | |
1802 |
|
1803 | |||
1803 | # we now calculate the status of pull request again, and based on that |
|
1804 | # we now calculate the status of pull request again, and based on that | |
1804 | # calculation trigger status change. This might happen in cases |
|
1805 | # calculation trigger status change. This might happen in cases | |
1805 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1806 | # that non-reviewer admin closes a pr, which means his vote doesn't | |
1806 | # change the status, while if he's a reviewer this might change it. |
|
1807 | # change the status, while if he's a reviewer this might change it. | |
1807 | calculated_status = pull_request.calculated_review_status() |
|
1808 | calculated_status = pull_request.calculated_review_status() | |
1808 | if old_calculated_status != calculated_status: |
|
1809 | if old_calculated_status != calculated_status: | |
1809 | self.trigger_pull_request_hook(pull_request, user, 'review_status_change', |
|
1810 | self.trigger_pull_request_hook(pull_request, user, 'review_status_change', | |
1810 | data={'status': calculated_status}) |
|
1811 | data={'status': calculated_status}) | |
1811 |
|
1812 | |||
1812 | # finally close the PR |
|
1813 | # finally close the PR | |
1813 | PullRequestModel().close_pull_request(pull_request.pull_request_id, user) |
|
1814 | PullRequestModel().close_pull_request(pull_request.pull_request_id, user) | |
1814 |
|
1815 | |||
1815 | return comment, status |
|
1816 | return comment, status | |
1816 |
|
1817 | |||
1817 | def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False): |
|
1818 | def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False): | |
1818 | _ = translator or get_current_request().translate |
|
1819 | _ = translator or get_current_request().translate | |
1819 |
|
1820 | |||
1820 | if not self._is_merge_enabled(pull_request): |
|
1821 | if not self._is_merge_enabled(pull_request): | |
1821 | return None, False, _('Server-side pull request merging is disabled.') |
|
1822 | return None, False, _('Server-side pull request merging is disabled.') | |
1822 |
|
1823 | |||
1823 | if pull_request.is_closed(): |
|
1824 | if pull_request.is_closed(): | |
1824 | return None, False, _('This pull request is closed.') |
|
1825 | return None, False, _('This pull request is closed.') | |
1825 |
|
1826 | |||
1826 | merge_possible, msg = self._check_repo_requirements( |
|
1827 | merge_possible, msg = self._check_repo_requirements( | |
1827 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1828 | target=pull_request.target_repo, source=pull_request.source_repo, | |
1828 | translator=_) |
|
1829 | translator=_) | |
1829 | if not merge_possible: |
|
1830 | if not merge_possible: | |
1830 | return None, merge_possible, msg |
|
1831 | return None, merge_possible, msg | |
1831 |
|
1832 | |||
1832 | try: |
|
1833 | try: | |
1833 | merge_response = self._try_merge( |
|
1834 | merge_response = self._try_merge( | |
1834 | pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1835 | pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh) | |
1835 | log.debug("Merge response: %s", merge_response) |
|
1836 | log.debug("Merge response: %s", merge_response) | |
1836 | return merge_response, merge_response.possible, merge_response.merge_status_message |
|
1837 | return merge_response, merge_response.possible, merge_response.merge_status_message | |
1837 | except NotImplementedError: |
|
1838 | except NotImplementedError: | |
1838 | return None, False, _('Pull request merging is not supported.') |
|
1839 | return None, False, _('Pull request merging is not supported.') | |
1839 |
|
1840 | |||
1840 | def _check_repo_requirements(self, target, source, translator): |
|
1841 | def _check_repo_requirements(self, target, source, translator): | |
1841 | """ |
|
1842 | """ | |
1842 | Check if `target` and `source` have compatible requirements. |
|
1843 | Check if `target` and `source` have compatible requirements. | |
1843 |
|
1844 | |||
1844 | Currently this is just checking for largefiles. |
|
1845 | Currently this is just checking for largefiles. | |
1845 | """ |
|
1846 | """ | |
1846 | _ = translator |
|
1847 | _ = translator | |
1847 | target_has_largefiles = self._has_largefiles(target) |
|
1848 | target_has_largefiles = self._has_largefiles(target) | |
1848 | source_has_largefiles = self._has_largefiles(source) |
|
1849 | source_has_largefiles = self._has_largefiles(source) | |
1849 | merge_possible = True |
|
1850 | merge_possible = True | |
1850 | message = u'' |
|
1851 | message = u'' | |
1851 |
|
1852 | |||
1852 | if target_has_largefiles != source_has_largefiles: |
|
1853 | if target_has_largefiles != source_has_largefiles: | |
1853 | merge_possible = False |
|
1854 | merge_possible = False | |
1854 | if source_has_largefiles: |
|
1855 | if source_has_largefiles: | |
1855 | message = _( |
|
1856 | message = _( | |
1856 | 'Target repository large files support is disabled.') |
|
1857 | 'Target repository large files support is disabled.') | |
1857 | else: |
|
1858 | else: | |
1858 | message = _( |
|
1859 | message = _( | |
1859 | 'Source repository large files support is disabled.') |
|
1860 | 'Source repository large files support is disabled.') | |
1860 |
|
1861 | |||
1861 | return merge_possible, message |
|
1862 | return merge_possible, message | |
1862 |
|
1863 | |||
1863 | def _has_largefiles(self, repo): |
|
1864 | def _has_largefiles(self, repo): | |
1864 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1865 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( | |
1865 | 'extensions', 'largefiles') |
|
1866 | 'extensions', 'largefiles') | |
1866 | return largefiles_ui and largefiles_ui[0].active |
|
1867 | return largefiles_ui and largefiles_ui[0].active | |
1867 |
|
1868 | |||
1868 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): |
|
1869 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): | |
1869 | """ |
|
1870 | """ | |
1870 | Try to merge the pull request and return the merge status. |
|
1871 | Try to merge the pull request and return the merge status. | |
1871 | """ |
|
1872 | """ | |
1872 | log.debug( |
|
1873 | log.debug( | |
1873 | "Trying out if the pull request %s can be merged. Force_refresh=%s", |
|
1874 | "Trying out if the pull request %s can be merged. Force_refresh=%s", | |
1874 | pull_request.pull_request_id, force_shadow_repo_refresh) |
|
1875 | pull_request.pull_request_id, force_shadow_repo_refresh) | |
1875 | target_vcs = pull_request.target_repo.scm_instance() |
|
1876 | target_vcs = pull_request.target_repo.scm_instance() | |
1876 | # Refresh the target reference. |
|
1877 | # Refresh the target reference. | |
1877 | try: |
|
1878 | try: | |
1878 | target_ref = self._refresh_reference( |
|
1879 | target_ref = self._refresh_reference( | |
1879 | pull_request.target_ref_parts, target_vcs) |
|
1880 | pull_request.target_ref_parts, target_vcs) | |
1880 | except CommitDoesNotExistError: |
|
1881 | except CommitDoesNotExistError: | |
1881 | merge_state = MergeResponse( |
|
1882 | merge_state = MergeResponse( | |
1882 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
1883 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, | |
1883 | metadata={'target_ref': pull_request.target_ref_parts}) |
|
1884 | metadata={'target_ref': pull_request.target_ref_parts}) | |
1884 | return merge_state |
|
1885 | return merge_state | |
1885 |
|
1886 | |||
1886 | target_locked = pull_request.target_repo.locked |
|
1887 | target_locked = pull_request.target_repo.locked | |
1887 | if target_locked and target_locked[0]: |
|
1888 | if target_locked and target_locked[0]: | |
1888 | locked_by = 'user:{}'.format(target_locked[0]) |
|
1889 | locked_by = 'user:{}'.format(target_locked[0]) | |
1889 | log.debug("The target repository is locked by %s.", locked_by) |
|
1890 | log.debug("The target repository is locked by %s.", locked_by) | |
1890 | merge_state = MergeResponse( |
|
1891 | merge_state = MergeResponse( | |
1891 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, |
|
1892 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, | |
1892 | metadata={'locked_by': locked_by}) |
|
1893 | metadata={'locked_by': locked_by}) | |
1893 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( |
|
1894 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( | |
1894 | pull_request, target_ref): |
|
1895 | pull_request, target_ref): | |
1895 | log.debug("Refreshing the merge status of the repository.") |
|
1896 | log.debug("Refreshing the merge status of the repository.") | |
1896 | merge_state = self._refresh_merge_state( |
|
1897 | merge_state = self._refresh_merge_state( | |
1897 | pull_request, target_vcs, target_ref) |
|
1898 | pull_request, target_vcs, target_ref) | |
1898 | else: |
|
1899 | else: | |
1899 | possible = pull_request.last_merge_status == MergeFailureReason.NONE |
|
1900 | possible = pull_request.last_merge_status == MergeFailureReason.NONE | |
1900 | metadata = { |
|
1901 | metadata = { | |
1901 | 'unresolved_files': '', |
|
1902 | 'unresolved_files': '', | |
1902 | 'target_ref': pull_request.target_ref_parts, |
|
1903 | 'target_ref': pull_request.target_ref_parts, | |
1903 | 'source_ref': pull_request.source_ref_parts, |
|
1904 | 'source_ref': pull_request.source_ref_parts, | |
1904 | } |
|
1905 | } | |
1905 | if pull_request.last_merge_metadata: |
|
1906 | if pull_request.last_merge_metadata: | |
1906 | metadata.update(pull_request.last_merge_metadata_parsed) |
|
1907 | metadata.update(pull_request.last_merge_metadata_parsed) | |
1907 |
|
1908 | |||
1908 | if not possible and target_ref.type == 'branch': |
|
1909 | if not possible and target_ref.type == 'branch': | |
1909 | # NOTE(marcink): case for mercurial multiple heads on branch |
|
1910 | # NOTE(marcink): case for mercurial multiple heads on branch | |
1910 | heads = target_vcs._heads(target_ref.name) |
|
1911 | heads = target_vcs._heads(target_ref.name) | |
1911 | if len(heads) != 1: |
|
1912 | if len(heads) != 1: | |
1912 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) |
|
1913 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) | |
1913 | metadata.update({ |
|
1914 | metadata.update({ | |
1914 | 'heads': heads |
|
1915 | 'heads': heads | |
1915 | }) |
|
1916 | }) | |
1916 |
|
1917 | |||
1917 | merge_state = MergeResponse( |
|
1918 | merge_state = MergeResponse( | |
1918 | possible, False, None, pull_request.last_merge_status, metadata=metadata) |
|
1919 | possible, False, None, pull_request.last_merge_status, metadata=metadata) | |
1919 |
|
1920 | |||
1920 | return merge_state |
|
1921 | return merge_state | |
1921 |
|
1922 | |||
1922 | def _refresh_reference(self, reference, vcs_repository): |
|
1923 | def _refresh_reference(self, reference, vcs_repository): | |
1923 | if reference.type in self.UPDATABLE_REF_TYPES: |
|
1924 | if reference.type in self.UPDATABLE_REF_TYPES: | |
1924 | name_or_id = reference.name |
|
1925 | name_or_id = reference.name | |
1925 | else: |
|
1926 | else: | |
1926 | name_or_id = reference.commit_id |
|
1927 | name_or_id = reference.commit_id | |
1927 |
|
1928 | |||
1928 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1929 | refreshed_commit = vcs_repository.get_commit(name_or_id) | |
1929 | refreshed_reference = Reference( |
|
1930 | refreshed_reference = Reference( | |
1930 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1931 | reference.type, reference.name, refreshed_commit.raw_id) | |
1931 | return refreshed_reference |
|
1932 | return refreshed_reference | |
1932 |
|
1933 | |||
1933 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1934 | def _needs_merge_state_refresh(self, pull_request, target_reference): | |
1934 | return not( |
|
1935 | return not( | |
1935 | pull_request.revisions and |
|
1936 | pull_request.revisions and | |
1936 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1937 | pull_request.revisions[0] == pull_request._last_merge_source_rev and | |
1937 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1938 | target_reference.commit_id == pull_request._last_merge_target_rev) | |
1938 |
|
1939 | |||
1939 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1940 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): | |
1940 | workspace_id = self._workspace_id(pull_request) |
|
1941 | workspace_id = self._workspace_id(pull_request) | |
1941 | source_vcs = pull_request.source_repo.scm_instance() |
|
1942 | source_vcs = pull_request.source_repo.scm_instance() | |
1942 | repo_id = pull_request.target_repo.repo_id |
|
1943 | repo_id = pull_request.target_repo.repo_id | |
1943 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1944 | use_rebase = self._use_rebase_for_merging(pull_request) | |
1944 | close_branch = self._close_branch_before_merging(pull_request) |
|
1945 | close_branch = self._close_branch_before_merging(pull_request) | |
1945 | merge_state = target_vcs.merge( |
|
1946 | merge_state = target_vcs.merge( | |
1946 | repo_id, workspace_id, |
|
1947 | repo_id, workspace_id, | |
1947 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1948 | target_reference, source_vcs, pull_request.source_ref_parts, | |
1948 | dry_run=True, use_rebase=use_rebase, |
|
1949 | dry_run=True, use_rebase=use_rebase, | |
1949 | close_branch=close_branch) |
|
1950 | close_branch=close_branch) | |
1950 |
|
1951 | |||
1951 | # Do not store the response if there was an unknown error. |
|
1952 | # Do not store the response if there was an unknown error. | |
1952 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1953 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: | |
1953 | pull_request._last_merge_source_rev = \ |
|
1954 | pull_request._last_merge_source_rev = \ | |
1954 | pull_request.source_ref_parts.commit_id |
|
1955 | pull_request.source_ref_parts.commit_id | |
1955 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1956 | pull_request._last_merge_target_rev = target_reference.commit_id | |
1956 | pull_request.last_merge_status = merge_state.failure_reason |
|
1957 | pull_request.last_merge_status = merge_state.failure_reason | |
1957 | pull_request.last_merge_metadata = merge_state.metadata |
|
1958 | pull_request.last_merge_metadata = merge_state.metadata | |
1958 |
|
1959 | |||
1959 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1960 | pull_request.shadow_merge_ref = merge_state.merge_ref | |
1960 | Session().add(pull_request) |
|
1961 | Session().add(pull_request) | |
1961 | Session().commit() |
|
1962 | Session().commit() | |
1962 |
|
1963 | |||
1963 | return merge_state |
|
1964 | return merge_state | |
1964 |
|
1965 | |||
1965 | def _workspace_id(self, pull_request): |
|
1966 | def _workspace_id(self, pull_request): | |
1966 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1967 | workspace_id = 'pr-%s' % pull_request.pull_request_id | |
1967 | return workspace_id |
|
1968 | return workspace_id | |
1968 |
|
1969 | |||
1969 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1970 | def generate_repo_data(self, repo, commit_id=None, branch=None, | |
1970 | bookmark=None, translator=None): |
|
1971 | bookmark=None, translator=None): | |
1971 | from rhodecode.model.repo import RepoModel |
|
1972 | from rhodecode.model.repo import RepoModel | |
1972 |
|
1973 | |||
1973 | all_refs, selected_ref = \ |
|
1974 | all_refs, selected_ref = \ | |
1974 | self._get_repo_pullrequest_sources( |
|
1975 | self._get_repo_pullrequest_sources( | |
1975 | repo.scm_instance(), commit_id=commit_id, |
|
1976 | repo.scm_instance(), commit_id=commit_id, | |
1976 | branch=branch, bookmark=bookmark, translator=translator) |
|
1977 | branch=branch, bookmark=bookmark, translator=translator) | |
1977 |
|
1978 | |||
1978 | refs_select2 = [] |
|
1979 | refs_select2 = [] | |
1979 | for element in all_refs: |
|
1980 | for element in all_refs: | |
1980 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1981 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] | |
1981 | refs_select2.append({'text': element[1], 'children': children}) |
|
1982 | refs_select2.append({'text': element[1], 'children': children}) | |
1982 |
|
1983 | |||
1983 | return { |
|
1984 | return { | |
1984 | 'user': { |
|
1985 | 'user': { | |
1985 | 'user_id': repo.user.user_id, |
|
1986 | 'user_id': repo.user.user_id, | |
1986 | 'username': repo.user.username, |
|
1987 | 'username': repo.user.username, | |
1987 | 'firstname': repo.user.first_name, |
|
1988 | 'firstname': repo.user.first_name, | |
1988 | 'lastname': repo.user.last_name, |
|
1989 | 'lastname': repo.user.last_name, | |
1989 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1990 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), | |
1990 | }, |
|
1991 | }, | |
1991 | 'name': repo.repo_name, |
|
1992 | 'name': repo.repo_name, | |
1992 | 'link': RepoModel().get_url(repo), |
|
1993 | 'link': RepoModel().get_url(repo), | |
1993 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1994 | 'description': h.chop_at_smart(repo.description_safe, '\n'), | |
1994 | 'refs': { |
|
1995 | 'refs': { | |
1995 | 'all_refs': all_refs, |
|
1996 | 'all_refs': all_refs, | |
1996 | 'selected_ref': selected_ref, |
|
1997 | 'selected_ref': selected_ref, | |
1997 | 'select2_refs': refs_select2 |
|
1998 | 'select2_refs': refs_select2 | |
1998 | } |
|
1999 | } | |
1999 | } |
|
2000 | } | |
2000 |
|
2001 | |||
2001 | def generate_pullrequest_title(self, source, source_ref, target): |
|
2002 | def generate_pullrequest_title(self, source, source_ref, target): | |
2002 | return u'{source}#{at_ref} to {target}'.format( |
|
2003 | return u'{source}#{at_ref} to {target}'.format( | |
2003 | source=source, |
|
2004 | source=source, | |
2004 | at_ref=source_ref, |
|
2005 | at_ref=source_ref, | |
2005 | target=target, |
|
2006 | target=target, | |
2006 | ) |
|
2007 | ) | |
2007 |
|
2008 | |||
2008 | def _cleanup_merge_workspace(self, pull_request): |
|
2009 | def _cleanup_merge_workspace(self, pull_request): | |
2009 | # Merging related cleanup |
|
2010 | # Merging related cleanup | |
2010 | repo_id = pull_request.target_repo.repo_id |
|
2011 | repo_id = pull_request.target_repo.repo_id | |
2011 | target_scm = pull_request.target_repo.scm_instance() |
|
2012 | target_scm = pull_request.target_repo.scm_instance() | |
2012 | workspace_id = self._workspace_id(pull_request) |
|
2013 | workspace_id = self._workspace_id(pull_request) | |
2013 |
|
2014 | |||
2014 | try: |
|
2015 | try: | |
2015 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) |
|
2016 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) | |
2016 | except NotImplementedError: |
|
2017 | except NotImplementedError: | |
2017 | pass |
|
2018 | pass | |
2018 |
|
2019 | |||
2019 | def _get_repo_pullrequest_sources( |
|
2020 | def _get_repo_pullrequest_sources( | |
2020 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
2021 | self, repo, commit_id=None, branch=None, bookmark=None, | |
2021 | translator=None): |
|
2022 | translator=None): | |
2022 | """ |
|
2023 | """ | |
2023 | Return a structure with repo's interesting commits, suitable for |
|
2024 | Return a structure with repo's interesting commits, suitable for | |
2024 | the selectors in pullrequest controller |
|
2025 | the selectors in pullrequest controller | |
2025 |
|
2026 | |||
2026 | :param commit_id: a commit that must be in the list somehow |
|
2027 | :param commit_id: a commit that must be in the list somehow | |
2027 | and selected by default |
|
2028 | and selected by default | |
2028 | :param branch: a branch that must be in the list and selected |
|
2029 | :param branch: a branch that must be in the list and selected | |
2029 | by default - even if closed |
|
2030 | by default - even if closed | |
2030 | :param bookmark: a bookmark that must be in the list and selected |
|
2031 | :param bookmark: a bookmark that must be in the list and selected | |
2031 | """ |
|
2032 | """ | |
2032 | _ = translator or get_current_request().translate |
|
2033 | _ = translator or get_current_request().translate | |
2033 |
|
2034 | |||
2034 | commit_id = safe_str(commit_id) if commit_id else None |
|
2035 | commit_id = safe_str(commit_id) if commit_id else None | |
2035 | branch = safe_unicode(branch) if branch else None |
|
2036 | branch = safe_unicode(branch) if branch else None | |
2036 | bookmark = safe_unicode(bookmark) if bookmark else None |
|
2037 | bookmark = safe_unicode(bookmark) if bookmark else None | |
2037 |
|
2038 | |||
2038 | selected = None |
|
2039 | selected = None | |
2039 |
|
2040 | |||
2040 | # order matters: first source that has commit_id in it will be selected |
|
2041 | # order matters: first source that has commit_id in it will be selected | |
2041 | sources = [] |
|
2042 | sources = [] | |
2042 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
2043 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) | |
2043 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
2044 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) | |
2044 |
|
2045 | |||
2045 | if commit_id: |
|
2046 | if commit_id: | |
2046 | ref_commit = (h.short_id(commit_id), commit_id) |
|
2047 | ref_commit = (h.short_id(commit_id), commit_id) | |
2047 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
2048 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) | |
2048 |
|
2049 | |||
2049 | sources.append( |
|
2050 | sources.append( | |
2050 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
2051 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), | |
2051 | ) |
|
2052 | ) | |
2052 |
|
2053 | |||
2053 | groups = [] |
|
2054 | groups = [] | |
2054 |
|
2055 | |||
2055 | for group_key, ref_list, group_name, match in sources: |
|
2056 | for group_key, ref_list, group_name, match in sources: | |
2056 | group_refs = [] |
|
2057 | group_refs = [] | |
2057 | for ref_name, ref_id in ref_list: |
|
2058 | for ref_name, ref_id in ref_list: | |
2058 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) |
|
2059 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) | |
2059 | group_refs.append((ref_key, ref_name)) |
|
2060 | group_refs.append((ref_key, ref_name)) | |
2060 |
|
2061 | |||
2061 | if not selected: |
|
2062 | if not selected: | |
2062 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
2063 | if set([commit_id, match]) & set([ref_id, ref_name]): | |
2063 | selected = ref_key |
|
2064 | selected = ref_key | |
2064 |
|
2065 | |||
2065 | if group_refs: |
|
2066 | if group_refs: | |
2066 | groups.append((group_refs, group_name)) |
|
2067 | groups.append((group_refs, group_name)) | |
2067 |
|
2068 | |||
2068 | if not selected: |
|
2069 | if not selected: | |
2069 | ref = commit_id or branch or bookmark |
|
2070 | ref = commit_id or branch or bookmark | |
2070 | if ref: |
|
2071 | if ref: | |
2071 | raise CommitDoesNotExistError( |
|
2072 | raise CommitDoesNotExistError( | |
2072 | u'No commit refs could be found matching: {}'.format(ref)) |
|
2073 | u'No commit refs could be found matching: {}'.format(ref)) | |
2073 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
2074 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: | |
2074 | selected = u'branch:{}:{}'.format( |
|
2075 | selected = u'branch:{}:{}'.format( | |
2075 | safe_unicode(repo.DEFAULT_BRANCH_NAME), |
|
2076 | safe_unicode(repo.DEFAULT_BRANCH_NAME), | |
2076 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) |
|
2077 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) | |
2077 | ) |
|
2078 | ) | |
2078 | elif repo.commit_ids: |
|
2079 | elif repo.commit_ids: | |
2079 | # make the user select in this case |
|
2080 | # make the user select in this case | |
2080 | selected = None |
|
2081 | selected = None | |
2081 | else: |
|
2082 | else: | |
2082 | raise EmptyRepositoryError() |
|
2083 | raise EmptyRepositoryError() | |
2083 | return groups, selected |
|
2084 | return groups, selected | |
2084 |
|
2085 | |||
2085 | def get_diff(self, source_repo, source_ref_id, target_ref_id, |
|
2086 | def get_diff(self, source_repo, source_ref_id, target_ref_id, | |
2086 | hide_whitespace_changes, diff_context): |
|
2087 | hide_whitespace_changes, diff_context): | |
2087 |
|
2088 | |||
2088 | return self._get_diff_from_pr_or_version( |
|
2089 | return self._get_diff_from_pr_or_version( | |
2089 | source_repo, source_ref_id, target_ref_id, |
|
2090 | source_repo, source_ref_id, target_ref_id, | |
2090 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
2091 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
2091 |
|
2092 | |||
2092 | def _get_diff_from_pr_or_version( |
|
2093 | def _get_diff_from_pr_or_version( | |
2093 | self, source_repo, source_ref_id, target_ref_id, |
|
2094 | self, source_repo, source_ref_id, target_ref_id, | |
2094 | hide_whitespace_changes, diff_context): |
|
2095 | hide_whitespace_changes, diff_context): | |
2095 |
|
2096 | |||
2096 | target_commit = source_repo.get_commit( |
|
2097 | target_commit = source_repo.get_commit( | |
2097 | commit_id=safe_str(target_ref_id)) |
|
2098 | commit_id=safe_str(target_ref_id)) | |
2098 | source_commit = source_repo.get_commit( |
|
2099 | source_commit = source_repo.get_commit( | |
2099 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
2100 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) | |
2100 | if isinstance(source_repo, Repository): |
|
2101 | if isinstance(source_repo, Repository): | |
2101 | vcs_repo = source_repo.scm_instance() |
|
2102 | vcs_repo = source_repo.scm_instance() | |
2102 | else: |
|
2103 | else: | |
2103 | vcs_repo = source_repo |
|
2104 | vcs_repo = source_repo | |
2104 |
|
2105 | |||
2105 | # TODO: johbo: In the context of an update, we cannot reach |
|
2106 | # TODO: johbo: In the context of an update, we cannot reach | |
2106 | # the old commit anymore with our normal mechanisms. It needs |
|
2107 | # the old commit anymore with our normal mechanisms. It needs | |
2107 | # some sort of special support in the vcs layer to avoid this |
|
2108 | # some sort of special support in the vcs layer to avoid this | |
2108 | # workaround. |
|
2109 | # workaround. | |
2109 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
2110 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and | |
2110 | vcs_repo.alias == 'git'): |
|
2111 | vcs_repo.alias == 'git'): | |
2111 | source_commit.raw_id = safe_str(source_ref_id) |
|
2112 | source_commit.raw_id = safe_str(source_ref_id) | |
2112 |
|
2113 | |||
2113 | log.debug('calculating diff between ' |
|
2114 | log.debug('calculating diff between ' | |
2114 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
2115 | 'source_ref:%s and target_ref:%s for repo `%s`', | |
2115 | target_ref_id, source_ref_id, |
|
2116 | target_ref_id, source_ref_id, | |
2116 | safe_unicode(vcs_repo.path)) |
|
2117 | safe_unicode(vcs_repo.path)) | |
2117 |
|
2118 | |||
2118 | vcs_diff = vcs_repo.get_diff( |
|
2119 | vcs_diff = vcs_repo.get_diff( | |
2119 | commit1=target_commit, commit2=source_commit, |
|
2120 | commit1=target_commit, commit2=source_commit, | |
2120 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
2121 | ignore_whitespace=hide_whitespace_changes, context=diff_context) | |
2121 | return vcs_diff |
|
2122 | return vcs_diff | |
2122 |
|
2123 | |||
2123 | def _is_merge_enabled(self, pull_request): |
|
2124 | def _is_merge_enabled(self, pull_request): | |
2124 | return self._get_general_setting( |
|
2125 | return self._get_general_setting( | |
2125 | pull_request, 'rhodecode_pr_merge_enabled') |
|
2126 | pull_request, 'rhodecode_pr_merge_enabled') | |
2126 |
|
2127 | |||
2127 | def _use_rebase_for_merging(self, pull_request): |
|
2128 | def _use_rebase_for_merging(self, pull_request): | |
2128 | repo_type = pull_request.target_repo.repo_type |
|
2129 | repo_type = pull_request.target_repo.repo_type | |
2129 | if repo_type == 'hg': |
|
2130 | if repo_type == 'hg': | |
2130 | return self._get_general_setting( |
|
2131 | return self._get_general_setting( | |
2131 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
2132 | pull_request, 'rhodecode_hg_use_rebase_for_merging') | |
2132 | elif repo_type == 'git': |
|
2133 | elif repo_type == 'git': | |
2133 | return self._get_general_setting( |
|
2134 | return self._get_general_setting( | |
2134 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
2135 | pull_request, 'rhodecode_git_use_rebase_for_merging') | |
2135 |
|
2136 | |||
2136 | return False |
|
2137 | return False | |
2137 |
|
2138 | |||
2138 | def _user_name_for_merging(self, pull_request, user): |
|
2139 | def _user_name_for_merging(self, pull_request, user): | |
2139 | env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '') |
|
2140 | env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '') | |
2140 | if env_user_name_attr and hasattr(user, env_user_name_attr): |
|
2141 | if env_user_name_attr and hasattr(user, env_user_name_attr): | |
2141 | user_name_attr = env_user_name_attr |
|
2142 | user_name_attr = env_user_name_attr | |
2142 | else: |
|
2143 | else: | |
2143 | user_name_attr = 'short_contact' |
|
2144 | user_name_attr = 'short_contact' | |
2144 |
|
2145 | |||
2145 | user_name = getattr(user, user_name_attr) |
|
2146 | user_name = getattr(user, user_name_attr) | |
2146 | return user_name |
|
2147 | return user_name | |
2147 |
|
2148 | |||
2148 | def _close_branch_before_merging(self, pull_request): |
|
2149 | def _close_branch_before_merging(self, pull_request): | |
2149 | repo_type = pull_request.target_repo.repo_type |
|
2150 | repo_type = pull_request.target_repo.repo_type | |
2150 | if repo_type == 'hg': |
|
2151 | if repo_type == 'hg': | |
2151 | return self._get_general_setting( |
|
2152 | return self._get_general_setting( | |
2152 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
2153 | pull_request, 'rhodecode_hg_close_branch_before_merging') | |
2153 | elif repo_type == 'git': |
|
2154 | elif repo_type == 'git': | |
2154 | return self._get_general_setting( |
|
2155 | return self._get_general_setting( | |
2155 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
2156 | pull_request, 'rhodecode_git_close_branch_before_merging') | |
2156 |
|
2157 | |||
2157 | return False |
|
2158 | return False | |
2158 |
|
2159 | |||
2159 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
2160 | def _get_general_setting(self, pull_request, settings_key, default=False): | |
2160 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
2161 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) | |
2161 | settings = settings_model.get_general_settings() |
|
2162 | settings = settings_model.get_general_settings() | |
2162 | return settings.get(settings_key, default) |
|
2163 | return settings.get(settings_key, default) | |
2163 |
|
2164 | |||
2164 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
2165 | def _log_audit_action(self, action, action_data, user, pull_request): | |
2165 | audit_logger.store( |
|
2166 | audit_logger.store( | |
2166 | action=action, |
|
2167 | action=action, | |
2167 | action_data=action_data, |
|
2168 | action_data=action_data, | |
2168 | user=user, |
|
2169 | user=user, | |
2169 | repo=pull_request.target_repo) |
|
2170 | repo=pull_request.target_repo) | |
2170 |
|
2171 | |||
2171 | def get_reviewer_functions(self): |
|
2172 | def get_reviewer_functions(self): | |
2172 | """ |
|
2173 | """ | |
2173 | Fetches functions for validation and fetching default reviewers. |
|
2174 | Fetches functions for validation and fetching default reviewers. | |
2174 | If available we use the EE package, else we fallback to CE |
|
2175 | If available we use the EE package, else we fallback to CE | |
2175 | package functions |
|
2176 | package functions | |
2176 | """ |
|
2177 | """ | |
2177 | try: |
|
2178 | try: | |
2178 | from rc_reviewers.utils import get_default_reviewers_data |
|
2179 | from rc_reviewers.utils import get_default_reviewers_data | |
2179 | from rc_reviewers.utils import validate_default_reviewers |
|
2180 | from rc_reviewers.utils import validate_default_reviewers | |
2180 | from rc_reviewers.utils import validate_observers |
|
2181 | from rc_reviewers.utils import validate_observers | |
2181 | except ImportError: |
|
2182 | except ImportError: | |
2182 | from rhodecode.apps.repository.utils import get_default_reviewers_data |
|
2183 | from rhodecode.apps.repository.utils import get_default_reviewers_data | |
2183 | from rhodecode.apps.repository.utils import validate_default_reviewers |
|
2184 | from rhodecode.apps.repository.utils import validate_default_reviewers | |
2184 | from rhodecode.apps.repository.utils import validate_observers |
|
2185 | from rhodecode.apps.repository.utils import validate_observers | |
2185 |
|
2186 | |||
2186 | return get_default_reviewers_data, validate_default_reviewers, validate_observers |
|
2187 | return get_default_reviewers_data, validate_default_reviewers, validate_observers | |
2187 |
|
2188 | |||
2188 |
|
2189 | |||
2189 | class MergeCheck(object): |
|
2190 | class MergeCheck(object): | |
2190 | """ |
|
2191 | """ | |
2191 | Perform Merge Checks and returns a check object which stores information |
|
2192 | Perform Merge Checks and returns a check object which stores information | |
2192 | about merge errors, and merge conditions |
|
2193 | about merge errors, and merge conditions | |
2193 | """ |
|
2194 | """ | |
2194 | TODO_CHECK = 'todo' |
|
2195 | TODO_CHECK = 'todo' | |
2195 | PERM_CHECK = 'perm' |
|
2196 | PERM_CHECK = 'perm' | |
2196 | REVIEW_CHECK = 'review' |
|
2197 | REVIEW_CHECK = 'review' | |
2197 | MERGE_CHECK = 'merge' |
|
2198 | MERGE_CHECK = 'merge' | |
2198 | WIP_CHECK = 'wip' |
|
2199 | WIP_CHECK = 'wip' | |
2199 |
|
2200 | |||
2200 | def __init__(self): |
|
2201 | def __init__(self): | |
2201 | self.review_status = None |
|
2202 | self.review_status = None | |
2202 | self.merge_possible = None |
|
2203 | self.merge_possible = None | |
2203 | self.merge_msg = '' |
|
2204 | self.merge_msg = '' | |
2204 | self.merge_response = None |
|
2205 | self.merge_response = None | |
2205 | self.failed = None |
|
2206 | self.failed = None | |
2206 | self.errors = [] |
|
2207 | self.errors = [] | |
2207 | self.error_details = OrderedDict() |
|
2208 | self.error_details = OrderedDict() | |
2208 | self.source_commit = AttributeDict() |
|
2209 | self.source_commit = AttributeDict() | |
2209 | self.target_commit = AttributeDict() |
|
2210 | self.target_commit = AttributeDict() | |
2210 | self.reviewers_count = 0 |
|
2211 | self.reviewers_count = 0 | |
2211 | self.observers_count = 0 |
|
2212 | self.observers_count = 0 | |
2212 |
|
2213 | |||
2213 | def __repr__(self): |
|
2214 | def __repr__(self): | |
2214 | return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format( |
|
2215 | return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format( | |
2215 | self.merge_possible, self.failed, self.errors) |
|
2216 | self.merge_possible, self.failed, self.errors) | |
2216 |
|
2217 | |||
2217 | def push_error(self, error_type, message, error_key, details): |
|
2218 | def push_error(self, error_type, message, error_key, details): | |
2218 | self.failed = True |
|
2219 | self.failed = True | |
2219 | self.errors.append([error_type, message]) |
|
2220 | self.errors.append([error_type, message]) | |
2220 | self.error_details[error_key] = dict( |
|
2221 | self.error_details[error_key] = dict( | |
2221 | details=details, |
|
2222 | details=details, | |
2222 | error_type=error_type, |
|
2223 | error_type=error_type, | |
2223 | message=message |
|
2224 | message=message | |
2224 | ) |
|
2225 | ) | |
2225 |
|
2226 | |||
2226 | @classmethod |
|
2227 | @classmethod | |
2227 | def validate(cls, pull_request, auth_user, translator, fail_early=False, |
|
2228 | def validate(cls, pull_request, auth_user, translator, fail_early=False, | |
2228 | force_shadow_repo_refresh=False): |
|
2229 | force_shadow_repo_refresh=False): | |
2229 | _ = translator |
|
2230 | _ = translator | |
2230 | merge_check = cls() |
|
2231 | merge_check = cls() | |
2231 |
|
2232 | |||
2232 | # title has WIP: |
|
2233 | # title has WIP: | |
2233 | if pull_request.work_in_progress: |
|
2234 | if pull_request.work_in_progress: | |
2234 | log.debug("MergeCheck: cannot merge, title has wip: marker.") |
|
2235 | log.debug("MergeCheck: cannot merge, title has wip: marker.") | |
2235 |
|
2236 | |||
2236 | msg = _('WIP marker in title prevents from accidental merge.') |
|
2237 | msg = _('WIP marker in title prevents from accidental merge.') | |
2237 | merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) |
|
2238 | merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) | |
2238 | if fail_early: |
|
2239 | if fail_early: | |
2239 | return merge_check |
|
2240 | return merge_check | |
2240 |
|
2241 | |||
2241 | # permissions to merge |
|
2242 | # permissions to merge | |
2242 | user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user) |
|
2243 | user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user) | |
2243 | if not user_allowed_to_merge: |
|
2244 | if not user_allowed_to_merge: | |
2244 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
2245 | log.debug("MergeCheck: cannot merge, approval is pending.") | |
2245 |
|
2246 | |||
2246 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) |
|
2247 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) | |
2247 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
2248 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) | |
2248 | if fail_early: |
|
2249 | if fail_early: | |
2249 | return merge_check |
|
2250 | return merge_check | |
2250 |
|
2251 | |||
2251 | # permission to merge into the target branch |
|
2252 | # permission to merge into the target branch | |
2252 | target_commit_id = pull_request.target_ref_parts.commit_id |
|
2253 | target_commit_id = pull_request.target_ref_parts.commit_id | |
2253 | if pull_request.target_ref_parts.type == 'branch': |
|
2254 | if pull_request.target_ref_parts.type == 'branch': | |
2254 | branch_name = pull_request.target_ref_parts.name |
|
2255 | branch_name = pull_request.target_ref_parts.name | |
2255 | else: |
|
2256 | else: | |
2256 | # for mercurial we can always figure out the branch from the commit |
|
2257 | # for mercurial we can always figure out the branch from the commit | |
2257 | # in case of bookmark |
|
2258 | # in case of bookmark | |
2258 | target_commit = pull_request.target_repo.get_commit(target_commit_id) |
|
2259 | target_commit = pull_request.target_repo.get_commit(target_commit_id) | |
2259 | branch_name = target_commit.branch |
|
2260 | branch_name = target_commit.branch | |
2260 |
|
2261 | |||
2261 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
2262 | rule, branch_perm = auth_user.get_rule_and_branch_permission( | |
2262 | pull_request.target_repo.repo_name, branch_name) |
|
2263 | pull_request.target_repo.repo_name, branch_name) | |
2263 | if branch_perm and branch_perm == 'branch.none': |
|
2264 | if branch_perm and branch_perm == 'branch.none': | |
2264 | msg = _('Target branch `{}` changes rejected by rule {}.').format( |
|
2265 | msg = _('Target branch `{}` changes rejected by rule {}.').format( | |
2265 | branch_name, rule) |
|
2266 | branch_name, rule) | |
2266 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
2267 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) | |
2267 | if fail_early: |
|
2268 | if fail_early: | |
2268 | return merge_check |
|
2269 | return merge_check | |
2269 |
|
2270 | |||
2270 | # review status, must be always present |
|
2271 | # review status, must be always present | |
2271 | review_status = pull_request.calculated_review_status() |
|
2272 | review_status = pull_request.calculated_review_status() | |
2272 | merge_check.review_status = review_status |
|
2273 | merge_check.review_status = review_status | |
2273 | merge_check.reviewers_count = pull_request.reviewers_count |
|
2274 | merge_check.reviewers_count = pull_request.reviewers_count | |
2274 | merge_check.observers_count = pull_request.observers_count |
|
2275 | merge_check.observers_count = pull_request.observers_count | |
2275 |
|
2276 | |||
2276 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
2277 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED | |
2277 | if not status_approved and merge_check.reviewers_count: |
|
2278 | if not status_approved and merge_check.reviewers_count: | |
2278 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
2279 | log.debug("MergeCheck: cannot merge, approval is pending.") | |
2279 | msg = _('Pull request reviewer approval is pending.') |
|
2280 | msg = _('Pull request reviewer approval is pending.') | |
2280 |
|
2281 | |||
2281 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) |
|
2282 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) | |
2282 |
|
2283 | |||
2283 | if fail_early: |
|
2284 | if fail_early: | |
2284 | return merge_check |
|
2285 | return merge_check | |
2285 |
|
2286 | |||
2286 | # left over TODOs |
|
2287 | # left over TODOs | |
2287 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) |
|
2288 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) | |
2288 | if todos: |
|
2289 | if todos: | |
2289 | log.debug("MergeCheck: cannot merge, {} " |
|
2290 | log.debug("MergeCheck: cannot merge, {} " | |
2290 | "unresolved TODOs left.".format(len(todos))) |
|
2291 | "unresolved TODOs left.".format(len(todos))) | |
2291 |
|
2292 | |||
2292 | if len(todos) == 1: |
|
2293 | if len(todos) == 1: | |
2293 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
2294 | msg = _('Cannot merge, {} TODO still not resolved.').format( | |
2294 | len(todos)) |
|
2295 | len(todos)) | |
2295 | else: |
|
2296 | else: | |
2296 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
2297 | msg = _('Cannot merge, {} TODOs still not resolved.').format( | |
2297 | len(todos)) |
|
2298 | len(todos)) | |
2298 |
|
2299 | |||
2299 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
2300 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) | |
2300 |
|
2301 | |||
2301 | if fail_early: |
|
2302 | if fail_early: | |
2302 | return merge_check |
|
2303 | return merge_check | |
2303 |
|
2304 | |||
2304 | # merge possible, here is the filesystem simulation + shadow repo |
|
2305 | # merge possible, here is the filesystem simulation + shadow repo | |
2305 | merge_response, merge_status, msg = PullRequestModel().merge_status( |
|
2306 | merge_response, merge_status, msg = PullRequestModel().merge_status( | |
2306 | pull_request, translator=translator, |
|
2307 | pull_request, translator=translator, | |
2307 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
2308 | force_shadow_repo_refresh=force_shadow_repo_refresh) | |
2308 |
|
2309 | |||
2309 | merge_check.merge_possible = merge_status |
|
2310 | merge_check.merge_possible = merge_status | |
2310 | merge_check.merge_msg = msg |
|
2311 | merge_check.merge_msg = msg | |
2311 | merge_check.merge_response = merge_response |
|
2312 | merge_check.merge_response = merge_response | |
2312 |
|
2313 | |||
2313 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
2314 | source_ref_id = pull_request.source_ref_parts.commit_id | |
2314 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
2315 | target_ref_id = pull_request.target_ref_parts.commit_id | |
2315 |
|
2316 | |||
2316 | try: |
|
2317 | try: | |
2317 | source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request) |
|
2318 | source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request) | |
2318 | merge_check.source_commit.changed = source_ref_id != source_commit.raw_id |
|
2319 | merge_check.source_commit.changed = source_ref_id != source_commit.raw_id | |
2319 | merge_check.source_commit.ref_spec = pull_request.source_ref_parts |
|
2320 | merge_check.source_commit.ref_spec = pull_request.source_ref_parts | |
2320 | merge_check.source_commit.current_raw_id = source_commit.raw_id |
|
2321 | merge_check.source_commit.current_raw_id = source_commit.raw_id | |
2321 | merge_check.source_commit.previous_raw_id = source_ref_id |
|
2322 | merge_check.source_commit.previous_raw_id = source_ref_id | |
2322 |
|
2323 | |||
2323 | merge_check.target_commit.changed = target_ref_id != target_commit.raw_id |
|
2324 | merge_check.target_commit.changed = target_ref_id != target_commit.raw_id | |
2324 | merge_check.target_commit.ref_spec = pull_request.target_ref_parts |
|
2325 | merge_check.target_commit.ref_spec = pull_request.target_ref_parts | |
2325 | merge_check.target_commit.current_raw_id = target_commit.raw_id |
|
2326 | merge_check.target_commit.current_raw_id = target_commit.raw_id | |
2326 | merge_check.target_commit.previous_raw_id = target_ref_id |
|
2327 | merge_check.target_commit.previous_raw_id = target_ref_id | |
2327 | except (SourceRefMissing, TargetRefMissing): |
|
2328 | except (SourceRefMissing, TargetRefMissing): | |
2328 | pass |
|
2329 | pass | |
2329 |
|
2330 | |||
2330 | if not merge_status: |
|
2331 | if not merge_status: | |
2331 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") |
|
2332 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") | |
2332 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
2333 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) | |
2333 |
|
2334 | |||
2334 | if fail_early: |
|
2335 | if fail_early: | |
2335 | return merge_check |
|
2336 | return merge_check | |
2336 |
|
2337 | |||
2337 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
2338 | log.debug('MergeCheck: is failed: %s', merge_check.failed) | |
2338 | return merge_check |
|
2339 | return merge_check | |
2339 |
|
2340 | |||
2340 | @classmethod |
|
2341 | @classmethod | |
2341 | def get_merge_conditions(cls, pull_request, translator): |
|
2342 | def get_merge_conditions(cls, pull_request, translator): | |
2342 | _ = translator |
|
2343 | _ = translator | |
2343 | merge_details = {} |
|
2344 | merge_details = {} | |
2344 |
|
2345 | |||
2345 | model = PullRequestModel() |
|
2346 | model = PullRequestModel() | |
2346 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
2347 | use_rebase = model._use_rebase_for_merging(pull_request) | |
2347 |
|
2348 | |||
2348 | if use_rebase: |
|
2349 | if use_rebase: | |
2349 | merge_details['merge_strategy'] = dict( |
|
2350 | merge_details['merge_strategy'] = dict( | |
2350 | details={}, |
|
2351 | details={}, | |
2351 | message=_('Merge strategy: rebase') |
|
2352 | message=_('Merge strategy: rebase') | |
2352 | ) |
|
2353 | ) | |
2353 | else: |
|
2354 | else: | |
2354 | merge_details['merge_strategy'] = dict( |
|
2355 | merge_details['merge_strategy'] = dict( | |
2355 | details={}, |
|
2356 | details={}, | |
2356 | message=_('Merge strategy: explicit merge commit') |
|
2357 | message=_('Merge strategy: explicit merge commit') | |
2357 | ) |
|
2358 | ) | |
2358 |
|
2359 | |||
2359 | close_branch = model._close_branch_before_merging(pull_request) |
|
2360 | close_branch = model._close_branch_before_merging(pull_request) | |
2360 | if close_branch: |
|
2361 | if close_branch: | |
2361 | repo_type = pull_request.target_repo.repo_type |
|
2362 | repo_type = pull_request.target_repo.repo_type | |
2362 | close_msg = '' |
|
2363 | close_msg = '' | |
2363 | if repo_type == 'hg': |
|
2364 | if repo_type == 'hg': | |
2364 | close_msg = _('Source branch will be closed before the merge.') |
|
2365 | close_msg = _('Source branch will be closed before the merge.') | |
2365 | elif repo_type == 'git': |
|
2366 | elif repo_type == 'git': | |
2366 | close_msg = _('Source branch will be deleted after the merge.') |
|
2367 | close_msg = _('Source branch will be deleted after the merge.') | |
2367 |
|
2368 | |||
2368 | merge_details['close_branch'] = dict( |
|
2369 | merge_details['close_branch'] = dict( | |
2369 | details={}, |
|
2370 | details={}, | |
2370 | message=close_msg |
|
2371 | message=close_msg | |
2371 | ) |
|
2372 | ) | |
2372 |
|
2373 | |||
2373 | return merge_details |
|
2374 | return merge_details | |
2374 |
|
2375 | |||
2375 |
|
2376 | |||
2376 | ChangeTuple = collections.namedtuple( |
|
2377 | ChangeTuple = collections.namedtuple( | |
2377 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
2378 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) | |
2378 |
|
2379 | |||
2379 | FileChangeTuple = collections.namedtuple( |
|
2380 | FileChangeTuple = collections.namedtuple( | |
2380 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
|
2381 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
General Comments 0
You need to be logged in to leave comments.
Login now