Show More
@@ -0,0 +1,40 b'' | |||||
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | import os | |||
|
19 | ||||
|
20 | ||||
|
21 | def get_config(ini_path, **kwargs): | |||
|
22 | import configparser | |||
|
23 | parser = configparser.ConfigParser(**kwargs) | |||
|
24 | parser.read(ini_path) | |||
|
25 | return parser | |||
|
26 | ||||
|
27 | ||||
|
28 | def get_app_config_lightweight(ini_path): | |||
|
29 | parser = get_config(ini_path) | |||
|
30 | parser.set('app:main', 'here', os.getcwd()) | |||
|
31 | parser.set('app:main', '__file__', ini_path) | |||
|
32 | return dict(parser.items('app:main')) | |||
|
33 | ||||
|
34 | ||||
|
35 | def get_app_config(ini_path): | |||
|
36 | """ | |||
|
37 | This loads the app context and provides a heavy type iniliaziation of config | |||
|
38 | """ | |||
|
39 | from paste.deploy.loadwsgi import appconfig | |||
|
40 | return appconfig(f'config:{ini_path}', relative_to=os.getcwd()) |
@@ -1,826 +1,828 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2023 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2023 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 | import io |
|
18 | import io | |
19 | import os |
|
19 | import os | |
20 | import sys |
|
20 | import sys | |
21 | import logging |
|
21 | import logging | |
22 | import collections |
|
22 | import collections | |
23 | import base64 |
|
23 | import base64 | |
24 | import msgpack |
|
24 | import msgpack | |
25 | import dataclasses |
|
25 | import dataclasses | |
26 | import pygit2 |
|
26 | import pygit2 | |
27 |
|
27 | |||
28 | import http.client |
|
28 | import http.client | |
29 | from celery import Celery |
|
29 | from celery import Celery | |
30 |
|
30 | |||
31 | import mercurial.scmutil |
|
31 | import mercurial.scmutil | |
32 | import mercurial.node |
|
32 | import mercurial.node | |
33 |
|
33 | |||
34 | from vcsserver.lib.rc_json import json |
|
34 | from vcsserver.lib.rc_json import json | |
35 | from vcsserver import exceptions, subprocessio, settings |
|
35 | from vcsserver import exceptions, subprocessio, settings | |
36 | from vcsserver.str_utils import ascii_str, safe_str |
|
36 | from vcsserver.str_utils import ascii_str, safe_str | |
37 | from vcsserver.remote.git_remote import Repository |
|
37 | from vcsserver.remote.git_remote import Repository | |
38 |
|
38 | |||
39 | celery_app = Celery('__vcsserver__') |
|
39 | celery_app = Celery('__vcsserver__') | |
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class HooksHttpClient: |
|
43 | class HooksHttpClient: | |
44 | proto = 'msgpack.v1' |
|
44 | proto = 'msgpack.v1' | |
45 | connection = None |
|
45 | connection = None | |
46 |
|
46 | |||
47 | def __init__(self, hooks_uri): |
|
47 | def __init__(self, hooks_uri): | |
48 | self.hooks_uri = hooks_uri |
|
48 | self.hooks_uri = hooks_uri | |
49 |
|
49 | |||
50 | def __repr__(self): |
|
50 | def __repr__(self): | |
51 | return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})' |
|
51 | return f'{self.__class__}(hook_uri={self.hooks_uri}, proto={self.proto})' | |
52 |
|
52 | |||
53 | def __call__(self, method, extras): |
|
53 | def __call__(self, method, extras): | |
54 | connection = http.client.HTTPConnection(self.hooks_uri) |
|
54 | connection = http.client.HTTPConnection(self.hooks_uri) | |
55 | # binary msgpack body |
|
55 | # binary msgpack body | |
56 | headers, body = self._serialize(method, extras) |
|
56 | headers, body = self._serialize(method, extras) | |
57 | log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri) |
|
57 | log.debug('Doing a new hooks call using HTTPConnection to %s', self.hooks_uri) | |
58 |
|
58 | |||
59 | try: |
|
59 | try: | |
60 | try: |
|
60 | try: | |
61 | connection.request('POST', '/', body, headers) |
|
61 | connection.request('POST', '/', body, headers) | |
62 | except Exception as error: |
|
62 | except Exception as error: | |
63 | log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error) |
|
63 | log.error('Hooks calling Connection failed on %s, org error: %s', connection.__dict__, error) | |
64 | raise |
|
64 | raise | |
65 |
|
65 | |||
66 | response = connection.getresponse() |
|
66 | response = connection.getresponse() | |
67 | try: |
|
67 | try: | |
68 | return msgpack.load(response) |
|
68 | return msgpack.load(response) | |
69 | except Exception: |
|
69 | except Exception: | |
70 | response_data = response.read() |
|
70 | response_data = response.read() | |
71 | log.exception('Failed to decode hook response json data. ' |
|
71 | log.exception('Failed to decode hook response json data. ' | |
72 | 'response_code:%s, raw_data:%s', |
|
72 | 'response_code:%s, raw_data:%s', | |
73 | response.status, response_data) |
|
73 | response.status, response_data) | |
74 | raise |
|
74 | raise | |
75 | finally: |
|
75 | finally: | |
76 | connection.close() |
|
76 | connection.close() | |
77 |
|
77 | |||
78 | @classmethod |
|
78 | @classmethod | |
79 | def _serialize(cls, hook_name, extras): |
|
79 | def _serialize(cls, hook_name, extras): | |
80 | data = { |
|
80 | data = { | |
81 | 'method': hook_name, |
|
81 | 'method': hook_name, | |
82 | 'extras': extras |
|
82 | 'extras': extras | |
83 | } |
|
83 | } | |
84 | headers = { |
|
84 | headers = { | |
85 | "rc-hooks-protocol": cls.proto, |
|
85 | "rc-hooks-protocol": cls.proto, | |
86 | "Connection": "keep-alive" |
|
86 | "Connection": "keep-alive" | |
87 | } |
|
87 | } | |
88 | return headers, msgpack.packb(data) |
|
88 | return headers, msgpack.packb(data) | |
89 |
|
89 | |||
90 |
|
90 | |||
91 | class HooksCeleryClient: |
|
91 | class HooksCeleryClient: | |
92 | TASK_TIMEOUT = 60 # time in seconds |
|
92 | TASK_TIMEOUT = 60 # time in seconds | |
93 |
|
93 | |||
94 | def __init__(self, queue, backend): |
|
94 | def __init__(self, queue, backend): | |
95 | celery_app.config_from_object({ |
|
95 | celery_app.config_from_object({ | |
96 | 'broker_url': queue, 'result_backend': backend, |
|
96 | 'broker_url': queue, 'result_backend': backend, | |
97 | 'broker_connection_retry_on_startup': True, |
|
97 | 'broker_connection_retry_on_startup': True, | |
98 | 'task_serializer': 'msgpack', |
|
98 | 'task_serializer': 'msgpack', | |
99 | 'accept_content': ['json', 'msgpack'], |
|
99 | 'accept_content': ['json', 'msgpack'], | |
100 | 'result_serializer': 'msgpack', |
|
100 | 'result_serializer': 'msgpack', | |
101 | 'result_accept_content': ['json', 'msgpack'] |
|
101 | 'result_accept_content': ['json', 'msgpack'] | |
102 | }) |
|
102 | }) | |
103 | self.celery_app = celery_app |
|
103 | self.celery_app = celery_app | |
104 |
|
104 | |||
105 | def __call__(self, method, extras): |
|
105 | def __call__(self, method, extras): | |
106 | inquired_task = self.celery_app.signature( |
|
106 | inquired_task = self.celery_app.signature( | |
107 | f'rhodecode.lib.celerylib.tasks.{method}' |
|
107 | f'rhodecode.lib.celerylib.tasks.{method}' | |
108 | ) |
|
108 | ) | |
109 | return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT) |
|
109 | return inquired_task.delay(extras).get(timeout=self.TASK_TIMEOUT) | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | class HooksShadowRepoClient: |
|
112 | class HooksShadowRepoClient: | |
113 |
|
113 | |||
114 | def __call__(self, hook_name, extras): |
|
114 | def __call__(self, hook_name, extras): | |
115 | return {'output': '', 'status': 0} |
|
115 | return {'output': '', 'status': 0} | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | class RemoteMessageWriter: |
|
118 | class RemoteMessageWriter: | |
119 | """Writer base class.""" |
|
119 | """Writer base class.""" | |
120 | def write(self, message): |
|
120 | def write(self, message): | |
121 | raise NotImplementedError() |
|
121 | raise NotImplementedError() | |
122 |
|
122 | |||
123 |
|
123 | |||
124 | class HgMessageWriter(RemoteMessageWriter): |
|
124 | class HgMessageWriter(RemoteMessageWriter): | |
125 | """Writer that knows how to send messages to mercurial clients.""" |
|
125 | """Writer that knows how to send messages to mercurial clients.""" | |
126 |
|
126 | |||
127 | def __init__(self, ui): |
|
127 | def __init__(self, ui): | |
128 | self.ui = ui |
|
128 | self.ui = ui | |
129 |
|
129 | |||
130 | def write(self, message: str): |
|
130 | def write(self, message: str): | |
131 | # TODO: Check why the quiet flag is set by default. |
|
131 | # TODO: Check why the quiet flag is set by default. | |
132 | old = self.ui.quiet |
|
132 | old = self.ui.quiet | |
133 | self.ui.quiet = False |
|
133 | self.ui.quiet = False | |
134 | self.ui.status(message.encode('utf-8')) |
|
134 | self.ui.status(message.encode('utf-8')) | |
135 | self.ui.quiet = old |
|
135 | self.ui.quiet = old | |
136 |
|
136 | |||
137 |
|
137 | |||
138 | class GitMessageWriter(RemoteMessageWriter): |
|
138 | class GitMessageWriter(RemoteMessageWriter): | |
139 | """Writer that knows how to send messages to git clients.""" |
|
139 | """Writer that knows how to send messages to git clients.""" | |
140 |
|
140 | |||
141 | def __init__(self, stdout=None): |
|
141 | def __init__(self, stdout=None): | |
142 | self.stdout = stdout or sys.stdout |
|
142 | self.stdout = stdout or sys.stdout | |
143 |
|
143 | |||
144 | def write(self, message: str): |
|
144 | def write(self, message: str): | |
145 | self.stdout.write(message) |
|
145 | self.stdout.write(message) | |
146 |
|
146 | |||
147 |
|
147 | |||
148 | class SvnMessageWriter(RemoteMessageWriter): |
|
148 | class SvnMessageWriter(RemoteMessageWriter): | |
149 | """Writer that knows how to send messages to svn clients.""" |
|
149 | """Writer that knows how to send messages to svn clients.""" | |
150 |
|
150 | |||
151 | def __init__(self, stderr=None): |
|
151 | def __init__(self, stderr=None): | |
152 | # SVN needs data sent to stderr for back-to-client messaging |
|
152 | # SVN needs data sent to stderr for back-to-client messaging | |
153 | self.stderr = stderr or sys.stderr |
|
153 | self.stderr = stderr or sys.stderr | |
154 |
|
154 | |||
155 | def write(self, message): |
|
155 | def write(self, message): | |
156 | self.stderr.write(message) |
|
156 | self.stderr.write(message) | |
157 |
|
157 | |||
158 |
|
158 | |||
159 | def _handle_exception(result): |
|
159 | def _handle_exception(result): | |
160 | exception_class = result.get('exception') |
|
160 | exception_class = result.get('exception') | |
161 | exception_traceback = result.get('exception_traceback') |
|
161 | exception_traceback = result.get('exception_traceback') | |
162 | log.debug('Handling hook-call exception: %s', exception_class) |
|
162 | log.debug('Handling hook-call exception: %s', exception_class) | |
163 |
|
163 | |||
164 | if exception_traceback: |
|
164 | if exception_traceback: | |
165 | log.error('Got traceback from remote call:%s', exception_traceback) |
|
165 | log.error('Got traceback from remote call:%s', exception_traceback) | |
166 |
|
166 | |||
167 | if exception_class == 'HTTPLockedRC': |
|
167 | if exception_class == 'HTTPLockedRC': | |
168 | raise exceptions.RepositoryLockedException()(*result['exception_args']) |
|
168 | raise exceptions.RepositoryLockedException()(*result['exception_args']) | |
169 | elif exception_class == 'HTTPBranchProtected': |
|
169 | elif exception_class == 'HTTPBranchProtected': | |
170 | raise exceptions.RepositoryBranchProtectedException()(*result['exception_args']) |
|
170 | raise exceptions.RepositoryBranchProtectedException()(*result['exception_args']) | |
171 | elif exception_class == 'RepositoryError': |
|
171 | elif exception_class == 'RepositoryError': | |
172 | raise exceptions.VcsException()(*result['exception_args']) |
|
172 | raise exceptions.VcsException()(*result['exception_args']) | |
173 | elif exception_class: |
|
173 | elif exception_class: | |
174 | raise Exception( |
|
174 | raise Exception( | |
175 | f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """ |
|
175 | f"""Got remote exception "{exception_class}" with args "{result['exception_args']}" """ | |
176 | ) |
|
176 | ) | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | def _get_hooks_client(extras): |
|
179 | def _get_hooks_client(extras): | |
180 | hooks_uri = extras.get('hooks_uri') |
|
180 | hooks_uri = extras.get('hooks_uri') | |
181 | task_queue = extras.get('task_queue') |
|
181 | task_queue = extras.get('task_queue') | |
182 | task_backend = extras.get('task_backend') |
|
182 | task_backend = extras.get('task_backend') | |
183 | is_shadow_repo = extras.get('is_shadow_repo') |
|
183 | is_shadow_repo = extras.get('is_shadow_repo') | |
184 |
|
184 | |||
185 | if hooks_uri: |
|
185 | if hooks_uri: | |
186 | return HooksHttpClient(hooks_uri) |
|
186 | return HooksHttpClient(hooks_uri) | |
187 | elif task_queue and task_backend: |
|
187 | elif task_queue and task_backend: | |
188 | return HooksCeleryClient(task_queue, task_backend) |
|
188 | return HooksCeleryClient(task_queue, task_backend) | |
189 | elif is_shadow_repo: |
|
189 | elif is_shadow_repo: | |
190 | return HooksShadowRepoClient() |
|
190 | return HooksShadowRepoClient() | |
191 | else: |
|
191 | else: | |
192 | raise Exception("Hooks client not found!") |
|
192 | raise Exception("Hooks client not found!") | |
193 |
|
193 | |||
194 |
|
194 | |||
195 | def _call_hook(hook_name, extras, writer): |
|
195 | def _call_hook(hook_name, extras, writer): | |
196 | hooks_client = _get_hooks_client(extras) |
|
196 | hooks_client = _get_hooks_client(extras) | |
197 | log.debug('Hooks, using client:%s', hooks_client) |
|
197 | log.debug('Hooks, using client:%s', hooks_client) | |
198 | result = hooks_client(hook_name, extras) |
|
198 | result = hooks_client(hook_name, extras) | |
199 | log.debug('Hooks got result: %s', result) |
|
199 | log.debug('Hooks got result: %s', result) | |
200 | _handle_exception(result) |
|
200 | _handle_exception(result) | |
201 | writer.write(result['output']) |
|
201 | writer.write(result['output']) | |
202 |
|
202 | |||
203 | return result['status'] |
|
203 | return result['status'] | |
204 |
|
204 | |||
205 |
|
205 | |||
206 | def _extras_from_ui(ui): |
|
206 | def _extras_from_ui(ui): | |
207 | hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA') |
|
207 | hook_data = ui.config(b'rhodecode', b'RC_SCM_DATA') | |
208 | if not hook_data: |
|
208 | if not hook_data: | |
209 | # maybe it's inside environ ? |
|
209 | # maybe it's inside environ ? | |
210 | env_hook_data = os.environ.get('RC_SCM_DATA') |
|
210 | env_hook_data = os.environ.get('RC_SCM_DATA') | |
211 | if env_hook_data: |
|
211 | if env_hook_data: | |
212 | hook_data = env_hook_data |
|
212 | hook_data = env_hook_data | |
213 |
|
213 | |||
214 | extras = {} |
|
214 | extras = {} | |
215 | if hook_data: |
|
215 | if hook_data: | |
216 | extras = json.loads(hook_data) |
|
216 | extras = json.loads(hook_data) | |
217 | return extras |
|
217 | return extras | |
218 |
|
218 | |||
219 |
|
219 | |||
220 | def _rev_range_hash(repo, node, check_heads=False): |
|
220 | def _rev_range_hash(repo, node, check_heads=False): | |
221 | from vcsserver.hgcompat import get_ctx |
|
221 | from vcsserver.hgcompat import get_ctx | |
222 |
|
222 | |||
223 | commits = [] |
|
223 | commits = [] | |
224 | revs = [] |
|
224 | revs = [] | |
225 | start = get_ctx(repo, node).rev() |
|
225 | start = get_ctx(repo, node).rev() | |
226 | end = len(repo) |
|
226 | end = len(repo) | |
227 | for rev in range(start, end): |
|
227 | for rev in range(start, end): | |
228 | revs.append(rev) |
|
228 | revs.append(rev) | |
229 | ctx = get_ctx(repo, rev) |
|
229 | ctx = get_ctx(repo, rev) | |
230 | commit_id = ascii_str(mercurial.node.hex(ctx.node())) |
|
230 | commit_id = ascii_str(mercurial.node.hex(ctx.node())) | |
231 | branch = safe_str(ctx.branch()) |
|
231 | branch = safe_str(ctx.branch()) | |
232 | commits.append((commit_id, branch)) |
|
232 | commits.append((commit_id, branch)) | |
233 |
|
233 | |||
234 | parent_heads = [] |
|
234 | parent_heads = [] | |
235 | if check_heads: |
|
235 | if check_heads: | |
236 | parent_heads = _check_heads(repo, start, end, revs) |
|
236 | parent_heads = _check_heads(repo, start, end, revs) | |
237 | return commits, parent_heads |
|
237 | return commits, parent_heads | |
238 |
|
238 | |||
239 |
|
239 | |||
240 | def _check_heads(repo, start, end, commits): |
|
240 | def _check_heads(repo, start, end, commits): | |
241 | from vcsserver.hgcompat import get_ctx |
|
241 | from vcsserver.hgcompat import get_ctx | |
242 | changelog = repo.changelog |
|
242 | changelog = repo.changelog | |
243 | parents = set() |
|
243 | parents = set() | |
244 |
|
244 | |||
245 | for new_rev in commits: |
|
245 | for new_rev in commits: | |
246 | for p in changelog.parentrevs(new_rev): |
|
246 | for p in changelog.parentrevs(new_rev): | |
247 | if p == mercurial.node.nullrev: |
|
247 | if p == mercurial.node.nullrev: | |
248 | continue |
|
248 | continue | |
249 | if p < start: |
|
249 | if p < start: | |
250 | parents.add(p) |
|
250 | parents.add(p) | |
251 |
|
251 | |||
252 | for p in parents: |
|
252 | for p in parents: | |
253 | branch = get_ctx(repo, p).branch() |
|
253 | branch = get_ctx(repo, p).branch() | |
254 | # The heads descending from that parent, on the same branch |
|
254 | # The heads descending from that parent, on the same branch | |
255 | parent_heads = {p} |
|
255 | parent_heads = {p} | |
256 | reachable = {p} |
|
256 | reachable = {p} | |
257 | for x in range(p + 1, end): |
|
257 | for x in range(p + 1, end): | |
258 | if get_ctx(repo, x).branch() != branch: |
|
258 | if get_ctx(repo, x).branch() != branch: | |
259 | continue |
|
259 | continue | |
260 | for pp in changelog.parentrevs(x): |
|
260 | for pp in changelog.parentrevs(x): | |
261 | if pp in reachable: |
|
261 | if pp in reachable: | |
262 | reachable.add(x) |
|
262 | reachable.add(x) | |
263 | parent_heads.discard(pp) |
|
263 | parent_heads.discard(pp) | |
264 | parent_heads.add(x) |
|
264 | parent_heads.add(x) | |
265 | # More than one head? Suggest merging |
|
265 | # More than one head? Suggest merging | |
266 | if len(parent_heads) > 1: |
|
266 | if len(parent_heads) > 1: | |
267 | return list(parent_heads) |
|
267 | return list(parent_heads) | |
268 |
|
268 | |||
269 | return [] |
|
269 | return [] | |
270 |
|
270 | |||
271 |
|
271 | |||
272 | def _get_git_env(): |
|
272 | def _get_git_env(): | |
273 | env = {} |
|
273 | env = {} | |
274 | for k, v in os.environ.items(): |
|
274 | for k, v in os.environ.items(): | |
275 | if k.startswith('GIT'): |
|
275 | if k.startswith('GIT'): | |
276 | env[k] = v |
|
276 | env[k] = v | |
277 |
|
277 | |||
278 | # serialized version |
|
278 | # serialized version | |
279 | return [(k, v) for k, v in env.items()] |
|
279 | return [(k, v) for k, v in env.items()] | |
280 |
|
280 | |||
281 |
|
281 | |||
282 | def _get_hg_env(old_rev, new_rev, txnid, repo_path): |
|
282 | def _get_hg_env(old_rev, new_rev, txnid, repo_path): | |
283 | env = {} |
|
283 | env = {} | |
284 | for k, v in os.environ.items(): |
|
284 | for k, v in os.environ.items(): | |
285 | if k.startswith('HG'): |
|
285 | if k.startswith('HG'): | |
286 | env[k] = v |
|
286 | env[k] = v | |
287 |
|
287 | |||
288 | env['HG_NODE'] = old_rev |
|
288 | env['HG_NODE'] = old_rev | |
289 | env['HG_NODE_LAST'] = new_rev |
|
289 | env['HG_NODE_LAST'] = new_rev | |
290 | env['HG_TXNID'] = txnid |
|
290 | env['HG_TXNID'] = txnid | |
291 | env['HG_PENDING'] = repo_path |
|
291 | env['HG_PENDING'] = repo_path | |
292 |
|
292 | |||
293 | return [(k, v) for k, v in env.items()] |
|
293 | return [(k, v) for k, v in env.items()] | |
294 |
|
294 | |||
295 |
|
295 | |||
296 | def _fix_hooks_executables(ini_path=''): |
|
296 | def _fix_hooks_executables(ini_path=''): | |
297 | """ |
|
297 | """ | |
298 | This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns |
|
298 | This is a trick to set proper settings.EXECUTABLE paths for certain execution patterns | |
299 | especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail |
|
299 | especially for subversion where hooks strip entire env, and calling just 'svn' command will most likely fail | |
300 | because svn is not on PATH |
|
300 | because svn is not on PATH | |
301 | """ |
|
301 | """ | |
302 | from vcsserver.http_main import sanitize_settings_and_apply_defaults |
|
302 | from vcsserver.http_main import sanitize_settings_and_apply_defaults | |
303 | from vcsserver.lib.config_utils import get_app_config_lightweight |
|
303 | from vcsserver.lib.config_utils import get_app_config_lightweight | |
304 |
|
304 | |||
305 | core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin' |
|
305 | core_binary_dir = settings.BINARY_DIR or '/usr/local/bin/rhodecode_bin/vcs_bin' | |
306 | if ini_path: |
|
306 | if ini_path: | |
307 |
|
307 | |||
308 | ini_settings = get_app_config_lightweight(ini_path) |
|
308 | ini_settings = get_app_config_lightweight(ini_path) | |
309 | ini_settings = sanitize_settings_and_apply_defaults({'__file__': ini_path}, ini_settings) |
|
309 | ini_settings = sanitize_settings_and_apply_defaults({'__file__': ini_path}, ini_settings) | |
310 | core_binary_dir = ini_settings['core.binary_dir'] |
|
310 | core_binary_dir = ini_settings['core.binary_dir'] | |
311 |
|
311 | |||
312 | settings.BINARY_DIR = core_binary_dir |
|
312 | settings.BINARY_DIR = core_binary_dir | |
313 |
|
313 | |||
314 |
|
314 | |||
315 | def repo_size(ui, repo, **kwargs): |
|
315 | def repo_size(ui, repo, **kwargs): | |
316 | extras = _extras_from_ui(ui) |
|
316 | extras = _extras_from_ui(ui) | |
317 | return _call_hook('repo_size', extras, HgMessageWriter(ui)) |
|
317 | return _call_hook('repo_size', extras, HgMessageWriter(ui)) | |
318 |
|
318 | |||
319 |
|
319 | |||
320 | def pre_pull(ui, repo, **kwargs): |
|
320 | def pre_pull(ui, repo, **kwargs): | |
321 | extras = _extras_from_ui(ui) |
|
321 | extras = _extras_from_ui(ui) | |
322 | return _call_hook('pre_pull', extras, HgMessageWriter(ui)) |
|
322 | return _call_hook('pre_pull', extras, HgMessageWriter(ui)) | |
323 |
|
323 | |||
324 |
|
324 | |||
325 | def pre_pull_ssh(ui, repo, **kwargs): |
|
325 | def pre_pull_ssh(ui, repo, **kwargs): | |
326 | extras = _extras_from_ui(ui) |
|
326 | extras = _extras_from_ui(ui) | |
327 | if extras and extras.get('SSH'): |
|
327 | if extras and extras.get('SSH'): | |
328 | return pre_pull(ui, repo, **kwargs) |
|
328 | return pre_pull(ui, repo, **kwargs) | |
329 | return 0 |
|
329 | return 0 | |
330 |
|
330 | |||
331 |
|
331 | |||
332 | def post_pull(ui, repo, **kwargs): |
|
332 | def post_pull(ui, repo, **kwargs): | |
333 | extras = _extras_from_ui(ui) |
|
333 | extras = _extras_from_ui(ui) | |
334 | return _call_hook('post_pull', extras, HgMessageWriter(ui)) |
|
334 | return _call_hook('post_pull', extras, HgMessageWriter(ui)) | |
335 |
|
335 | |||
336 |
|
336 | |||
337 | def post_pull_ssh(ui, repo, **kwargs): |
|
337 | def post_pull_ssh(ui, repo, **kwargs): | |
338 | extras = _extras_from_ui(ui) |
|
338 | extras = _extras_from_ui(ui) | |
339 | if extras and extras.get('SSH'): |
|
339 | if extras and extras.get('SSH'): | |
340 | return post_pull(ui, repo, **kwargs) |
|
340 | return post_pull(ui, repo, **kwargs) | |
341 | return 0 |
|
341 | return 0 | |
342 |
|
342 | |||
343 |
|
343 | |||
344 | def pre_push(ui, repo, node=None, **kwargs): |
|
344 | def pre_push(ui, repo, node=None, **kwargs): | |
345 | """ |
|
345 | """ | |
346 | Mercurial pre_push hook |
|
346 | Mercurial pre_push hook | |
347 | """ |
|
347 | """ | |
348 | extras = _extras_from_ui(ui) |
|
348 | extras = _extras_from_ui(ui) | |
349 | detect_force_push = extras.get('detect_force_push') |
|
349 | detect_force_push = extras.get('detect_force_push') | |
350 |
|
350 | |||
351 | rev_data = [] |
|
351 | rev_data = [] | |
352 | hook_type: str = safe_str(kwargs.get('hooktype')) |
|
352 | hook_type: str = safe_str(kwargs.get('hooktype')) | |
353 |
|
353 | |||
354 | if node and hook_type == 'pretxnchangegroup': |
|
354 | if node and hook_type == 'pretxnchangegroup': | |
355 | branches = collections.defaultdict(list) |
|
355 | branches = collections.defaultdict(list) | |
356 | commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push) |
|
356 | commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push) | |
357 | for commit_id, branch in commits: |
|
357 | for commit_id, branch in commits: | |
358 | branches[branch].append(commit_id) |
|
358 | branches[branch].append(commit_id) | |
359 |
|
359 | |||
360 | for branch, commits in branches.items(): |
|
360 | for branch, commits in branches.items(): | |
361 | old_rev = ascii_str(kwargs.get('node_last')) or commits[0] |
|
361 | old_rev = ascii_str(kwargs.get('node_last')) or commits[0] | |
362 | rev_data.append({ |
|
362 | rev_data.append({ | |
363 | 'total_commits': len(commits), |
|
363 | 'total_commits': len(commits), | |
364 | 'old_rev': old_rev, |
|
364 | 'old_rev': old_rev, | |
365 | 'new_rev': commits[-1], |
|
365 | 'new_rev': commits[-1], | |
366 | 'ref': '', |
|
366 | 'ref': '', | |
367 | 'type': 'branch', |
|
367 | 'type': 'branch', | |
368 | 'name': branch, |
|
368 | 'name': branch, | |
369 | }) |
|
369 | }) | |
370 |
|
370 | |||
371 | for push_ref in rev_data: |
|
371 | for push_ref in rev_data: | |
372 | push_ref['multiple_heads'] = _heads |
|
372 | push_ref['multiple_heads'] = _heads | |
373 |
|
373 | |||
374 | repo_path = os.path.join( |
|
374 | repo_path = os.path.join( | |
375 | extras.get('repo_store', ''), extras.get('repository', '')) |
|
375 | extras.get('repo_store', ''), extras.get('repository', '')) | |
376 | push_ref['hg_env'] = _get_hg_env( |
|
376 | push_ref['hg_env'] = _get_hg_env( | |
377 | old_rev=push_ref['old_rev'], |
|
377 | old_rev=push_ref['old_rev'], | |
378 | new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')), |
|
378 | new_rev=push_ref['new_rev'], txnid=ascii_str(kwargs.get('txnid')), | |
379 | repo_path=repo_path) |
|
379 | repo_path=repo_path) | |
380 |
|
380 | |||
381 | extras['hook_type'] = hook_type or 'pre_push' |
|
381 | extras['hook_type'] = hook_type or 'pre_push' | |
382 | extras['commit_ids'] = rev_data |
|
382 | extras['commit_ids'] = rev_data | |
383 |
|
383 | |||
384 | return _call_hook('pre_push', extras, HgMessageWriter(ui)) |
|
384 | return _call_hook('pre_push', extras, HgMessageWriter(ui)) | |
385 |
|
385 | |||
386 |
|
386 | |||
387 | def pre_push_ssh(ui, repo, node=None, **kwargs): |
|
387 | def pre_push_ssh(ui, repo, node=None, **kwargs): | |
388 | extras = _extras_from_ui(ui) |
|
388 | extras = _extras_from_ui(ui) | |
389 | if extras.get('SSH'): |
|
389 | if extras.get('SSH'): | |
390 | return pre_push(ui, repo, node, **kwargs) |
|
390 | return pre_push(ui, repo, node, **kwargs) | |
391 |
|
391 | |||
392 | return 0 |
|
392 | return 0 | |
393 |
|
393 | |||
394 |
|
394 | |||
395 | def pre_push_ssh_auth(ui, repo, node=None, **kwargs): |
|
395 | def pre_push_ssh_auth(ui, repo, node=None, **kwargs): | |
396 | """ |
|
396 | """ | |
397 | Mercurial pre_push hook for SSH |
|
397 | Mercurial pre_push hook for SSH | |
398 | """ |
|
398 | """ | |
399 | extras = _extras_from_ui(ui) |
|
399 | extras = _extras_from_ui(ui) | |
400 | if extras.get('SSH'): |
|
400 | if extras.get('SSH'): | |
401 | permission = extras['SSH_PERMISSIONS'] |
|
401 | permission = extras['SSH_PERMISSIONS'] | |
402 |
|
402 | |||
403 | if 'repository.write' == permission or 'repository.admin' == permission: |
|
403 | if 'repository.write' == permission or 'repository.admin' == permission: | |
404 | return 0 |
|
404 | return 0 | |
405 |
|
405 | |||
406 | # non-zero ret code |
|
406 | # non-zero ret code | |
407 | return 1 |
|
407 | return 1 | |
408 |
|
408 | |||
409 | return 0 |
|
409 | return 0 | |
410 |
|
410 | |||
411 |
|
411 | |||
412 | def post_push(ui, repo, node, **kwargs): |
|
412 | def post_push(ui, repo, node, **kwargs): | |
413 | """ |
|
413 | """ | |
414 | Mercurial post_push hook |
|
414 | Mercurial post_push hook | |
415 | """ |
|
415 | """ | |
416 | extras = _extras_from_ui(ui) |
|
416 | extras = _extras_from_ui(ui) | |
417 |
|
417 | |||
418 | commit_ids = [] |
|
418 | commit_ids = [] | |
419 | branches = [] |
|
419 | branches = [] | |
420 | bookmarks = [] |
|
420 | bookmarks = [] | |
421 | tags = [] |
|
421 | tags = [] | |
422 | hook_type: str = safe_str(kwargs.get('hooktype')) |
|
422 | hook_type: str = safe_str(kwargs.get('hooktype')) | |
423 |
|
423 | |||
424 | commits, _heads = _rev_range_hash(repo, node) |
|
424 | commits, _heads = _rev_range_hash(repo, node) | |
425 | for commit_id, branch in commits: |
|
425 | for commit_id, branch in commits: | |
426 | commit_ids.append(commit_id) |
|
426 | commit_ids.append(commit_id) | |
427 | if branch not in branches: |
|
427 | if branch not in branches: | |
428 | branches.append(branch) |
|
428 | branches.append(branch) | |
429 |
|
429 | |||
430 | if hasattr(ui, '_rc_pushkey_bookmarks'): |
|
430 | if hasattr(ui, '_rc_pushkey_bookmarks'): | |
431 | bookmarks = ui._rc_pushkey_bookmarks |
|
431 | bookmarks = ui._rc_pushkey_bookmarks | |
432 |
|
432 | |||
433 | extras['hook_type'] = hook_type or 'post_push' |
|
433 | extras['hook_type'] = hook_type or 'post_push' | |
434 | extras['commit_ids'] = commit_ids |
|
434 | extras['commit_ids'] = commit_ids | |
435 |
|
435 | |||
436 | extras['new_refs'] = { |
|
436 | extras['new_refs'] = { | |
437 | 'branches': branches, |
|
437 | 'branches': branches, | |
438 | 'bookmarks': bookmarks, |
|
438 | 'bookmarks': bookmarks, | |
439 | 'tags': tags |
|
439 | 'tags': tags | |
440 | } |
|
440 | } | |
441 |
|
441 | |||
442 | return _call_hook('post_push', extras, HgMessageWriter(ui)) |
|
442 | return _call_hook('post_push', extras, HgMessageWriter(ui)) | |
443 |
|
443 | |||
444 |
|
444 | |||
445 | def post_push_ssh(ui, repo, node, **kwargs): |
|
445 | def post_push_ssh(ui, repo, node, **kwargs): | |
446 | """ |
|
446 | """ | |
447 | Mercurial post_push hook for SSH |
|
447 | Mercurial post_push hook for SSH | |
448 | """ |
|
448 | """ | |
449 | if _extras_from_ui(ui).get('SSH'): |
|
449 | if _extras_from_ui(ui).get('SSH'): | |
450 | return post_push(ui, repo, node, **kwargs) |
|
450 | return post_push(ui, repo, node, **kwargs) | |
451 | return 0 |
|
451 | return 0 | |
452 |
|
452 | |||
453 |
|
453 | |||
454 | def key_push(ui, repo, **kwargs): |
|
454 | def key_push(ui, repo, **kwargs): | |
455 | from vcsserver.hgcompat import get_ctx |
|
455 | from vcsserver.hgcompat import get_ctx | |
456 |
|
456 | |||
457 | if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks': |
|
457 | if kwargs['new'] != b'0' and kwargs['namespace'] == b'bookmarks': | |
458 | # store new bookmarks in our UI object propagated later to post_push |
|
458 | # store new bookmarks in our UI object propagated later to post_push | |
459 | ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks() |
|
459 | ui._rc_pushkey_bookmarks = get_ctx(repo, kwargs['key']).bookmarks() | |
460 | return |
|
460 | return | |
461 |
|
461 | |||
462 |
|
462 | |||
463 | # backward compat |
|
463 | # backward compat | |
464 | log_pull_action = post_pull |
|
464 | log_pull_action = post_pull | |
465 |
|
465 | |||
466 | # backward compat |
|
466 | # backward compat | |
467 | log_push_action = post_push |
|
467 | log_push_action = post_push | |
468 |
|
468 | |||
469 |
|
469 | |||
470 | def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env): |
|
470 | def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env): | |
471 | """ |
|
471 | """ | |
472 | Old hook name: keep here for backward compatibility. |
|
472 | Old hook name: keep here for backward compatibility. | |
473 |
|
473 | |||
474 | This is only required when the installed git hooks are not upgraded. |
|
474 | This is only required when the installed git hooks are not upgraded. | |
475 | """ |
|
475 | """ | |
476 | pass |
|
476 | pass | |
477 |
|
477 | |||
478 |
|
478 | |||
479 | def handle_git_post_receive(unused_repo_path, unused_revs, unused_env): |
|
479 | def handle_git_post_receive(unused_repo_path, unused_revs, unused_env): | |
480 | """ |
|
480 | """ | |
481 | Old hook name: keep here for backward compatibility. |
|
481 | Old hook name: keep here for backward compatibility. | |
482 |
|
482 | |||
483 | This is only required when the installed git hooks are not upgraded. |
|
483 | This is only required when the installed git hooks are not upgraded. | |
484 | """ |
|
484 | """ | |
485 | pass |
|
485 | pass | |
486 |
|
486 | |||
487 |
|
487 | |||
488 | @dataclasses.dataclass |
|
488 | @dataclasses.dataclass | |
489 | class HookResponse: |
|
489 | class HookResponse: | |
490 | status: int |
|
490 | status: int | |
491 | output: str |
|
491 | output: str | |
492 |
|
492 | |||
493 |
|
493 | |||
494 | def git_pre_pull(extras) -> HookResponse: |
|
494 | def git_pre_pull(extras) -> HookResponse: | |
495 | """ |
|
495 | """ | |
496 | Pre pull hook. |
|
496 | Pre pull hook. | |
497 |
|
497 | |||
498 | :param extras: dictionary containing the keys defined in simplevcs |
|
498 | :param extras: dictionary containing the keys defined in simplevcs | |
499 | :type extras: dict |
|
499 | :type extras: dict | |
500 |
|
500 | |||
501 | :return: status code of the hook. 0 for success. |
|
501 | :return: status code of the hook. 0 for success. | |
502 | :rtype: int |
|
502 | :rtype: int | |
503 | """ |
|
503 | """ | |
504 |
|
504 | |||
505 | if 'pull' not in extras['hooks']: |
|
505 | if 'pull' not in extras['hooks']: | |
506 | return HookResponse(0, '') |
|
506 | return HookResponse(0, '') | |
507 |
|
507 | |||
508 | stdout = io.StringIO() |
|
508 | stdout = io.StringIO() | |
509 | try: |
|
509 | try: | |
510 | status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout)) |
|
510 | status_code = _call_hook('pre_pull', extras, GitMessageWriter(stdout)) | |
511 |
|
511 | |||
512 | except Exception as error: |
|
512 | except Exception as error: | |
513 | log.exception('Failed to call pre_pull hook') |
|
513 | log.exception('Failed to call pre_pull hook') | |
514 | status_code = 128 |
|
514 | status_code = 128 | |
515 | stdout.write(f'ERROR: {error}\n') |
|
515 | stdout.write(f'ERROR: {error}\n') | |
516 |
|
516 | |||
517 | return HookResponse(status_code, stdout.getvalue()) |
|
517 | return HookResponse(status_code, stdout.getvalue()) | |
518 |
|
518 | |||
519 |
|
519 | |||
520 | def git_post_pull(extras) -> HookResponse: |
|
520 | def git_post_pull(extras) -> HookResponse: | |
521 | """ |
|
521 | """ | |
522 | Post pull hook. |
|
522 | Post pull hook. | |
523 |
|
523 | |||
524 | :param extras: dictionary containing the keys defined in simplevcs |
|
524 | :param extras: dictionary containing the keys defined in simplevcs | |
525 | :type extras: dict |
|
525 | :type extras: dict | |
526 |
|
526 | |||
527 | :return: status code of the hook. 0 for success. |
|
527 | :return: status code of the hook. 0 for success. | |
528 | :rtype: int |
|
528 | :rtype: int | |
529 | """ |
|
529 | """ | |
530 | if 'pull' not in extras['hooks']: |
|
530 | if 'pull' not in extras['hooks']: | |
531 | return HookResponse(0, '') |
|
531 | return HookResponse(0, '') | |
532 |
|
532 | |||
533 | stdout = io.StringIO() |
|
533 | stdout = io.StringIO() | |
534 | try: |
|
534 | try: | |
535 | status = _call_hook('post_pull', extras, GitMessageWriter(stdout)) |
|
535 | status = _call_hook('post_pull', extras, GitMessageWriter(stdout)) | |
536 | except Exception as error: |
|
536 | except Exception as error: | |
537 | status = 128 |
|
537 | status = 128 | |
538 | stdout.write(f'ERROR: {error}\n') |
|
538 | stdout.write(f'ERROR: {error}\n') | |
539 |
|
539 | |||
540 | return HookResponse(status, stdout.getvalue()) |
|
540 | return HookResponse(status, stdout.getvalue()) | |
541 |
|
541 | |||
542 |
|
542 | |||
543 | def _parse_git_ref_lines(revision_lines): |
|
543 | def _parse_git_ref_lines(revision_lines): | |
544 | rev_data = [] |
|
544 | rev_data = [] | |
545 | for revision_line in revision_lines or []: |
|
545 | for revision_line in revision_lines or []: | |
546 | old_rev, new_rev, ref = revision_line.strip().split(' ') |
|
546 | old_rev, new_rev, ref = revision_line.strip().split(' ') | |
547 | ref_data = ref.split('/', 2) |
|
547 | ref_data = ref.split('/', 2) | |
548 | if ref_data[1] in ('tags', 'heads'): |
|
548 | if ref_data[1] in ('tags', 'heads'): | |
549 | rev_data.append({ |
|
549 | rev_data.append({ | |
550 | # NOTE(marcink): |
|
550 | # NOTE(marcink): | |
551 | # we're unable to tell total_commits for git at this point |
|
551 | # we're unable to tell total_commits for git at this point | |
552 | # but we set the variable for consistency with GIT |
|
552 | # but we set the variable for consistency with GIT | |
553 | 'total_commits': -1, |
|
553 | 'total_commits': -1, | |
554 | 'old_rev': old_rev, |
|
554 | 'old_rev': old_rev, | |
555 | 'new_rev': new_rev, |
|
555 | 'new_rev': new_rev, | |
556 | 'ref': ref, |
|
556 | 'ref': ref, | |
557 | 'type': ref_data[1], |
|
557 | 'type': ref_data[1], | |
558 | 'name': ref_data[2], |
|
558 | 'name': ref_data[2], | |
559 | }) |
|
559 | }) | |
560 | return rev_data |
|
560 | return rev_data | |
561 |
|
561 | |||
562 |
|
562 | |||
563 | def git_pre_receive(unused_repo_path, revision_lines, env) -> int: |
|
563 | def git_pre_receive(unused_repo_path, revision_lines, env) -> int: | |
564 | """ |
|
564 | """ | |
565 | Pre push hook. |
|
565 | Pre push hook. | |
566 |
|
566 | |||
567 | :return: status code of the hook. 0 for success. |
|
567 | :return: status code of the hook. 0 for success. | |
568 | """ |
|
568 | """ | |
569 | extras = json.loads(env['RC_SCM_DATA']) |
|
569 | extras = json.loads(env['RC_SCM_DATA']) | |
570 | rev_data = _parse_git_ref_lines(revision_lines) |
|
570 | rev_data = _parse_git_ref_lines(revision_lines) | |
571 | if 'push' not in extras['hooks']: |
|
571 | if 'push' not in extras['hooks']: | |
572 | return 0 |
|
572 | return 0 | |
573 | _fix_hooks_executables() |
|
573 | _fix_hooks_executables() | |
574 |
|
574 | |||
575 | empty_commit_id = '0' * 40 |
|
575 | empty_commit_id = '0' * 40 | |
576 |
|
576 | |||
577 | detect_force_push = extras.get('detect_force_push') |
|
577 | detect_force_push = extras.get('detect_force_push') | |
578 |
|
578 | |||
579 | for push_ref in rev_data: |
|
579 | for push_ref in rev_data: | |
580 | # store our git-env which holds the temp store |
|
580 | # store our git-env which holds the temp store | |
581 | push_ref['git_env'] = _get_git_env() |
|
581 | push_ref['git_env'] = _get_git_env() | |
582 | push_ref['pruned_sha'] = '' |
|
582 | push_ref['pruned_sha'] = '' | |
583 | if not detect_force_push: |
|
583 | if not detect_force_push: | |
584 | # don't check for forced-push when we don't need to |
|
584 | # don't check for forced-push when we don't need to | |
585 | continue |
|
585 | continue | |
586 |
|
586 | |||
587 | type_ = push_ref['type'] |
|
587 | type_ = push_ref['type'] | |
588 | new_branch = push_ref['old_rev'] == empty_commit_id |
|
588 | new_branch = push_ref['old_rev'] == empty_commit_id | |
589 | delete_branch = push_ref['new_rev'] == empty_commit_id |
|
589 | delete_branch = push_ref['new_rev'] == empty_commit_id | |
590 | if type_ == 'heads' and not (new_branch or delete_branch): |
|
590 | if type_ == 'heads' and not (new_branch or delete_branch): | |
591 | old_rev = push_ref['old_rev'] |
|
591 | old_rev = push_ref['old_rev'] | |
592 | new_rev = push_ref['new_rev'] |
|
592 | new_rev = push_ref['new_rev'] | |
593 | cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}'] |
|
593 | cmd = [settings.GIT_EXECUTABLE(), 'rev-list', old_rev, f'^{new_rev}'] | |
594 | stdout, stderr = subprocessio.run_command( |
|
594 | stdout, stderr = subprocessio.run_command( | |
595 | cmd, env=os.environ.copy()) |
|
595 | cmd, env=os.environ.copy()) | |
596 | # means we're having some non-reachable objects, this forced push was used |
|
596 | # means we're having some non-reachable objects, this forced push was used | |
597 | if stdout: |
|
597 | if stdout: | |
598 | push_ref['pruned_sha'] = stdout.splitlines() |
|
598 | push_ref['pruned_sha'] = stdout.splitlines() | |
599 |
|
599 | |||
600 | extras['hook_type'] = 'pre_receive' |
|
600 | extras['hook_type'] = 'pre_receive' | |
601 | extras['commit_ids'] = rev_data |
|
601 | extras['commit_ids'] = rev_data | |
602 |
|
602 | |||
603 | stdout = sys.stdout |
|
603 | stdout = sys.stdout | |
604 | status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout)) |
|
604 | status_code = _call_hook('pre_push', extras, GitMessageWriter(stdout)) | |
605 |
|
605 | |||
606 | return status_code |
|
606 | return status_code | |
607 |
|
607 | |||
608 |
|
608 | |||
609 | def git_post_receive(unused_repo_path, revision_lines, env) -> int: |
|
609 | def git_post_receive(unused_repo_path, revision_lines, env) -> int: | |
610 | """ |
|
610 | """ | |
611 | Post push hook. |
|
611 | Post push hook. | |
612 |
|
612 | |||
613 | :return: status code of the hook. 0 for success. |
|
613 | :return: status code of the hook. 0 for success. | |
614 | """ |
|
614 | """ | |
615 | extras = json.loads(env['RC_SCM_DATA']) |
|
615 | extras = json.loads(env['RC_SCM_DATA']) | |
616 | if 'push' not in extras['hooks']: |
|
616 | if 'push' not in extras['hooks']: | |
617 | return 0 |
|
617 | return 0 | |
618 |
|
618 | |||
619 | _fix_hooks_executables() |
|
619 | _fix_hooks_executables() | |
620 |
|
620 | |||
621 | rev_data = _parse_git_ref_lines(revision_lines) |
|
621 | rev_data = _parse_git_ref_lines(revision_lines) | |
622 |
|
622 | |||
623 | git_revs = [] |
|
623 | git_revs = [] | |
624 |
|
624 | |||
625 | # N.B.(skreft): it is ok to just call git, as git before calling a |
|
625 | # N.B.(skreft): it is ok to just call git, as git before calling a | |
626 | # subcommand sets the PATH environment variable so that it point to the |
|
626 | # subcommand sets the PATH environment variable so that it point to the | |
627 | # correct version of the git executable. |
|
627 | # correct version of the git executable. | |
628 | empty_commit_id = '0' * 40 |
|
628 | empty_commit_id = '0' * 40 | |
629 | branches = [] |
|
629 | branches = [] | |
630 | tags = [] |
|
630 | tags = [] | |
631 | for push_ref in rev_data: |
|
631 | for push_ref in rev_data: | |
632 | type_ = push_ref['type'] |
|
632 | type_ = push_ref['type'] | |
633 |
|
633 | |||
634 | if type_ == 'heads': |
|
634 | if type_ == 'heads': | |
635 | # starting new branch case |
|
635 | # starting new branch case | |
636 | if push_ref['old_rev'] == empty_commit_id: |
|
636 | if push_ref['old_rev'] == empty_commit_id: | |
637 | push_ref_name = push_ref['name'] |
|
637 | push_ref_name = push_ref['name'] | |
638 |
|
638 | |||
639 | if push_ref_name not in branches: |
|
639 | if push_ref_name not in branches: | |
640 | branches.append(push_ref_name) |
|
640 | branches.append(push_ref_name) | |
641 |
|
641 | |||
642 | need_head_set = '' |
|
642 | need_head_set = '' | |
643 | with Repository(os.getcwd()) as repo: |
|
643 | with Repository(os.getcwd()) as repo: | |
644 | try: |
|
644 | try: | |
645 | repo.head |
|
645 | repo.head | |
646 | except pygit2.GitError: |
|
646 | except pygit2.GitError: | |
647 | need_head_set = f'refs/heads/{push_ref_name}' |
|
647 | need_head_set = f'refs/heads/{push_ref_name}' | |
648 |
|
648 | |||
649 | if need_head_set: |
|
649 | if need_head_set: | |
650 | repo.set_head(need_head_set) |
|
650 | repo.set_head(need_head_set) | |
651 | print(f"Setting default branch to {push_ref_name}") |
|
651 | print(f"Setting default branch to {push_ref_name}") | |
652 |
|
652 | |||
653 | cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*'] |
|
653 | cmd = [settings.GIT_EXECUTABLE(), 'for-each-ref', '--format=%(refname)', 'refs/heads/*'] | |
654 | stdout, stderr = subprocessio.run_command( |
|
654 | stdout, stderr = subprocessio.run_command( | |
655 | cmd, env=os.environ.copy()) |
|
655 | cmd, env=os.environ.copy()) | |
656 | heads = safe_str(stdout) |
|
656 | heads = safe_str(stdout) | |
657 | heads = heads.replace(push_ref['ref'], '') |
|
657 | heads = heads.replace(push_ref['ref'], '') | |
658 | heads = ' '.join(head for head |
|
658 | heads = ' '.join(head for head | |
659 | in heads.splitlines() if head) or '.' |
|
659 | in heads.splitlines() if head) or '.' | |
660 | cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse', |
|
660 | cmd = [settings.GIT_EXECUTABLE(), 'log', '--reverse', | |
661 | '--pretty=format:%H', '--', push_ref['new_rev'], |
|
661 | '--pretty=format:%H', '--', push_ref['new_rev'], | |
662 | '--not', heads] |
|
662 | '--not', heads] | |
663 | stdout, stderr = subprocessio.run_command( |
|
663 | stdout, stderr = subprocessio.run_command( | |
664 | cmd, env=os.environ.copy()) |
|
664 | cmd, env=os.environ.copy()) | |
665 | git_revs.extend(list(map(ascii_str, stdout.splitlines()))) |
|
665 | git_revs.extend(list(map(ascii_str, stdout.splitlines()))) | |
666 |
|
666 | |||
667 | # delete branch case |
|
667 | # delete branch case | |
668 | elif push_ref['new_rev'] == empty_commit_id: |
|
668 | elif push_ref['new_rev'] == empty_commit_id: | |
669 | git_revs.append(f'delete_branch=>{push_ref["name"]}') |
|
669 | git_revs.append(f'delete_branch=>{push_ref["name"]}') | |
670 | else: |
|
670 | else: | |
671 | if push_ref['name'] not in branches: |
|
671 | if push_ref['name'] not in branches: | |
672 | branches.append(push_ref['name']) |
|
672 | branches.append(push_ref['name']) | |
673 |
|
673 | |||
674 | cmd = [settings.GIT_EXECUTABLE(), 'log', |
|
674 | cmd = [settings.GIT_EXECUTABLE(), 'log', | |
675 | f'{push_ref["old_rev"]}..{push_ref["new_rev"]}', |
|
675 | f'{push_ref["old_rev"]}..{push_ref["new_rev"]}', | |
676 | '--reverse', '--pretty=format:%H'] |
|
676 | '--reverse', '--pretty=format:%H'] | |
677 | stdout, stderr = subprocessio.run_command( |
|
677 | stdout, stderr = subprocessio.run_command( | |
678 | cmd, env=os.environ.copy()) |
|
678 | cmd, env=os.environ.copy()) | |
679 | # we get bytes from stdout, we need str to be consistent |
|
679 | # we get bytes from stdout, we need str to be consistent | |
680 | log_revs = list(map(ascii_str, stdout.splitlines())) |
|
680 | log_revs = list(map(ascii_str, stdout.splitlines())) | |
681 | git_revs.extend(log_revs) |
|
681 | git_revs.extend(log_revs) | |
682 |
|
682 | |||
683 | # Pure pygit2 impl. but still 2-3x slower :/ |
|
683 | # Pure pygit2 impl. but still 2-3x slower :/ | |
684 | # results = [] |
|
684 | # results = [] | |
685 | # |
|
685 | # | |
686 | # with Repository(os.getcwd()) as repo: |
|
686 | # with Repository(os.getcwd()) as repo: | |
687 | # repo_new_rev = repo[push_ref['new_rev']] |
|
687 | # repo_new_rev = repo[push_ref['new_rev']] | |
688 | # repo_old_rev = repo[push_ref['old_rev']] |
|
688 | # repo_old_rev = repo[push_ref['old_rev']] | |
689 | # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL) |
|
689 | # walker = repo.walk(repo_new_rev.id, pygit2.GIT_SORT_TOPOLOGICAL) | |
690 | # |
|
690 | # | |
691 | # for commit in walker: |
|
691 | # for commit in walker: | |
692 | # if commit.id == repo_old_rev.id: |
|
692 | # if commit.id == repo_old_rev.id: | |
693 | # break |
|
693 | # break | |
694 | # results.append(commit.id.hex) |
|
694 | # results.append(commit.id.hex) | |
695 | # # reverse the order, can't use GIT_SORT_REVERSE |
|
695 | # # reverse the order, can't use GIT_SORT_REVERSE | |
696 | # log_revs = results[::-1] |
|
696 | # log_revs = results[::-1] | |
697 |
|
697 | |||
698 | elif type_ == 'tags': |
|
698 | elif type_ == 'tags': | |
699 | if push_ref['name'] not in tags: |
|
699 | if push_ref['name'] not in tags: | |
700 | tags.append(push_ref['name']) |
|
700 | tags.append(push_ref['name']) | |
701 | git_revs.append(f'tag=>{push_ref["name"]}') |
|
701 | git_revs.append(f'tag=>{push_ref["name"]}') | |
702 |
|
702 | |||
703 | extras['hook_type'] = 'post_receive' |
|
703 | extras['hook_type'] = 'post_receive' | |
704 | extras['commit_ids'] = git_revs |
|
704 | extras['commit_ids'] = git_revs | |
705 | extras['new_refs'] = { |
|
705 | extras['new_refs'] = { | |
706 | 'branches': branches, |
|
706 | 'branches': branches, | |
707 | 'bookmarks': [], |
|
707 | 'bookmarks': [], | |
708 | 'tags': tags, |
|
708 | 'tags': tags, | |
709 | } |
|
709 | } | |
710 |
|
710 | |||
711 | stdout = sys.stdout |
|
711 | stdout = sys.stdout | |
712 |
|
712 | |||
713 | if 'repo_size' in extras['hooks']: |
|
713 | if 'repo_size' in extras['hooks']: | |
714 | try: |
|
714 | try: | |
715 | _call_hook('repo_size', extras, GitMessageWriter(stdout)) |
|
715 | _call_hook('repo_size', extras, GitMessageWriter(stdout)) | |
716 | except Exception: |
|
716 | except Exception: | |
717 | pass |
|
717 | pass | |
718 |
|
718 | |||
719 | status_code = _call_hook('post_push', extras, GitMessageWriter(stdout)) |
|
719 | status_code = _call_hook('post_push', extras, GitMessageWriter(stdout)) | |
720 | return status_code |
|
720 | return status_code | |
721 |
|
721 | |||
722 |
|
722 | |||
723 | def _get_extras_from_txn_id(path, txn_id): |
|
723 | def _get_extras_from_txn_id(path, txn_id): | |
724 | _fix_hooks_executables() |
|
724 | _fix_hooks_executables() | |
725 |
|
725 | |||
726 | extras = {} |
|
726 | extras = {} | |
727 | try: |
|
727 | try: | |
728 | cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget', |
|
728 | cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget', | |
729 | '-t', txn_id, |
|
729 | '-t', txn_id, | |
730 | '--revprop', path, 'rc-scm-extras'] |
|
730 | '--revprop', path, 'rc-scm-extras'] | |
731 | stdout, stderr = subprocessio.run_command( |
|
731 | stdout, stderr = subprocessio.run_command( | |
732 | cmd, env=os.environ.copy()) |
|
732 | cmd, env=os.environ.copy()) | |
733 | extras = json.loads(base64.urlsafe_b64decode(stdout)) |
|
733 | extras = json.loads(base64.urlsafe_b64decode(stdout)) | |
734 | except Exception: |
|
734 | except Exception: | |
735 | log.exception('Failed to extract extras info from txn_id') |
|
735 | log.exception('Failed to extract extras info from txn_id') | |
736 |
|
736 | |||
737 | return extras |
|
737 | return extras | |
738 |
|
738 | |||
739 |
|
739 | |||
740 | def _get_extras_from_commit_id(commit_id, path): |
|
740 | def _get_extras_from_commit_id(commit_id, path): | |
741 | _fix_hooks_executables() |
|
741 | _fix_hooks_executables() | |
742 |
|
742 | |||
743 | extras = {} |
|
743 | extras = {} | |
744 | try: |
|
744 | try: | |
745 | cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget', |
|
745 | cmd = [settings.SVNLOOK_EXECUTABLE(), 'pget', | |
746 | '-r', commit_id, |
|
746 | '-r', commit_id, | |
747 | '--revprop', path, 'rc-scm-extras'] |
|
747 | '--revprop', path, 'rc-scm-extras'] | |
748 | stdout, stderr = subprocessio.run_command( |
|
748 | stdout, stderr = subprocessio.run_command( | |
749 | cmd, env=os.environ.copy()) |
|
749 | cmd, env=os.environ.copy()) | |
750 | extras = json.loads(base64.urlsafe_b64decode(stdout)) |
|
750 | extras = json.loads(base64.urlsafe_b64decode(stdout)) | |
751 | except Exception: |
|
751 | except Exception: | |
752 | log.exception('Failed to extract extras info from commit_id') |
|
752 | log.exception('Failed to extract extras info from commit_id') | |
753 |
|
753 | |||
754 | return extras |
|
754 | return extras | |
755 |
|
755 | |||
756 |
|
756 | |||
757 | def svn_pre_commit(repo_path, commit_data, env): |
|
757 | def svn_pre_commit(repo_path, commit_data, env): | |
758 |
|
758 | |||
759 | path, txn_id = commit_data |
|
759 | path, txn_id = commit_data | |
760 | branches = [] |
|
760 | branches = [] | |
761 | tags = [] |
|
761 | tags = [] | |
762 |
|
762 | |||
763 | if env.get('RC_SCM_DATA'): |
|
763 | if env.get('RC_SCM_DATA'): | |
764 | extras = json.loads(env['RC_SCM_DATA']) |
|
764 | extras = json.loads(env['RC_SCM_DATA']) | |
765 | else: |
|
765 | else: | |
766 | # fallback method to read from TXN-ID stored data |
|
766 | # fallback method to read from TXN-ID stored data | |
767 | extras = _get_extras_from_txn_id(path, txn_id) |
|
767 | extras = _get_extras_from_txn_id(path, txn_id) | |
|
768 | ||||
768 |
|
|
769 | if not extras: | |
769 | return 0 |
|
770 | raise ValueError('Failed to extract context data called extras for hook execution') | |
770 |
|
771 | |||
771 | extras['hook_type'] = 'pre_commit' |
|
772 | extras['hook_type'] = 'pre_commit' | |
772 | extras['commit_ids'] = [txn_id] |
|
773 | extras['commit_ids'] = [txn_id] | |
773 | extras['txn_id'] = txn_id |
|
774 | extras['txn_id'] = txn_id | |
774 | extras['new_refs'] = { |
|
775 | extras['new_refs'] = { | |
775 | 'total_commits': 1, |
|
776 | 'total_commits': 1, | |
776 | 'branches': branches, |
|
777 | 'branches': branches, | |
777 | 'bookmarks': [], |
|
778 | 'bookmarks': [], | |
778 | 'tags': tags, |
|
779 | 'tags': tags, | |
779 | } |
|
780 | } | |
780 |
|
781 | |||
781 | return _call_hook('pre_push', extras, SvnMessageWriter()) |
|
782 | return _call_hook('pre_push', extras, SvnMessageWriter()) | |
782 |
|
783 | |||
783 |
|
784 | |||
784 | def svn_post_commit(repo_path, commit_data, env): |
|
785 | def svn_post_commit(repo_path, commit_data, env): | |
785 | """ |
|
786 | """ | |
786 | commit_data is path, rev, txn_id |
|
787 | commit_data is path, rev, txn_id | |
787 | """ |
|
788 | """ | |
788 |
|
789 | |||
789 | if len(commit_data) == 3: |
|
790 | if len(commit_data) == 3: | |
790 | path, commit_id, txn_id = commit_data |
|
791 | path, commit_id, txn_id = commit_data | |
791 | elif len(commit_data) == 2: |
|
792 | elif len(commit_data) == 2: | |
792 | log.error('Failed to extract txn_id from commit_data using legacy method. ' |
|
793 | log.error('Failed to extract txn_id from commit_data using legacy method. ' | |
793 | 'Some functionality might be limited') |
|
794 | 'Some functionality might be limited') | |
794 | path, commit_id = commit_data |
|
795 | path, commit_id = commit_data | |
795 | txn_id = None |
|
796 | txn_id = None | |
796 | else: |
|
797 | else: | |
797 | return 0 |
|
798 | return 0 | |
798 |
|
799 | |||
799 | branches = [] |
|
800 | branches = [] | |
800 | tags = [] |
|
801 | tags = [] | |
801 |
|
802 | |||
802 | if env.get('RC_SCM_DATA'): |
|
803 | if env.get('RC_SCM_DATA'): | |
803 | extras = json.loads(env['RC_SCM_DATA']) |
|
804 | extras = json.loads(env['RC_SCM_DATA']) | |
804 | else: |
|
805 | else: | |
805 | # fallback method to read from TXN-ID stored data |
|
806 | # fallback method to read from TXN-ID stored data | |
806 | extras = _get_extras_from_commit_id(commit_id, path) |
|
807 | extras = _get_extras_from_commit_id(commit_id, path) | |
|
808 | ||||
807 |
|
|
809 | if not extras: | |
808 | return 0 |
|
810 | raise ValueError('Failed to extract context data called extras for hook execution') | |
809 |
|
811 | |||
810 | extras['hook_type'] = 'post_commit' |
|
812 | extras['hook_type'] = 'post_commit' | |
811 | extras['commit_ids'] = [commit_id] |
|
813 | extras['commit_ids'] = [commit_id] | |
812 | extras['txn_id'] = txn_id |
|
814 | extras['txn_id'] = txn_id | |
813 | extras['new_refs'] = { |
|
815 | extras['new_refs'] = { | |
814 | 'branches': branches, |
|
816 | 'branches': branches, | |
815 | 'bookmarks': [], |
|
817 | 'bookmarks': [], | |
816 | 'tags': tags, |
|
818 | 'tags': tags, | |
817 | 'total_commits': 1, |
|
819 | 'total_commits': 1, | |
818 | } |
|
820 | } | |
819 |
|
821 | |||
820 | if 'repo_size' in extras['hooks']: |
|
822 | if 'repo_size' in extras['hooks']: | |
821 | try: |
|
823 | try: | |
822 | _call_hook('repo_size', extras, SvnMessageWriter()) |
|
824 | _call_hook('repo_size', extras, SvnMessageWriter()) | |
823 | except Exception: |
|
825 | except Exception: | |
824 | pass |
|
826 | pass | |
825 |
|
827 | |||
826 | return _call_hook('post_push', extras, SvnMessageWriter()) |
|
828 | return _call_hook('post_push', extras, SvnMessageWriter()) |
General Comments 0
You need to be logged in to leave comments.
Login now