##// END OF EJS Templates
py3: drop xrange
marcink -
r982:9917a6cc python3
parent child Browse files
Show More
@@ -1,54 +1,54 b''
1 """
1 """
2 Implementation of :class:`EchoApp`.
2 Implementation of :class:`EchoApp`.
3
3
4 This WSGI application will just echo back the data which it recieves.
4 This WSGI application will just echo back the data which it recieves.
5 """
5 """
6
6
7 import logging
7 import logging
8
8
9
9
10 log = logging.getLogger(__name__)
10 log = logging.getLogger(__name__)
11
11
12
12
13 class EchoApp(object):
13 class EchoApp(object):
14
14
15 def __init__(self, repo_path, repo_name, config):
15 def __init__(self, repo_path, repo_name, config):
16 self._repo_path = repo_path
16 self._repo_path = repo_path
17 log.info("EchoApp initialized for %s", repo_path)
17 log.info("EchoApp initialized for %s", repo_path)
18
18
19 def __call__(self, environ, start_response):
19 def __call__(self, environ, start_response):
20 log.debug("EchoApp called for %s", self._repo_path)
20 log.debug("EchoApp called for %s", self._repo_path)
21 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
21 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
22 environ['wsgi.input'].read()
22 environ['wsgi.input'].read()
23 status = '200 OK'
23 status = '200 OK'
24 headers = [('Content-Type', 'text/plain')]
24 headers = [('Content-Type', 'text/plain')]
25 start_response(status, headers)
25 start_response(status, headers)
26 return ["ECHO"]
26 return ["ECHO"]
27
27
28
28
29 class EchoAppStream(object):
29 class EchoAppStream(object):
30
30
31 def __init__(self, repo_path, repo_name, config):
31 def __init__(self, repo_path, repo_name, config):
32 self._repo_path = repo_path
32 self._repo_path = repo_path
33 log.info("EchoApp initialized for %s", repo_path)
33 log.info("EchoApp initialized for %s", repo_path)
34
34
35 def __call__(self, environ, start_response):
35 def __call__(self, environ, start_response):
36 log.debug("EchoApp called for %s", self._repo_path)
36 log.debug("EchoApp called for %s", self._repo_path)
37 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
37 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
38 environ['wsgi.input'].read()
38 environ['wsgi.input'].read()
39 status = '200 OK'
39 status = '200 OK'
40 headers = [('Content-Type', 'text/plain')]
40 headers = [('Content-Type', 'text/plain')]
41 start_response(status, headers)
41 start_response(status, headers)
42
42
43 def generator():
43 def generator():
44 for _ in xrange(1000000):
44 for _ in range(1000000):
45 yield "ECHO"
45 yield "ECHO"
46 return generator()
46 return generator()
47
47
48
48
49 def create_app():
49 def create_app():
50 """
50 """
51 Allows to run this app directly in a WSGI server.
51 Allows to run this app directly in a WSGI server.
52 """
52 """
53 stub_config = {}
53 stub_config = {}
54 return EchoApp('stub_path', 'stub_name', stub_config)
54 return EchoApp('stub_path', 'stub_name', stub_config)
@@ -1,1009 +1,1009 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase, purge
25 from hgext import largefiles, rebase, purge
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30 from mercurial import repair
30 from mercurial import repair
31
31
32 import vcsserver
32 import vcsserver
33 from vcsserver import exceptions
33 from vcsserver import exceptions
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 from vcsserver.hgcompat import (
35 from vcsserver.hgcompat import (
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 RepoLookupError, InterventionRequired, RequirementError)
40 RepoLookupError, InterventionRequired, RequirementError)
41 from vcsserver.vcs_base import RemoteBase
41 from vcsserver.vcs_base import RemoteBase
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 def make_ui_from_config(repo_config):
46 def make_ui_from_config(repo_config):
47
47
48 class LoggingUI(ui.ui):
48 class LoggingUI(ui.ui):
49 def status(self, *msg, **opts):
49 def status(self, *msg, **opts):
50 log.info(' '.join(msg).rstrip('\n'))
50 log.info(' '.join(msg).rstrip('\n'))
51 super(LoggingUI, self).status(*msg, **opts)
51 super(LoggingUI, self).status(*msg, **opts)
52
52
53 def warn(self, *msg, **opts):
53 def warn(self, *msg, **opts):
54 log.warn(' '.join(msg).rstrip('\n'))
54 log.warn(' '.join(msg).rstrip('\n'))
55 super(LoggingUI, self).warn(*msg, **opts)
55 super(LoggingUI, self).warn(*msg, **opts)
56
56
57 def error(self, *msg, **opts):
57 def error(self, *msg, **opts):
58 log.error(' '.join(msg).rstrip('\n'))
58 log.error(' '.join(msg).rstrip('\n'))
59 super(LoggingUI, self).error(*msg, **opts)
59 super(LoggingUI, self).error(*msg, **opts)
60
60
61 def note(self, *msg, **opts):
61 def note(self, *msg, **opts):
62 log.info(' '.join(msg).rstrip('\n'))
62 log.info(' '.join(msg).rstrip('\n'))
63 super(LoggingUI, self).note(*msg, **opts)
63 super(LoggingUI, self).note(*msg, **opts)
64
64
65 def debug(self, *msg, **opts):
65 def debug(self, *msg, **opts):
66 log.debug(' '.join(msg).rstrip('\n'))
66 log.debug(' '.join(msg).rstrip('\n'))
67 super(LoggingUI, self).debug(*msg, **opts)
67 super(LoggingUI, self).debug(*msg, **opts)
68
68
69 baseui = LoggingUI()
69 baseui = LoggingUI()
70
70
71 # clean the baseui object
71 # clean the baseui object
72 baseui._ocfg = hgconfig.config()
72 baseui._ocfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
75
75
76 for section, option, value in repo_config:
76 for section, option, value in repo_config:
77 baseui.setconfig(section, option, value)
77 baseui.setconfig(section, option, value)
78
78
79 # make our hgweb quiet so it doesn't print output
79 # make our hgweb quiet so it doesn't print output
80 baseui.setconfig('ui', 'quiet', 'true')
80 baseui.setconfig('ui', 'quiet', 'true')
81
81
82 baseui.setconfig('ui', 'paginate', 'never')
82 baseui.setconfig('ui', 'paginate', 'never')
83 # for better Error reporting of Mercurial
83 # for better Error reporting of Mercurial
84 baseui.setconfig('ui', 'message-output', 'stderr')
84 baseui.setconfig('ui', 'message-output', 'stderr')
85
85
86 # force mercurial to only use 1 thread, otherwise it may try to set a
86 # force mercurial to only use 1 thread, otherwise it may try to set a
87 # signal in a non-main thread, thus generating a ValueError.
87 # signal in a non-main thread, thus generating a ValueError.
88 baseui.setconfig('worker', 'numcpus', 1)
88 baseui.setconfig('worker', 'numcpus', 1)
89
89
90 # If there is no config for the largefiles extension, we explicitly disable
90 # If there is no config for the largefiles extension, we explicitly disable
91 # it here. This overrides settings from repositories hgrc file. Recent
91 # it here. This overrides settings from repositories hgrc file. Recent
92 # mercurial versions enable largefiles in hgrc on clone from largefile
92 # mercurial versions enable largefiles in hgrc on clone from largefile
93 # repo.
93 # repo.
94 if not baseui.hasconfig('extensions', 'largefiles'):
94 if not baseui.hasconfig('extensions', 'largefiles'):
95 log.debug('Explicitly disable largefiles extension for repo.')
95 log.debug('Explicitly disable largefiles extension for repo.')
96 baseui.setconfig('extensions', 'largefiles', '!')
96 baseui.setconfig('extensions', 'largefiles', '!')
97
97
98 return baseui
98 return baseui
99
99
100
100
101 def reraise_safe_exceptions(func):
101 def reraise_safe_exceptions(func):
102 """Decorator for converting mercurial exceptions to something neutral."""
102 """Decorator for converting mercurial exceptions to something neutral."""
103
103
104 def wrapper(*args, **kwargs):
104 def wrapper(*args, **kwargs):
105 try:
105 try:
106 return func(*args, **kwargs)
106 return func(*args, **kwargs)
107 except (Abort, InterventionRequired) as e:
107 except (Abort, InterventionRequired) as e:
108 raise_from_original(exceptions.AbortException(e))
108 raise_from_original(exceptions.AbortException(e))
109 except RepoLookupError as e:
109 except RepoLookupError as e:
110 raise_from_original(exceptions.LookupException(e))
110 raise_from_original(exceptions.LookupException(e))
111 except RequirementError as e:
111 except RequirementError as e:
112 raise_from_original(exceptions.RequirementException(e))
112 raise_from_original(exceptions.RequirementException(e))
113 except RepoError as e:
113 except RepoError as e:
114 raise_from_original(exceptions.VcsException(e))
114 raise_from_original(exceptions.VcsException(e))
115 except LookupError as e:
115 except LookupError as e:
116 raise_from_original(exceptions.LookupException(e))
116 raise_from_original(exceptions.LookupException(e))
117 except Exception as e:
117 except Exception as e:
118 if not hasattr(e, '_vcs_kind'):
118 if not hasattr(e, '_vcs_kind'):
119 log.exception("Unhandled exception in hg remote call")
119 log.exception("Unhandled exception in hg remote call")
120 raise_from_original(exceptions.UnhandledException(e))
120 raise_from_original(exceptions.UnhandledException(e))
121
121
122 raise
122 raise
123 return wrapper
123 return wrapper
124
124
125
125
126 class MercurialFactory(RepoFactory):
126 class MercurialFactory(RepoFactory):
127 repo_type = 'hg'
127 repo_type = 'hg'
128
128
129 def _create_config(self, config, hooks=True):
129 def _create_config(self, config, hooks=True):
130 if not hooks:
130 if not hooks:
131 hooks_to_clean = frozenset((
131 hooks_to_clean = frozenset((
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 new_config = []
134 new_config = []
135 for section, option, value in config:
135 for section, option, value in config:
136 if section == 'hooks' and option in hooks_to_clean:
136 if section == 'hooks' and option in hooks_to_clean:
137 continue
137 continue
138 new_config.append((section, option, value))
138 new_config.append((section, option, value))
139 config = new_config
139 config = new_config
140
140
141 baseui = make_ui_from_config(config)
141 baseui = make_ui_from_config(config)
142 return baseui
142 return baseui
143
143
144 def _create_repo(self, wire, create):
144 def _create_repo(self, wire, create):
145 baseui = self._create_config(wire["config"])
145 baseui = self._create_config(wire["config"])
146 return instance(baseui, wire["path"], create)
146 return instance(baseui, wire["path"], create)
147
147
148 def repo(self, wire, create=False):
148 def repo(self, wire, create=False):
149 """
149 """
150 Get a repository instance for the given path.
150 Get a repository instance for the given path.
151 """
151 """
152 return self._create_repo(wire, create)
152 return self._create_repo(wire, create)
153
153
154
154
155 def patch_ui_message_output(baseui):
155 def patch_ui_message_output(baseui):
156 baseui.setconfig('ui', 'quiet', 'false')
156 baseui.setconfig('ui', 'quiet', 'false')
157 output = io.BytesIO()
157 output = io.BytesIO()
158
158
159 def write(data, **unused_kwargs):
159 def write(data, **unused_kwargs):
160 output.write(data)
160 output.write(data)
161
161
162 baseui.status = write
162 baseui.status = write
163 baseui.write = write
163 baseui.write = write
164 baseui.warn = write
164 baseui.warn = write
165 baseui.debug = write
165 baseui.debug = write
166
166
167 return baseui, output
167 return baseui, output
168
168
169
169
170 class HgRemote(RemoteBase):
170 class HgRemote(RemoteBase):
171
171
172 def __init__(self, factory):
172 def __init__(self, factory):
173 self._factory = factory
173 self._factory = factory
174 self._bulk_methods = {
174 self._bulk_methods = {
175 "affected_files": self.ctx_files,
175 "affected_files": self.ctx_files,
176 "author": self.ctx_user,
176 "author": self.ctx_user,
177 "branch": self.ctx_branch,
177 "branch": self.ctx_branch,
178 "children": self.ctx_children,
178 "children": self.ctx_children,
179 "date": self.ctx_date,
179 "date": self.ctx_date,
180 "message": self.ctx_description,
180 "message": self.ctx_description,
181 "parents": self.ctx_parents,
181 "parents": self.ctx_parents,
182 "status": self.ctx_status,
182 "status": self.ctx_status,
183 "obsolete": self.ctx_obsolete,
183 "obsolete": self.ctx_obsolete,
184 "phase": self.ctx_phase,
184 "phase": self.ctx_phase,
185 "hidden": self.ctx_hidden,
185 "hidden": self.ctx_hidden,
186 "_file_paths": self.ctx_list,
186 "_file_paths": self.ctx_list,
187 }
187 }
188
188
189 def _get_ctx(self, repo, ref):
189 def _get_ctx(self, repo, ref):
190 return get_ctx(repo, ref)
190 return get_ctx(repo, ref)
191
191
192 @reraise_safe_exceptions
192 @reraise_safe_exceptions
193 def discover_hg_version(self):
193 def discover_hg_version(self):
194 from mercurial import util
194 from mercurial import util
195 return util.version()
195 return util.version()
196
196
197 @reraise_safe_exceptions
197 @reraise_safe_exceptions
198 def is_empty(self, wire):
198 def is_empty(self, wire):
199 repo = self._factory.repo(wire)
199 repo = self._factory.repo(wire)
200
200
201 try:
201 try:
202 return len(repo) == 0
202 return len(repo) == 0
203 except Exception:
203 except Exception:
204 log.exception("failed to read object_store")
204 log.exception("failed to read object_store")
205 return False
205 return False
206
206
207 @reraise_safe_exceptions
207 @reraise_safe_exceptions
208 def archive_repo(self, archive_path, mtime, file_info, kind):
208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 if kind == "tgz":
209 if kind == "tgz":
210 archiver = archival.tarit(archive_path, mtime, "gz")
210 archiver = archival.tarit(archive_path, mtime, "gz")
211 elif kind == "tbz2":
211 elif kind == "tbz2":
212 archiver = archival.tarit(archive_path, mtime, "bz2")
212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 elif kind == 'zip':
213 elif kind == 'zip':
214 archiver = archival.zipit(archive_path, mtime)
214 archiver = archival.zipit(archive_path, mtime)
215 else:
215 else:
216 raise exceptions.ArchiveException()(
216 raise exceptions.ArchiveException()(
217 'Remote does not support: "%s".' % kind)
217 'Remote does not support: "%s".' % kind)
218
218
219 for f_path, f_mode, f_is_link, f_content in file_info:
219 for f_path, f_mode, f_is_link, f_content in file_info:
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 archiver.done()
221 archiver.done()
222
222
223 @reraise_safe_exceptions
223 @reraise_safe_exceptions
224 def bookmarks(self, wire):
224 def bookmarks(self, wire):
225 cache_on, context_uid, repo_id = self._cache_on(wire)
225 cache_on, context_uid, repo_id = self._cache_on(wire)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
227 def _bookmarks(_context_uid, _repo_id):
227 def _bookmarks(_context_uid, _repo_id):
228 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
229 return dict(repo._bookmarks)
229 return dict(repo._bookmarks)
230
230
231 return _bookmarks(context_uid, repo_id)
231 return _bookmarks(context_uid, repo_id)
232
232
233 @reraise_safe_exceptions
233 @reraise_safe_exceptions
234 def branches(self, wire, normal, closed):
234 def branches(self, wire, normal, closed):
235 cache_on, context_uid, repo_id = self._cache_on(wire)
235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 def _branches(_context_uid, _repo_id, _normal, _closed):
237 def _branches(_context_uid, _repo_id, _normal, _closed):
238 repo = self._factory.repo(wire)
238 repo = self._factory.repo(wire)
239 iter_branches = repo.branchmap().iterbranches()
239 iter_branches = repo.branchmap().iterbranches()
240 bt = {}
240 bt = {}
241 for branch_name, _heads, tip, is_closed in iter_branches:
241 for branch_name, _heads, tip, is_closed in iter_branches:
242 if normal and not is_closed:
242 if normal and not is_closed:
243 bt[branch_name] = tip
243 bt[branch_name] = tip
244 if closed and is_closed:
244 if closed and is_closed:
245 bt[branch_name] = tip
245 bt[branch_name] = tip
246
246
247 return bt
247 return bt
248
248
249 return _branches(context_uid, repo_id, normal, closed)
249 return _branches(context_uid, repo_id, normal, closed)
250
250
251 @reraise_safe_exceptions
251 @reraise_safe_exceptions
252 def bulk_request(self, wire, commit_id, pre_load):
252 def bulk_request(self, wire, commit_id, pre_load):
253 cache_on, context_uid, repo_id = self._cache_on(wire)
253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 result = {}
256 result = {}
257 for attr in pre_load:
257 for attr in pre_load:
258 try:
258 try:
259 method = self._bulk_methods[attr]
259 method = self._bulk_methods[attr]
260 result[attr] = method(wire, commit_id)
260 result[attr] = method(wire, commit_id)
261 except KeyError as e:
261 except KeyError as e:
262 raise exceptions.VcsException(e)(
262 raise exceptions.VcsException(e)(
263 'Unknown bulk attribute: "%s"' % attr)
263 'Unknown bulk attribute: "%s"' % attr)
264 return result
264 return result
265
265
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267
267
268 @reraise_safe_exceptions
268 @reraise_safe_exceptions
269 def ctx_branch(self, wire, commit_id):
269 def ctx_branch(self, wire, commit_id):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 def _ctx_branch(_repo_id, _commit_id):
272 def _ctx_branch(_repo_id, _commit_id):
273 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
274 ctx = self._get_ctx(repo, commit_id)
274 ctx = self._get_ctx(repo, commit_id)
275 return ctx.branch()
275 return ctx.branch()
276 return _ctx_branch(repo_id, commit_id)
276 return _ctx_branch(repo_id, commit_id)
277
277
278 @reraise_safe_exceptions
278 @reraise_safe_exceptions
279 def ctx_date(self, wire, commit_id):
279 def ctx_date(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 def _ctx_date(_repo_id, _commit_id):
282 def _ctx_date(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
284 ctx = self._get_ctx(repo, commit_id)
285 return ctx.date()
285 return ctx.date()
286 return _ctx_date(repo_id, commit_id)
286 return _ctx_date(repo_id, commit_id)
287
287
288 @reraise_safe_exceptions
288 @reraise_safe_exceptions
289 def ctx_description(self, wire, revision):
289 def ctx_description(self, wire, revision):
290 repo = self._factory.repo(wire)
290 repo = self._factory.repo(wire)
291 ctx = self._get_ctx(repo, revision)
291 ctx = self._get_ctx(repo, revision)
292 return ctx.description()
292 return ctx.description()
293
293
294 @reraise_safe_exceptions
294 @reraise_safe_exceptions
295 def ctx_files(self, wire, commit_id):
295 def ctx_files(self, wire, commit_id):
296 cache_on, context_uid, repo_id = self._cache_on(wire)
296 cache_on, context_uid, repo_id = self._cache_on(wire)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 def _ctx_files(_repo_id, _commit_id):
298 def _ctx_files(_repo_id, _commit_id):
299 repo = self._factory.repo(wire)
299 repo = self._factory.repo(wire)
300 ctx = self._get_ctx(repo, commit_id)
300 ctx = self._get_ctx(repo, commit_id)
301 return ctx.files()
301 return ctx.files()
302
302
303 return _ctx_files(repo_id, commit_id)
303 return _ctx_files(repo_id, commit_id)
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def ctx_list(self, path, revision):
306 def ctx_list(self, path, revision):
307 repo = self._factory.repo(path)
307 repo = self._factory.repo(path)
308 ctx = self._get_ctx(repo, revision)
308 ctx = self._get_ctx(repo, revision)
309 return list(ctx)
309 return list(ctx)
310
310
311 @reraise_safe_exceptions
311 @reraise_safe_exceptions
312 def ctx_parents(self, wire, commit_id):
312 def ctx_parents(self, wire, commit_id):
313 cache_on, context_uid, repo_id = self._cache_on(wire)
313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 def _ctx_parents(_repo_id, _commit_id):
315 def _ctx_parents(_repo_id, _commit_id):
316 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
317 ctx = self._get_ctx(repo, commit_id)
317 ctx = self._get_ctx(repo, commit_id)
318 return [parent.hex() for parent in ctx.parents()
318 return [parent.hex() for parent in ctx.parents()
319 if not (parent.hidden() or parent.obsolete())]
319 if not (parent.hidden() or parent.obsolete())]
320
320
321 return _ctx_parents(repo_id, commit_id)
321 return _ctx_parents(repo_id, commit_id)
322
322
323 @reraise_safe_exceptions
323 @reraise_safe_exceptions
324 def ctx_children(self, wire, commit_id):
324 def ctx_children(self, wire, commit_id):
325 cache_on, context_uid, repo_id = self._cache_on(wire)
325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 def _ctx_children(_repo_id, _commit_id):
327 def _ctx_children(_repo_id, _commit_id):
328 repo = self._factory.repo(wire)
328 repo = self._factory.repo(wire)
329 ctx = self._get_ctx(repo, commit_id)
329 ctx = self._get_ctx(repo, commit_id)
330 return [child.hex() for child in ctx.children()
330 return [child.hex() for child in ctx.children()
331 if not (child.hidden() or child.obsolete())]
331 if not (child.hidden() or child.obsolete())]
332
332
333 return _ctx_children(repo_id, commit_id)
333 return _ctx_children(repo_id, commit_id)
334
334
335 @reraise_safe_exceptions
335 @reraise_safe_exceptions
336 def ctx_phase(self, wire, commit_id):
336 def ctx_phase(self, wire, commit_id):
337 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
341 ctx = self._get_ctx(repo, commit_id)
341 ctx = self._get_ctx(repo, commit_id)
342 # public=0, draft=1, secret=3
342 # public=0, draft=1, secret=3
343 return ctx.phase()
343 return ctx.phase()
344 return _ctx_phase(context_uid, repo_id, commit_id)
344 return _ctx_phase(context_uid, repo_id, commit_id)
345
345
346 @reraise_safe_exceptions
346 @reraise_safe_exceptions
347 def ctx_obsolete(self, wire, commit_id):
347 def ctx_obsolete(self, wire, commit_id):
348 cache_on, context_uid, repo_id = self._cache_on(wire)
348 cache_on, context_uid, repo_id = self._cache_on(wire)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 repo = self._factory.repo(wire)
351 repo = self._factory.repo(wire)
352 ctx = self._get_ctx(repo, commit_id)
352 ctx = self._get_ctx(repo, commit_id)
353 return ctx.obsolete()
353 return ctx.obsolete()
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
355
355
356 @reraise_safe_exceptions
356 @reraise_safe_exceptions
357 def ctx_hidden(self, wire, commit_id):
357 def ctx_hidden(self, wire, commit_id):
358 cache_on, context_uid, repo_id = self._cache_on(wire)
358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, commit_id)
362 ctx = self._get_ctx(repo, commit_id)
363 return ctx.hidden()
363 return ctx.hidden()
364 return _ctx_hidden(context_uid, repo_id, commit_id)
364 return _ctx_hidden(context_uid, repo_id, commit_id)
365
365
366 @reraise_safe_exceptions
366 @reraise_safe_exceptions
367 def ctx_substate(self, wire, revision):
367 def ctx_substate(self, wire, revision):
368 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
369 ctx = self._get_ctx(repo, revision)
369 ctx = self._get_ctx(repo, revision)
370 return ctx.substate
370 return ctx.substate
371
371
372 @reraise_safe_exceptions
372 @reraise_safe_exceptions
373 def ctx_status(self, wire, revision):
373 def ctx_status(self, wire, revision):
374 repo = self._factory.repo(wire)
374 repo = self._factory.repo(wire)
375 ctx = self._get_ctx(repo, revision)
375 ctx = self._get_ctx(repo, revision)
376 status = repo[ctx.p1().node()].status(other=ctx.node())
376 status = repo[ctx.p1().node()].status(other=ctx.node())
377 # object of status (odd, custom named tuple in mercurial) is not
377 # object of status (odd, custom named tuple in mercurial) is not
378 # correctly serializable, we make it a list, as the underling
378 # correctly serializable, we make it a list, as the underling
379 # API expects this to be a list
379 # API expects this to be a list
380 return list(status)
380 return list(status)
381
381
382 @reraise_safe_exceptions
382 @reraise_safe_exceptions
383 def ctx_user(self, wire, revision):
383 def ctx_user(self, wire, revision):
384 repo = self._factory.repo(wire)
384 repo = self._factory.repo(wire)
385 ctx = self._get_ctx(repo, revision)
385 ctx = self._get_ctx(repo, revision)
386 return ctx.user()
386 return ctx.user()
387
387
388 @reraise_safe_exceptions
388 @reraise_safe_exceptions
389 def check_url(self, url, config):
389 def check_url(self, url, config):
390 _proto = None
390 _proto = None
391 if '+' in url[:url.find('://')]:
391 if '+' in url[:url.find('://')]:
392 _proto = url[0:url.find('+')]
392 _proto = url[0:url.find('+')]
393 url = url[url.find('+') + 1:]
393 url = url[url.find('+') + 1:]
394 handlers = []
394 handlers = []
395 url_obj = url_parser(url)
395 url_obj = url_parser(url)
396 test_uri, authinfo = url_obj.authinfo()
396 test_uri, authinfo = url_obj.authinfo()
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 url_obj.query = obfuscate_qs(url_obj.query)
398 url_obj.query = obfuscate_qs(url_obj.query)
399
399
400 cleaned_uri = str(url_obj)
400 cleaned_uri = str(url_obj)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402
402
403 if authinfo:
403 if authinfo:
404 # create a password manager
404 # create a password manager
405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
406 passmgr.add_password(*authinfo)
406 passmgr.add_password(*authinfo)
407
407
408 handlers.extend((httpbasicauthhandler(passmgr),
408 handlers.extend((httpbasicauthhandler(passmgr),
409 httpdigestauthhandler(passmgr)))
409 httpdigestauthhandler(passmgr)))
410
410
411 o = urllib2.build_opener(*handlers)
411 o = urllib2.build_opener(*handlers)
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 ('Accept', 'application/mercurial-0.1')]
413 ('Accept', 'application/mercurial-0.1')]
414
414
415 q = {"cmd": 'between'}
415 q = {"cmd": 'between'}
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 qs = '?%s' % urllib.urlencode(q)
417 qs = '?%s' % urllib.urlencode(q)
418 cu = "%s%s" % (test_uri, qs)
418 cu = "%s%s" % (test_uri, qs)
419 req = urllib2.Request(cu, None, {})
419 req = urllib2.Request(cu, None, {})
420
420
421 try:
421 try:
422 log.debug("Trying to open URL %s", cleaned_uri)
422 log.debug("Trying to open URL %s", cleaned_uri)
423 resp = o.open(req)
423 resp = o.open(req)
424 if resp.code != 200:
424 if resp.code != 200:
425 raise exceptions.URLError()('Return Code is not 200')
425 raise exceptions.URLError()('Return Code is not 200')
426 except Exception as e:
426 except Exception as e:
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 # means it cannot be cloned
428 # means it cannot be cloned
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430
430
431 # now check if it's a proper hg repo, but don't do it for svn
431 # now check if it's a proper hg repo, but don't do it for svn
432 try:
432 try:
433 if _proto == 'svn':
433 if _proto == 'svn':
434 pass
434 pass
435 else:
435 else:
436 # check for pure hg repos
436 # check for pure hg repos
437 log.debug(
437 log.debug(
438 "Verifying if URL is a Mercurial repository: %s",
438 "Verifying if URL is a Mercurial repository: %s",
439 cleaned_uri)
439 cleaned_uri)
440 ui = make_ui_from_config(config)
440 ui = make_ui_from_config(config)
441 peer_checker = makepeer(ui, url)
441 peer_checker = makepeer(ui, url)
442 peer_checker.lookup('tip')
442 peer_checker.lookup('tip')
443 except Exception as e:
443 except Exception as e:
444 log.warning("URL is not a valid Mercurial repository: %s",
444 log.warning("URL is not a valid Mercurial repository: %s",
445 cleaned_uri)
445 cleaned_uri)
446 raise exceptions.URLError(e)(
446 raise exceptions.URLError(e)(
447 "url [%s] does not look like an hg repo org_exc: %s"
447 "url [%s] does not look like an hg repo org_exc: %s"
448 % (cleaned_uri, e))
448 % (cleaned_uri, e))
449
449
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 return True
451 return True
452
452
453 @reraise_safe_exceptions
453 @reraise_safe_exceptions
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 repo = self._factory.repo(wire)
455 repo = self._factory.repo(wire)
456
456
457 if file_filter:
457 if file_filter:
458 match_filter = match(file_filter[0], '', [file_filter[1]])
458 match_filter = match(file_filter[0], '', [file_filter[1]])
459 else:
459 else:
460 match_filter = file_filter
460 match_filter = file_filter
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462
462
463 try:
463 try:
464 return "".join(patch.diff(
464 return "".join(patch.diff(
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 except RepoLookupError as e:
466 except RepoLookupError as e:
467 raise exceptions.LookupException(e)()
467 raise exceptions.LookupException(e)()
468
468
469 @reraise_safe_exceptions
469 @reraise_safe_exceptions
470 def node_history(self, wire, revision, path, limit):
470 def node_history(self, wire, revision, path, limit):
471 cache_on, context_uid, repo_id = self._cache_on(wire)
471 cache_on, context_uid, repo_id = self._cache_on(wire)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 repo = self._factory.repo(wire)
474 repo = self._factory.repo(wire)
475
475
476 ctx = self._get_ctx(repo, revision)
476 ctx = self._get_ctx(repo, revision)
477 fctx = ctx.filectx(path)
477 fctx = ctx.filectx(path)
478
478
479 def history_iter():
479 def history_iter():
480 limit_rev = fctx.rev()
480 limit_rev = fctx.rev()
481 for obj in reversed(list(fctx.filelog())):
481 for obj in reversed(list(fctx.filelog())):
482 obj = fctx.filectx(obj)
482 obj = fctx.filectx(obj)
483 ctx = obj.changectx()
483 ctx = obj.changectx()
484 if ctx.hidden() or ctx.obsolete():
484 if ctx.hidden() or ctx.obsolete():
485 continue
485 continue
486
486
487 if limit_rev >= obj.rev():
487 if limit_rev >= obj.rev():
488 yield obj
488 yield obj
489
489
490 history = []
490 history = []
491 for cnt, obj in enumerate(history_iter()):
491 for cnt, obj in enumerate(history_iter()):
492 if limit and cnt >= limit:
492 if limit and cnt >= limit:
493 break
493 break
494 history.append(hex(obj.node()))
494 history.append(hex(obj.node()))
495
495
496 return [x for x in history]
496 return [x for x in history]
497 return _node_history(context_uid, repo_id, revision, path, limit)
497 return _node_history(context_uid, repo_id, revision, path, limit)
498
498
499 @reraise_safe_exceptions
499 @reraise_safe_exceptions
500 def node_history_untill(self, wire, revision, path, limit):
500 def node_history_untill(self, wire, revision, path, limit):
501 cache_on, context_uid, repo_id = self._cache_on(wire)
501 cache_on, context_uid, repo_id = self._cache_on(wire)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
503 def _node_history_until(_context_uid, _repo_id):
503 def _node_history_until(_context_uid, _repo_id):
504 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
505 ctx = self._get_ctx(repo, revision)
505 ctx = self._get_ctx(repo, revision)
506 fctx = ctx.filectx(path)
506 fctx = ctx.filectx(path)
507
507
508 file_log = list(fctx.filelog())
508 file_log = list(fctx.filelog())
509 if limit:
509 if limit:
510 # Limit to the last n items
510 # Limit to the last n items
511 file_log = file_log[-limit:]
511 file_log = file_log[-limit:]
512
512
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
515
515
516 @reraise_safe_exceptions
516 @reraise_safe_exceptions
517 def fctx_annotate(self, wire, revision, path):
517 def fctx_annotate(self, wire, revision, path):
518 repo = self._factory.repo(wire)
518 repo = self._factory.repo(wire)
519 ctx = self._get_ctx(repo, revision)
519 ctx = self._get_ctx(repo, revision)
520 fctx = ctx.filectx(path)
520 fctx = ctx.filectx(path)
521
521
522 result = []
522 result = []
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 ln_no = i
524 ln_no = i
525 sha = hex(annotate_obj.fctx.node())
525 sha = hex(annotate_obj.fctx.node())
526 content = annotate_obj.text
526 content = annotate_obj.text
527 result.append((ln_no, sha, content))
527 result.append((ln_no, sha, content))
528 return result
528 return result
529
529
530 @reraise_safe_exceptions
530 @reraise_safe_exceptions
531 def fctx_node_data(self, wire, revision, path):
531 def fctx_node_data(self, wire, revision, path):
532 repo = self._factory.repo(wire)
532 repo = self._factory.repo(wire)
533 ctx = self._get_ctx(repo, revision)
533 ctx = self._get_ctx(repo, revision)
534 fctx = ctx.filectx(path)
534 fctx = ctx.filectx(path)
535 return fctx.data()
535 return fctx.data()
536
536
537 @reraise_safe_exceptions
537 @reraise_safe_exceptions
538 def fctx_flags(self, wire, commit_id, path):
538 def fctx_flags(self, wire, commit_id, path):
539 cache_on, context_uid, repo_id = self._cache_on(wire)
539 cache_on, context_uid, repo_id = self._cache_on(wire)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
541 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
542 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
544 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
545 return fctx.flags()
545 return fctx.flags()
546
546
547 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
548
548
549 @reraise_safe_exceptions
549 @reraise_safe_exceptions
550 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
551 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
553 def _fctx_size(_repo_id, _revision, _path):
553 def _fctx_size(_repo_id, _revision, _path):
554 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
555 ctx = self._get_ctx(repo, commit_id)
555 ctx = self._get_ctx(repo, commit_id)
556 fctx = ctx.filectx(path)
556 fctx = ctx.filectx(path)
557 return fctx.size()
557 return fctx.size()
558 return _fctx_size(repo_id, commit_id, path)
558 return _fctx_size(repo_id, commit_id, path)
559
559
560 @reraise_safe_exceptions
560 @reraise_safe_exceptions
561 def get_all_commit_ids(self, wire, name):
561 def get_all_commit_ids(self, wire, name):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
566 repo = repo.filtered(name)
566 repo = repo.filtered(name)
567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
568 return revs
568 return revs
569 return _get_all_commit_ids(context_uid, repo_id, name)
569 return _get_all_commit_ids(context_uid, repo_id, name)
570
570
571 @reraise_safe_exceptions
571 @reraise_safe_exceptions
572 def get_config_value(self, wire, section, name, untrusted=False):
572 def get_config_value(self, wire, section, name, untrusted=False):
573 repo = self._factory.repo(wire)
573 repo = self._factory.repo(wire)
574 return repo.ui.config(section, name, untrusted=untrusted)
574 return repo.ui.config(section, name, untrusted=untrusted)
575
575
576 @reraise_safe_exceptions
576 @reraise_safe_exceptions
577 def is_large_file(self, wire, commit_id, path):
577 def is_large_file(self, wire, commit_id, path):
578 cache_on, context_uid, repo_id = self._cache_on(wire)
578 cache_on, context_uid, repo_id = self._cache_on(wire)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 return largefiles.lfutil.isstandin(path)
581 return largefiles.lfutil.isstandin(path)
582
582
583 return _is_large_file(context_uid, repo_id, commit_id, path)
583 return _is_large_file(context_uid, repo_id, commit_id, path)
584
584
585 @reraise_safe_exceptions
585 @reraise_safe_exceptions
586 def is_binary(self, wire, revision, path):
586 def is_binary(self, wire, revision, path):
587 cache_on, context_uid, repo_id = self._cache_on(wire)
587 cache_on, context_uid, repo_id = self._cache_on(wire)
588
588
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
590 def _is_binary(_repo_id, _sha, _path):
590 def _is_binary(_repo_id, _sha, _path):
591 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
592 ctx = self._get_ctx(repo, revision)
592 ctx = self._get_ctx(repo, revision)
593 fctx = ctx.filectx(path)
593 fctx = ctx.filectx(path)
594 return fctx.isbinary()
594 return fctx.isbinary()
595
595
596 return _is_binary(repo_id, revision, path)
596 return _is_binary(repo_id, revision, path)
597
597
598 @reraise_safe_exceptions
598 @reraise_safe_exceptions
599 def in_largefiles_store(self, wire, sha):
599 def in_largefiles_store(self, wire, sha):
600 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
601 return largefiles.lfutil.instore(repo, sha)
601 return largefiles.lfutil.instore(repo, sha)
602
602
603 @reraise_safe_exceptions
603 @reraise_safe_exceptions
604 def in_user_cache(self, wire, sha):
604 def in_user_cache(self, wire, sha):
605 repo = self._factory.repo(wire)
605 repo = self._factory.repo(wire)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
607
607
608 @reraise_safe_exceptions
608 @reraise_safe_exceptions
609 def store_path(self, wire, sha):
609 def store_path(self, wire, sha):
610 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
611 return largefiles.lfutil.storepath(repo, sha)
611 return largefiles.lfutil.storepath(repo, sha)
612
612
613 @reraise_safe_exceptions
613 @reraise_safe_exceptions
614 def link(self, wire, sha, path):
614 def link(self, wire, sha, path):
615 repo = self._factory.repo(wire)
615 repo = self._factory.repo(wire)
616 largefiles.lfutil.link(
616 largefiles.lfutil.link(
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618
618
619 @reraise_safe_exceptions
619 @reraise_safe_exceptions
620 def localrepository(self, wire, create=False):
620 def localrepository(self, wire, create=False):
621 self._factory.repo(wire, create=create)
621 self._factory.repo(wire, create=create)
622
622
623 @reraise_safe_exceptions
623 @reraise_safe_exceptions
624 def lookup(self, wire, revision, both):
624 def lookup(self, wire, revision, both):
625 cache_on, context_uid, repo_id = self._cache_on(wire)
625 cache_on, context_uid, repo_id = self._cache_on(wire)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
627 def _lookup(_context_uid, _repo_id, _revision, _both):
627 def _lookup(_context_uid, _repo_id, _revision, _both):
628
628
629 repo = self._factory.repo(wire)
629 repo = self._factory.repo(wire)
630 rev = _revision
630 rev = _revision
631 if isinstance(rev, int):
631 if isinstance(rev, int):
632 # NOTE(marcink):
632 # NOTE(marcink):
633 # since Mercurial doesn't support negative indexes properly
633 # since Mercurial doesn't support negative indexes properly
634 # we need to shift accordingly by one to get proper index, e.g
634 # we need to shift accordingly by one to get proper index, e.g
635 # repo[-1] => repo[-2]
635 # repo[-1] => repo[-2]
636 # repo[0] => repo[-1]
636 # repo[0] => repo[-1]
637 if rev <= 0:
637 if rev <= 0:
638 rev = rev + -1
638 rev = rev + -1
639 try:
639 try:
640 ctx = self._get_ctx(repo, rev)
640 ctx = self._get_ctx(repo, rev)
641 except (TypeError, RepoLookupError) as e:
641 except (TypeError, RepoLookupError) as e:
642 e._org_exc_tb = traceback.format_exc()
642 e._org_exc_tb = traceback.format_exc()
643 raise exceptions.LookupException(e)(rev)
643 raise exceptions.LookupException(e)(rev)
644 except LookupError as e:
644 except LookupError as e:
645 e._org_exc_tb = traceback.format_exc()
645 e._org_exc_tb = traceback.format_exc()
646 raise exceptions.LookupException(e)(e.name)
646 raise exceptions.LookupException(e)(e.name)
647
647
648 if not both:
648 if not both:
649 return ctx.hex()
649 return ctx.hex()
650
650
651 ctx = repo[ctx.hex()]
651 ctx = repo[ctx.hex()]
652 return ctx.hex(), ctx.rev()
652 return ctx.hex(), ctx.rev()
653
653
654 return _lookup(context_uid, repo_id, revision, both)
654 return _lookup(context_uid, repo_id, revision, both)
655
655
656 @reraise_safe_exceptions
656 @reraise_safe_exceptions
657 def sync_push(self, wire, url):
657 def sync_push(self, wire, url):
658 if not self.check_url(url, wire['config']):
658 if not self.check_url(url, wire['config']):
659 return
659 return
660
660
661 repo = self._factory.repo(wire)
661 repo = self._factory.repo(wire)
662
662
663 # Disable any prompts for this repo
663 # Disable any prompts for this repo
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665
665
666 bookmarks = dict(repo._bookmarks).keys()
666 bookmarks = dict(repo._bookmarks).keys()
667 remote = peer(repo, {}, url)
667 remote = peer(repo, {}, url)
668 # Disable any prompts for this remote
668 # Disable any prompts for this remote
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670
670
671 return exchange.push(
671 return exchange.push(
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def revision(self, wire, rev):
675 def revision(self, wire, rev):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 ctx = self._get_ctx(repo, rev)
677 ctx = self._get_ctx(repo, rev)
678 return ctx.rev()
678 return ctx.rev()
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def rev_range(self, wire, commit_filter):
681 def rev_range(self, wire, commit_filter):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683
683
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
685 def _rev_range(_context_uid, _repo_id, _filter):
685 def _rev_range(_context_uid, _repo_id, _filter):
686 repo = self._factory.repo(wire)
686 repo = self._factory.repo(wire)
687 revisions = [rev for rev in revrange(repo, commit_filter)]
687 revisions = [rev for rev in revrange(repo, commit_filter)]
688 return revisions
688 return revisions
689
689
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691
691
692 @reraise_safe_exceptions
692 @reraise_safe_exceptions
693 def rev_range_hash(self, wire, node):
693 def rev_range_hash(self, wire, node):
694 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
695
695
696 def get_revs(repo, rev_opt):
696 def get_revs(repo, rev_opt):
697 if rev_opt:
697 if rev_opt:
698 revs = revrange(repo, rev_opt)
698 revs = revrange(repo, rev_opt)
699 if len(revs) == 0:
699 if len(revs) == 0:
700 return (nullrev, nullrev)
700 return (nullrev, nullrev)
701 return max(revs), min(revs)
701 return max(revs), min(revs)
702 else:
702 else:
703 return len(repo) - 1, 0
703 return len(repo) - 1, 0
704
704
705 stop, start = get_revs(repo, [node + ':'])
705 stop, start = get_revs(repo, [node + ':'])
706 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
706 revs = [hex(repo[r].node()) for r in range(start, stop + 1)]
707 return revs
707 return revs
708
708
709 @reraise_safe_exceptions
709 @reraise_safe_exceptions
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 other_path = kwargs.pop('other_path', None)
711 other_path = kwargs.pop('other_path', None)
712
712
713 # case when we want to compare two independent repositories
713 # case when we want to compare two independent repositories
714 if other_path and other_path != wire["path"]:
714 if other_path and other_path != wire["path"]:
715 baseui = self._factory._create_config(wire["config"])
715 baseui = self._factory._create_config(wire["config"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 else:
717 else:
718 repo = self._factory.repo(wire)
718 repo = self._factory.repo(wire)
719 return list(repo.revs(rev_spec, *args))
719 return list(repo.revs(rev_spec, *args))
720
720
721 @reraise_safe_exceptions
721 @reraise_safe_exceptions
722 def verify(self, wire,):
722 def verify(self, wire,):
723 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
724 baseui = self._factory._create_config(wire['config'])
724 baseui = self._factory._create_config(wire['config'])
725
725
726 baseui, output = patch_ui_message_output(baseui)
726 baseui, output = patch_ui_message_output(baseui)
727
727
728 repo.ui = baseui
728 repo.ui = baseui
729 verify.verify(repo)
729 verify.verify(repo)
730 return output.getvalue()
730 return output.getvalue()
731
731
732 @reraise_safe_exceptions
732 @reraise_safe_exceptions
733 def hg_update_cache(self, wire,):
733 def hg_update_cache(self, wire,):
734 repo = self._factory.repo(wire)
734 repo = self._factory.repo(wire)
735 baseui = self._factory._create_config(wire['config'])
735 baseui = self._factory._create_config(wire['config'])
736 baseui, output = patch_ui_message_output(baseui)
736 baseui, output = patch_ui_message_output(baseui)
737
737
738 repo.ui = baseui
738 repo.ui = baseui
739 with repo.wlock(), repo.lock():
739 with repo.wlock(), repo.lock():
740 repo.updatecaches(full=True)
740 repo.updatecaches(full=True)
741
741
742 return output.getvalue()
742 return output.getvalue()
743
743
744 @reraise_safe_exceptions
744 @reraise_safe_exceptions
745 def hg_rebuild_fn_cache(self, wire,):
745 def hg_rebuild_fn_cache(self, wire,):
746 repo = self._factory.repo(wire)
746 repo = self._factory.repo(wire)
747 baseui = self._factory._create_config(wire['config'])
747 baseui = self._factory._create_config(wire['config'])
748 baseui, output = patch_ui_message_output(baseui)
748 baseui, output = patch_ui_message_output(baseui)
749
749
750 repo.ui = baseui
750 repo.ui = baseui
751
751
752 repair.rebuildfncache(baseui, repo)
752 repair.rebuildfncache(baseui, repo)
753
753
754 return output.getvalue()
754 return output.getvalue()
755
755
756 @reraise_safe_exceptions
756 @reraise_safe_exceptions
757 def tags(self, wire):
757 def tags(self, wire):
758 cache_on, context_uid, repo_id = self._cache_on(wire)
758 cache_on, context_uid, repo_id = self._cache_on(wire)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
760 def _tags(_context_uid, _repo_id):
760 def _tags(_context_uid, _repo_id):
761 repo = self._factory.repo(wire)
761 repo = self._factory.repo(wire)
762 return repo.tags()
762 return repo.tags()
763
763
764 return _tags(context_uid, repo_id)
764 return _tags(context_uid, repo_id)
765
765
766 @reraise_safe_exceptions
766 @reraise_safe_exceptions
767 def update(self, wire, node=None, clean=False):
767 def update(self, wire, node=None, clean=False):
768 repo = self._factory.repo(wire)
768 repo = self._factory.repo(wire)
769 baseui = self._factory._create_config(wire['config'])
769 baseui = self._factory._create_config(wire['config'])
770 commands.update(baseui, repo, node=node, clean=clean)
770 commands.update(baseui, repo, node=node, clean=clean)
771
771
772 @reraise_safe_exceptions
772 @reraise_safe_exceptions
773 def identify(self, wire):
773 def identify(self, wire):
774 repo = self._factory.repo(wire)
774 repo = self._factory.repo(wire)
775 baseui = self._factory._create_config(wire['config'])
775 baseui = self._factory._create_config(wire['config'])
776 output = io.BytesIO()
776 output = io.BytesIO()
777 baseui.write = output.write
777 baseui.write = output.write
778 # This is required to get a full node id
778 # This is required to get a full node id
779 baseui.debugflag = True
779 baseui.debugflag = True
780 commands.identify(baseui, repo, id=True)
780 commands.identify(baseui, repo, id=True)
781
781
782 return output.getvalue()
782 return output.getvalue()
783
783
784 @reraise_safe_exceptions
784 @reraise_safe_exceptions
785 def heads(self, wire, branch=None):
785 def heads(self, wire, branch=None):
786 repo = self._factory.repo(wire)
786 repo = self._factory.repo(wire)
787 baseui = self._factory._create_config(wire['config'])
787 baseui = self._factory._create_config(wire['config'])
788 output = io.BytesIO()
788 output = io.BytesIO()
789
789
790 def write(data, **unused_kwargs):
790 def write(data, **unused_kwargs):
791 output.write(data)
791 output.write(data)
792
792
793 baseui.write = write
793 baseui.write = write
794 if branch:
794 if branch:
795 args = [branch]
795 args = [branch]
796 else:
796 else:
797 args = []
797 args = []
798 commands.heads(baseui, repo, template='{node} ', *args)
798 commands.heads(baseui, repo, template='{node} ', *args)
799
799
800 return output.getvalue()
800 return output.getvalue()
801
801
802 @reraise_safe_exceptions
802 @reraise_safe_exceptions
803 def ancestor(self, wire, revision1, revision2):
803 def ancestor(self, wire, revision1, revision2):
804 repo = self._factory.repo(wire)
804 repo = self._factory.repo(wire)
805 changelog = repo.changelog
805 changelog = repo.changelog
806 lookup = repo.lookup
806 lookup = repo.lookup
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 return hex(a)
808 return hex(a)
809
809
810 @reraise_safe_exceptions
810 @reraise_safe_exceptions
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
814
814
815 @reraise_safe_exceptions
815 @reraise_safe_exceptions
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817
817
818 repo = self._factory.repo(wire)
818 repo = self._factory.repo(wire)
819 baseui = self._factory._create_config(wire['config'])
819 baseui = self._factory._create_config(wire['config'])
820 publishing = baseui.configbool('phases', 'publish')
820 publishing = baseui.configbool('phases', 'publish')
821 if publishing:
821 if publishing:
822 new_commit = 'public'
822 new_commit = 'public'
823 else:
823 else:
824 new_commit = 'draft'
824 new_commit = 'draft'
825
825
826 def _filectxfn(_repo, ctx, path):
826 def _filectxfn(_repo, ctx, path):
827 """
827 """
828 Marks given path as added/changed/removed in a given _repo. This is
828 Marks given path as added/changed/removed in a given _repo. This is
829 for internal mercurial commit function.
829 for internal mercurial commit function.
830 """
830 """
831
831
832 # check if this path is removed
832 # check if this path is removed
833 if path in removed:
833 if path in removed:
834 # returning None is a way to mark node for removal
834 # returning None is a way to mark node for removal
835 return None
835 return None
836
836
837 # check if this path is added
837 # check if this path is added
838 for node in updated:
838 for node in updated:
839 if node['path'] == path:
839 if node['path'] == path:
840 return memfilectx(
840 return memfilectx(
841 _repo,
841 _repo,
842 changectx=ctx,
842 changectx=ctx,
843 path=node['path'],
843 path=node['path'],
844 data=node['content'],
844 data=node['content'],
845 islink=False,
845 islink=False,
846 isexec=bool(node['mode'] & stat.S_IXUSR),
846 isexec=bool(node['mode'] & stat.S_IXUSR),
847 copysource=False)
847 copysource=False)
848
848
849 raise exceptions.AbortException()(
849 raise exceptions.AbortException()(
850 "Given path haven't been marked as added, "
850 "Given path haven't been marked as added, "
851 "changed or removed (%s)" % path)
851 "changed or removed (%s)" % path)
852
852
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854
854
855 commit_ctx = memctx(
855 commit_ctx = memctx(
856 repo=repo,
856 repo=repo,
857 parents=parents,
857 parents=parents,
858 text=message,
858 text=message,
859 files=files,
859 files=files,
860 filectxfn=_filectxfn,
860 filectxfn=_filectxfn,
861 user=user,
861 user=user,
862 date=(commit_time, commit_timezone),
862 date=(commit_time, commit_timezone),
863 extra=extra)
863 extra=extra)
864
864
865 n = repo.commitctx(commit_ctx)
865 n = repo.commitctx(commit_ctx)
866 new_id = hex(n)
866 new_id = hex(n)
867
867
868 return new_id
868 return new_id
869
869
870 @reraise_safe_exceptions
870 @reraise_safe_exceptions
871 def pull(self, wire, url, commit_ids=None):
871 def pull(self, wire, url, commit_ids=None):
872 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
873 # Disable any prompts for this repo
873 # Disable any prompts for this repo
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875
875
876 remote = peer(repo, {}, url)
876 remote = peer(repo, {}, url)
877 # Disable any prompts for this remote
877 # Disable any prompts for this remote
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879
879
880 if commit_ids:
880 if commit_ids:
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882
882
883 return exchange.pull(
883 return exchange.pull(
884 repo, remote, heads=commit_ids, force=None).cgresult
884 repo, remote, heads=commit_ids, force=None).cgresult
885
885
886 @reraise_safe_exceptions
886 @reraise_safe_exceptions
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 repo = self._factory.repo(wire)
888 repo = self._factory.repo(wire)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890
890
891 # Mercurial internally has a lot of logic that checks ONLY if
891 # Mercurial internally has a lot of logic that checks ONLY if
892 # option is defined, we just pass those if they are defined then
892 # option is defined, we just pass those if they are defined then
893 opts = {}
893 opts = {}
894 if bookmark:
894 if bookmark:
895 opts['bookmark'] = bookmark
895 opts['bookmark'] = bookmark
896 if branch:
896 if branch:
897 opts['branch'] = branch
897 opts['branch'] = branch
898 if revision:
898 if revision:
899 opts['rev'] = revision
899 opts['rev'] = revision
900
900
901 commands.pull(baseui, repo, source, **opts)
901 commands.pull(baseui, repo, source, **opts)
902
902
903 @reraise_safe_exceptions
903 @reraise_safe_exceptions
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 repo = self._factory.repo(wire)
905 repo = self._factory.repo(wire)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 new_branch=push_branches)
908 new_branch=push_branches)
909
909
910 @reraise_safe_exceptions
910 @reraise_safe_exceptions
911 def strip(self, wire, revision, update, backup):
911 def strip(self, wire, revision, update, backup):
912 repo = self._factory.repo(wire)
912 repo = self._factory.repo(wire)
913 ctx = self._get_ctx(repo, revision)
913 ctx = self._get_ctx(repo, revision)
914 hgext_strip(
914 hgext_strip(
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916
916
917 @reraise_safe_exceptions
917 @reraise_safe_exceptions
918 def get_unresolved_files(self, wire):
918 def get_unresolved_files(self, wire):
919 repo = self._factory.repo(wire)
919 repo = self._factory.repo(wire)
920
920
921 log.debug('Calculating unresolved files for repo: %s', repo)
921 log.debug('Calculating unresolved files for repo: %s', repo)
922 output = io.BytesIO()
922 output = io.BytesIO()
923
923
924 def write(data, **unused_kwargs):
924 def write(data, **unused_kwargs):
925 output.write(data)
925 output.write(data)
926
926
927 baseui = self._factory._create_config(wire['config'])
927 baseui = self._factory._create_config(wire['config'])
928 baseui.write = write
928 baseui.write = write
929
929
930 commands.resolve(baseui, repo, list=True)
930 commands.resolve(baseui, repo, list=True)
931 unresolved = output.getvalue().splitlines(0)
931 unresolved = output.getvalue().splitlines(0)
932 return unresolved
932 return unresolved
933
933
934 @reraise_safe_exceptions
934 @reraise_safe_exceptions
935 def merge(self, wire, revision):
935 def merge(self, wire, revision):
936 repo = self._factory.repo(wire)
936 repo = self._factory.repo(wire)
937 baseui = self._factory._create_config(wire['config'])
937 baseui = self._factory._create_config(wire['config'])
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939
939
940 # In case of sub repositories are used mercurial prompts the user in
940 # In case of sub repositories are used mercurial prompts the user in
941 # case of merge conflicts or different sub repository sources. By
941 # case of merge conflicts or different sub repository sources. By
942 # setting the interactive flag to `False` mercurial doesn't prompt the
942 # setting the interactive flag to `False` mercurial doesn't prompt the
943 # used but instead uses a default value.
943 # used but instead uses a default value.
944 repo.ui.setconfig('ui', 'interactive', False)
944 repo.ui.setconfig('ui', 'interactive', False)
945 commands.merge(baseui, repo, rev=revision)
945 commands.merge(baseui, repo, rev=revision)
946
946
947 @reraise_safe_exceptions
947 @reraise_safe_exceptions
948 def merge_state(self, wire):
948 def merge_state(self, wire):
949 repo = self._factory.repo(wire)
949 repo = self._factory.repo(wire)
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951
951
952 # In case of sub repositories are used mercurial prompts the user in
952 # In case of sub repositories are used mercurial prompts the user in
953 # case of merge conflicts or different sub repository sources. By
953 # case of merge conflicts or different sub repository sources. By
954 # setting the interactive flag to `False` mercurial doesn't prompt the
954 # setting the interactive flag to `False` mercurial doesn't prompt the
955 # used but instead uses a default value.
955 # used but instead uses a default value.
956 repo.ui.setconfig('ui', 'interactive', False)
956 repo.ui.setconfig('ui', 'interactive', False)
957 ms = hg_merge.mergestate(repo)
957 ms = hg_merge.mergestate(repo)
958 return [x for x in ms.unresolved()]
958 return [x for x in ms.unresolved()]
959
959
960 @reraise_safe_exceptions
960 @reraise_safe_exceptions
961 def commit(self, wire, message, username, close_branch=False):
961 def commit(self, wire, message, username, close_branch=False):
962 repo = self._factory.repo(wire)
962 repo = self._factory.repo(wire)
963 baseui = self._factory._create_config(wire['config'])
963 baseui = self._factory._create_config(wire['config'])
964 repo.ui.setconfig('ui', 'username', username)
964 repo.ui.setconfig('ui', 'username', username)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966
966
967 @reraise_safe_exceptions
967 @reraise_safe_exceptions
968 def rebase(self, wire, source=None, dest=None, abort=False):
968 def rebase(self, wire, source=None, dest=None, abort=False):
969 repo = self._factory.repo(wire)
969 repo = self._factory.repo(wire)
970 baseui = self._factory._create_config(wire['config'])
970 baseui = self._factory._create_config(wire['config'])
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 # In case of sub repositories are used mercurial prompts the user in
972 # In case of sub repositories are used mercurial prompts the user in
973 # case of merge conflicts or different sub repository sources. By
973 # case of merge conflicts or different sub repository sources. By
974 # setting the interactive flag to `False` mercurial doesn't prompt the
974 # setting the interactive flag to `False` mercurial doesn't prompt the
975 # used but instead uses a default value.
975 # used but instead uses a default value.
976 repo.ui.setconfig('ui', 'interactive', False)
976 repo.ui.setconfig('ui', 'interactive', False)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978
978
979 @reraise_safe_exceptions
979 @reraise_safe_exceptions
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 repo = self._factory.repo(wire)
981 repo = self._factory.repo(wire)
982 ctx = self._get_ctx(repo, revision)
982 ctx = self._get_ctx(repo, revision)
983 node = ctx.node()
983 node = ctx.node()
984
984
985 date = (tag_time, tag_timezone)
985 date = (tag_time, tag_timezone)
986 try:
986 try:
987 hg_tag.tag(repo, name, node, message, local, user, date)
987 hg_tag.tag(repo, name, node, message, local, user, date)
988 except Abort as e:
988 except Abort as e:
989 log.exception("Tag operation aborted")
989 log.exception("Tag operation aborted")
990 # Exception can contain unicode which we convert
990 # Exception can contain unicode which we convert
991 raise exceptions.AbortException(e)(repr(e))
991 raise exceptions.AbortException(e)(repr(e))
992
992
993 @reraise_safe_exceptions
993 @reraise_safe_exceptions
994 def bookmark(self, wire, bookmark, revision=None):
994 def bookmark(self, wire, bookmark, revision=None):
995 repo = self._factory.repo(wire)
995 repo = self._factory.repo(wire)
996 baseui = self._factory._create_config(wire['config'])
996 baseui = self._factory._create_config(wire['config'])
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998
998
999 @reraise_safe_exceptions
999 @reraise_safe_exceptions
1000 def install_hooks(self, wire, force=False):
1000 def install_hooks(self, wire, force=False):
1001 # we don't need any special hooks for Mercurial
1001 # we don't need any special hooks for Mercurial
1002 pass
1002 pass
1003
1003
1004 @reraise_safe_exceptions
1004 @reraise_safe_exceptions
1005 def get_hooks_info(self, wire):
1005 def get_hooks_info(self, wire):
1006 return {
1006 return {
1007 'pre_version': vcsserver.__version__,
1007 'pre_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
1009 }
1009 }
@@ -1,729 +1,729 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55
55
56 response_data = response.read()
56 response_data = response.read()
57
57
58 try:
58 try:
59 return json.loads(response_data)
59 return json.loads(response_data)
60 except Exception:
60 except Exception:
61 log.exception('Failed to decode hook response json data. '
61 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
62 'response_code:%s, raw_data:%s',
63 response.status, response_data)
63 response.status, response_data)
64 raise
64 raise
65
65
66 def _serialize(self, hook_name, extras):
66 def _serialize(self, hook_name, extras):
67 data = {
67 data = {
68 'method': hook_name,
68 'method': hook_name,
69 'extras': extras
69 'extras': extras
70 }
70 }
71 return json.dumps(data)
71 return json.dumps(data)
72
72
73
73
74 class HooksDummyClient(object):
74 class HooksDummyClient(object):
75 def __init__(self, hooks_module):
75 def __init__(self, hooks_module):
76 self._hooks_module = importlib.import_module(hooks_module)
76 self._hooks_module = importlib.import_module(hooks_module)
77
77
78 def __call__(self, hook_name, extras):
78 def __call__(self, hook_name, extras):
79 with self._hooks_module.Hooks() as hooks:
79 with self._hooks_module.Hooks() as hooks:
80 return getattr(hooks, hook_name)(extras)
80 return getattr(hooks, hook_name)(extras)
81
81
82
82
83 class HooksShadowRepoClient(object):
83 class HooksShadowRepoClient(object):
84
84
85 def __call__(self, hook_name, extras):
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
86 return {'output': '', 'status': 0}
87
87
88
88
89 class RemoteMessageWriter(object):
89 class RemoteMessageWriter(object):
90 """Writer base class."""
90 """Writer base class."""
91 def write(self, message):
91 def write(self, message):
92 raise NotImplementedError()
92 raise NotImplementedError()
93
93
94
94
95 class HgMessageWriter(RemoteMessageWriter):
95 class HgMessageWriter(RemoteMessageWriter):
96 """Writer that knows how to send messages to mercurial clients."""
96 """Writer that knows how to send messages to mercurial clients."""
97
97
98 def __init__(self, ui):
98 def __init__(self, ui):
99 self.ui = ui
99 self.ui = ui
100
100
101 def write(self, message):
101 def write(self, message):
102 # TODO: Check why the quiet flag is set by default.
102 # TODO: Check why the quiet flag is set by default.
103 old = self.ui.quiet
103 old = self.ui.quiet
104 self.ui.quiet = False
104 self.ui.quiet = False
105 self.ui.status(message.encode('utf-8'))
105 self.ui.status(message.encode('utf-8'))
106 self.ui.quiet = old
106 self.ui.quiet = old
107
107
108
108
109 class GitMessageWriter(RemoteMessageWriter):
109 class GitMessageWriter(RemoteMessageWriter):
110 """Writer that knows how to send messages to git clients."""
110 """Writer that knows how to send messages to git clients."""
111
111
112 def __init__(self, stdout=None):
112 def __init__(self, stdout=None):
113 self.stdout = stdout or sys.stdout
113 self.stdout = stdout or sys.stdout
114
114
115 def write(self, message):
115 def write(self, message):
116 self.stdout.write(message.encode('utf-8'))
116 self.stdout.write(message.encode('utf-8'))
117
117
118
118
119 class SvnMessageWriter(RemoteMessageWriter):
119 class SvnMessageWriter(RemoteMessageWriter):
120 """Writer that knows how to send messages to svn clients."""
120 """Writer that knows how to send messages to svn clients."""
121
121
122 def __init__(self, stderr=None):
122 def __init__(self, stderr=None):
123 # SVN needs data sent to stderr for back-to-client messaging
123 # SVN needs data sent to stderr for back-to-client messaging
124 self.stderr = stderr or sys.stderr
124 self.stderr = stderr or sys.stderr
125
125
126 def write(self, message):
126 def write(self, message):
127 self.stderr.write(message.encode('utf-8'))
127 self.stderr.write(message.encode('utf-8'))
128
128
129
129
130 def _handle_exception(result):
130 def _handle_exception(result):
131 exception_class = result.get('exception')
131 exception_class = result.get('exception')
132 exception_traceback = result.get('exception_traceback')
132 exception_traceback = result.get('exception_traceback')
133
133
134 if exception_traceback:
134 if exception_traceback:
135 log.error('Got traceback from remote call:%s', exception_traceback)
135 log.error('Got traceback from remote call:%s', exception_traceback)
136
136
137 if exception_class == 'HTTPLockedRC':
137 if exception_class == 'HTTPLockedRC':
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 elif exception_class == 'HTTPBranchProtected':
139 elif exception_class == 'HTTPBranchProtected':
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 elif exception_class == 'RepositoryError':
141 elif exception_class == 'RepositoryError':
142 raise exceptions.VcsException()(*result['exception_args'])
142 raise exceptions.VcsException()(*result['exception_args'])
143 elif exception_class:
143 elif exception_class:
144 raise Exception('Got remote exception "%s" with args "%s"' %
144 raise Exception('Got remote exception "%s" with args "%s"' %
145 (exception_class, result['exception_args']))
145 (exception_class, result['exception_args']))
146
146
147
147
148 def _get_hooks_client(extras):
148 def _get_hooks_client(extras):
149 hooks_uri = extras.get('hooks_uri')
149 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
151 if hooks_uri:
152 return HooksHttpClient(extras['hooks_uri'])
152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
154 return HooksShadowRepoClient()
155 else:
155 else:
156 return HooksDummyClient(extras['hooks_module'])
156 return HooksDummyClient(extras['hooks_module'])
157
157
158
158
159 def _call_hook(hook_name, extras, writer):
159 def _call_hook(hook_name, extras, writer):
160 hooks_client = _get_hooks_client(extras)
160 hooks_client = _get_hooks_client(extras)
161 log.debug('Hooks, using client:%s', hooks_client)
161 log.debug('Hooks, using client:%s', hooks_client)
162 result = hooks_client(hook_name, extras)
162 result = hooks_client(hook_name, extras)
163 log.debug('Hooks got result: %s', result)
163 log.debug('Hooks got result: %s', result)
164
164
165 _handle_exception(result)
165 _handle_exception(result)
166 writer.write(result['output'])
166 writer.write(result['output'])
167
167
168 return result['status']
168 return result['status']
169
169
170
170
171 def _extras_from_ui(ui):
171 def _extras_from_ui(ui):
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 if not hook_data:
173 if not hook_data:
174 # maybe it's inside environ ?
174 # maybe it's inside environ ?
175 env_hook_data = os.environ.get('RC_SCM_DATA')
175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 if env_hook_data:
176 if env_hook_data:
177 hook_data = env_hook_data
177 hook_data = env_hook_data
178
178
179 extras = {}
179 extras = {}
180 if hook_data:
180 if hook_data:
181 extras = json.loads(hook_data)
181 extras = json.loads(hook_data)
182 return extras
182 return extras
183
183
184
184
185 def _rev_range_hash(repo, node, check_heads=False):
185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
186 from vcsserver.hgcompat import get_ctx
187
187
188 commits = []
188 commits = []
189 revs = []
189 revs = []
190 start = get_ctx(repo, node).rev()
190 start = get_ctx(repo, node).rev()
191 end = len(repo)
191 end = len(repo)
192 for rev in range(start, end):
192 for rev in range(start, end):
193 revs.append(rev)
193 revs.append(rev)
194 ctx = get_ctx(repo, rev)
194 ctx = get_ctx(repo, rev)
195 commit_id = mercurial.node.hex(ctx.node())
195 commit_id = mercurial.node.hex(ctx.node())
196 branch = ctx.branch()
196 branch = ctx.branch()
197 commits.append((commit_id, branch))
197 commits.append((commit_id, branch))
198
198
199 parent_heads = []
199 parent_heads = []
200 if check_heads:
200 if check_heads:
201 parent_heads = _check_heads(repo, start, end, revs)
201 parent_heads = _check_heads(repo, start, end, revs)
202 return commits, parent_heads
202 return commits, parent_heads
203
203
204
204
205 def _check_heads(repo, start, end, commits):
205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
206 from vcsserver.hgcompat import get_ctx
207 changelog = repo.changelog
207 changelog = repo.changelog
208 parents = set()
208 parents = set()
209
209
210 for new_rev in commits:
210 for new_rev in commits:
211 for p in changelog.parentrevs(new_rev):
211 for p in changelog.parentrevs(new_rev):
212 if p == mercurial.node.nullrev:
212 if p == mercurial.node.nullrev:
213 continue
213 continue
214 if p < start:
214 if p < start:
215 parents.add(p)
215 parents.add(p)
216
216
217 for p in parents:
217 for p in parents:
218 branch = get_ctx(repo, p).branch()
218 branch = get_ctx(repo, p).branch()
219 # The heads descending from that parent, on the same branch
219 # The heads descending from that parent, on the same branch
220 parent_heads = set([p])
220 parent_heads = set([p])
221 reachable = set([p])
221 reachable = set([p])
222 for x in xrange(p + 1, end):
222 for x in range(p + 1, end):
223 if get_ctx(repo, x).branch() != branch:
223 if get_ctx(repo, x).branch() != branch:
224 continue
224 continue
225 for pp in changelog.parentrevs(x):
225 for pp in changelog.parentrevs(x):
226 if pp in reachable:
226 if pp in reachable:
227 reachable.add(x)
227 reachable.add(x)
228 parent_heads.discard(pp)
228 parent_heads.discard(pp)
229 parent_heads.add(x)
229 parent_heads.add(x)
230 # More than one head? Suggest merging
230 # More than one head? Suggest merging
231 if len(parent_heads) > 1:
231 if len(parent_heads) > 1:
232 return list(parent_heads)
232 return list(parent_heads)
233
233
234 return []
234 return []
235
235
236
236
237 def _get_git_env():
237 def _get_git_env():
238 env = {}
238 env = {}
239 for k, v in os.environ.items():
239 for k, v in os.environ.items():
240 if k.startswith('GIT'):
240 if k.startswith('GIT'):
241 env[k] = v
241 env[k] = v
242
242
243 # serialized version
243 # serialized version
244 return [(k, v) for k, v in env.items()]
244 return [(k, v) for k, v in env.items()]
245
245
246
246
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 env = {}
248 env = {}
249 for k, v in os.environ.items():
249 for k, v in os.environ.items():
250 if k.startswith('HG'):
250 if k.startswith('HG'):
251 env[k] = v
251 env[k] = v
252
252
253 env['HG_NODE'] = old_rev
253 env['HG_NODE'] = old_rev
254 env['HG_NODE_LAST'] = new_rev
254 env['HG_NODE_LAST'] = new_rev
255 env['HG_TXNID'] = txnid
255 env['HG_TXNID'] = txnid
256 env['HG_PENDING'] = repo_path
256 env['HG_PENDING'] = repo_path
257
257
258 return [(k, v) for k, v in env.items()]
258 return [(k, v) for k, v in env.items()]
259
259
260
260
261 def repo_size(ui, repo, **kwargs):
261 def repo_size(ui, repo, **kwargs):
262 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264
264
265
265
266 def pre_pull(ui, repo, **kwargs):
266 def pre_pull(ui, repo, **kwargs):
267 extras = _extras_from_ui(ui)
267 extras = _extras_from_ui(ui)
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269
269
270
270
271 def pre_pull_ssh(ui, repo, **kwargs):
271 def pre_pull_ssh(ui, repo, **kwargs):
272 extras = _extras_from_ui(ui)
272 extras = _extras_from_ui(ui)
273 if extras and extras.get('SSH'):
273 if extras and extras.get('SSH'):
274 return pre_pull(ui, repo, **kwargs)
274 return pre_pull(ui, repo, **kwargs)
275 return 0
275 return 0
276
276
277
277
278 def post_pull(ui, repo, **kwargs):
278 def post_pull(ui, repo, **kwargs):
279 extras = _extras_from_ui(ui)
279 extras = _extras_from_ui(ui)
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281
281
282
282
283 def post_pull_ssh(ui, repo, **kwargs):
283 def post_pull_ssh(ui, repo, **kwargs):
284 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
285 if extras and extras.get('SSH'):
285 if extras and extras.get('SSH'):
286 return post_pull(ui, repo, **kwargs)
286 return post_pull(ui, repo, **kwargs)
287 return 0
287 return 0
288
288
289
289
290 def pre_push(ui, repo, node=None, **kwargs):
290 def pre_push(ui, repo, node=None, **kwargs):
291 """
291 """
292 Mercurial pre_push hook
292 Mercurial pre_push hook
293 """
293 """
294 extras = _extras_from_ui(ui)
294 extras = _extras_from_ui(ui)
295 detect_force_push = extras.get('detect_force_push')
295 detect_force_push = extras.get('detect_force_push')
296
296
297 rev_data = []
297 rev_data = []
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 branches = collections.defaultdict(list)
299 branches = collections.defaultdict(list)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 for commit_id, branch in commits:
301 for commit_id, branch in commits:
302 branches[branch].append(commit_id)
302 branches[branch].append(commit_id)
303
303
304 for branch, commits in branches.items():
304 for branch, commits in branches.items():
305 old_rev = kwargs.get('node_last') or commits[0]
305 old_rev = kwargs.get('node_last') or commits[0]
306 rev_data.append({
306 rev_data.append({
307 'total_commits': len(commits),
307 'total_commits': len(commits),
308 'old_rev': old_rev,
308 'old_rev': old_rev,
309 'new_rev': commits[-1],
309 'new_rev': commits[-1],
310 'ref': '',
310 'ref': '',
311 'type': 'branch',
311 'type': 'branch',
312 'name': branch,
312 'name': branch,
313 })
313 })
314
314
315 for push_ref in rev_data:
315 for push_ref in rev_data:
316 push_ref['multiple_heads'] = _heads
316 push_ref['multiple_heads'] = _heads
317
317
318 repo_path = os.path.join(
318 repo_path = os.path.join(
319 extras.get('repo_store', ''), extras.get('repository', ''))
319 extras.get('repo_store', ''), extras.get('repository', ''))
320 push_ref['hg_env'] = _get_hg_env(
320 push_ref['hg_env'] = _get_hg_env(
321 old_rev=push_ref['old_rev'],
321 old_rev=push_ref['old_rev'],
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 repo_path=repo_path)
323 repo_path=repo_path)
324
324
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 extras['commit_ids'] = rev_data
326 extras['commit_ids'] = rev_data
327
327
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329
329
330
330
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 extras = _extras_from_ui(ui)
332 extras = _extras_from_ui(ui)
333 if extras.get('SSH'):
333 if extras.get('SSH'):
334 return pre_push(ui, repo, node, **kwargs)
334 return pre_push(ui, repo, node, **kwargs)
335
335
336 return 0
336 return 0
337
337
338
338
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 """
340 """
341 Mercurial pre_push hook for SSH
341 Mercurial pre_push hook for SSH
342 """
342 """
343 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
344 if extras.get('SSH'):
344 if extras.get('SSH'):
345 permission = extras['SSH_PERMISSIONS']
345 permission = extras['SSH_PERMISSIONS']
346
346
347 if 'repository.write' == permission or 'repository.admin' == permission:
347 if 'repository.write' == permission or 'repository.admin' == permission:
348 return 0
348 return 0
349
349
350 # non-zero ret code
350 # non-zero ret code
351 return 1
351 return 1
352
352
353 return 0
353 return 0
354
354
355
355
356 def post_push(ui, repo, node, **kwargs):
356 def post_push(ui, repo, node, **kwargs):
357 """
357 """
358 Mercurial post_push hook
358 Mercurial post_push hook
359 """
359 """
360 extras = _extras_from_ui(ui)
360 extras = _extras_from_ui(ui)
361
361
362 commit_ids = []
362 commit_ids = []
363 branches = []
363 branches = []
364 bookmarks = []
364 bookmarks = []
365 tags = []
365 tags = []
366
366
367 commits, _heads = _rev_range_hash(repo, node)
367 commits, _heads = _rev_range_hash(repo, node)
368 for commit_id, branch in commits:
368 for commit_id, branch in commits:
369 commit_ids.append(commit_id)
369 commit_ids.append(commit_id)
370 if branch not in branches:
370 if branch not in branches:
371 branches.append(branch)
371 branches.append(branch)
372
372
373 if hasattr(ui, '_rc_pushkey_branches'):
373 if hasattr(ui, '_rc_pushkey_branches'):
374 bookmarks = ui._rc_pushkey_branches
374 bookmarks = ui._rc_pushkey_branches
375
375
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 extras['commit_ids'] = commit_ids
377 extras['commit_ids'] = commit_ids
378 extras['new_refs'] = {
378 extras['new_refs'] = {
379 'branches': branches,
379 'branches': branches,
380 'bookmarks': bookmarks,
380 'bookmarks': bookmarks,
381 'tags': tags
381 'tags': tags
382 }
382 }
383
383
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385
385
386
386
387 def post_push_ssh(ui, repo, node, **kwargs):
387 def post_push_ssh(ui, repo, node, **kwargs):
388 """
388 """
389 Mercurial post_push hook for SSH
389 Mercurial post_push hook for SSH
390 """
390 """
391 if _extras_from_ui(ui).get('SSH'):
391 if _extras_from_ui(ui).get('SSH'):
392 return post_push(ui, repo, node, **kwargs)
392 return post_push(ui, repo, node, **kwargs)
393 return 0
393 return 0
394
394
395
395
396 def key_push(ui, repo, **kwargs):
396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
397 from vcsserver.hgcompat import get_ctx
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 # store new bookmarks in our UI object propagated later to post_push
399 # store new bookmarks in our UI object propagated later to post_push
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 return
401 return
402
402
403
403
404 # backward compat
404 # backward compat
405 log_pull_action = post_pull
405 log_pull_action = post_pull
406
406
407 # backward compat
407 # backward compat
408 log_push_action = post_push
408 log_push_action = post_push
409
409
410
410
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 """
412 """
413 Old hook name: keep here for backward compatibility.
413 Old hook name: keep here for backward compatibility.
414
414
415 This is only required when the installed git hooks are not upgraded.
415 This is only required when the installed git hooks are not upgraded.
416 """
416 """
417 pass
417 pass
418
418
419
419
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 """
421 """
422 Old hook name: keep here for backward compatibility.
422 Old hook name: keep here for backward compatibility.
423
423
424 This is only required when the installed git hooks are not upgraded.
424 This is only required when the installed git hooks are not upgraded.
425 """
425 """
426 pass
426 pass
427
427
428
428
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430
430
431
431
432 def git_pre_pull(extras):
432 def git_pre_pull(extras):
433 """
433 """
434 Pre pull hook.
434 Pre pull hook.
435
435
436 :param extras: dictionary containing the keys defined in simplevcs
436 :param extras: dictionary containing the keys defined in simplevcs
437 :type extras: dict
437 :type extras: dict
438
438
439 :return: status code of the hook. 0 for success.
439 :return: status code of the hook. 0 for success.
440 :rtype: int
440 :rtype: int
441 """
441 """
442 if 'pull' not in extras['hooks']:
442 if 'pull' not in extras['hooks']:
443 return HookResponse(0, '')
443 return HookResponse(0, '')
444
444
445 stdout = io.BytesIO()
445 stdout = io.BytesIO()
446 try:
446 try:
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 except Exception as error:
448 except Exception as error:
449 status = 128
449 status = 128
450 stdout.write('ERROR: %s\n' % str(error))
450 stdout.write('ERROR: %s\n' % str(error))
451
451
452 return HookResponse(status, stdout.getvalue())
452 return HookResponse(status, stdout.getvalue())
453
453
454
454
455 def git_post_pull(extras):
455 def git_post_pull(extras):
456 """
456 """
457 Post pull hook.
457 Post pull hook.
458
458
459 :param extras: dictionary containing the keys defined in simplevcs
459 :param extras: dictionary containing the keys defined in simplevcs
460 :type extras: dict
460 :type extras: dict
461
461
462 :return: status code of the hook. 0 for success.
462 :return: status code of the hook. 0 for success.
463 :rtype: int
463 :rtype: int
464 """
464 """
465 if 'pull' not in extras['hooks']:
465 if 'pull' not in extras['hooks']:
466 return HookResponse(0, '')
466 return HookResponse(0, '')
467
467
468 stdout = io.BytesIO()
468 stdout = io.BytesIO()
469 try:
469 try:
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 except Exception as error:
471 except Exception as error:
472 status = 128
472 status = 128
473 stdout.write('ERROR: %s\n' % error)
473 stdout.write('ERROR: %s\n' % error)
474
474
475 return HookResponse(status, stdout.getvalue())
475 return HookResponse(status, stdout.getvalue())
476
476
477
477
478 def _parse_git_ref_lines(revision_lines):
478 def _parse_git_ref_lines(revision_lines):
479 rev_data = []
479 rev_data = []
480 for revision_line in revision_lines or []:
480 for revision_line in revision_lines or []:
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 ref_data = ref.split('/', 2)
482 ref_data = ref.split('/', 2)
483 if ref_data[1] in ('tags', 'heads'):
483 if ref_data[1] in ('tags', 'heads'):
484 rev_data.append({
484 rev_data.append({
485 # NOTE(marcink):
485 # NOTE(marcink):
486 # we're unable to tell total_commits for git at this point
486 # we're unable to tell total_commits for git at this point
487 # but we set the variable for consistency with GIT
487 # but we set the variable for consistency with GIT
488 'total_commits': -1,
488 'total_commits': -1,
489 'old_rev': old_rev,
489 'old_rev': old_rev,
490 'new_rev': new_rev,
490 'new_rev': new_rev,
491 'ref': ref,
491 'ref': ref,
492 'type': ref_data[1],
492 'type': ref_data[1],
493 'name': ref_data[2],
493 'name': ref_data[2],
494 })
494 })
495 return rev_data
495 return rev_data
496
496
497
497
498 def git_pre_receive(unused_repo_path, revision_lines, env):
498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 """
499 """
500 Pre push hook.
500 Pre push hook.
501
501
502 :param extras: dictionary containing the keys defined in simplevcs
502 :param extras: dictionary containing the keys defined in simplevcs
503 :type extras: dict
503 :type extras: dict
504
504
505 :return: status code of the hook. 0 for success.
505 :return: status code of the hook. 0 for success.
506 :rtype: int
506 :rtype: int
507 """
507 """
508 extras = json.loads(env['RC_SCM_DATA'])
508 extras = json.loads(env['RC_SCM_DATA'])
509 rev_data = _parse_git_ref_lines(revision_lines)
509 rev_data = _parse_git_ref_lines(revision_lines)
510 if 'push' not in extras['hooks']:
510 if 'push' not in extras['hooks']:
511 return 0
511 return 0
512 empty_commit_id = '0' * 40
512 empty_commit_id = '0' * 40
513
513
514 detect_force_push = extras.get('detect_force_push')
514 detect_force_push = extras.get('detect_force_push')
515
515
516 for push_ref in rev_data:
516 for push_ref in rev_data:
517 # store our git-env which holds the temp store
517 # store our git-env which holds the temp store
518 push_ref['git_env'] = _get_git_env()
518 push_ref['git_env'] = _get_git_env()
519 push_ref['pruned_sha'] = ''
519 push_ref['pruned_sha'] = ''
520 if not detect_force_push:
520 if not detect_force_push:
521 # don't check for forced-push when we don't need to
521 # don't check for forced-push when we don't need to
522 continue
522 continue
523
523
524 type_ = push_ref['type']
524 type_ = push_ref['type']
525 new_branch = push_ref['old_rev'] == empty_commit_id
525 new_branch = push_ref['old_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 if type_ == 'heads' and not (new_branch or delete_branch):
527 if type_ == 'heads' and not (new_branch or delete_branch):
528 old_rev = push_ref['old_rev']
528 old_rev = push_ref['old_rev']
529 new_rev = push_ref['new_rev']
529 new_rev = push_ref['new_rev']
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 stdout, stderr = subprocessio.run_command(
531 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
532 cmd, env=os.environ.copy())
533 # means we're having some non-reachable objects, this forced push was used
533 # means we're having some non-reachable objects, this forced push was used
534 if stdout:
534 if stdout:
535 push_ref['pruned_sha'] = stdout.splitlines()
535 push_ref['pruned_sha'] = stdout.splitlines()
536
536
537 extras['hook_type'] = 'pre_receive'
537 extras['hook_type'] = 'pre_receive'
538 extras['commit_ids'] = rev_data
538 extras['commit_ids'] = rev_data
539 return _call_hook('pre_push', extras, GitMessageWriter())
539 return _call_hook('pre_push', extras, GitMessageWriter())
540
540
541
541
542 def git_post_receive(unused_repo_path, revision_lines, env):
542 def git_post_receive(unused_repo_path, revision_lines, env):
543 """
543 """
544 Post push hook.
544 Post push hook.
545
545
546 :param extras: dictionary containing the keys defined in simplevcs
546 :param extras: dictionary containing the keys defined in simplevcs
547 :type extras: dict
547 :type extras: dict
548
548
549 :return: status code of the hook. 0 for success.
549 :return: status code of the hook. 0 for success.
550 :rtype: int
550 :rtype: int
551 """
551 """
552 extras = json.loads(env['RC_SCM_DATA'])
552 extras = json.loads(env['RC_SCM_DATA'])
553 if 'push' not in extras['hooks']:
553 if 'push' not in extras['hooks']:
554 return 0
554 return 0
555
555
556 rev_data = _parse_git_ref_lines(revision_lines)
556 rev_data = _parse_git_ref_lines(revision_lines)
557
557
558 git_revs = []
558 git_revs = []
559
559
560 # N.B.(skreft): it is ok to just call git, as git before calling a
560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 # subcommand sets the PATH environment variable so that it point to the
561 # subcommand sets the PATH environment variable so that it point to the
562 # correct version of the git executable.
562 # correct version of the git executable.
563 empty_commit_id = '0' * 40
563 empty_commit_id = '0' * 40
564 branches = []
564 branches = []
565 tags = []
565 tags = []
566 for push_ref in rev_data:
566 for push_ref in rev_data:
567 type_ = push_ref['type']
567 type_ = push_ref['type']
568
568
569 if type_ == 'heads':
569 if type_ == 'heads':
570 if push_ref['old_rev'] == empty_commit_id:
570 if push_ref['old_rev'] == empty_commit_id:
571 # starting new branch case
571 # starting new branch case
572 if push_ref['name'] not in branches:
572 if push_ref['name'] not in branches:
573 branches.append(push_ref['name'])
573 branches.append(push_ref['name'])
574
574
575 # Fix up head revision if needed
575 # Fix up head revision if needed
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 try:
577 try:
578 subprocessio.run_command(cmd, env=os.environ.copy())
578 subprocessio.run_command(cmd, env=os.environ.copy())
579 except Exception:
579 except Exception:
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 'refs/heads/%s' % push_ref['name']]
581 'refs/heads/%s' % push_ref['name']]
582 print("Setting default branch to %s" % push_ref['name'])
582 print("Setting default branch to %s" % push_ref['name'])
583 subprocessio.run_command(cmd, env=os.environ.copy())
583 subprocessio.run_command(cmd, env=os.environ.copy())
584
584
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 '--format=%(refname)', 'refs/heads/*']
586 '--format=%(refname)', 'refs/heads/*']
587 stdout, stderr = subprocessio.run_command(
587 stdout, stderr = subprocessio.run_command(
588 cmd, env=os.environ.copy())
588 cmd, env=os.environ.copy())
589 heads = stdout
589 heads = stdout
590 heads = heads.replace(push_ref['ref'], '')
590 heads = heads.replace(push_ref['ref'], '')
591 heads = ' '.join(head for head
591 heads = ' '.join(head for head
592 in heads.splitlines() if head) or '.'
592 in heads.splitlines() if head) or '.'
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 '--pretty=format:%H', '--', push_ref['new_rev'],
594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 '--not', heads]
595 '--not', heads]
596 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
598 git_revs.extend(stdout.splitlines())
598 git_revs.extend(stdout.splitlines())
599 elif push_ref['new_rev'] == empty_commit_id:
599 elif push_ref['new_rev'] == empty_commit_id:
600 # delete branch case
600 # delete branch case
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 else:
602 else:
603 if push_ref['name'] not in branches:
603 if push_ref['name'] not in branches:
604 branches.append(push_ref['name'])
604 branches.append(push_ref['name'])
605
605
606 cmd = [settings.GIT_EXECUTABLE, 'log',
606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 '{old_rev}..{new_rev}'.format(**push_ref),
607 '{old_rev}..{new_rev}'.format(**push_ref),
608 '--reverse', '--pretty=format:%H']
608 '--reverse', '--pretty=format:%H']
609 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
610 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
611 git_revs.extend(stdout.splitlines())
611 git_revs.extend(stdout.splitlines())
612 elif type_ == 'tags':
612 elif type_ == 'tags':
613 if push_ref['name'] not in tags:
613 if push_ref['name'] not in tags:
614 tags.append(push_ref['name'])
614 tags.append(push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
616
616
617 extras['hook_type'] = 'post_receive'
617 extras['hook_type'] = 'post_receive'
618 extras['commit_ids'] = git_revs
618 extras['commit_ids'] = git_revs
619 extras['new_refs'] = {
619 extras['new_refs'] = {
620 'branches': branches,
620 'branches': branches,
621 'bookmarks': [],
621 'bookmarks': [],
622 'tags': tags,
622 'tags': tags,
623 }
623 }
624
624
625 if 'repo_size' in extras['hooks']:
625 if 'repo_size' in extras['hooks']:
626 try:
626 try:
627 _call_hook('repo_size', extras, GitMessageWriter())
627 _call_hook('repo_size', extras, GitMessageWriter())
628 except:
628 except:
629 pass
629 pass
630
630
631 return _call_hook('post_push', extras, GitMessageWriter())
631 return _call_hook('post_push', extras, GitMessageWriter())
632
632
633
633
634 def _get_extras_from_txn_id(path, txn_id):
634 def _get_extras_from_txn_id(path, txn_id):
635 extras = {}
635 extras = {}
636 try:
636 try:
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 '-t', txn_id,
638 '-t', txn_id,
639 '--revprop', path, 'rc-scm-extras']
639 '--revprop', path, 'rc-scm-extras']
640 stdout, stderr = subprocessio.run_command(
640 stdout, stderr = subprocessio.run_command(
641 cmd, env=os.environ.copy())
641 cmd, env=os.environ.copy())
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 except Exception:
643 except Exception:
644 log.exception('Failed to extract extras info from txn_id')
644 log.exception('Failed to extract extras info from txn_id')
645
645
646 return extras
646 return extras
647
647
648
648
649 def _get_extras_from_commit_id(commit_id, path):
649 def _get_extras_from_commit_id(commit_id, path):
650 extras = {}
650 extras = {}
651 try:
651 try:
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 '-r', commit_id,
653 '-r', commit_id,
654 '--revprop', path, 'rc-scm-extras']
654 '--revprop', path, 'rc-scm-extras']
655 stdout, stderr = subprocessio.run_command(
655 stdout, stderr = subprocessio.run_command(
656 cmd, env=os.environ.copy())
656 cmd, env=os.environ.copy())
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 except Exception:
658 except Exception:
659 log.exception('Failed to extract extras info from commit_id')
659 log.exception('Failed to extract extras info from commit_id')
660
660
661 return extras
661 return extras
662
662
663
663
664 def svn_pre_commit(repo_path, commit_data, env):
664 def svn_pre_commit(repo_path, commit_data, env):
665 path, txn_id = commit_data
665 path, txn_id = commit_data
666 branches = []
666 branches = []
667 tags = []
667 tags = []
668
668
669 if env.get('RC_SCM_DATA'):
669 if env.get('RC_SCM_DATA'):
670 extras = json.loads(env['RC_SCM_DATA'])
670 extras = json.loads(env['RC_SCM_DATA'])
671 else:
671 else:
672 # fallback method to read from TXN-ID stored data
672 # fallback method to read from TXN-ID stored data
673 extras = _get_extras_from_txn_id(path, txn_id)
673 extras = _get_extras_from_txn_id(path, txn_id)
674 if not extras:
674 if not extras:
675 return 0
675 return 0
676
676
677 extras['hook_type'] = 'pre_commit'
677 extras['hook_type'] = 'pre_commit'
678 extras['commit_ids'] = [txn_id]
678 extras['commit_ids'] = [txn_id]
679 extras['txn_id'] = txn_id
679 extras['txn_id'] = txn_id
680 extras['new_refs'] = {
680 extras['new_refs'] = {
681 'total_commits': 1,
681 'total_commits': 1,
682 'branches': branches,
682 'branches': branches,
683 'bookmarks': [],
683 'bookmarks': [],
684 'tags': tags,
684 'tags': tags,
685 }
685 }
686
686
687 return _call_hook('pre_push', extras, SvnMessageWriter())
687 return _call_hook('pre_push', extras, SvnMessageWriter())
688
688
689
689
690 def svn_post_commit(repo_path, commit_data, env):
690 def svn_post_commit(repo_path, commit_data, env):
691 """
691 """
692 commit_data is path, rev, txn_id
692 commit_data is path, rev, txn_id
693 """
693 """
694 if len(commit_data) == 3:
694 if len(commit_data) == 3:
695 path, commit_id, txn_id = commit_data
695 path, commit_id, txn_id = commit_data
696 elif len(commit_data) == 2:
696 elif len(commit_data) == 2:
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 'Some functionality might be limited')
698 'Some functionality might be limited')
699 path, commit_id = commit_data
699 path, commit_id = commit_data
700 txn_id = None
700 txn_id = None
701
701
702 branches = []
702 branches = []
703 tags = []
703 tags = []
704
704
705 if env.get('RC_SCM_DATA'):
705 if env.get('RC_SCM_DATA'):
706 extras = json.loads(env['RC_SCM_DATA'])
706 extras = json.loads(env['RC_SCM_DATA'])
707 else:
707 else:
708 # fallback method to read from TXN-ID stored data
708 # fallback method to read from TXN-ID stored data
709 extras = _get_extras_from_commit_id(commit_id, path)
709 extras = _get_extras_from_commit_id(commit_id, path)
710 if not extras:
710 if not extras:
711 return 0
711 return 0
712
712
713 extras['hook_type'] = 'post_commit'
713 extras['hook_type'] = 'post_commit'
714 extras['commit_ids'] = [commit_id]
714 extras['commit_ids'] = [commit_id]
715 extras['txn_id'] = txn_id
715 extras['txn_id'] = txn_id
716 extras['new_refs'] = {
716 extras['new_refs'] = {
717 'branches': branches,
717 'branches': branches,
718 'bookmarks': [],
718 'bookmarks': [],
719 'tags': tags,
719 'tags': tags,
720 'total_commits': 1,
720 'total_commits': 1,
721 }
721 }
722
722
723 if 'repo_size' in extras['hooks']:
723 if 'repo_size' in extras['hooks']:
724 try:
724 try:
725 _call_hook('repo_size', extras, SvnMessageWriter())
725 _call_hook('repo_size', extras, SvnMessageWriter())
726 except Exception:
726 except Exception:
727 pass
727 pass
728
728
729 return _call_hook('post_push', extras, SvnMessageWriter())
729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,386 +1,386 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import simplejson as json
24 import simplejson as json
25 import dulwich.protocol
25 import dulwich.protocol
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver import hooks, subprocessio
28 from vcsserver import hooks, subprocessio
29
29
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class FileWrapper(object):
34 class FileWrapper(object):
35 """File wrapper that ensures how much data is read from it."""
35 """File wrapper that ensures how much data is read from it."""
36
36
37 def __init__(self, fd, content_length):
37 def __init__(self, fd, content_length):
38 self.fd = fd
38 self.fd = fd
39 self.content_length = content_length
39 self.content_length = content_length
40 self.remain = content_length
40 self.remain = content_length
41
41
42 def read(self, size):
42 def read(self, size):
43 if size <= self.remain:
43 if size <= self.remain:
44 try:
44 try:
45 data = self.fd.read(size)
45 data = self.fd.read(size)
46 except socket.error:
46 except socket.error:
47 raise IOError(self)
47 raise IOError(self)
48 self.remain -= size
48 self.remain -= size
49 elif self.remain:
49 elif self.remain:
50 data = self.fd.read(self.remain)
50 data = self.fd.read(self.remain)
51 self.remain = 0
51 self.remain = 0
52 else:
52 else:
53 data = None
53 data = None
54 return data
54 return data
55
55
56 def __repr__(self):
56 def __repr__(self):
57 return '<FileWrapper %s len: %s, read: %s>' % (
57 return '<FileWrapper %s len: %s, read: %s>' % (
58 self.fd, self.content_length, self.content_length - self.remain
58 self.fd, self.content_length, self.content_length - self.remain
59 )
59 )
60
60
61
61
62 class GitRepository(object):
62 class GitRepository(object):
63 """WSGI app for handling Git smart protocol endpoints."""
63 """WSGI app for handling Git smart protocol endpoints."""
64
64
65 git_folder_signature = frozenset(
65 git_folder_signature = frozenset(
66 ('config', 'head', 'info', 'objects', 'refs'))
66 ('config', 'head', 'info', 'objects', 'refs'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 valid_accepts = frozenset(('application/x-%s-result' %
68 valid_accepts = frozenset(('application/x-%s-result' %
69 c for c in commands))
69 c for c in commands))
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77
77
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 extras):
79 extras):
80 files = frozenset(f.lower() for f in os.listdir(content_path))
80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 valid_dir_signature = self.git_folder_signature.issubset(files)
81 valid_dir_signature = self.git_folder_signature.issubset(files)
82
82
83 if not valid_dir_signature:
83 if not valid_dir_signature:
84 raise OSError('%s missing git signature' % content_path)
84 raise OSError('%s missing git signature' % content_path)
85
85
86 self.content_path = content_path
86 self.content_path = content_path
87 self.repo_name = repo_name
87 self.repo_name = repo_name
88 self.extras = extras
88 self.extras = extras
89 self.git_path = git_path
89 self.git_path = git_path
90 self.update_server_info = update_server_info
90 self.update_server_info = update_server_info
91
91
92 def _get_fixedpath(self, path):
92 def _get_fixedpath(self, path):
93 """
93 """
94 Small fix for repo_path
94 Small fix for repo_path
95
95
96 :param path:
96 :param path:
97 """
97 """
98 path = path.split(self.repo_name, 1)[-1]
98 path = path.split(self.repo_name, 1)[-1]
99 if path.startswith('.git'):
99 if path.startswith('.git'):
100 # for bare repos we still get the .git prefix inside, we skip it
100 # for bare repos we still get the .git prefix inside, we skip it
101 # here, and remove from the service command
101 # here, and remove from the service command
102 path = path[4:]
102 path = path[4:]
103
103
104 return path.strip('/')
104 return path.strip('/')
105
105
106 def inforefs(self, request, unused_environ):
106 def inforefs(self, request, unused_environ):
107 """
107 """
108 WSGI Response producer for HTTP GET Git Smart
108 WSGI Response producer for HTTP GET Git Smart
109 HTTP /info/refs request.
109 HTTP /info/refs request.
110 """
110 """
111
111
112 git_command = request.GET.get('service')
112 git_command = request.GET.get('service')
113 if git_command not in self.commands:
113 if git_command not in self.commands:
114 log.debug('command %s not allowed', git_command)
114 log.debug('command %s not allowed', git_command)
115 return exc.HTTPForbidden()
115 return exc.HTTPForbidden()
116
116
117 # please, resist the urge to add '\n' to git capture and increment
117 # please, resist the urge to add '\n' to git capture and increment
118 # line count by 1.
118 # line count by 1.
119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 # a part of protocol.
120 # a part of protocol.
121 # The code in Git client not only does NOT need '\n', but actually
121 # The code in Git client not only does NOT need '\n', but actually
122 # blows up if you sprinkle "flush" (0000) as "0001\n".
122 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 # It reads binary, per number of bytes specified.
123 # It reads binary, per number of bytes specified.
124 # if you do add '\n' as part of data, count it.
124 # if you do add '\n' as part of data, count it.
125 server_advert = '# service=%s\n' % git_command
125 server_advert = '# service=%s\n' % git_command
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 try:
127 try:
128 gitenv = dict(os.environ)
128 gitenv = dict(os.environ)
129 # forget all configs
129 # forget all configs
130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 command = [self.git_path, git_command[4:], '--stateless-rpc',
131 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 '--advertise-refs', self.content_path]
132 '--advertise-refs', self.content_path]
133 out = subprocessio.SubprocessIOChunker(
133 out = subprocessio.SubprocessIOChunker(
134 command,
134 command,
135 env=gitenv,
135 env=gitenv,
136 starting_values=[packet_len + server_advert + '0000'],
136 starting_values=[packet_len + server_advert + '0000'],
137 shell=False
137 shell=False
138 )
138 )
139 except EnvironmentError:
139 except EnvironmentError:
140 log.exception('Error processing command')
140 log.exception('Error processing command')
141 raise exc.HTTPExpectationFailed()
141 raise exc.HTTPExpectationFailed()
142
142
143 resp = Response()
143 resp = Response()
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 resp.charset = None
145 resp.charset = None
146 resp.app_iter = out
146 resp.app_iter = out
147
147
148 return resp
148 return resp
149
149
150 def _get_want_capabilities(self, request):
150 def _get_want_capabilities(self, request):
151 """Read the capabilities found in the first want line of the request."""
151 """Read the capabilities found in the first want line of the request."""
152 pos = request.body_file_seekable.tell()
152 pos = request.body_file_seekable.tell()
153 first_line = request.body_file_seekable.readline()
153 first_line = request.body_file_seekable.readline()
154 request.body_file_seekable.seek(pos)
154 request.body_file_seekable.seek(pos)
155
155
156 return frozenset(
156 return frozenset(
157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158
158
159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 """
160 """
161 Construct a response with an empty PACK file.
161 Construct a response with an empty PACK file.
162
162
163 We use an empty PACK file, as that would trigger the failure of the pull
163 We use an empty PACK file, as that would trigger the failure of the pull
164 or clone command.
164 or clone command.
165
165
166 We also print in the error output a message explaining why the command
166 We also print in the error output a message explaining why the command
167 was aborted.
167 was aborted.
168
168
169 If aditionally, the user is accepting messages we send them the output
169 If aditionally, the user is accepting messages we send them the output
170 of the pre-pull hook.
170 of the pre-pull hook.
171
171
172 Note that for clients not supporting side-band we just send them the
172 Note that for clients not supporting side-band we just send them the
173 emtpy PACK file.
173 emtpy PACK file.
174 """
174 """
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 response = []
176 response = []
177 proto = dulwich.protocol.Protocol(None, response.append)
177 proto = dulwich.protocol.Protocol(None, response.append)
178 proto.write_pkt_line('NAK\n')
178 proto.write_pkt_line('NAK\n')
179 self._write_sideband_to_proto(pre_pull_messages, proto,
179 self._write_sideband_to_proto(pre_pull_messages, proto,
180 capabilities)
180 capabilities)
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 # produces a fatal error in the client:
182 # produces a fatal error in the client:
183 # fatal: error in sideband demultiplexer
183 # fatal: error in sideband demultiplexer
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
185 proto.write_sideband(1, self.EMPTY_PACK)
185 proto.write_sideband(1, self.EMPTY_PACK)
186
186
187 # writes 0000
187 # writes 0000
188 proto.write_pkt_line(None)
188 proto.write_pkt_line(None)
189
189
190 return response
190 return response
191 else:
191 else:
192 return [self.EMPTY_PACK]
192 return [self.EMPTY_PACK]
193
193
194 def _write_sideband_to_proto(self, data, proto, capabilities):
194 def _write_sideband_to_proto(self, data, proto, capabilities):
195 """
195 """
196 Write the data to the proto's sideband number 2.
196 Write the data to the proto's sideband number 2.
197
197
198 We do not use dulwich's write_sideband directly as it only supports
198 We do not use dulwich's write_sideband directly as it only supports
199 side-band-64k.
199 side-band-64k.
200 """
200 """
201 if not data:
201 if not data:
202 return
202 return
203
203
204 # N.B.(skreft): The values below are explained in the pack protocol
204 # N.B.(skreft): The values below are explained in the pack protocol
205 # documentation, section Packfile Data.
205 # documentation, section Packfile Data.
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 if 'side-band-64k' in capabilities:
207 if 'side-band-64k' in capabilities:
208 chunk_size = 65515
208 chunk_size = 65515
209 elif 'side-band' in capabilities:
209 elif 'side-band' in capabilities:
210 chunk_size = 995
210 chunk_size = 995
211 else:
211 else:
212 return
212 return
213
213
214 chunker = (
214 chunker = (
215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
215 data[i:i + chunk_size] for i in range(0, len(data), chunk_size))
216
216
217 for chunk in chunker:
217 for chunk in chunker:
218 proto.write_sideband(2, chunk)
218 proto.write_sideband(2, chunk)
219
219
220 def _get_messages(self, data, capabilities):
220 def _get_messages(self, data, capabilities):
221 """Return a list with packets for sending data in sideband number 2."""
221 """Return a list with packets for sending data in sideband number 2."""
222 response = []
222 response = []
223 proto = dulwich.protocol.Protocol(None, response.append)
223 proto = dulwich.protocol.Protocol(None, response.append)
224
224
225 self._write_sideband_to_proto(data, proto, capabilities)
225 self._write_sideband_to_proto(data, proto, capabilities)
226
226
227 return response
227 return response
228
228
229 def _inject_messages_to_response(self, response, capabilities,
229 def _inject_messages_to_response(self, response, capabilities,
230 start_messages, end_messages):
230 start_messages, end_messages):
231 """
231 """
232 Given a list response we inject the pre/post-pull messages.
232 Given a list response we inject the pre/post-pull messages.
233
233
234 We only inject the messages if the client supports sideband, and the
234 We only inject the messages if the client supports sideband, and the
235 response has the format:
235 response has the format:
236 0008NAK\n...0000
236 0008NAK\n...0000
237
237
238 Note that we do not check the no-progress capability as by default, git
238 Note that we do not check the no-progress capability as by default, git
239 sends it, which effectively would block all messages.
239 sends it, which effectively would block all messages.
240 """
240 """
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 return response
242 return response
243
243
244 if not start_messages and not end_messages:
244 if not start_messages and not end_messages:
245 return response
245 return response
246
246
247 # make a list out of response if it's an iterator
247 # make a list out of response if it's an iterator
248 # so we can investigate it for message injection.
248 # so we can investigate it for message injection.
249 if hasattr(response, '__iter__'):
249 if hasattr(response, '__iter__'):
250 response = list(response)
250 response = list(response)
251
251
252 if (not response[0].startswith('0008NAK\n') or
252 if (not response[0].startswith('0008NAK\n') or
253 not response[-1].endswith('0000')):
253 not response[-1].endswith('0000')):
254 return response
254 return response
255
255
256 new_response = ['0008NAK\n']
256 new_response = ['0008NAK\n']
257 new_response.extend(self._get_messages(start_messages, capabilities))
257 new_response.extend(self._get_messages(start_messages, capabilities))
258 if len(response) == 1:
258 if len(response) == 1:
259 new_response.append(response[0][8:-4])
259 new_response.append(response[0][8:-4])
260 else:
260 else:
261 new_response.append(response[0][8:])
261 new_response.append(response[0][8:])
262 new_response.extend(response[1:-1])
262 new_response.extend(response[1:-1])
263 new_response.append(response[-1][:-4])
263 new_response.append(response[-1][:-4])
264 new_response.extend(self._get_messages(end_messages, capabilities))
264 new_response.extend(self._get_messages(end_messages, capabilities))
265 new_response.append('0000')
265 new_response.append('0000')
266
266
267 return new_response
267 return new_response
268
268
269 def backend(self, request, environ):
269 def backend(self, request, environ):
270 """
270 """
271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
272 Reads commands and data from HTTP POST's body.
272 Reads commands and data from HTTP POST's body.
273 returns an iterator obj with contents of git command's
273 returns an iterator obj with contents of git command's
274 response to stdout
274 response to stdout
275 """
275 """
276 # TODO(skreft): think how we could detect an HTTPLockedException, as
276 # TODO(skreft): think how we could detect an HTTPLockedException, as
277 # we probably want to have the same mechanism used by mercurial and
277 # we probably want to have the same mechanism used by mercurial and
278 # simplevcs.
278 # simplevcs.
279 # For that we would need to parse the output of the command looking for
279 # For that we would need to parse the output of the command looking for
280 # some signs of the HTTPLockedError, parse the data and reraise it in
280 # some signs of the HTTPLockedError, parse the data and reraise it in
281 # pygrack. However, that would interfere with the streaming.
281 # pygrack. However, that would interfere with the streaming.
282 #
282 #
283 # Now the output of a blocked push is:
283 # Now the output of a blocked push is:
284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
285 # POST git-receive-pack (1047 bytes)
285 # POST git-receive-pack (1047 bytes)
286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
288 # ! [remote rejected] master -> master (pre-receive hook declined)
288 # ! [remote rejected] master -> master (pre-receive hook declined)
289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
290
290
291 git_command = self._get_fixedpath(request.path_info)
291 git_command = self._get_fixedpath(request.path_info)
292 if git_command not in self.commands:
292 if git_command not in self.commands:
293 log.debug('command %s not allowed', git_command)
293 log.debug('command %s not allowed', git_command)
294 return exc.HTTPForbidden()
294 return exc.HTTPForbidden()
295
295
296 capabilities = None
296 capabilities = None
297 if git_command == 'git-upload-pack':
297 if git_command == 'git-upload-pack':
298 capabilities = self._get_want_capabilities(request)
298 capabilities = self._get_want_capabilities(request)
299
299
300 if 'CONTENT_LENGTH' in environ:
300 if 'CONTENT_LENGTH' in environ:
301 inputstream = FileWrapper(request.body_file_seekable,
301 inputstream = FileWrapper(request.body_file_seekable,
302 request.content_length)
302 request.content_length)
303 else:
303 else:
304 inputstream = request.body_file_seekable
304 inputstream = request.body_file_seekable
305
305
306 resp = Response()
306 resp = Response()
307 resp.content_type = ('application/x-%s-result' %
307 resp.content_type = ('application/x-%s-result' %
308 git_command.encode('utf8'))
308 git_command.encode('utf8'))
309 resp.charset = None
309 resp.charset = None
310
310
311 pre_pull_messages = ''
311 pre_pull_messages = ''
312 if git_command == 'git-upload-pack':
312 if git_command == 'git-upload-pack':
313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
314 if status != 0:
314 if status != 0:
315 resp.app_iter = self._build_failed_pre_pull_response(
315 resp.app_iter = self._build_failed_pre_pull_response(
316 capabilities, pre_pull_messages)
316 capabilities, pre_pull_messages)
317 return resp
317 return resp
318
318
319 gitenv = dict(os.environ)
319 gitenv = dict(os.environ)
320 # forget all configs
320 # forget all configs
321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
324 self.content_path]
324 self.content_path]
325 log.debug('handling cmd %s', cmd)
325 log.debug('handling cmd %s', cmd)
326
326
327 out = subprocessio.SubprocessIOChunker(
327 out = subprocessio.SubprocessIOChunker(
328 cmd,
328 cmd,
329 inputstream=inputstream,
329 inputstream=inputstream,
330 env=gitenv,
330 env=gitenv,
331 cwd=self.content_path,
331 cwd=self.content_path,
332 shell=False,
332 shell=False,
333 fail_on_stderr=False,
333 fail_on_stderr=False,
334 fail_on_return_code=False
334 fail_on_return_code=False
335 )
335 )
336
336
337 if self.update_server_info and git_command == 'git-receive-pack':
337 if self.update_server_info and git_command == 'git-receive-pack':
338 # We need to fully consume the iterator here, as the
338 # We need to fully consume the iterator here, as the
339 # update-server-info command needs to be run after the push.
339 # update-server-info command needs to be run after the push.
340 out = list(out)
340 out = list(out)
341
341
342 # Updating refs manually after each push.
342 # Updating refs manually after each push.
343 # This is required as some clients are exposing Git repos internally
343 # This is required as some clients are exposing Git repos internally
344 # with the dumb protocol.
344 # with the dumb protocol.
345 cmd = [self.git_path, 'update-server-info']
345 cmd = [self.git_path, 'update-server-info']
346 log.debug('handling cmd %s', cmd)
346 log.debug('handling cmd %s', cmd)
347 output = subprocessio.SubprocessIOChunker(
347 output = subprocessio.SubprocessIOChunker(
348 cmd,
348 cmd,
349 inputstream=inputstream,
349 inputstream=inputstream,
350 env=gitenv,
350 env=gitenv,
351 cwd=self.content_path,
351 cwd=self.content_path,
352 shell=False,
352 shell=False,
353 fail_on_stderr=False,
353 fail_on_stderr=False,
354 fail_on_return_code=False
354 fail_on_return_code=False
355 )
355 )
356 # Consume all the output so the subprocess finishes
356 # Consume all the output so the subprocess finishes
357 for _ in output:
357 for _ in output:
358 pass
358 pass
359
359
360 if git_command == 'git-upload-pack':
360 if git_command == 'git-upload-pack':
361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
362 resp.app_iter = self._inject_messages_to_response(
362 resp.app_iter = self._inject_messages_to_response(
363 out, capabilities, pre_pull_messages, post_pull_messages)
363 out, capabilities, pre_pull_messages, post_pull_messages)
364 else:
364 else:
365 resp.app_iter = out
365 resp.app_iter = out
366
366
367 return resp
367 return resp
368
368
369 def __call__(self, environ, start_response):
369 def __call__(self, environ, start_response):
370 request = Request(environ)
370 request = Request(environ)
371 _path = self._get_fixedpath(request.path_info)
371 _path = self._get_fixedpath(request.path_info)
372 if _path.startswith('info/refs'):
372 if _path.startswith('info/refs'):
373 app = self.inforefs
373 app = self.inforefs
374 else:
374 else:
375 app = self.backend
375 app = self.backend
376
376
377 try:
377 try:
378 resp = app(request, environ)
378 resp = app(request, environ)
379 except exc.HTTPException as error:
379 except exc.HTTPException as error:
380 log.exception('HTTP Error')
380 log.exception('HTTP Error')
381 resp = error
381 resp = error
382 except Exception:
382 except Exception:
383 log.exception('Unknown error')
383 log.exception('Unknown error')
384 resp = exc.HTTPInternalServerError()
384 resp = exc.HTTPInternalServerError()
385
385
386 return resp(environ, start_response)
386 return resp(environ, start_response)
@@ -1,39 +1,39 b''
1 """
1 """
2 Tests used to profile the HTTP based implementation.
2 Tests used to profile the HTTP based implementation.
3 """
3 """
4
4
5 import pytest
5 import pytest
6 import webtest
6 import webtest
7
7
8 from vcsserver.http_main import main
8 from vcsserver.http_main import main
9
9
10
10
11 @pytest.fixture
11 @pytest.fixture
12 def vcs_app():
12 def vcs_app():
13 stub_settings = {
13 stub_settings = {
14 'dev.use_echo_app': 'true',
14 'dev.use_echo_app': 'true',
15 'locale': 'en_US.UTF-8',
15 'locale': 'en_US.UTF-8',
16 }
16 }
17 vcs_app = main({}, **stub_settings)
17 vcs_app = main({}, **stub_settings)
18 app = webtest.TestApp(vcs_app)
18 app = webtest.TestApp(vcs_app)
19 return app
19 return app
20
20
21
21
22 @pytest.fixture(scope='module')
22 @pytest.fixture(scope='module')
23 def data():
23 def data():
24 one_kb = 'x' * 1024
24 one_kb = 'x' * 1024
25 return one_kb * 1024 * 10
25 return one_kb * 1024 * 10
26
26
27
27
28 def test_http_app_streaming_with_data(data, repeat, vcs_app):
28 def test_http_app_streaming_with_data(data, repeat, vcs_app):
29 app = vcs_app
29 app = vcs_app
30 for x in xrange(repeat / 10):
30 for x in range(repeat / 10):
31 response = app.post('/stream/git/', params=data)
31 response = app.post('/stream/git/', params=data)
32 assert response.status_code == 200
32 assert response.status_code == 200
33
33
34
34
35 def test_http_app_streaming_no_data(repeat, vcs_app):
35 def test_http_app_streaming_no_data(repeat, vcs_app):
36 app = vcs_app
36 app = vcs_app
37 for x in xrange(repeat / 10):
37 for x in range(repeat / 10):
38 response = app.post('/stream/git/')
38 response = app.post('/stream/git/')
39 assert response.status_code == 200
39 assert response.status_code == 200
@@ -1,155 +1,155 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21
21
22 import pytest
22 import pytest
23
23
24 from vcsserver import subprocessio
24 from vcsserver import subprocessio
25
25
26
26
27 class KindaFilelike(object): # pragma: no cover
27 class KindaFilelike(object): # pragma: no cover
28
28
29 def __init__(self, data, size):
29 def __init__(self, data, size):
30 chunks = size / len(data)
30 chunks = size / len(data)
31
31
32 self.stream = self._get_stream(data, chunks)
32 self.stream = self._get_stream(data, chunks)
33
33
34 def _get_stream(self, data, chunks):
34 def _get_stream(self, data, chunks):
35 for x in xrange(chunks):
35 for x in range(chunks):
36 yield data
36 yield data
37
37
38 def read(self, n):
38 def read(self, n):
39
39
40 buffer_stream = ''
40 buffer_stream = ''
41 for chunk in self.stream:
41 for chunk in self.stream:
42 buffer_stream += chunk
42 buffer_stream += chunk
43 if len(buffer_stream) >= n:
43 if len(buffer_stream) >= n:
44 break
44 break
45
45
46 # self.stream = self.bytes[n:]
46 # self.stream = self.bytes[n:]
47 return buffer_stream
47 return buffer_stream
48
48
49
49
50 @pytest.fixture(scope='module')
50 @pytest.fixture(scope='module')
51 def environ():
51 def environ():
52 """Delete coverage variables, as they make the tests fail."""
52 """Delete coverage variables, as they make the tests fail."""
53 env = dict(os.environ)
53 env = dict(os.environ)
54 for key in env.keys():
54 for key in env.keys():
55 if key.startswith('COV_CORE_'):
55 if key.startswith('COV_CORE_'):
56 del env[key]
56 del env[key]
57
57
58 return env
58 return env
59
59
60
60
61 def _get_python_args(script):
61 def _get_python_args(script):
62 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
62 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
63
63
64
64
65 def test_raise_exception_on_non_zero_return_code(environ):
65 def test_raise_exception_on_non_zero_return_code(environ):
66 args = _get_python_args('sys.exit(1)')
66 args = _get_python_args('sys.exit(1)')
67 with pytest.raises(EnvironmentError):
67 with pytest.raises(EnvironmentError):
68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
69
69
70
70
71 def test_does_not_fail_on_non_zero_return_code(environ):
71 def test_does_not_fail_on_non_zero_return_code(environ):
72 args = _get_python_args('sys.exit(1)')
72 args = _get_python_args('sys.exit(1)')
73 output = ''.join(
73 output = ''.join(
74 subprocessio.SubprocessIOChunker(
74 subprocessio.SubprocessIOChunker(
75 args, shell=False, fail_on_return_code=False, env=environ
75 args, shell=False, fail_on_return_code=False, env=environ
76 )
76 )
77 )
77 )
78
78
79 assert output == ''
79 assert output == ''
80
80
81
81
82 def test_raise_exception_on_stderr(environ):
82 def test_raise_exception_on_stderr(environ):
83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
84 with pytest.raises(EnvironmentError) as excinfo:
84 with pytest.raises(EnvironmentError) as excinfo:
85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
86
86
87 assert 'exited due to an error:\nX' in str(excinfo.value)
87 assert 'exited due to an error:\nX' in str(excinfo.value)
88
88
89
89
90 def test_does_not_fail_on_stderr(environ):
90 def test_does_not_fail_on_stderr(environ):
91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
92 output = ''.join(
92 output = ''.join(
93 subprocessio.SubprocessIOChunker(
93 subprocessio.SubprocessIOChunker(
94 args, shell=False, fail_on_stderr=False, env=environ
94 args, shell=False, fail_on_stderr=False, env=environ
95 )
95 )
96 )
96 )
97
97
98 assert output == ''
98 assert output == ''
99
99
100
100
101 @pytest.mark.parametrize('size', [1, 10 ** 5])
101 @pytest.mark.parametrize('size', [1, 10 ** 5])
102 def test_output_with_no_input(size, environ):
102 def test_output_with_no_input(size, environ):
103 print(type(environ))
103 print(type(environ))
104 data = 'X'
104 data = 'X'
105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
107
107
108 assert output == data * size
108 assert output == data * size
109
109
110
110
111 @pytest.mark.parametrize('size', [1, 10 ** 5])
111 @pytest.mark.parametrize('size', [1, 10 ** 5])
112 def test_output_with_no_input_does_not_fail(size, environ):
112 def test_output_with_no_input_does_not_fail(size, environ):
113 data = 'X'
113 data = 'X'
114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
115 output = ''.join(
115 output = ''.join(
116 subprocessio.SubprocessIOChunker(
116 subprocessio.SubprocessIOChunker(
117 args, shell=False, fail_on_return_code=False, env=environ
117 args, shell=False, fail_on_return_code=False, env=environ
118 )
118 )
119 )
119 )
120
120
121 print("{} {}".format(len(data * size), len(output)))
121 print("{} {}".format(len(data * size), len(output)))
122 assert output == data * size
122 assert output == data * size
123
123
124
124
125 @pytest.mark.parametrize('size', [1, 10 ** 5])
125 @pytest.mark.parametrize('size', [1, 10 ** 5])
126 def test_output_with_input(size, environ):
126 def test_output_with_input(size, environ):
127 data_len = size
127 data_len = size
128 inputstream = KindaFilelike('X', size)
128 inputstream = KindaFilelike('X', size)
129
129
130 # This acts like the cat command.
130 # This acts like the cat command.
131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 output = ''.join(
132 output = ''.join(
133 subprocessio.SubprocessIOChunker(
133 subprocessio.SubprocessIOChunker(
134 args, shell=False, inputstream=inputstream, env=environ
134 args, shell=False, inputstream=inputstream, env=environ
135 )
135 )
136 )
136 )
137
137
138 assert len(output) == data_len
138 assert len(output) == data_len
139
139
140
140
141 @pytest.mark.parametrize('size', [1, 10 ** 5])
141 @pytest.mark.parametrize('size', [1, 10 ** 5])
142 def test_output_with_input_skipping_iterator(size, environ):
142 def test_output_with_input_skipping_iterator(size, environ):
143 data_len = size
143 data_len = size
144 inputstream = KindaFilelike('X', size)
144 inputstream = KindaFilelike('X', size)
145
145
146 # This acts like the cat command.
146 # This acts like the cat command.
147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
148
148
149 # Note: assigning the chunker makes sure that it is not deleted too early
149 # Note: assigning the chunker makes sure that it is not deleted too early
150 chunker = subprocessio.SubprocessIOChunker(
150 chunker = subprocessio.SubprocessIOChunker(
151 args, shell=False, inputstream=inputstream, env=environ
151 args, shell=False, inputstream=inputstream, env=environ
152 )
152 )
153 output = ''.join(chunker.output)
153 output = ''.join(chunker.output)
154
154
155 assert len(output) == data_len
155 assert len(output) == data_len
General Comments 0
You need to be logged in to leave comments. Login now