Show More
@@ -25,6 +25,7 b' from vcsserver.lib.rc_cache import regio' | |||||
25 | from vcsserver import exceptions |
|
25 | from vcsserver import exceptions | |
26 | from vcsserver.exceptions import NoContentException |
|
26 | from vcsserver.exceptions import NoContentException | |
27 | from vcsserver.hgcompat import (archival) |
|
27 | from vcsserver.hgcompat import (archival) | |
|
28 | from vcsserver.str_utils import safe_bytes | |||
28 |
|
29 | |||
29 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
30 |
|
31 | |||
@@ -112,13 +113,13 b' def archive_repo(walker, archive_dest_pa' | |||||
112 | 'Remote does not support: "%s" archive type.' % kind) |
|
113 | 'Remote does not support: "%s" archive type.' % kind) | |
113 |
|
114 | |||
114 | for f in walker(commit_id, archive_at_path): |
|
115 | for f in walker(commit_id, archive_at_path): | |
115 | f_path = os.path.join(archive_dir_name, f.path.lstrip('/')) |
|
116 | f_path = os.path.join(safe_bytes(archive_dir_name), f.path.lstrip(b'/')) | |
116 | try: |
|
117 | try: | |
117 | archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes()) |
|
118 | archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes()) | |
118 | except NoContentException: |
|
119 | except NoContentException: | |
119 | # NOTE(marcink): this is a special case for SVN so we can create "empty" |
|
120 | # NOTE(marcink): this is a special case for SVN so we can create "empty" | |
120 | # directories which arent supported by archiver |
|
121 | # directories which arent supported by archiver | |
121 | archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '') |
|
122 | archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, '') | |
122 |
|
123 | |||
123 | if write_metadata: |
|
124 | if write_metadata: | |
124 | metadata = dict([ |
|
125 | metadata = dict([ | |
@@ -127,8 +128,8 b' def archive_repo(walker, archive_dest_pa' | |||||
127 | ]) |
|
128 | ]) | |
128 | metadata.update(extra_metadata) |
|
129 | metadata.update(extra_metadata) | |
129 |
|
130 | |||
130 |
meta = [" |
|
131 | meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()] | |
131 | f_path = os.path.join(archive_dir_name, '.archival.txt') |
|
132 | f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt') | |
132 | archiver.addfile(f_path, 0o644, False, '\n'.join(meta)) |
|
133 | archiver.addfile(f_path, 0o644, False, b'\n'.join(meta)) | |
133 |
|
134 | |||
134 | return archiver.done() |
|
135 | return archiver.done() |
@@ -42,7 +42,7 b' def write_response_error(http_exception,' | |||||
42 | _exception = http_exception(content_type=content_type) |
|
42 | _exception = http_exception(content_type=content_type) | |
43 | _exception.content_type = content_type |
|
43 | _exception.content_type = content_type | |
44 | if text: |
|
44 | if text: | |
45 |
_exception. |
|
45 | _exception.body = json.dumps({'message': text}) | |
46 | log.debug('LFS: writing response of type %s to client with text:%s', |
|
46 | log.debug('LFS: writing response of type %s to client with text:%s', | |
47 | http_exception, text) |
|
47 | http_exception, text) | |
48 | return _exception |
|
48 | return _exception |
@@ -39,7 +39,7 b' from pyramid.response import Response' | |||||
39 |
|
39 | |||
40 | from vcsserver.lib.rc_json import json |
|
40 | from vcsserver.lib.rc_json import json | |
41 | from vcsserver.config.settings_maker import SettingsMaker |
|
41 | from vcsserver.config.settings_maker import SettingsMaker | |
42 | from vcsserver.str_utils import safe_int |
|
42 | from vcsserver.str_utils import safe_int, safe_bytes, safe_str | |
43 | from vcsserver.lib.statsd_client import StatsdClient |
|
43 | from vcsserver.lib.statsd_client import StatsdClient | |
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) |
@@ -50,7 +50,7 b' from vcsserver.vcs_base import RemoteBas' | |||||
50 | DIR_STAT = stat.S_IFDIR |
|
50 | DIR_STAT = stat.S_IFDIR | |
51 | FILE_MODE = stat.S_IFMT |
|
51 | FILE_MODE = stat.S_IFMT | |
52 | GIT_LINK = objects.S_IFGITLINK |
|
52 | GIT_LINK = objects.S_IFGITLINK | |
53 | PEELED_REF_MARKER = '^{}' |
|
53 | PEELED_REF_MARKER = b'^{}' | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
@@ -156,7 +156,7 b' class GitRemote(RemoteBase):' | |||||
156 | prefix = b'git version' |
|
156 | prefix = b'git version' | |
157 | if stdout.startswith(prefix): |
|
157 | if stdout.startswith(prefix): | |
158 | stdout = stdout[len(prefix):] |
|
158 | stdout = stdout[len(prefix):] | |
159 | return stdout.strip() |
|
159 | return safe_str(stdout.strip()) | |
160 |
|
160 | |||
161 | @reraise_safe_exceptions |
|
161 | @reraise_safe_exceptions | |
162 | def is_empty(self, wire): |
|
162 | def is_empty(self, wire): | |
@@ -559,15 +559,24 b' class GitRemote(RemoteBase):' | |||||
559 | # Create commit |
|
559 | # Create commit | |
560 | commit = objects.Commit() |
|
560 | commit = objects.Commit() | |
561 | commit.tree = commit_tree.id |
|
561 | commit.tree = commit_tree.id | |
|
562 | bytes_keys = [ | |||
|
563 | 'author', | |||
|
564 | 'committer', | |||
|
565 | 'message', | |||
|
566 | 'encoding' | |||
|
567 | ] | |||
|
568 | ||||
562 | for k, v in commit_data.items(): |
|
569 | for k, v in commit_data.items(): | |
|
570 | if k in bytes_keys: | |||
|
571 | v = safe_bytes(v) | |||
563 | setattr(commit, k, v) |
|
572 | setattr(commit, k, v) | |
|
573 | ||||
564 | object_store.add_object(commit) |
|
574 | object_store.add_object(commit) | |
565 |
|
575 | |||
566 | self.create_branch(wire, branch, commit.id) |
|
576 | self.create_branch(wire, branch, safe_str(commit.id)) | |
567 |
|
577 | |||
568 | # dulwich set-ref |
|
578 | # dulwich set-ref | |
569 | ref = 'refs/heads/%s' % branch |
|
579 | repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id | |
570 | repo.refs[ref] = commit.id |
|
|||
571 |
|
580 | |||
572 | return commit.id |
|
581 | return commit.id | |
573 |
|
582 | |||
@@ -645,7 +654,7 b' class GitRemote(RemoteBase):' | |||||
645 | fetch_refs = [] |
|
654 | fetch_refs = [] | |
646 |
|
655 | |||
647 | for ref_line in output.splitlines(): |
|
656 | for ref_line in output.splitlines(): | |
648 | sha, ref = ref_line.split('\t') |
|
657 | sha, ref = ref_line.split(b'\t') | |
649 | sha = sha.strip() |
|
658 | sha = sha.strip() | |
650 | if ref in remote_refs: |
|
659 | if ref in remote_refs: | |
651 | # duplicate, skip |
|
660 | # duplicate, skip | |
@@ -654,16 +663,16 b' class GitRemote(RemoteBase):' | |||||
654 | log.debug("Skipping peeled reference %s", ref) |
|
663 | log.debug("Skipping peeled reference %s", ref) | |
655 | continue |
|
664 | continue | |
656 | # don't sync HEAD |
|
665 | # don't sync HEAD | |
657 | if ref in ['HEAD']: |
|
666 | if ref in [b'HEAD']: | |
658 | continue |
|
667 | continue | |
659 |
|
668 | |||
660 | remote_refs[ref] = sha |
|
669 | remote_refs[ref] = sha | |
661 |
|
670 | |||
662 | if refs and sha in refs: |
|
671 | if refs and sha in refs: | |
663 | # we filter fetch using our specified refs |
|
672 | # we filter fetch using our specified refs | |
664 |
fetch_refs.append('{ |
|
673 | fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}') | |
665 | elif not refs: |
|
674 | elif not refs: | |
666 |
fetch_refs.append('{ |
|
675 | fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}') | |
667 | log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs)) |
|
676 | log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs)) | |
668 |
|
677 | |||
669 | if fetch_refs: |
|
678 | if fetch_refs: | |
@@ -916,6 +925,7 b' class GitRemote(RemoteBase):' | |||||
916 | def parents(self, wire, commit_id): |
|
925 | def parents(self, wire, commit_id): | |
917 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
926 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
918 | region = self._region(wire) |
|
927 | region = self._region(wire) | |
|
928 | ||||
919 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
929 | @region.conditional_cache_on_arguments(condition=cache_on) | |
920 | def _parents(_repo_id, _commit_id): |
|
930 | def _parents(_repo_id, _commit_id): | |
921 | repo_init = self._factory.repo_libgit2(wire) |
|
931 | repo_init = self._factory.repo_libgit2(wire) | |
@@ -1241,7 +1251,7 b' class GitRemote(RemoteBase):' | |||||
1241 |
|
1251 | |||
1242 | return b''.join(proc), b''.join(proc.stderr) |
|
1252 | return b''.join(proc), b''.join(proc.stderr) | |
1243 | except OSError as err: |
|
1253 | except OSError as err: | |
1244 | cmd = ' '.join(cmd) # human friendly CMD |
|
1254 | cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD | |
1245 | tb_err = ("Couldn't run git command (%s).\n" |
|
1255 | tb_err = ("Couldn't run git command (%s).\n" | |
1246 | "Original error was:%s\n" |
|
1256 | "Original error was:%s\n" | |
1247 | "Call options:%s\n" |
|
1257 | "Call options:%s\n" |
@@ -200,7 +200,7 b' class HgRemote(RemoteBase):' | |||||
200 | @reraise_safe_exceptions |
|
200 | @reraise_safe_exceptions | |
201 | def discover_hg_version(self): |
|
201 | def discover_hg_version(self): | |
202 | from mercurial import util |
|
202 | from mercurial import util | |
203 | return util.version() |
|
203 | return safe_str(util.version()) | |
204 |
|
204 | |||
205 | @reraise_safe_exceptions |
|
205 | @reraise_safe_exceptions | |
206 | def is_empty(self, wire): |
|
206 | def is_empty(self, wire): | |
@@ -216,10 +216,11 b' class HgRemote(RemoteBase):' | |||||
216 | def bookmarks(self, wire): |
|
216 | def bookmarks(self, wire): | |
217 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
217 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
218 | region = self._region(wire) |
|
218 | region = self._region(wire) | |
|
219 | ||||
219 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
220 | @region.conditional_cache_on_arguments(condition=cache_on) | |
220 | def _bookmarks(_context_uid, _repo_id): |
|
221 | def _bookmarks(_context_uid, _repo_id): | |
221 | repo = self._factory.repo(wire) |
|
222 | repo = self._factory.repo(wire) | |
222 | return dict(repo._bookmarks) |
|
223 | return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()} | |
223 |
|
224 | |||
224 | return _bookmarks(context_uid, repo_id) |
|
225 | return _bookmarks(context_uid, repo_id) | |
225 |
|
226 | |||
@@ -227,16 +228,17 b' class HgRemote(RemoteBase):' | |||||
227 | def branches(self, wire, normal, closed): |
|
228 | def branches(self, wire, normal, closed): | |
228 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
229 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
229 | region = self._region(wire) |
|
230 | region = self._region(wire) | |
|
231 | ||||
230 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
232 | @region.conditional_cache_on_arguments(condition=cache_on) | |
231 | def _branches(_context_uid, _repo_id, _normal, _closed): |
|
233 | def _branches(_context_uid, _repo_id, _normal, _closed): | |
232 | repo = self._factory.repo(wire) |
|
234 | repo = self._factory.repo(wire) | |
233 | iter_branches = repo.branchmap().iterbranches() |
|
235 | iter_branches = repo.branchmap().iterbranches() | |
234 | bt = {} |
|
236 | bt = {} | |
235 | for branch_name, _heads, tip, is_closed in iter_branches: |
|
237 | for branch_name, _heads, tip_node, is_closed in iter_branches: | |
236 | if normal and not is_closed: |
|
238 | if normal and not is_closed: | |
237 | bt[branch_name] = tip |
|
239 | bt[safe_str(branch_name)] = ascii_str(hex(tip_node)) | |
238 | if closed and is_closed: |
|
240 | if closed and is_closed: | |
239 | bt[branch_name] = tip |
|
241 | bt[safe_str(branch_name)] = ascii_str(hex(tip_node)) | |
240 |
|
242 | |||
241 | return bt |
|
243 | return bt | |
242 |
|
244 | |||
@@ -246,6 +248,7 b' class HgRemote(RemoteBase):' | |||||
246 | def bulk_request(self, wire, commit_id, pre_load): |
|
248 | def bulk_request(self, wire, commit_id, pre_load): | |
247 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
249 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
248 | region = self._region(wire) |
|
250 | region = self._region(wire) | |
|
251 | ||||
249 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
252 | @region.conditional_cache_on_arguments(condition=cache_on) | |
250 | def _bulk_request(_repo_id, _commit_id, _pre_load): |
|
253 | def _bulk_request(_repo_id, _commit_id, _pre_load): | |
251 | result = {} |
|
254 | result = {} | |
@@ -264,6 +267,7 b' class HgRemote(RemoteBase):' | |||||
264 | def ctx_branch(self, wire, commit_id): |
|
267 | def ctx_branch(self, wire, commit_id): | |
265 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
268 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
266 | region = self._region(wire) |
|
269 | region = self._region(wire) | |
|
270 | ||||
267 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
271 | @region.conditional_cache_on_arguments(condition=cache_on) | |
268 | def _ctx_branch(_repo_id, _commit_id): |
|
272 | def _ctx_branch(_repo_id, _commit_id): | |
269 | repo = self._factory.repo(wire) |
|
273 | repo = self._factory.repo(wire) | |
@@ -275,6 +279,7 b' class HgRemote(RemoteBase):' | |||||
275 | def ctx_date(self, wire, commit_id): |
|
279 | def ctx_date(self, wire, commit_id): | |
276 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
280 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
277 | region = self._region(wire) |
|
281 | region = self._region(wire) | |
|
282 | ||||
278 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
283 | @region.conditional_cache_on_arguments(condition=cache_on) | |
279 | def _ctx_date(_repo_id, _commit_id): |
|
284 | def _ctx_date(_repo_id, _commit_id): | |
280 | repo = self._factory.repo(wire) |
|
285 | repo = self._factory.repo(wire) | |
@@ -292,6 +297,7 b' class HgRemote(RemoteBase):' | |||||
292 | def ctx_files(self, wire, commit_id): |
|
297 | def ctx_files(self, wire, commit_id): | |
293 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
298 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
294 | region = self._region(wire) |
|
299 | region = self._region(wire) | |
|
300 | ||||
295 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
301 | @region.conditional_cache_on_arguments(condition=cache_on) | |
296 | def _ctx_files(_repo_id, _commit_id): |
|
302 | def _ctx_files(_repo_id, _commit_id): | |
297 | repo = self._factory.repo(wire) |
|
303 | repo = self._factory.repo(wire) | |
@@ -310,6 +316,7 b' class HgRemote(RemoteBase):' | |||||
310 | def ctx_parents(self, wire, commit_id): |
|
316 | def ctx_parents(self, wire, commit_id): | |
311 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
317 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
312 | region = self._region(wire) |
|
318 | region = self._region(wire) | |
|
319 | ||||
313 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
320 | @region.conditional_cache_on_arguments(condition=cache_on) | |
314 | def _ctx_parents(_repo_id, _commit_id): |
|
321 | def _ctx_parents(_repo_id, _commit_id): | |
315 | repo = self._factory.repo(wire) |
|
322 | repo = self._factory.repo(wire) | |
@@ -323,6 +330,7 b' class HgRemote(RemoteBase):' | |||||
323 | def ctx_children(self, wire, commit_id): |
|
330 | def ctx_children(self, wire, commit_id): | |
324 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
331 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
325 | region = self._region(wire) |
|
332 | region = self._region(wire) | |
|
333 | ||||
326 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
334 | @region.conditional_cache_on_arguments(condition=cache_on) | |
327 | def _ctx_children(_repo_id, _commit_id): |
|
335 | def _ctx_children(_repo_id, _commit_id): | |
328 | repo = self._factory.repo(wire) |
|
336 | repo = self._factory.repo(wire) | |
@@ -336,6 +344,7 b' class HgRemote(RemoteBase):' | |||||
336 | def ctx_phase(self, wire, commit_id): |
|
344 | def ctx_phase(self, wire, commit_id): | |
337 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
345 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
338 | region = self._region(wire) |
|
346 | region = self._region(wire) | |
|
347 | ||||
339 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
348 | @region.conditional_cache_on_arguments(condition=cache_on) | |
340 | def _ctx_phase(_context_uid, _repo_id, _commit_id): |
|
349 | def _ctx_phase(_context_uid, _repo_id, _commit_id): | |
341 | repo = self._factory.repo(wire) |
|
350 | repo = self._factory.repo(wire) | |
@@ -348,6 +357,7 b' class HgRemote(RemoteBase):' | |||||
348 | def ctx_obsolete(self, wire, commit_id): |
|
357 | def ctx_obsolete(self, wire, commit_id): | |
349 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
358 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
350 | region = self._region(wire) |
|
359 | region = self._region(wire) | |
|
360 | ||||
351 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
361 | @region.conditional_cache_on_arguments(condition=cache_on) | |
352 | def _ctx_obsolete(_context_uid, _repo_id, _commit_id): |
|
362 | def _ctx_obsolete(_context_uid, _repo_id, _commit_id): | |
353 | repo = self._factory.repo(wire) |
|
363 | repo = self._factory.repo(wire) | |
@@ -359,6 +369,7 b' class HgRemote(RemoteBase):' | |||||
359 | def ctx_hidden(self, wire, commit_id): |
|
369 | def ctx_hidden(self, wire, commit_id): | |
360 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
370 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
361 | region = self._region(wire) |
|
371 | region = self._region(wire) | |
|
372 | ||||
362 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
373 | @region.conditional_cache_on_arguments(condition=cache_on) | |
363 | def _ctx_hidden(_context_uid, _repo_id, _commit_id): |
|
374 | def _ctx_hidden(_context_uid, _repo_id, _commit_id): | |
364 | repo = self._factory.repo(wire) |
|
375 | repo = self._factory.repo(wire) | |
@@ -464,8 +475,9 b' class HgRemote(RemoteBase):' | |||||
464 | opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1) |
|
475 | opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1) | |
465 |
|
476 | |||
466 | try: |
|
477 | try: | |
467 |
|
|
478 | diff_iter = patch.diff( | |
468 |
repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts) |
|
479 | repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts) | |
|
480 | return b"".join(diff_iter) | |||
469 | except RepoLookupError as e: |
|
481 | except RepoLookupError as e: | |
470 | raise exceptions.LookupException(e)() |
|
482 | raise exceptions.LookupException(e)() | |
471 |
|
483 | |||
@@ -583,7 +595,7 b' class HgRemote(RemoteBase):' | |||||
583 | @reraise_safe_exceptions |
|
595 | @reraise_safe_exceptions | |
584 | def get_config_value(self, wire, section, name, untrusted=False): |
|
596 | def get_config_value(self, wire, section, name, untrusted=False): | |
585 | repo = self._factory.repo(wire) |
|
597 | repo = self._factory.repo(wire) | |
586 | return repo.ui.config(section, name, untrusted=untrusted) |
|
598 | return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted) | |
587 |
|
599 | |||
588 | @reraise_safe_exceptions |
|
600 | @reraise_safe_exceptions | |
589 | def is_large_file(self, wire, commit_id, path): |
|
601 | def is_large_file(self, wire, commit_id, path): | |
@@ -681,7 +693,7 b' class HgRemote(RemoteBase):' | |||||
681 | repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y') |
|
693 | repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |
682 |
|
694 | |||
683 | bookmarks = list(dict(repo._bookmarks).keys()) |
|
695 | bookmarks = list(dict(repo._bookmarks).keys()) | |
684 | remote = peer(repo, {}, url) |
|
696 | remote = peer(repo, {}, safe_bytes(url)) | |
685 | # Disable any prompts for this remote |
|
697 | # Disable any prompts for this remote | |
686 | remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y') |
|
698 | remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |
687 |
|
699 | |||
@@ -782,7 +794,7 b' class HgRemote(RemoteBase):' | |||||
782 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
794 | @region.conditional_cache_on_arguments(condition=cache_on) | |
783 | def _tags(_context_uid, _repo_id): |
|
795 | def _tags(_context_uid, _repo_id): | |
784 | repo = self._factory.repo(wire) |
|
796 | repo = self._factory.repo(wire) | |
785 | return repo.tags() |
|
797 | return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()} | |
786 |
|
798 | |||
787 | return _tags(context_uid, repo_id) |
|
799 | return _tags(context_uid, repo_id) | |
788 |
|
800 | |||
@@ -815,10 +827,10 b' class HgRemote(RemoteBase):' | |||||
815 |
|
827 | |||
816 | baseui.write = write |
|
828 | baseui.write = write | |
817 | if branch: |
|
829 | if branch: | |
818 | args = [branch] |
|
830 | args = [safe_bytes(branch)] | |
819 | else: |
|
831 | else: | |
820 | args = [] |
|
832 | args = [] | |
821 | commands.heads(baseui, repo, template='{node} ', *args) |
|
833 | commands.heads(baseui, repo, template=b'{node} ', *args) | |
822 |
|
834 | |||
823 | return output.getvalue() |
|
835 | return output.getvalue() | |
824 |
|
836 | |||
@@ -833,57 +845,55 b' class HgRemote(RemoteBase):' | |||||
833 | @reraise_safe_exceptions |
|
845 | @reraise_safe_exceptions | |
834 | def clone(self, wire, source, dest, update_after_clone=False, hooks=True): |
|
846 | def clone(self, wire, source, dest, update_after_clone=False, hooks=True): | |
835 | baseui = self._factory._create_config(wire["config"], hooks=hooks) |
|
847 | baseui = self._factory._create_config(wire["config"], hooks=hooks) | |
836 | clone(baseui, source, dest, noupdate=not update_after_clone) |
|
848 | clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone) | |
837 |
|
849 | |||
838 | @reraise_safe_exceptions |
|
850 | @reraise_safe_exceptions | |
839 | def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated): |
|
851 | def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated): | |
840 |
|
852 | |||
841 | repo = self._factory.repo(wire) |
|
853 | repo = self._factory.repo(wire) | |
842 | baseui = self._factory._create_config(wire['config']) |
|
854 | baseui = self._factory._create_config(wire['config']) | |
843 | publishing = baseui.configbool('phases', 'publish') |
|
855 | publishing = baseui.configbool(b'phases', b'publish') | |
844 | if publishing: |
|
|||
845 | new_commit = 'public' |
|
|||
846 | else: |
|
|||
847 | new_commit = 'draft' |
|
|||
848 |
|
856 | |||
849 | def _filectxfn(_repo, ctx, path): |
|
857 | def _filectxfn(_repo, ctx, path: bytes): | |
850 | """ |
|
858 | """ | |
851 | Marks given path as added/changed/removed in a given _repo. This is |
|
859 | Marks given path as added/changed/removed in a given _repo. This is | |
852 | for internal mercurial commit function. |
|
860 | for internal mercurial commit function. | |
853 | """ |
|
861 | """ | |
854 |
|
862 | |||
855 | # check if this path is removed |
|
863 | # check if this path is removed | |
856 | if path in removed: |
|
864 | if safe_str(path) in removed: | |
857 | # returning None is a way to mark node for removal |
|
865 | # returning None is a way to mark node for removal | |
858 | return None |
|
866 | return None | |
859 |
|
867 | |||
860 | # check if this path is added |
|
868 | # check if this path is added | |
861 | for node in updated: |
|
869 | for node in updated: | |
862 | if node['path'] == path: |
|
870 | if safe_bytes(node['path']) == path: | |
863 | return memfilectx( |
|
871 | return memfilectx( | |
864 | _repo, |
|
872 | _repo, | |
865 | changectx=ctx, |
|
873 | changectx=ctx, | |
866 | path=node['path'], |
|
874 | path=safe_bytes(node['path']), | |
867 | data=node['content'], |
|
875 | data=safe_bytes(node['content']), | |
868 | islink=False, |
|
876 | islink=False, | |
869 | isexec=bool(node['mode'] & stat.S_IXUSR), |
|
877 | isexec=bool(node['mode'] & stat.S_IXUSR), | |
870 | copysource=False) |
|
878 | copysource=False) | |
|
879 | abort_exc = exceptions.AbortException() | |||
|
880 | raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})") | |||
871 |
|
881 | |||
872 | raise exceptions.AbortException()( |
|
882 | if publishing: | |
873 | "Given path haven't been marked as added, " |
|
883 | new_commit_phase = b'public' | |
874 | "changed or removed (%s)" % path) |
|
884 | else: | |
875 |
|
885 | new_commit_phase = b'draft' | ||
876 | with repo.ui.configoverride({('phases', 'new-commit'): new_commit}): |
|
886 | with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}): | |
877 |
|
887 | kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()} | ||
878 | commit_ctx = memctx( |
|
888 | commit_ctx = memctx( | |
879 | repo=repo, |
|
889 | repo=repo, | |
880 | parents=parents, |
|
890 | parents=parents, | |
881 | text=message, |
|
891 | text=safe_bytes(message), | |
882 | files=files, |
|
892 | files=[safe_bytes(x) for x in files], | |
883 | filectxfn=_filectxfn, |
|
893 | filectxfn=_filectxfn, | |
884 | user=user, |
|
894 | user=safe_bytes(user), | |
885 | date=(commit_time, commit_timezone), |
|
895 | date=(commit_time, commit_timezone), | |
886 |
extra= |
|
896 | extra=kwargs) | |
887 |
|
897 | |||
888 | n = repo.commitctx(commit_ctx) |
|
898 | n = repo.commitctx(commit_ctx) | |
889 | new_id = hex(n) |
|
899 | new_id = hex(n) | |
@@ -896,7 +906,7 b' class HgRemote(RemoteBase):' | |||||
896 | # Disable any prompts for this repo |
|
906 | # Disable any prompts for this repo | |
897 | repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y') |
|
907 | repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |
898 |
|
908 | |||
899 | remote = peer(repo, {}, url) |
|
909 | remote = peer(repo, {}, safe_bytes(url)) | |
900 | # Disable any prompts for this remote |
|
910 | # Disable any prompts for this remote | |
901 | remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y') |
|
911 | remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y') | |
902 |
|
912 |
@@ -115,7 +115,7 b' class SvnRemote(RemoteBase):' | |||||
115 | svn_ver = svn.core.SVN_VERSION |
|
115 | svn_ver = svn.core.SVN_VERSION | |
116 | except ImportError: |
|
116 | except ImportError: | |
117 | svn_ver = None |
|
117 | svn_ver = None | |
118 | return svn_ver |
|
118 | return safe_str(svn_ver) | |
119 |
|
119 | |||
120 | @reraise_safe_exceptions |
|
120 | @reraise_safe_exceptions | |
121 | def is_empty(self, wire): |
|
121 | def is_empty(self, wire): | |
@@ -504,7 +504,7 b' class SvnRemote(RemoteBase):' | |||||
504 | if safe_call: |
|
504 | if safe_call: | |
505 | return '', safe_str(err).strip() |
|
505 | return '', safe_str(err).strip() | |
506 | else: |
|
506 | else: | |
507 | cmd = ' '.join(cmd) # human friendly CMD |
|
507 | cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD | |
508 | tb_err = ("Couldn't run svn command (%s).\n" |
|
508 | tb_err = ("Couldn't run svn command (%s).\n" | |
509 | "Original error was:%s\n" |
|
509 | "Original error was:%s\n" | |
510 | "Call options:%s\n" |
|
510 | "Call options:%s\n" |
@@ -28,6 +28,8 b' import logging' | |||||
28 | import subprocess |
|
28 | import subprocess | |
29 | import threading |
|
29 | import threading | |
30 |
|
30 | |||
|
31 | from vcsserver.str_utils import safe_str | |||
|
32 | ||||
31 | log = logging.getLogger(__name__) |
|
33 | log = logging.getLogger(__name__) | |
32 |
|
34 | |||
33 |
|
35 | |||
@@ -550,7 +552,7 b' def run_command(arguments, env=None):' | |||||
550 | proc = SubprocessIOChunker(cmd, **_opts) |
|
552 | proc = SubprocessIOChunker(cmd, **_opts) | |
551 | return b''.join(proc), b''.join(proc.stderr) |
|
553 | return b''.join(proc), b''.join(proc.stderr) | |
552 | except OSError as err: |
|
554 | except OSError as err: | |
553 |
cmd = ' '.join(cmd) |
|
555 | cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD | |
554 | tb_err = ("Couldn't run subprocessio command (%s).\n" |
|
556 | tb_err = ("Couldn't run subprocessio command (%s).\n" | |
555 | "Original error was:%s\n" % (cmd, err)) |
|
557 | "Original error was:%s\n" % (cmd, err)) | |
556 | log.exception(tb_err) |
|
558 | log.exception(tb_err) |
General Comments 0
You need to be logged in to leave comments.
Login now