##// END OF EJS Templates
vcsserver: fixed archival calls
super-admin -
r1072:6a99d09b python3
parent child Browse files
Show More
@@ -1,135 +1,136 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urllib.parse
21 import urllib.parse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 from vcsserver.lib.rc_cache import region_meta
24
24
25 from vcsserver import exceptions
25 from vcsserver import exceptions
26 from vcsserver.exceptions import NoContentException
26 from vcsserver.exceptions import NoContentException
27 from vcsserver.hgcompat import (archival)
27 from vcsserver.hgcompat import archival
28 from vcsserver.str_utils import safe_bytes
28 from vcsserver.str_utils import safe_bytes
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 class RepoFactory(object):
33 class RepoFactory(object):
34 """
34 """
35 Utility to create instances of repository
35 Utility to create instances of repository
36
36
37 It provides internal caching of the `repo` object based on
37 It provides internal caching of the `repo` object based on
38 the :term:`call context`.
38 the :term:`call context`.
39 """
39 """
40 repo_type = None
40 repo_type = None
41
41
42 def __init__(self):
42 def __init__(self):
43 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
43 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
44
44
45 def _create_config(self, path, config):
45 def _create_config(self, path, config):
46 config = {}
46 config = {}
47 return config
47 return config
48
48
49 def _create_repo(self, wire, create):
49 def _create_repo(self, wire, create):
50 raise NotImplementedError()
50 raise NotImplementedError()
51
51
52 def repo(self, wire, create=False):
52 def repo(self, wire, create=False):
53 raise NotImplementedError()
53 raise NotImplementedError()
54
54
55
55
56 def obfuscate_qs(query_string):
56 def obfuscate_qs(query_string):
57 if query_string is None:
57 if query_string is None:
58 return None
58 return None
59
59
60 parsed = []
60 parsed = []
61 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
61 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
62 if k in ['auth_token', 'api_key']:
62 if k in ['auth_token', 'api_key']:
63 v = "*****"
63 v = "*****"
64 parsed.append((k, v))
64 parsed.append((k, v))
65
65
66 return '&'.join('{}{}'.format(
66 return '&'.join('{}{}'.format(
67 k, '={}'.format(v) if v else '') for k, v in parsed)
67 k, '={}'.format(v) if v else '') for k, v in parsed)
68
68
69
69
70 def raise_from_original(new_type, org_exc: Exception):
70 def raise_from_original(new_type, org_exc: Exception):
71 """
71 """
72 Raise a new exception type with original args and traceback.
72 Raise a new exception type with original args and traceback.
73 """
73 """
74
74
75 exc_type, exc_value, exc_traceback = sys.exc_info()
75 exc_type, exc_value, exc_traceback = sys.exc_info()
76 new_exc = new_type(*exc_value.args)
76 new_exc = new_type(*exc_value.args)
77
77
78 # store the original traceback into the new exc
78 # store the original traceback into the new exc
79 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
79 new_exc._org_exc_tb = traceback.format_tb(exc_traceback)
80
80
81 try:
81 try:
82 raise new_exc.with_traceback(exc_traceback)
82 raise new_exc.with_traceback(exc_traceback)
83 finally:
83 finally:
84 del exc_traceback
84 del exc_traceback
85
85
86
86
87 class ArchiveNode(object):
87 class ArchiveNode(object):
88 def __init__(self, path, mode, is_link, raw_bytes):
88 def __init__(self, path, mode, is_link, raw_bytes):
89 self.path = path
89 self.path = path
90 self.mode = mode
90 self.mode = mode
91 self.is_link = is_link
91 self.is_link = is_link
92 self.raw_bytes = raw_bytes
92 self.raw_bytes = raw_bytes
93
93
94
94
95 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
95 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
96 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
96 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
97 """
97 """
98 walker should be a file walker, for example:
98 walker should be a file walker, for example:
99 def walker():
99 def walker():
100 for file_info in files:
100 for file_info in files:
101 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
101 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
102 """
102 """
103 extra_metadata = extra_metadata or {}
103 extra_metadata = extra_metadata or {}
104 archive_dest_path = safe_bytes(archive_dest_path)
104
105
105 if kind == "tgz":
106 if kind == "tgz":
106 archiver = archival.tarit(archive_dest_path, mtime, "gz")
107 archiver = archival.tarit(archive_dest_path, mtime, b"gz")
107 elif kind == "tbz2":
108 elif kind == "tbz2":
108 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
109 archiver = archival.tarit(archive_dest_path, mtime, b"bz2")
109 elif kind == 'zip':
110 elif kind == 'zip':
110 archiver = archival.zipit(archive_dest_path, mtime)
111 archiver = archival.zipit(archive_dest_path, mtime)
111 else:
112 else:
112 raise exceptions.ArchiveException()(
113 raise exceptions.ArchiveException()(
113 'Remote does not support: "%s" archive type.' % kind)
114 f'Remote does not support: "{kind}" archive type.')
114
115
115 for f in walker(commit_id, archive_at_path):
116 for f in walker(commit_id, archive_at_path):
116 f_path = os.path.join(safe_bytes(archive_dir_name), f.path.lstrip(b'/'))
117 f_path = os.path.join(safe_bytes(archive_dir_name), f.path.lstrip(b'/'))
117 try:
118 try:
118 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
119 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
119 except NoContentException:
120 except NoContentException:
120 # NOTE(marcink): this is a special case for SVN so we can create "empty"
121 # NOTE(marcink): this is a special case for SVN so we can create "empty"
121 # directories which arent supported by archiver
122 # directories which arent supported by archiver
122 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, '')
123 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, '')
123
124
124 if write_metadata:
125 if write_metadata:
125 metadata = dict([
126 metadata = dict([
126 ('commit_id', commit_id),
127 ('commit_id', commit_id),
127 ('mtime', mtime),
128 ('mtime', mtime),
128 ])
129 ])
129 metadata.update(extra_metadata)
130 metadata.update(extra_metadata)
130
131
131 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
132 meta = [safe_bytes(f"{f_name}:{value}") for f_name, value in metadata.items()]
132 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
133 f_path = os.path.join(safe_bytes(archive_dir_name), b'.archival.txt')
133 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
134 archiver.addfile(f_path, 0o644, False, b'\n'.join(meta))
134
135
135 return archiver.done()
136 return archiver.done()
General Comments 0
You need to be logged in to leave comments. Login now