##// END OF EJS Templates
caches: allow regional per repo caches, and invalidate caches via a remote call.
super-admin -
r961:da0b3be2 default
parent child Browse files
Show More
@@ -1,130 +1,130 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver import exceptions
23 from vcsserver import exceptions
24 from vcsserver.exceptions import NoContentException
24 from vcsserver.exceptions import NoContentException
25 from vcsserver.hgcompat import (archival)
25 from vcsserver.hgcompat import (archival)
26 from vcsserver.lib.rc_cache import region_meta
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 class RepoFactory(object):
30 class RepoFactory(object):
31 """
31 """
32 Utility to create instances of repository
32 Utility to create instances of repository
33
33
34 It provides internal caching of the `repo` object based on
34 It provides internal caching of the `repo` object based on
35 the :term:`call context`.
35 the :term:`call context`.
36 """
36 """
37 repo_type = None
37 repo_type = None
38
38
39 def __init__(self):
39 def __init__(self):
40 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
40 pass
41
41
42 def _create_config(self, path, config):
42 def _create_config(self, path, config):
43 config = {}
43 config = {}
44 return config
44 return config
45
45
46 def _create_repo(self, wire, create):
46 def _create_repo(self, wire, create):
47 raise NotImplementedError()
47 raise NotImplementedError()
48
48
49 def repo(self, wire, create=False):
49 def repo(self, wire, create=False):
50 raise NotImplementedError()
50 raise NotImplementedError()
51
51
52
52
53 def obfuscate_qs(query_string):
53 def obfuscate_qs(query_string):
54 if query_string is None:
54 if query_string is None:
55 return None
55 return None
56
56
57 parsed = []
57 parsed = []
58 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
58 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
59 if k in ['auth_token', 'api_key']:
59 if k in ['auth_token', 'api_key']:
60 v = "*****"
60 v = "*****"
61 parsed.append((k, v))
61 parsed.append((k, v))
62
62
63 return '&'.join('{}{}'.format(
63 return '&'.join('{}{}'.format(
64 k, '={}'.format(v) if v else '') for k, v in parsed)
64 k, '={}'.format(v) if v else '') for k, v in parsed)
65
65
66
66
67 def raise_from_original(new_type):
67 def raise_from_original(new_type):
68 """
68 """
69 Raise a new exception type with original args and traceback.
69 Raise a new exception type with original args and traceback.
70 """
70 """
71 exc_type, exc_value, exc_traceback = sys.exc_info()
71 exc_type, exc_value, exc_traceback = sys.exc_info()
72 new_exc = new_type(*exc_value.args)
72 new_exc = new_type(*exc_value.args)
73 # store the original traceback into the new exc
73 # store the original traceback into the new exc
74 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
74 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
75
75
76 try:
76 try:
77 raise new_exc, None, exc_traceback
77 raise new_exc, None, exc_traceback
78 finally:
78 finally:
79 del exc_traceback
79 del exc_traceback
80
80
81
81
82 class ArchiveNode(object):
82 class ArchiveNode(object):
83 def __init__(self, path, mode, is_link, raw_bytes):
83 def __init__(self, path, mode, is_link, raw_bytes):
84 self.path = path
84 self.path = path
85 self.mode = mode
85 self.mode = mode
86 self.is_link = is_link
86 self.is_link = is_link
87 self.raw_bytes = raw_bytes
87 self.raw_bytes = raw_bytes
88
88
89
89
90 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
90 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
91 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
91 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
92 """
92 """
93 walker should be a file walker, for example:
93 walker should be a file walker, for example:
94 def walker():
94 def walker():
95 for file_info in files:
95 for file_info in files:
96 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
96 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
97 """
97 """
98 extra_metadata = extra_metadata or {}
98 extra_metadata = extra_metadata or {}
99
99
100 if kind == "tgz":
100 if kind == "tgz":
101 archiver = archival.tarit(archive_dest_path, mtime, "gz")
101 archiver = archival.tarit(archive_dest_path, mtime, "gz")
102 elif kind == "tbz2":
102 elif kind == "tbz2":
103 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
103 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
104 elif kind == 'zip':
104 elif kind == 'zip':
105 archiver = archival.zipit(archive_dest_path, mtime)
105 archiver = archival.zipit(archive_dest_path, mtime)
106 else:
106 else:
107 raise exceptions.ArchiveException()(
107 raise exceptions.ArchiveException()(
108 'Remote does not support: "%s" archive type.' % kind)
108 'Remote does not support: "%s" archive type.' % kind)
109
109
110 for f in walker(commit_id, archive_at_path):
110 for f in walker(commit_id, archive_at_path):
111 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
111 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
112 try:
112 try:
113 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
113 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
114 except NoContentException:
114 except NoContentException:
115 # NOTE(marcink): this is a special case for SVN so we can create "empty"
115 # NOTE(marcink): this is a special case for SVN so we can create "empty"
116 # directories which arent supported by archiver
116 # directories which arent supported by archiver
117 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
117 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
118
118
119 if write_metadata:
119 if write_metadata:
120 metadata = dict([
120 metadata = dict([
121 ('commit_id', commit_id),
121 ('commit_id', commit_id),
122 ('mtime', mtime),
122 ('mtime', mtime),
123 ])
123 ])
124 metadata.update(extra_metadata)
124 metadata.update(extra_metadata)
125
125
126 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
126 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
127 f_path = os.path.join(archive_dir_name, '.archival.txt')
127 f_path = os.path.join(archive_dir_name, '.archival.txt')
128 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
128 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
129
129
130 return archiver.done()
130 return archiver.done()
@@ -1,1226 +1,1247 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = '^{}'
53 PEELED_REF_MARKER = '^{}'
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def str_to_dulwich(value):
59 def str_to_dulwich(value):
60 """
60 """
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
62 """
62 """
63 return value.decode(settings.WIRE_ENCODING)
63 return value.decode(settings.WIRE_ENCODING)
64
64
65
65
66 def reraise_safe_exceptions(func):
66 def reraise_safe_exceptions(func):
67 """Converts Dulwich exceptions to something neutral."""
67 """Converts Dulwich exceptions to something neutral."""
68
68
69 @wraps(func)
69 @wraps(func)
70 def wrapper(*args, **kwargs):
70 def wrapper(*args, **kwargs):
71 try:
71 try:
72 return func(*args, **kwargs)
72 return func(*args, **kwargs)
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
74 exc = exceptions.LookupException(org_exc=e)
74 exc = exceptions.LookupException(org_exc=e)
75 raise exc(safe_str(e))
75 raise exc(safe_str(e))
76 except (HangupException, UnexpectedCommandError) as e:
76 except (HangupException, UnexpectedCommandError) as e:
77 exc = exceptions.VcsException(org_exc=e)
77 exc = exceptions.VcsException(org_exc=e)
78 raise exc(safe_str(e))
78 raise exc(safe_str(e))
79 except Exception as e:
79 except Exception as e:
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
81 # (KeyError on empty repos), we cannot track this and catch all
81 # (KeyError on empty repos), we cannot track this and catch all
82 # exceptions, it's an exceptions from other handlers
82 # exceptions, it's an exceptions from other handlers
83 #if not hasattr(e, '_vcs_kind'):
83 #if not hasattr(e, '_vcs_kind'):
84 #log.exception("Unhandled exception in git remote call")
84 #log.exception("Unhandled exception in git remote call")
85 #raise_from_original(exceptions.UnhandledException)
85 #raise_from_original(exceptions.UnhandledException)
86 raise
86 raise
87 return wrapper
87 return wrapper
88
88
89
89
90 class Repo(DulwichRepo):
90 class Repo(DulwichRepo):
91 """
91 """
92 A wrapper for dulwich Repo class.
92 A wrapper for dulwich Repo class.
93
93
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
95 "Too many open files" error. We need to close all opened file descriptors
95 "Too many open files" error. We need to close all opened file descriptors
96 once the repo object is destroyed.
96 once the repo object is destroyed.
97 """
97 """
98 def __del__(self):
98 def __del__(self):
99 if hasattr(self, 'object_store'):
99 if hasattr(self, 'object_store'):
100 self.close()
100 self.close()
101
101
102
102
103 class Repository(LibGit2Repo):
103 class Repository(LibGit2Repo):
104
104
105 def __enter__(self):
105 def __enter__(self):
106 return self
106 return self
107
107
108 def __exit__(self, exc_type, exc_val, exc_tb):
108 def __exit__(self, exc_type, exc_val, exc_tb):
109 self.free()
109 self.free()
110
110
111
111
112 class GitFactory(RepoFactory):
112 class GitFactory(RepoFactory):
113 repo_type = 'git'
113 repo_type = 'git'
114
114
115 def _create_repo(self, wire, create, use_libgit2=False):
115 def _create_repo(self, wire, create, use_libgit2=False):
116 if use_libgit2:
116 if use_libgit2:
117 return Repository(wire['path'])
117 return Repository(wire['path'])
118 else:
118 else:
119 repo_path = str_to_dulwich(wire['path'])
119 repo_path = str_to_dulwich(wire['path'])
120 return Repo(repo_path)
120 return Repo(repo_path)
121
121
122 def repo(self, wire, create=False, use_libgit2=False):
122 def repo(self, wire, create=False, use_libgit2=False):
123 """
123 """
124 Get a repository instance for the given path.
124 Get a repository instance for the given path.
125 """
125 """
126 return self._create_repo(wire, create, use_libgit2)
126 return self._create_repo(wire, create, use_libgit2)
127
127
128 def repo_libgit2(self, wire):
128 def repo_libgit2(self, wire):
129 return self.repo(wire, use_libgit2=True)
129 return self.repo(wire, use_libgit2=True)
130
130
131
131
132 class GitRemote(RemoteBase):
132 class GitRemote(RemoteBase):
133
133
134 def __init__(self, factory):
134 def __init__(self, factory):
135 self._factory = factory
135 self._factory = factory
136 self._bulk_methods = {
136 self._bulk_methods = {
137 "date": self.date,
137 "date": self.date,
138 "author": self.author,
138 "author": self.author,
139 "branch": self.branch,
139 "branch": self.branch,
140 "message": self.message,
140 "message": self.message,
141 "parents": self.parents,
141 "parents": self.parents,
142 "_commit": self.revision,
142 "_commit": self.revision,
143 }
143 }
144
144
145 def _wire_to_config(self, wire):
145 def _wire_to_config(self, wire):
146 if 'config' in wire:
146 if 'config' in wire:
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 return {}
148 return {}
149
149
150 def _remote_conf(self, config):
150 def _remote_conf(self, config):
151 params = [
151 params = [
152 '-c', 'core.askpass=""',
152 '-c', 'core.askpass=""',
153 ]
153 ]
154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 ssl_cert_dir = config.get('vcs_ssl_dir')
155 if ssl_cert_dir:
155 if ssl_cert_dir:
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 return params
157 return params
158
158
159 @reraise_safe_exceptions
159 @reraise_safe_exceptions
160 def discover_git_version(self):
160 def discover_git_version(self):
161 stdout, _ = self.run_git_command(
161 stdout, _ = self.run_git_command(
162 {}, ['--version'], _bare=True, _safe=True)
162 {}, ['--version'], _bare=True, _safe=True)
163 prefix = 'git version'
163 prefix = 'git version'
164 if stdout.startswith(prefix):
164 if stdout.startswith(prefix):
165 stdout = stdout[len(prefix):]
165 stdout = stdout[len(prefix):]
166 return stdout.strip()
166 return stdout.strip()
167
167
168 @reraise_safe_exceptions
168 @reraise_safe_exceptions
169 def is_empty(self, wire):
169 def is_empty(self, wire):
170 repo_init = self._factory.repo_libgit2(wire)
170 repo_init = self._factory.repo_libgit2(wire)
171 with repo_init as repo:
171 with repo_init as repo:
172
172
173 try:
173 try:
174 has_head = repo.head.name
174 has_head = repo.head.name
175 if has_head:
175 if has_head:
176 return False
176 return False
177
177
178 # NOTE(marcink): check again using more expensive method
178 # NOTE(marcink): check again using more expensive method
179 return repo.is_empty
179 return repo.is_empty
180 except Exception:
180 except Exception:
181 pass
181 pass
182
182
183 return True
183 return True
184
184
185 @reraise_safe_exceptions
185 @reraise_safe_exceptions
186 def assert_correct_path(self, wire):
186 def assert_correct_path(self, wire):
187 cache_on, context_uid, repo_id = self._cache_on(wire)
187 cache_on, context_uid, repo_id = self._cache_on(wire)
188 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 region = self.region(wire)
189 @region.conditional_cache_on_arguments(condition=cache_on)
189 def _assert_correct_path(_context_uid, _repo_id):
190 def _assert_correct_path(_context_uid, _repo_id):
190 try:
191 try:
191 repo_init = self._factory.repo_libgit2(wire)
192 repo_init = self._factory.repo_libgit2(wire)
192 with repo_init as repo:
193 with repo_init as repo:
193 pass
194 pass
194 except pygit2.GitError:
195 except pygit2.GitError:
195 path = wire.get('path')
196 path = wire.get('path')
196 tb = traceback.format_exc()
197 tb = traceback.format_exc()
197 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 return False
199 return False
199
200
200 return True
201 return True
201 return _assert_correct_path(context_uid, repo_id)
202 return _assert_correct_path(context_uid, repo_id)
202
203
203 @reraise_safe_exceptions
204 @reraise_safe_exceptions
204 def bare(self, wire):
205 def bare(self, wire):
205 repo_init = self._factory.repo_libgit2(wire)
206 repo_init = self._factory.repo_libgit2(wire)
206 with repo_init as repo:
207 with repo_init as repo:
207 return repo.is_bare
208 return repo.is_bare
208
209
209 @reraise_safe_exceptions
210 @reraise_safe_exceptions
210 def blob_as_pretty_string(self, wire, sha):
211 def blob_as_pretty_string(self, wire, sha):
211 repo_init = self._factory.repo_libgit2(wire)
212 repo_init = self._factory.repo_libgit2(wire)
212 with repo_init as repo:
213 with repo_init as repo:
213 blob_obj = repo[sha]
214 blob_obj = repo[sha]
214 blob = blob_obj.data
215 blob = blob_obj.data
215 return blob
216 return blob
216
217
217 @reraise_safe_exceptions
218 @reraise_safe_exceptions
218 def blob_raw_length(self, wire, sha):
219 def blob_raw_length(self, wire, sha):
219 cache_on, context_uid, repo_id = self._cache_on(wire)
220 cache_on, context_uid, repo_id = self._cache_on(wire)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 region = self.region(wire)
222 @region.conditional_cache_on_arguments(condition=cache_on)
221 def _blob_raw_length(_repo_id, _sha):
223 def _blob_raw_length(_repo_id, _sha):
222
224
223 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
224 with repo_init as repo:
226 with repo_init as repo:
225 blob = repo[sha]
227 blob = repo[sha]
226 return blob.size
228 return blob.size
227
229
228 return _blob_raw_length(repo_id, sha)
230 return _blob_raw_length(repo_id, sha)
229
231
230 def _parse_lfs_pointer(self, raw_content):
232 def _parse_lfs_pointer(self, raw_content):
231
233
232 spec_string = 'version https://git-lfs.github.com/spec'
234 spec_string = 'version https://git-lfs.github.com/spec'
233 if raw_content and raw_content.startswith(spec_string):
235 if raw_content and raw_content.startswith(spec_string):
234 pattern = re.compile(r"""
236 pattern = re.compile(r"""
235 (?:\n)?
237 (?:\n)?
236 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
237 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
238 ^size[ ](?P<oid_size>[0-9]+)\n
240 ^size[ ](?P<oid_size>[0-9]+)\n
239 (?:\n)?
241 (?:\n)?
240 """, re.VERBOSE | re.MULTILINE)
242 """, re.VERBOSE | re.MULTILINE)
241 match = pattern.match(raw_content)
243 match = pattern.match(raw_content)
242 if match:
244 if match:
243 return match.groupdict()
245 return match.groupdict()
244
246
245 return {}
247 return {}
246
248
247 @reraise_safe_exceptions
249 @reraise_safe_exceptions
248 def is_large_file(self, wire, commit_id):
250 def is_large_file(self, wire, commit_id):
249 cache_on, context_uid, repo_id = self._cache_on(wire)
251 cache_on, context_uid, repo_id = self._cache_on(wire)
250
252
251 @self.region.conditional_cache_on_arguments(condition=cache_on)
253 region = self.region(wire)
254 @region.conditional_cache_on_arguments(condition=cache_on)
252 def _is_large_file(_repo_id, _sha):
255 def _is_large_file(_repo_id, _sha):
253 repo_init = self._factory.repo_libgit2(wire)
256 repo_init = self._factory.repo_libgit2(wire)
254 with repo_init as repo:
257 with repo_init as repo:
255 blob = repo[commit_id]
258 blob = repo[commit_id]
256 if blob.is_binary:
259 if blob.is_binary:
257 return {}
260 return {}
258
261
259 return self._parse_lfs_pointer(blob.data)
262 return self._parse_lfs_pointer(blob.data)
260
263
261 return _is_large_file(repo_id, commit_id)
264 return _is_large_file(repo_id, commit_id)
262
265
263 @reraise_safe_exceptions
266 @reraise_safe_exceptions
264 def is_binary(self, wire, tree_id):
267 def is_binary(self, wire, tree_id):
265 cache_on, context_uid, repo_id = self._cache_on(wire)
268 cache_on, context_uid, repo_id = self._cache_on(wire)
266
269
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
270 region = self.region(wire)
271 @region.conditional_cache_on_arguments(condition=cache_on)
268 def _is_binary(_repo_id, _tree_id):
272 def _is_binary(_repo_id, _tree_id):
269 repo_init = self._factory.repo_libgit2(wire)
273 repo_init = self._factory.repo_libgit2(wire)
270 with repo_init as repo:
274 with repo_init as repo:
271 blob_obj = repo[tree_id]
275 blob_obj = repo[tree_id]
272 return blob_obj.is_binary
276 return blob_obj.is_binary
273
277
274 return _is_binary(repo_id, tree_id)
278 return _is_binary(repo_id, tree_id)
275
279
276 @reraise_safe_exceptions
280 @reraise_safe_exceptions
277 def in_largefiles_store(self, wire, oid):
281 def in_largefiles_store(self, wire, oid):
278 conf = self._wire_to_config(wire)
282 conf = self._wire_to_config(wire)
279 repo_init = self._factory.repo_libgit2(wire)
283 repo_init = self._factory.repo_libgit2(wire)
280 with repo_init as repo:
284 with repo_init as repo:
281 repo_name = repo.path
285 repo_name = repo.path
282
286
283 store_location = conf.get('vcs_git_lfs_store_location')
287 store_location = conf.get('vcs_git_lfs_store_location')
284 if store_location:
288 if store_location:
285
289
286 store = LFSOidStore(
290 store = LFSOidStore(
287 oid=oid, repo=repo_name, store_location=store_location)
291 oid=oid, repo=repo_name, store_location=store_location)
288 return store.has_oid()
292 return store.has_oid()
289
293
290 return False
294 return False
291
295
292 @reraise_safe_exceptions
296 @reraise_safe_exceptions
293 def store_path(self, wire, oid):
297 def store_path(self, wire, oid):
294 conf = self._wire_to_config(wire)
298 conf = self._wire_to_config(wire)
295 repo_init = self._factory.repo_libgit2(wire)
299 repo_init = self._factory.repo_libgit2(wire)
296 with repo_init as repo:
300 with repo_init as repo:
297 repo_name = repo.path
301 repo_name = repo.path
298
302
299 store_location = conf.get('vcs_git_lfs_store_location')
303 store_location = conf.get('vcs_git_lfs_store_location')
300 if store_location:
304 if store_location:
301 store = LFSOidStore(
305 store = LFSOidStore(
302 oid=oid, repo=repo_name, store_location=store_location)
306 oid=oid, repo=repo_name, store_location=store_location)
303 return store.oid_path
307 return store.oid_path
304 raise ValueError('Unable to fetch oid with path {}'.format(oid))
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
305
309
306 @reraise_safe_exceptions
310 @reraise_safe_exceptions
307 def bulk_request(self, wire, rev, pre_load):
311 def bulk_request(self, wire, rev, pre_load):
308 cache_on, context_uid, repo_id = self._cache_on(wire)
312 cache_on, context_uid, repo_id = self._cache_on(wire)
309 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 region = self.region(wire)
314 @region.conditional_cache_on_arguments(condition=cache_on)
310 def _bulk_request(_repo_id, _rev, _pre_load):
315 def _bulk_request(_repo_id, _rev, _pre_load):
311 result = {}
316 result = {}
312 for attr in pre_load:
317 for attr in pre_load:
313 try:
318 try:
314 method = self._bulk_methods[attr]
319 method = self._bulk_methods[attr]
315 args = [wire, rev]
320 args = [wire, rev]
316 result[attr] = method(*args)
321 result[attr] = method(*args)
317 except KeyError as e:
322 except KeyError as e:
318 raise exceptions.VcsException(e)(
323 raise exceptions.VcsException(e)(
319 "Unknown bulk attribute: %s" % attr)
324 "Unknown bulk attribute: %s" % attr)
320 return result
325 return result
321
326
322 return _bulk_request(repo_id, rev, sorted(pre_load))
327 return _bulk_request(repo_id, rev, sorted(pre_load))
323
328
324 def _build_opener(self, url):
329 def _build_opener(self, url):
325 handlers = []
330 handlers = []
326 url_obj = url_parser(url)
331 url_obj = url_parser(url)
327 _, authinfo = url_obj.authinfo()
332 _, authinfo = url_obj.authinfo()
328
333
329 if authinfo:
334 if authinfo:
330 # create a password manager
335 # create a password manager
331 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
332 passmgr.add_password(*authinfo)
337 passmgr.add_password(*authinfo)
333
338
334 handlers.extend((httpbasicauthhandler(passmgr),
339 handlers.extend((httpbasicauthhandler(passmgr),
335 httpdigestauthhandler(passmgr)))
340 httpdigestauthhandler(passmgr)))
336
341
337 return urllib2.build_opener(*handlers)
342 return urllib2.build_opener(*handlers)
338
343
339 def _type_id_to_name(self, type_id):
344 def _type_id_to_name(self, type_id):
340 return {
345 return {
341 1: b'commit',
346 1: b'commit',
342 2: b'tree',
347 2: b'tree',
343 3: b'blob',
348 3: b'blob',
344 4: b'tag'
349 4: b'tag'
345 }[type_id]
350 }[type_id]
346
351
347 @reraise_safe_exceptions
352 @reraise_safe_exceptions
348 def check_url(self, url, config):
353 def check_url(self, url, config):
349 url_obj = url_parser(url)
354 url_obj = url_parser(url)
350 test_uri, _ = url_obj.authinfo()
355 test_uri, _ = url_obj.authinfo()
351 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
352 url_obj.query = obfuscate_qs(url_obj.query)
357 url_obj.query = obfuscate_qs(url_obj.query)
353 cleaned_uri = str(url_obj)
358 cleaned_uri = str(url_obj)
354 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
355
360
356 if not test_uri.endswith('info/refs'):
361 if not test_uri.endswith('info/refs'):
357 test_uri = test_uri.rstrip('/') + '/info/refs'
362 test_uri = test_uri.rstrip('/') + '/info/refs'
358
363
359 o = self._build_opener(url)
364 o = self._build_opener(url)
360 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
361
366
362 q = {"service": 'git-upload-pack'}
367 q = {"service": 'git-upload-pack'}
363 qs = '?%s' % urllib.urlencode(q)
368 qs = '?%s' % urllib.urlencode(q)
364 cu = "%s%s" % (test_uri, qs)
369 cu = "%s%s" % (test_uri, qs)
365 req = urllib2.Request(cu, None, {})
370 req = urllib2.Request(cu, None, {})
366
371
367 try:
372 try:
368 log.debug("Trying to open URL %s", cleaned_uri)
373 log.debug("Trying to open URL %s", cleaned_uri)
369 resp = o.open(req)
374 resp = o.open(req)
370 if resp.code != 200:
375 if resp.code != 200:
371 raise exceptions.URLError()('Return Code is not 200')
376 raise exceptions.URLError()('Return Code is not 200')
372 except Exception as e:
377 except Exception as e:
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 # means it cannot be cloned
379 # means it cannot be cloned
375 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
376
381
377 # now detect if it's proper git repo
382 # now detect if it's proper git repo
378 gitdata = resp.read()
383 gitdata = resp.read()
379 if 'service=git-upload-pack' in gitdata:
384 if 'service=git-upload-pack' in gitdata:
380 pass
385 pass
381 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
382 # old style git can return some other format !
387 # old style git can return some other format !
383 pass
388 pass
384 else:
389 else:
385 raise exceptions.URLError()(
390 raise exceptions.URLError()(
386 "url [%s] does not look like an git" % (cleaned_uri,))
391 "url [%s] does not look like an git" % (cleaned_uri,))
387
392
388 return True
393 return True
389
394
390 @reraise_safe_exceptions
395 @reraise_safe_exceptions
391 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
392 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
393 remote_refs = self.pull(wire, url, apply_refs=False)
398 remote_refs = self.pull(wire, url, apply_refs=False)
394 repo = self._factory.repo(wire)
399 repo = self._factory.repo(wire)
395 if isinstance(valid_refs, list):
400 if isinstance(valid_refs, list):
396 valid_refs = tuple(valid_refs)
401 valid_refs = tuple(valid_refs)
397
402
398 for k in remote_refs:
403 for k in remote_refs:
399 # only parse heads/tags and skip so called deferred tags
404 # only parse heads/tags and skip so called deferred tags
400 if k.startswith(valid_refs) and not k.endswith(deferred):
405 if k.startswith(valid_refs) and not k.endswith(deferred):
401 repo[k] = remote_refs[k]
406 repo[k] = remote_refs[k]
402
407
403 if update_after_clone:
408 if update_after_clone:
404 # we want to checkout HEAD
409 # we want to checkout HEAD
405 repo["HEAD"] = remote_refs["HEAD"]
410 repo["HEAD"] = remote_refs["HEAD"]
406 index.build_index_from_tree(repo.path, repo.index_path(),
411 index.build_index_from_tree(repo.path, repo.index_path(),
407 repo.object_store, repo["HEAD"].tree)
412 repo.object_store, repo["HEAD"].tree)
408
413
409 @reraise_safe_exceptions
414 @reraise_safe_exceptions
410 def branch(self, wire, commit_id):
415 def branch(self, wire, commit_id):
411 cache_on, context_uid, repo_id = self._cache_on(wire)
416 cache_on, context_uid, repo_id = self._cache_on(wire)
412 @self.region.conditional_cache_on_arguments(condition=cache_on)
417 region = self.region(wire)
418 @region.conditional_cache_on_arguments(condition=cache_on)
413 def _branch(_context_uid, _repo_id, _commit_id):
419 def _branch(_context_uid, _repo_id, _commit_id):
414 regex = re.compile('^refs/heads')
420 regex = re.compile('^refs/heads')
415
421
416 def filter_with(ref):
422 def filter_with(ref):
417 return regex.match(ref[0]) and ref[1] == _commit_id
423 return regex.match(ref[0]) and ref[1] == _commit_id
418
424
419 branches = filter(filter_with, self.get_refs(wire).items())
425 branches = filter(filter_with, self.get_refs(wire).items())
420 return [x[0].split('refs/heads/')[-1] for x in branches]
426 return [x[0].split('refs/heads/')[-1] for x in branches]
421
427
422 return _branch(context_uid, repo_id, commit_id)
428 return _branch(context_uid, repo_id, commit_id)
423
429
424 @reraise_safe_exceptions
430 @reraise_safe_exceptions
425 def commit_branches(self, wire, commit_id):
431 def commit_branches(self, wire, commit_id):
426 cache_on, context_uid, repo_id = self._cache_on(wire)
432 cache_on, context_uid, repo_id = self._cache_on(wire)
427 @self.region.conditional_cache_on_arguments(condition=cache_on)
433 region = self.region(wire)
434 @region.conditional_cache_on_arguments(condition=cache_on)
428 def _commit_branches(_context_uid, _repo_id, _commit_id):
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
429 repo_init = self._factory.repo_libgit2(wire)
436 repo_init = self._factory.repo_libgit2(wire)
430 with repo_init as repo:
437 with repo_init as repo:
431 branches = [x for x in repo.branches.with_commit(_commit_id)]
438 branches = [x for x in repo.branches.with_commit(_commit_id)]
432 return branches
439 return branches
433
440
434 return _commit_branches(context_uid, repo_id, commit_id)
441 return _commit_branches(context_uid, repo_id, commit_id)
435
442
436 @reraise_safe_exceptions
443 @reraise_safe_exceptions
437 def add_object(self, wire, content):
444 def add_object(self, wire, content):
438 repo_init = self._factory.repo_libgit2(wire)
445 repo_init = self._factory.repo_libgit2(wire)
439 with repo_init as repo:
446 with repo_init as repo:
440 blob = objects.Blob()
447 blob = objects.Blob()
441 blob.set_raw_string(content)
448 blob.set_raw_string(content)
442 repo.object_store.add_object(blob)
449 repo.object_store.add_object(blob)
443 return blob.id
450 return blob.id
444
451
445 # TODO: this is quite complex, check if that can be simplified
452 # TODO: this is quite complex, check if that can be simplified
446 @reraise_safe_exceptions
453 @reraise_safe_exceptions
447 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
448 repo = self._factory.repo(wire)
455 repo = self._factory.repo(wire)
449 object_store = repo.object_store
456 object_store = repo.object_store
450
457
451 # Create tree and populates it with blobs
458 # Create tree and populates it with blobs
452 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
459 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
453
460
454 for node in updated:
461 for node in updated:
455 # Compute subdirs if needed
462 # Compute subdirs if needed
456 dirpath, nodename = vcspath.split(node['path'])
463 dirpath, nodename = vcspath.split(node['path'])
457 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
464 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
458 parent = commit_tree
465 parent = commit_tree
459 ancestors = [('', parent)]
466 ancestors = [('', parent)]
460
467
461 # Tries to dig for the deepest existing tree
468 # Tries to dig for the deepest existing tree
462 while dirnames:
469 while dirnames:
463 curdir = dirnames.pop(0)
470 curdir = dirnames.pop(0)
464 try:
471 try:
465 dir_id = parent[curdir][1]
472 dir_id = parent[curdir][1]
466 except KeyError:
473 except KeyError:
467 # put curdir back into dirnames and stops
474 # put curdir back into dirnames and stops
468 dirnames.insert(0, curdir)
475 dirnames.insert(0, curdir)
469 break
476 break
470 else:
477 else:
471 # If found, updates parent
478 # If found, updates parent
472 parent = repo[dir_id]
479 parent = repo[dir_id]
473 ancestors.append((curdir, parent))
480 ancestors.append((curdir, parent))
474 # Now parent is deepest existing tree and we need to create
481 # Now parent is deepest existing tree and we need to create
475 # subtrees for dirnames (in reverse order)
482 # subtrees for dirnames (in reverse order)
476 # [this only applies for nodes from added]
483 # [this only applies for nodes from added]
477 new_trees = []
484 new_trees = []
478
485
479 blob = objects.Blob.from_string(node['content'])
486 blob = objects.Blob.from_string(node['content'])
480
487
481 if dirnames:
488 if dirnames:
482 # If there are trees which should be created we need to build
489 # If there are trees which should be created we need to build
483 # them now (in reverse order)
490 # them now (in reverse order)
484 reversed_dirnames = list(reversed(dirnames))
491 reversed_dirnames = list(reversed(dirnames))
485 curtree = objects.Tree()
492 curtree = objects.Tree()
486 curtree[node['node_path']] = node['mode'], blob.id
493 curtree[node['node_path']] = node['mode'], blob.id
487 new_trees.append(curtree)
494 new_trees.append(curtree)
488 for dirname in reversed_dirnames[:-1]:
495 for dirname in reversed_dirnames[:-1]:
489 newtree = objects.Tree()
496 newtree = objects.Tree()
490 newtree[dirname] = (DIR_STAT, curtree.id)
497 newtree[dirname] = (DIR_STAT, curtree.id)
491 new_trees.append(newtree)
498 new_trees.append(newtree)
492 curtree = newtree
499 curtree = newtree
493 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
500 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
494 else:
501 else:
495 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
502 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
496
503
497 new_trees.append(parent)
504 new_trees.append(parent)
498 # Update ancestors
505 # Update ancestors
499 reversed_ancestors = reversed(
506 reversed_ancestors = reversed(
500 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
507 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
501 for parent, tree, path in reversed_ancestors:
508 for parent, tree, path in reversed_ancestors:
502 parent[path] = (DIR_STAT, tree.id)
509 parent[path] = (DIR_STAT, tree.id)
503 object_store.add_object(tree)
510 object_store.add_object(tree)
504
511
505 object_store.add_object(blob)
512 object_store.add_object(blob)
506 for tree in new_trees:
513 for tree in new_trees:
507 object_store.add_object(tree)
514 object_store.add_object(tree)
508
515
509 for node_path in removed:
516 for node_path in removed:
510 paths = node_path.split('/')
517 paths = node_path.split('/')
511 tree = commit_tree
518 tree = commit_tree
512 trees = [tree]
519 trees = [tree]
513 # Traverse deep into the forest...
520 # Traverse deep into the forest...
514 for path in paths:
521 for path in paths:
515 try:
522 try:
516 obj = repo[tree[path][1]]
523 obj = repo[tree[path][1]]
517 if isinstance(obj, objects.Tree):
524 if isinstance(obj, objects.Tree):
518 trees.append(obj)
525 trees.append(obj)
519 tree = obj
526 tree = obj
520 except KeyError:
527 except KeyError:
521 break
528 break
522 # Cut down the blob and all rotten trees on the way back...
529 # Cut down the blob and all rotten trees on the way back...
523 for path, tree in reversed(zip(paths, trees)):
530 for path, tree in reversed(zip(paths, trees)):
524 del tree[path]
531 del tree[path]
525 if tree:
532 if tree:
526 # This tree still has elements - don't remove it or any
533 # This tree still has elements - don't remove it or any
527 # of it's parents
534 # of it's parents
528 break
535 break
529
536
530 object_store.add_object(commit_tree)
537 object_store.add_object(commit_tree)
531
538
532 # Create commit
539 # Create commit
533 commit = objects.Commit()
540 commit = objects.Commit()
534 commit.tree = commit_tree.id
541 commit.tree = commit_tree.id
535 for k, v in commit_data.iteritems():
542 for k, v in commit_data.iteritems():
536 setattr(commit, k, v)
543 setattr(commit, k, v)
537 object_store.add_object(commit)
544 object_store.add_object(commit)
538
545
539 self.create_branch(wire, branch, commit.id)
546 self.create_branch(wire, branch, commit.id)
540
547
541 # dulwich set-ref
548 # dulwich set-ref
542 ref = 'refs/heads/%s' % branch
549 ref = 'refs/heads/%s' % branch
543 repo.refs[ref] = commit.id
550 repo.refs[ref] = commit.id
544
551
545 return commit.id
552 return commit.id
546
553
547 @reraise_safe_exceptions
554 @reraise_safe_exceptions
548 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
555 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
549 if url != 'default' and '://' not in url:
556 if url != 'default' and '://' not in url:
550 client = LocalGitClient(url)
557 client = LocalGitClient(url)
551 else:
558 else:
552 url_obj = url_parser(url)
559 url_obj = url_parser(url)
553 o = self._build_opener(url)
560 o = self._build_opener(url)
554 url, _ = url_obj.authinfo()
561 url, _ = url_obj.authinfo()
555 client = HttpGitClient(base_url=url, opener=o)
562 client = HttpGitClient(base_url=url, opener=o)
556 repo = self._factory.repo(wire)
563 repo = self._factory.repo(wire)
557
564
558 determine_wants = repo.object_store.determine_wants_all
565 determine_wants = repo.object_store.determine_wants_all
559 if refs:
566 if refs:
560 def determine_wants_requested(references):
567 def determine_wants_requested(references):
561 return [references[r] for r in references if r in refs]
568 return [references[r] for r in references if r in refs]
562 determine_wants = determine_wants_requested
569 determine_wants = determine_wants_requested
563
570
564 try:
571 try:
565 remote_refs = client.fetch(
572 remote_refs = client.fetch(
566 path=url, target=repo, determine_wants=determine_wants)
573 path=url, target=repo, determine_wants=determine_wants)
567 except NotGitRepository as e:
574 except NotGitRepository as e:
568 log.warning(
575 log.warning(
569 'Trying to fetch from "%s" failed, not a Git repository.', url)
576 'Trying to fetch from "%s" failed, not a Git repository.', url)
570 # Exception can contain unicode which we convert
577 # Exception can contain unicode which we convert
571 raise exceptions.AbortException(e)(repr(e))
578 raise exceptions.AbortException(e)(repr(e))
572
579
573 # mikhail: client.fetch() returns all the remote refs, but fetches only
580 # mikhail: client.fetch() returns all the remote refs, but fetches only
574 # refs filtered by `determine_wants` function. We need to filter result
581 # refs filtered by `determine_wants` function. We need to filter result
575 # as well
582 # as well
576 if refs:
583 if refs:
577 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
584 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
578
585
579 if apply_refs:
586 if apply_refs:
580 # TODO: johbo: Needs proper test coverage with a git repository
587 # TODO: johbo: Needs proper test coverage with a git repository
581 # that contains a tag object, so that we would end up with
588 # that contains a tag object, so that we would end up with
582 # a peeled ref at this point.
589 # a peeled ref at this point.
583 for k in remote_refs:
590 for k in remote_refs:
584 if k.endswith(PEELED_REF_MARKER):
591 if k.endswith(PEELED_REF_MARKER):
585 log.debug("Skipping peeled reference %s", k)
592 log.debug("Skipping peeled reference %s", k)
586 continue
593 continue
587 repo[k] = remote_refs[k]
594 repo[k] = remote_refs[k]
588
595
589 if refs and not update_after:
596 if refs and not update_after:
590 # mikhail: explicitly set the head to the last ref.
597 # mikhail: explicitly set the head to the last ref.
591 repo['HEAD'] = remote_refs[refs[-1]]
598 repo['HEAD'] = remote_refs[refs[-1]]
592
599
593 if update_after:
600 if update_after:
594 # we want to checkout HEAD
601 # we want to checkout HEAD
595 repo["HEAD"] = remote_refs["HEAD"]
602 repo["HEAD"] = remote_refs["HEAD"]
596 index.build_index_from_tree(repo.path, repo.index_path(),
603 index.build_index_from_tree(repo.path, repo.index_path(),
597 repo.object_store, repo["HEAD"].tree)
604 repo.object_store, repo["HEAD"].tree)
598 return remote_refs
605 return remote_refs
599
606
600 @reraise_safe_exceptions
607 @reraise_safe_exceptions
601 def sync_fetch(self, wire, url, refs=None, all_refs=False):
608 def sync_fetch(self, wire, url, refs=None, all_refs=False):
602 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
603 if refs and not isinstance(refs, (list, tuple)):
610 if refs and not isinstance(refs, (list, tuple)):
604 refs = [refs]
611 refs = [refs]
605
612
606 config = self._wire_to_config(wire)
613 config = self._wire_to_config(wire)
607 # get all remote refs we'll use to fetch later
614 # get all remote refs we'll use to fetch later
608 cmd = ['ls-remote']
615 cmd = ['ls-remote']
609 if not all_refs:
616 if not all_refs:
610 cmd += ['--heads', '--tags']
617 cmd += ['--heads', '--tags']
611 cmd += [url]
618 cmd += [url]
612 output, __ = self.run_git_command(
619 output, __ = self.run_git_command(
613 wire, cmd, fail_on_stderr=False,
620 wire, cmd, fail_on_stderr=False,
614 _copts=self._remote_conf(config),
621 _copts=self._remote_conf(config),
615 extra_env={'GIT_TERMINAL_PROMPT': '0'})
622 extra_env={'GIT_TERMINAL_PROMPT': '0'})
616
623
617 remote_refs = collections.OrderedDict()
624 remote_refs = collections.OrderedDict()
618 fetch_refs = []
625 fetch_refs = []
619
626
620 for ref_line in output.splitlines():
627 for ref_line in output.splitlines():
621 sha, ref = ref_line.split('\t')
628 sha, ref = ref_line.split('\t')
622 sha = sha.strip()
629 sha = sha.strip()
623 if ref in remote_refs:
630 if ref in remote_refs:
624 # duplicate, skip
631 # duplicate, skip
625 continue
632 continue
626 if ref.endswith(PEELED_REF_MARKER):
633 if ref.endswith(PEELED_REF_MARKER):
627 log.debug("Skipping peeled reference %s", ref)
634 log.debug("Skipping peeled reference %s", ref)
628 continue
635 continue
629 # don't sync HEAD
636 # don't sync HEAD
630 if ref in ['HEAD']:
637 if ref in ['HEAD']:
631 continue
638 continue
632
639
633 remote_refs[ref] = sha
640 remote_refs[ref] = sha
634
641
635 if refs and sha in refs:
642 if refs and sha in refs:
636 # we filter fetch using our specified refs
643 # we filter fetch using our specified refs
637 fetch_refs.append('{}:{}'.format(ref, ref))
644 fetch_refs.append('{}:{}'.format(ref, ref))
638 elif not refs:
645 elif not refs:
639 fetch_refs.append('{}:{}'.format(ref, ref))
646 fetch_refs.append('{}:{}'.format(ref, ref))
640 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
647 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
641
648
642 if fetch_refs:
649 if fetch_refs:
643 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
650 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
644 fetch_refs_chunks = list(chunk)
651 fetch_refs_chunks = list(chunk)
645 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
652 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
646 _out, _err = self.run_git_command(
653 _out, _err = self.run_git_command(
647 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
654 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
648 fail_on_stderr=False,
655 fail_on_stderr=False,
649 _copts=self._remote_conf(config),
656 _copts=self._remote_conf(config),
650 extra_env={'GIT_TERMINAL_PROMPT': '0'})
657 extra_env={'GIT_TERMINAL_PROMPT': '0'})
651
658
652 return remote_refs
659 return remote_refs
653
660
654 @reraise_safe_exceptions
661 @reraise_safe_exceptions
655 def sync_push(self, wire, url, refs=None):
662 def sync_push(self, wire, url, refs=None):
656 if not self.check_url(url, wire):
663 if not self.check_url(url, wire):
657 return
664 return
658 config = self._wire_to_config(wire)
665 config = self._wire_to_config(wire)
659 self._factory.repo(wire)
666 self._factory.repo(wire)
660 self.run_git_command(
667 self.run_git_command(
661 wire, ['push', url, '--mirror'], fail_on_stderr=False,
668 wire, ['push', url, '--mirror'], fail_on_stderr=False,
662 _copts=self._remote_conf(config),
669 _copts=self._remote_conf(config),
663 extra_env={'GIT_TERMINAL_PROMPT': '0'})
670 extra_env={'GIT_TERMINAL_PROMPT': '0'})
664
671
665 @reraise_safe_exceptions
672 @reraise_safe_exceptions
666 def get_remote_refs(self, wire, url):
673 def get_remote_refs(self, wire, url):
667 repo = Repo(url)
674 repo = Repo(url)
668 return repo.get_refs()
675 return repo.get_refs()
669
676
670 @reraise_safe_exceptions
677 @reraise_safe_exceptions
671 def get_description(self, wire):
678 def get_description(self, wire):
672 repo = self._factory.repo(wire)
679 repo = self._factory.repo(wire)
673 return repo.get_description()
680 return repo.get_description()
674
681
675 @reraise_safe_exceptions
682 @reraise_safe_exceptions
676 def get_missing_revs(self, wire, rev1, rev2, path2):
683 def get_missing_revs(self, wire, rev1, rev2, path2):
677 repo = self._factory.repo(wire)
684 repo = self._factory.repo(wire)
678 LocalGitClient(thin_packs=False).fetch(path2, repo)
685 LocalGitClient(thin_packs=False).fetch(path2, repo)
679
686
680 wire_remote = wire.copy()
687 wire_remote = wire.copy()
681 wire_remote['path'] = path2
688 wire_remote['path'] = path2
682 repo_remote = self._factory.repo(wire_remote)
689 repo_remote = self._factory.repo(wire_remote)
683 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
690 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
684
691
685 revs = [
692 revs = [
686 x.commit.id
693 x.commit.id
687 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
694 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
688 return revs
695 return revs
689
696
690 @reraise_safe_exceptions
697 @reraise_safe_exceptions
691 def get_object(self, wire, sha, maybe_unreachable=False):
698 def get_object(self, wire, sha, maybe_unreachable=False):
692 cache_on, context_uid, repo_id = self._cache_on(wire)
699 cache_on, context_uid, repo_id = self._cache_on(wire)
693 @self.region.conditional_cache_on_arguments(condition=cache_on)
700 region = self.region(wire)
701 @region.conditional_cache_on_arguments(condition=cache_on)
694 def _get_object(_context_uid, _repo_id, _sha):
702 def _get_object(_context_uid, _repo_id, _sha):
695 repo_init = self._factory.repo_libgit2(wire)
703 repo_init = self._factory.repo_libgit2(wire)
696 with repo_init as repo:
704 with repo_init as repo:
697
705
698 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
706 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
699 try:
707 try:
700 commit = repo.revparse_single(sha)
708 commit = repo.revparse_single(sha)
701 except KeyError:
709 except KeyError:
702 # NOTE(marcink): KeyError doesn't give us any meaningful information
710 # NOTE(marcink): KeyError doesn't give us any meaningful information
703 # here, we instead give something more explicit
711 # here, we instead give something more explicit
704 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
712 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
705 raise exceptions.LookupException(e)(missing_commit_err)
713 raise exceptions.LookupException(e)(missing_commit_err)
706 except ValueError as e:
714 except ValueError as e:
707 raise exceptions.LookupException(e)(missing_commit_err)
715 raise exceptions.LookupException(e)(missing_commit_err)
708
716
709 is_tag = False
717 is_tag = False
710 if isinstance(commit, pygit2.Tag):
718 if isinstance(commit, pygit2.Tag):
711 commit = repo.get(commit.target)
719 commit = repo.get(commit.target)
712 is_tag = True
720 is_tag = True
713
721
714 check_dangling = True
722 check_dangling = True
715 if is_tag:
723 if is_tag:
716 check_dangling = False
724 check_dangling = False
717
725
718 if check_dangling and maybe_unreachable:
726 if check_dangling and maybe_unreachable:
719 check_dangling = False
727 check_dangling = False
720
728
721 # we used a reference and it parsed means we're not having a dangling commit
729 # we used a reference and it parsed means we're not having a dangling commit
722 if sha != commit.hex:
730 if sha != commit.hex:
723 check_dangling = False
731 check_dangling = False
724
732
725 if check_dangling:
733 if check_dangling:
726 # check for dangling commit
734 # check for dangling commit
727 for branch in repo.branches.with_commit(commit.hex):
735 for branch in repo.branches.with_commit(commit.hex):
728 if branch:
736 if branch:
729 break
737 break
730 else:
738 else:
731 # NOTE(marcink): Empty error doesn't give us any meaningful information
739 # NOTE(marcink): Empty error doesn't give us any meaningful information
732 # here, we instead give something more explicit
740 # here, we instead give something more explicit
733 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
741 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
734 raise exceptions.LookupException(e)(missing_commit_err)
742 raise exceptions.LookupException(e)(missing_commit_err)
735
743
736 commit_id = commit.hex
744 commit_id = commit.hex
737 type_id = commit.type
745 type_id = commit.type
738
746
739 return {
747 return {
740 'id': commit_id,
748 'id': commit_id,
741 'type': self._type_id_to_name(type_id),
749 'type': self._type_id_to_name(type_id),
742 'commit_id': commit_id,
750 'commit_id': commit_id,
743 'idx': 0
751 'idx': 0
744 }
752 }
745
753
746 return _get_object(context_uid, repo_id, sha)
754 return _get_object(context_uid, repo_id, sha)
747
755
748 @reraise_safe_exceptions
756 @reraise_safe_exceptions
749 def get_refs(self, wire):
757 def get_refs(self, wire):
750 cache_on, context_uid, repo_id = self._cache_on(wire)
758 cache_on, context_uid, repo_id = self._cache_on(wire)
751 @self.region.conditional_cache_on_arguments(condition=cache_on)
759 region = self.region(wire)
760 @region.conditional_cache_on_arguments(condition=cache_on)
752 def _get_refs(_context_uid, _repo_id):
761 def _get_refs(_context_uid, _repo_id):
753
762
754 repo_init = self._factory.repo_libgit2(wire)
763 repo_init = self._factory.repo_libgit2(wire)
755 with repo_init as repo:
764 with repo_init as repo:
756 regex = re.compile('^refs/(heads|tags)/')
765 regex = re.compile('^refs/(heads|tags)/')
757 return {x.name: x.target.hex for x in
766 return {x.name: x.target.hex for x in
758 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
767 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
759
768
760 return _get_refs(context_uid, repo_id)
769 return _get_refs(context_uid, repo_id)
761
770
762 @reraise_safe_exceptions
771 @reraise_safe_exceptions
763 def get_branch_pointers(self, wire):
772 def get_branch_pointers(self, wire):
764 cache_on, context_uid, repo_id = self._cache_on(wire)
773 cache_on, context_uid, repo_id = self._cache_on(wire)
765 @self.region.conditional_cache_on_arguments(condition=cache_on)
774 region = self.region(wire)
775 @region.conditional_cache_on_arguments(condition=cache_on)
766 def _get_branch_pointers(_context_uid, _repo_id):
776 def _get_branch_pointers(_context_uid, _repo_id):
767
777
768 repo_init = self._factory.repo_libgit2(wire)
778 repo_init = self._factory.repo_libgit2(wire)
769 regex = re.compile('^refs/heads')
779 regex = re.compile('^refs/heads')
770 with repo_init as repo:
780 with repo_init as repo:
771 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
781 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
772 return {x.target.hex: x.shorthand for x in branches}
782 return {x.target.hex: x.shorthand for x in branches}
773
783
774 return _get_branch_pointers(context_uid, repo_id)
784 return _get_branch_pointers(context_uid, repo_id)
775
785
776 @reraise_safe_exceptions
786 @reraise_safe_exceptions
777 def head(self, wire, show_exc=True):
787 def head(self, wire, show_exc=True):
778 cache_on, context_uid, repo_id = self._cache_on(wire)
788 cache_on, context_uid, repo_id = self._cache_on(wire)
779 @self.region.conditional_cache_on_arguments(condition=cache_on)
789 region = self.region(wire)
790 @region.conditional_cache_on_arguments(condition=cache_on)
780 def _head(_context_uid, _repo_id, _show_exc):
791 def _head(_context_uid, _repo_id, _show_exc):
781 repo_init = self._factory.repo_libgit2(wire)
792 repo_init = self._factory.repo_libgit2(wire)
782 with repo_init as repo:
793 with repo_init as repo:
783 try:
794 try:
784 return repo.head.peel().hex
795 return repo.head.peel().hex
785 except Exception:
796 except Exception:
786 if show_exc:
797 if show_exc:
787 raise
798 raise
788 return _head(context_uid, repo_id, show_exc)
799 return _head(context_uid, repo_id, show_exc)
789
800
790 @reraise_safe_exceptions
801 @reraise_safe_exceptions
791 def init(self, wire):
802 def init(self, wire):
792 repo_path = str_to_dulwich(wire['path'])
803 repo_path = str_to_dulwich(wire['path'])
793 self.repo = Repo.init(repo_path)
804 self.repo = Repo.init(repo_path)
794
805
795 @reraise_safe_exceptions
806 @reraise_safe_exceptions
796 def init_bare(self, wire):
807 def init_bare(self, wire):
797 repo_path = str_to_dulwich(wire['path'])
808 repo_path = str_to_dulwich(wire['path'])
798 self.repo = Repo.init_bare(repo_path)
809 self.repo = Repo.init_bare(repo_path)
799
810
800 @reraise_safe_exceptions
811 @reraise_safe_exceptions
801 def revision(self, wire, rev):
812 def revision(self, wire, rev):
802
813
803 cache_on, context_uid, repo_id = self._cache_on(wire)
814 cache_on, context_uid, repo_id = self._cache_on(wire)
804 @self.region.conditional_cache_on_arguments(condition=cache_on)
815 region = self.region(wire)
816 @region.conditional_cache_on_arguments(condition=cache_on)
805 def _revision(_context_uid, _repo_id, _rev):
817 def _revision(_context_uid, _repo_id, _rev):
806 repo_init = self._factory.repo_libgit2(wire)
818 repo_init = self._factory.repo_libgit2(wire)
807 with repo_init as repo:
819 with repo_init as repo:
808 commit = repo[rev]
820 commit = repo[rev]
809 obj_data = {
821 obj_data = {
810 'id': commit.id.hex,
822 'id': commit.id.hex,
811 }
823 }
812 # tree objects itself don't have tree_id attribute
824 # tree objects itself don't have tree_id attribute
813 if hasattr(commit, 'tree_id'):
825 if hasattr(commit, 'tree_id'):
814 obj_data['tree'] = commit.tree_id.hex
826 obj_data['tree'] = commit.tree_id.hex
815
827
816 return obj_data
828 return obj_data
817 return _revision(context_uid, repo_id, rev)
829 return _revision(context_uid, repo_id, rev)
818
830
819 @reraise_safe_exceptions
831 @reraise_safe_exceptions
820 def date(self, wire, commit_id):
832 def date(self, wire, commit_id):
821 cache_on, context_uid, repo_id = self._cache_on(wire)
833 cache_on, context_uid, repo_id = self._cache_on(wire)
822 @self.region.conditional_cache_on_arguments(condition=cache_on)
834 region = self.region(wire)
835 @region.conditional_cache_on_arguments(condition=cache_on)
823 def _date(_repo_id, _commit_id):
836 def _date(_repo_id, _commit_id):
824 repo_init = self._factory.repo_libgit2(wire)
837 repo_init = self._factory.repo_libgit2(wire)
825 with repo_init as repo:
838 with repo_init as repo:
826 commit = repo[commit_id]
839 commit = repo[commit_id]
827
840
828 if hasattr(commit, 'commit_time'):
841 if hasattr(commit, 'commit_time'):
829 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
842 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
830 else:
843 else:
831 commit = commit.get_object()
844 commit = commit.get_object()
832 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
845 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
833
846
834 # TODO(marcink): check dulwich difference of offset vs timezone
847 # TODO(marcink): check dulwich difference of offset vs timezone
835 return [commit_time, commit_time_offset]
848 return [commit_time, commit_time_offset]
836 return _date(repo_id, commit_id)
849 return _date(repo_id, commit_id)
837
850
838 @reraise_safe_exceptions
851 @reraise_safe_exceptions
839 def author(self, wire, commit_id):
852 def author(self, wire, commit_id):
840 cache_on, context_uid, repo_id = self._cache_on(wire)
853 cache_on, context_uid, repo_id = self._cache_on(wire)
841 @self.region.conditional_cache_on_arguments(condition=cache_on)
854 region = self.region(wire)
855 @region.conditional_cache_on_arguments(condition=cache_on)
842 def _author(_repo_id, _commit_id):
856 def _author(_repo_id, _commit_id):
843 repo_init = self._factory.repo_libgit2(wire)
857 repo_init = self._factory.repo_libgit2(wire)
844 with repo_init as repo:
858 with repo_init as repo:
845 commit = repo[commit_id]
859 commit = repo[commit_id]
846
860
847 if hasattr(commit, 'author'):
861 if hasattr(commit, 'author'):
848 author = commit.author
862 author = commit.author
849 else:
863 else:
850 author = commit.get_object().author
864 author = commit.get_object().author
851
865
852 if author.email:
866 if author.email:
853 return u"{} <{}>".format(author.name, author.email)
867 return u"{} <{}>".format(author.name, author.email)
854
868
855 try:
869 try:
856 return u"{}".format(author.name)
870 return u"{}".format(author.name)
857 except Exception:
871 except Exception:
858 return u"{}".format(safe_unicode(author.raw_name))
872 return u"{}".format(safe_unicode(author.raw_name))
859
873
860 return _author(repo_id, commit_id)
874 return _author(repo_id, commit_id)
861
875
862 @reraise_safe_exceptions
876 @reraise_safe_exceptions
863 def message(self, wire, commit_id):
877 def message(self, wire, commit_id):
864 cache_on, context_uid, repo_id = self._cache_on(wire)
878 cache_on, context_uid, repo_id = self._cache_on(wire)
865 @self.region.conditional_cache_on_arguments(condition=cache_on)
879 region = self.region(wire)
880 @region.conditional_cache_on_arguments(condition=cache_on)
866 def _message(_repo_id, _commit_id):
881 def _message(_repo_id, _commit_id):
867 repo_init = self._factory.repo_libgit2(wire)
882 repo_init = self._factory.repo_libgit2(wire)
868 with repo_init as repo:
883 with repo_init as repo:
869 commit = repo[commit_id]
884 commit = repo[commit_id]
870 return commit.message
885 return commit.message
871 return _message(repo_id, commit_id)
886 return _message(repo_id, commit_id)
872
887
873 @reraise_safe_exceptions
888 @reraise_safe_exceptions
874 def parents(self, wire, commit_id):
889 def parents(self, wire, commit_id):
875 cache_on, context_uid, repo_id = self._cache_on(wire)
890 cache_on, context_uid, repo_id = self._cache_on(wire)
876 @self.region.conditional_cache_on_arguments(condition=cache_on)
891 region = self.region(wire)
892 @region.conditional_cache_on_arguments(condition=cache_on)
877 def _parents(_repo_id, _commit_id):
893 def _parents(_repo_id, _commit_id):
878 repo_init = self._factory.repo_libgit2(wire)
894 repo_init = self._factory.repo_libgit2(wire)
879 with repo_init as repo:
895 with repo_init as repo:
880 commit = repo[commit_id]
896 commit = repo[commit_id]
881 if hasattr(commit, 'parent_ids'):
897 if hasattr(commit, 'parent_ids'):
882 parent_ids = commit.parent_ids
898 parent_ids = commit.parent_ids
883 else:
899 else:
884 parent_ids = commit.get_object().parent_ids
900 parent_ids = commit.get_object().parent_ids
885
901
886 return [x.hex for x in parent_ids]
902 return [x.hex for x in parent_ids]
887 return _parents(repo_id, commit_id)
903 return _parents(repo_id, commit_id)
888
904
889 @reraise_safe_exceptions
905 @reraise_safe_exceptions
890 def children(self, wire, commit_id):
906 def children(self, wire, commit_id):
891 cache_on, context_uid, repo_id = self._cache_on(wire)
907 cache_on, context_uid, repo_id = self._cache_on(wire)
892 @self.region.conditional_cache_on_arguments(condition=cache_on)
908 region = self.region(wire)
909 @region.conditional_cache_on_arguments(condition=cache_on)
893 def _children(_repo_id, _commit_id):
910 def _children(_repo_id, _commit_id):
894 output, __ = self.run_git_command(
911 output, __ = self.run_git_command(
895 wire, ['rev-list', '--all', '--children'])
912 wire, ['rev-list', '--all', '--children'])
896
913
897 child_ids = []
914 child_ids = []
898 pat = re.compile(r'^%s' % commit_id)
915 pat = re.compile(r'^%s' % commit_id)
899 for l in output.splitlines():
916 for l in output.splitlines():
900 if pat.match(l):
917 if pat.match(l):
901 found_ids = l.split(' ')[1:]
918 found_ids = l.split(' ')[1:]
902 child_ids.extend(found_ids)
919 child_ids.extend(found_ids)
903
920
904 return child_ids
921 return child_ids
905 return _children(repo_id, commit_id)
922 return _children(repo_id, commit_id)
906
923
907 @reraise_safe_exceptions
924 @reraise_safe_exceptions
908 def set_refs(self, wire, key, value):
925 def set_refs(self, wire, key, value):
909 repo_init = self._factory.repo_libgit2(wire)
926 repo_init = self._factory.repo_libgit2(wire)
910 with repo_init as repo:
927 with repo_init as repo:
911 repo.references.create(key, value, force=True)
928 repo.references.create(key, value, force=True)
912
929
913 @reraise_safe_exceptions
930 @reraise_safe_exceptions
914 def create_branch(self, wire, branch_name, commit_id, force=False):
931 def create_branch(self, wire, branch_name, commit_id, force=False):
915 repo_init = self._factory.repo_libgit2(wire)
932 repo_init = self._factory.repo_libgit2(wire)
916 with repo_init as repo:
933 with repo_init as repo:
917 commit = repo[commit_id]
934 commit = repo[commit_id]
918
935
919 if force:
936 if force:
920 repo.branches.local.create(branch_name, commit, force=force)
937 repo.branches.local.create(branch_name, commit, force=force)
921 elif not repo.branches.get(branch_name):
938 elif not repo.branches.get(branch_name):
922 # create only if that branch isn't existing
939 # create only if that branch isn't existing
923 repo.branches.local.create(branch_name, commit, force=force)
940 repo.branches.local.create(branch_name, commit, force=force)
924
941
925 @reraise_safe_exceptions
942 @reraise_safe_exceptions
926 def remove_ref(self, wire, key):
943 def remove_ref(self, wire, key):
927 repo_init = self._factory.repo_libgit2(wire)
944 repo_init = self._factory.repo_libgit2(wire)
928 with repo_init as repo:
945 with repo_init as repo:
929 repo.references.delete(key)
946 repo.references.delete(key)
930
947
931 @reraise_safe_exceptions
948 @reraise_safe_exceptions
932 def tag_remove(self, wire, tag_name):
949 def tag_remove(self, wire, tag_name):
933 repo_init = self._factory.repo_libgit2(wire)
950 repo_init = self._factory.repo_libgit2(wire)
934 with repo_init as repo:
951 with repo_init as repo:
935 key = 'refs/tags/{}'.format(tag_name)
952 key = 'refs/tags/{}'.format(tag_name)
936 repo.references.delete(key)
953 repo.references.delete(key)
937
954
938 @reraise_safe_exceptions
955 @reraise_safe_exceptions
939 def tree_changes(self, wire, source_id, target_id):
956 def tree_changes(self, wire, source_id, target_id):
940 # TODO(marcink): remove this seems it's only used by tests
957 # TODO(marcink): remove this seems it's only used by tests
941 repo = self._factory.repo(wire)
958 repo = self._factory.repo(wire)
942 source = repo[source_id].tree if source_id else None
959 source = repo[source_id].tree if source_id else None
943 target = repo[target_id].tree
960 target = repo[target_id].tree
944 result = repo.object_store.tree_changes(source, target)
961 result = repo.object_store.tree_changes(source, target)
945 return list(result)
962 return list(result)
946
963
947 @reraise_safe_exceptions
964 @reraise_safe_exceptions
948 def tree_and_type_for_path(self, wire, commit_id, path):
965 def tree_and_type_for_path(self, wire, commit_id, path):
949
966
950 cache_on, context_uid, repo_id = self._cache_on(wire)
967 cache_on, context_uid, repo_id = self._cache_on(wire)
951 @self.region.conditional_cache_on_arguments(condition=cache_on)
968 region = self.region(wire)
969 @region.conditional_cache_on_arguments(condition=cache_on)
952 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
970 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
953 repo_init = self._factory.repo_libgit2(wire)
971 repo_init = self._factory.repo_libgit2(wire)
954
972
955 with repo_init as repo:
973 with repo_init as repo:
956 commit = repo[commit_id]
974 commit = repo[commit_id]
957 try:
975 try:
958 tree = commit.tree[path]
976 tree = commit.tree[path]
959 except KeyError:
977 except KeyError:
960 return None, None, None
978 return None, None, None
961
979
962 return tree.id.hex, tree.type, tree.filemode
980 return tree.id.hex, tree.type, tree.filemode
963 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
981 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
964
982
965 @reraise_safe_exceptions
983 @reraise_safe_exceptions
966 def tree_items(self, wire, tree_id):
984 def tree_items(self, wire, tree_id):
967 cache_on, context_uid, repo_id = self._cache_on(wire)
985 cache_on, context_uid, repo_id = self._cache_on(wire)
968 @self.region.conditional_cache_on_arguments(condition=cache_on)
986 region = self.region(wire)
987 @region.conditional_cache_on_arguments(condition=cache_on)
969 def _tree_items(_repo_id, _tree_id):
988 def _tree_items(_repo_id, _tree_id):
970
989
971 repo_init = self._factory.repo_libgit2(wire)
990 repo_init = self._factory.repo_libgit2(wire)
972 with repo_init as repo:
991 with repo_init as repo:
973 try:
992 try:
974 tree = repo[tree_id]
993 tree = repo[tree_id]
975 except KeyError:
994 except KeyError:
976 raise ObjectMissing('No tree with id: {}'.format(tree_id))
995 raise ObjectMissing('No tree with id: {}'.format(tree_id))
977
996
978 result = []
997 result = []
979 for item in tree:
998 for item in tree:
980 item_sha = item.hex
999 item_sha = item.hex
981 item_mode = item.filemode
1000 item_mode = item.filemode
982 item_type = item.type
1001 item_type = item.type
983
1002
984 if item_type == 'commit':
1003 if item_type == 'commit':
985 # NOTE(marcink): submodules we translate to 'link' for backward compat
1004 # NOTE(marcink): submodules we translate to 'link' for backward compat
986 item_type = 'link'
1005 item_type = 'link'
987
1006
988 result.append((item.name, item_mode, item_sha, item_type))
1007 result.append((item.name, item_mode, item_sha, item_type))
989 return result
1008 return result
990 return _tree_items(repo_id, tree_id)
1009 return _tree_items(repo_id, tree_id)
991
1010
992 @reraise_safe_exceptions
1011 @reraise_safe_exceptions
993 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1012 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
994 """
1013 """
995 Old version that uses subprocess to call diff
1014 Old version that uses subprocess to call diff
996 """
1015 """
997
1016
998 flags = [
1017 flags = [
999 '-U%s' % context, '--patch',
1018 '-U%s' % context, '--patch',
1000 '--binary',
1019 '--binary',
1001 '--find-renames',
1020 '--find-renames',
1002 '--no-indent-heuristic',
1021 '--no-indent-heuristic',
1003 # '--indent-heuristic',
1022 # '--indent-heuristic',
1004 #'--full-index',
1023 #'--full-index',
1005 #'--abbrev=40'
1024 #'--abbrev=40'
1006 ]
1025 ]
1007
1026
1008 if opt_ignorews:
1027 if opt_ignorews:
1009 flags.append('--ignore-all-space')
1028 flags.append('--ignore-all-space')
1010
1029
1011 if commit_id_1 == self.EMPTY_COMMIT:
1030 if commit_id_1 == self.EMPTY_COMMIT:
1012 cmd = ['show'] + flags + [commit_id_2]
1031 cmd = ['show'] + flags + [commit_id_2]
1013 else:
1032 else:
1014 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1033 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1015
1034
1016 if file_filter:
1035 if file_filter:
1017 cmd.extend(['--', file_filter])
1036 cmd.extend(['--', file_filter])
1018
1037
1019 diff, __ = self.run_git_command(wire, cmd)
1038 diff, __ = self.run_git_command(wire, cmd)
1020 # If we used 'show' command, strip first few lines (until actual diff
1039 # If we used 'show' command, strip first few lines (until actual diff
1021 # starts)
1040 # starts)
1022 if commit_id_1 == self.EMPTY_COMMIT:
1041 if commit_id_1 == self.EMPTY_COMMIT:
1023 lines = diff.splitlines()
1042 lines = diff.splitlines()
1024 x = 0
1043 x = 0
1025 for line in lines:
1044 for line in lines:
1026 if line.startswith('diff'):
1045 if line.startswith('diff'):
1027 break
1046 break
1028 x += 1
1047 x += 1
1029 # Append new line just like 'diff' command do
1048 # Append new line just like 'diff' command do
1030 diff = '\n'.join(lines[x:]) + '\n'
1049 diff = '\n'.join(lines[x:]) + '\n'
1031 return diff
1050 return diff
1032
1051
1033 @reraise_safe_exceptions
1052 @reraise_safe_exceptions
1034 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1053 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1035 repo_init = self._factory.repo_libgit2(wire)
1054 repo_init = self._factory.repo_libgit2(wire)
1036 with repo_init as repo:
1055 with repo_init as repo:
1037 swap = True
1056 swap = True
1038 flags = 0
1057 flags = 0
1039 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1058 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1040
1059
1041 if opt_ignorews:
1060 if opt_ignorews:
1042 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1061 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1043
1062
1044 if commit_id_1 == self.EMPTY_COMMIT:
1063 if commit_id_1 == self.EMPTY_COMMIT:
1045 comm1 = repo[commit_id_2]
1064 comm1 = repo[commit_id_2]
1046 diff_obj = comm1.tree.diff_to_tree(
1065 diff_obj = comm1.tree.diff_to_tree(
1047 flags=flags, context_lines=context, swap=swap)
1066 flags=flags, context_lines=context, swap=swap)
1048
1067
1049 else:
1068 else:
1050 comm1 = repo[commit_id_2]
1069 comm1 = repo[commit_id_2]
1051 comm2 = repo[commit_id_1]
1070 comm2 = repo[commit_id_1]
1052 diff_obj = comm1.tree.diff_to_tree(
1071 diff_obj = comm1.tree.diff_to_tree(
1053 comm2.tree, flags=flags, context_lines=context, swap=swap)
1072 comm2.tree, flags=flags, context_lines=context, swap=swap)
1054 similar_flags = 0
1073 similar_flags = 0
1055 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1074 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1056 diff_obj.find_similar(flags=similar_flags)
1075 diff_obj.find_similar(flags=similar_flags)
1057
1076
1058 if file_filter:
1077 if file_filter:
1059 for p in diff_obj:
1078 for p in diff_obj:
1060 if p.delta.old_file.path == file_filter:
1079 if p.delta.old_file.path == file_filter:
1061 return p.patch or ''
1080 return p.patch or ''
1062 # fo matching path == no diff
1081 # fo matching path == no diff
1063 return ''
1082 return ''
1064 return diff_obj.patch or ''
1083 return diff_obj.patch or ''
1065
1084
1066 @reraise_safe_exceptions
1085 @reraise_safe_exceptions
1067 def node_history(self, wire, commit_id, path, limit):
1086 def node_history(self, wire, commit_id, path, limit):
1068 cache_on, context_uid, repo_id = self._cache_on(wire)
1087 cache_on, context_uid, repo_id = self._cache_on(wire)
1069 @self.region.conditional_cache_on_arguments(condition=cache_on)
1088 region = self.region(wire)
1089 @region.conditional_cache_on_arguments(condition=cache_on)
1070 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1090 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1071 # optimize for n==1, rev-list is much faster for that use-case
1091 # optimize for n==1, rev-list is much faster for that use-case
1072 if limit == 1:
1092 if limit == 1:
1073 cmd = ['rev-list', '-1', commit_id, '--', path]
1093 cmd = ['rev-list', '-1', commit_id, '--', path]
1074 else:
1094 else:
1075 cmd = ['log']
1095 cmd = ['log']
1076 if limit:
1096 if limit:
1077 cmd.extend(['-n', str(safe_int(limit, 0))])
1097 cmd.extend(['-n', str(safe_int(limit, 0))])
1078 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1098 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1079
1099
1080 output, __ = self.run_git_command(wire, cmd)
1100 output, __ = self.run_git_command(wire, cmd)
1081 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1101 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1082
1102
1083 return [x for x in commit_ids]
1103 return [x for x in commit_ids]
1084 return _node_history(context_uid, repo_id, commit_id, path, limit)
1104 return _node_history(context_uid, repo_id, commit_id, path, limit)
1085
1105
1086 @reraise_safe_exceptions
1106 @reraise_safe_exceptions
1087 def node_annotate(self, wire, commit_id, path):
1107 def node_annotate(self, wire, commit_id, path):
1088
1108
1089 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1109 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1090 # -l ==> outputs long shas (and we need all 40 characters)
1110 # -l ==> outputs long shas (and we need all 40 characters)
1091 # --root ==> doesn't put '^' character for boundaries
1111 # --root ==> doesn't put '^' character for boundaries
1092 # -r commit_id ==> blames for the given commit
1112 # -r commit_id ==> blames for the given commit
1093 output, __ = self.run_git_command(wire, cmd)
1113 output, __ = self.run_git_command(wire, cmd)
1094
1114
1095 result = []
1115 result = []
1096 for i, blame_line in enumerate(output.split('\n')[:-1]):
1116 for i, blame_line in enumerate(output.split('\n')[:-1]):
1097 line_no = i + 1
1117 line_no = i + 1
1098 commit_id, line = re.split(r' ', blame_line, 1)
1118 commit_id, line = re.split(r' ', blame_line, 1)
1099 result.append((line_no, commit_id, line))
1119 result.append((line_no, commit_id, line))
1100 return result
1120 return result
1101
1121
1102 @reraise_safe_exceptions
1122 @reraise_safe_exceptions
1103 def update_server_info(self, wire):
1123 def update_server_info(self, wire):
1104 repo = self._factory.repo(wire)
1124 repo = self._factory.repo(wire)
1105 update_server_info(repo)
1125 update_server_info(repo)
1106
1126
1107 @reraise_safe_exceptions
1127 @reraise_safe_exceptions
1108 def get_all_commit_ids(self, wire):
1128 def get_all_commit_ids(self, wire):
1109
1129
1110 cache_on, context_uid, repo_id = self._cache_on(wire)
1130 cache_on, context_uid, repo_id = self._cache_on(wire)
1111 @self.region.conditional_cache_on_arguments(condition=cache_on)
1131 region = self.region(wire)
1132 @region.conditional_cache_on_arguments(condition=cache_on)
1112 def _get_all_commit_ids(_context_uid, _repo_id):
1133 def _get_all_commit_ids(_context_uid, _repo_id):
1113
1134
1114 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1135 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1115 try:
1136 try:
1116 output, __ = self.run_git_command(wire, cmd)
1137 output, __ = self.run_git_command(wire, cmd)
1117 return output.splitlines()
1138 return output.splitlines()
1118 except Exception:
1139 except Exception:
1119 # Can be raised for empty repositories
1140 # Can be raised for empty repositories
1120 return []
1141 return []
1121 return _get_all_commit_ids(context_uid, repo_id)
1142 return _get_all_commit_ids(context_uid, repo_id)
1122
1143
1123 @reraise_safe_exceptions
1144 @reraise_safe_exceptions
1124 def run_git_command(self, wire, cmd, **opts):
1145 def run_git_command(self, wire, cmd, **opts):
1125 path = wire.get('path', None)
1146 path = wire.get('path', None)
1126
1147
1127 if path and os.path.isdir(path):
1148 if path and os.path.isdir(path):
1128 opts['cwd'] = path
1149 opts['cwd'] = path
1129
1150
1130 if '_bare' in opts:
1151 if '_bare' in opts:
1131 _copts = []
1152 _copts = []
1132 del opts['_bare']
1153 del opts['_bare']
1133 else:
1154 else:
1134 _copts = ['-c', 'core.quotepath=false', ]
1155 _copts = ['-c', 'core.quotepath=false', ]
1135 safe_call = False
1156 safe_call = False
1136 if '_safe' in opts:
1157 if '_safe' in opts:
1137 # no exc on failure
1158 # no exc on failure
1138 del opts['_safe']
1159 del opts['_safe']
1139 safe_call = True
1160 safe_call = True
1140
1161
1141 if '_copts' in opts:
1162 if '_copts' in opts:
1142 _copts.extend(opts['_copts'] or [])
1163 _copts.extend(opts['_copts'] or [])
1143 del opts['_copts']
1164 del opts['_copts']
1144
1165
1145 gitenv = os.environ.copy()
1166 gitenv = os.environ.copy()
1146 gitenv.update(opts.pop('extra_env', {}))
1167 gitenv.update(opts.pop('extra_env', {}))
1147 # need to clean fix GIT_DIR !
1168 # need to clean fix GIT_DIR !
1148 if 'GIT_DIR' in gitenv:
1169 if 'GIT_DIR' in gitenv:
1149 del gitenv['GIT_DIR']
1170 del gitenv['GIT_DIR']
1150 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1171 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1151 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1172 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1152
1173
1153 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1174 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1154 _opts = {'env': gitenv, 'shell': False}
1175 _opts = {'env': gitenv, 'shell': False}
1155
1176
1156 proc = None
1177 proc = None
1157 try:
1178 try:
1158 _opts.update(opts)
1179 _opts.update(opts)
1159 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1180 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1160
1181
1161 return ''.join(proc), ''.join(proc.error)
1182 return ''.join(proc), ''.join(proc.error)
1162 except (EnvironmentError, OSError) as err:
1183 except (EnvironmentError, OSError) as err:
1163 cmd = ' '.join(cmd) # human friendly CMD
1184 cmd = ' '.join(cmd) # human friendly CMD
1164 tb_err = ("Couldn't run git command (%s).\n"
1185 tb_err = ("Couldn't run git command (%s).\n"
1165 "Original error was:%s\n"
1186 "Original error was:%s\n"
1166 "Call options:%s\n"
1187 "Call options:%s\n"
1167 % (cmd, err, _opts))
1188 % (cmd, err, _opts))
1168 log.exception(tb_err)
1189 log.exception(tb_err)
1169 if safe_call:
1190 if safe_call:
1170 return '', err
1191 return '', err
1171 else:
1192 else:
1172 raise exceptions.VcsException()(tb_err)
1193 raise exceptions.VcsException()(tb_err)
1173 finally:
1194 finally:
1174 if proc:
1195 if proc:
1175 proc.close()
1196 proc.close()
1176
1197
1177 @reraise_safe_exceptions
1198 @reraise_safe_exceptions
1178 def install_hooks(self, wire, force=False):
1199 def install_hooks(self, wire, force=False):
1179 from vcsserver.hook_utils import install_git_hooks
1200 from vcsserver.hook_utils import install_git_hooks
1180 bare = self.bare(wire)
1201 bare = self.bare(wire)
1181 path = wire['path']
1202 path = wire['path']
1182 return install_git_hooks(path, bare, force_create=force)
1203 return install_git_hooks(path, bare, force_create=force)
1183
1204
1184 @reraise_safe_exceptions
1205 @reraise_safe_exceptions
1185 def get_hooks_info(self, wire):
1206 def get_hooks_info(self, wire):
1186 from vcsserver.hook_utils import (
1207 from vcsserver.hook_utils import (
1187 get_git_pre_hook_version, get_git_post_hook_version)
1208 get_git_pre_hook_version, get_git_post_hook_version)
1188 bare = self.bare(wire)
1209 bare = self.bare(wire)
1189 path = wire['path']
1210 path = wire['path']
1190 return {
1211 return {
1191 'pre_version': get_git_pre_hook_version(path, bare),
1212 'pre_version': get_git_pre_hook_version(path, bare),
1192 'post_version': get_git_post_hook_version(path, bare),
1213 'post_version': get_git_post_hook_version(path, bare),
1193 }
1214 }
1194
1215
1195 @reraise_safe_exceptions
1216 @reraise_safe_exceptions
1196 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1217 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1197 archive_dir_name, commit_id):
1218 archive_dir_name, commit_id):
1198
1219
1199 def file_walker(_commit_id, path):
1220 def file_walker(_commit_id, path):
1200 repo_init = self._factory.repo_libgit2(wire)
1221 repo_init = self._factory.repo_libgit2(wire)
1201
1222
1202 with repo_init as repo:
1223 with repo_init as repo:
1203 commit = repo[commit_id]
1224 commit = repo[commit_id]
1204
1225
1205 if path in ['', '/']:
1226 if path in ['', '/']:
1206 tree = commit.tree
1227 tree = commit.tree
1207 else:
1228 else:
1208 tree = commit.tree[path.rstrip('/')]
1229 tree = commit.tree[path.rstrip('/')]
1209 tree_id = tree.id.hex
1230 tree_id = tree.id.hex
1210 try:
1231 try:
1211 tree = repo[tree_id]
1232 tree = repo[tree_id]
1212 except KeyError:
1233 except KeyError:
1213 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1234 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1214
1235
1215 index = LibGit2Index.Index()
1236 index = LibGit2Index.Index()
1216 index.read_tree(tree)
1237 index.read_tree(tree)
1217 file_iter = index
1238 file_iter = index
1218
1239
1219 for fn in file_iter:
1240 for fn in file_iter:
1220 file_path = fn.path
1241 file_path = fn.path
1221 mode = fn.mode
1242 mode = fn.mode
1222 is_link = stat.S_ISLNK(mode)
1243 is_link = stat.S_ISLNK(mode)
1223 yield ArchiveNode(file_path, mode, is_link, repo[fn.id].read_raw)
1244 yield ArchiveNode(file_path, mode, is_link, repo[fn.id].read_raw)
1224
1245
1225 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1246 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1226 archive_dir_name, commit_id)
1247 archive_dir_name, commit_id)
@@ -1,1022 +1,1043 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import functools
17 import functools
18 import io
18 import io
19 import logging
19 import logging
20 import os
20 import os
21 import stat
21 import stat
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24 import traceback
24 import traceback
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27 from hgext.strip import strip as hgext_strip
27 from hgext.strip import strip as hgext_strip
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
42 alwaysmatcher, patternmatcher, hgutil)
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
49
49
50 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
51 def status(self, *msg, **opts):
51 def status(self, *msg, **opts):
52 log.info(' '.join(msg).rstrip('\n'))
52 log.info(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).status(*msg, **opts)
53 super(LoggingUI, self).status(*msg, **opts)
54
54
55 def warn(self, *msg, **opts):
55 def warn(self, *msg, **opts):
56 log.warn(' '.join(msg).rstrip('\n'))
56 log.warn(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).warn(*msg, **opts)
57 super(LoggingUI, self).warn(*msg, **opts)
58
58
59 def error(self, *msg, **opts):
59 def error(self, *msg, **opts):
60 log.error(' '.join(msg).rstrip('\n'))
60 log.error(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).error(*msg, **opts)
61 super(LoggingUI, self).error(*msg, **opts)
62
62
63 def note(self, *msg, **opts):
63 def note(self, *msg, **opts):
64 log.info(' '.join(msg).rstrip('\n'))
64 log.info(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).note(*msg, **opts)
65 super(LoggingUI, self).note(*msg, **opts)
66
66
67 def debug(self, *msg, **opts):
67 def debug(self, *msg, **opts):
68 log.debug(' '.join(msg).rstrip('\n'))
68 log.debug(' '.join(msg).rstrip('\n'))
69 super(LoggingUI, self).debug(*msg, **opts)
69 super(LoggingUI, self).debug(*msg, **opts)
70
70
71 baseui = LoggingUI()
71 baseui = LoggingUI()
72
72
73 # clean the baseui object
73 # clean the baseui object
74 baseui._ocfg = hgconfig.config()
74 baseui._ocfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
77
77
78 for section, option, value in repo_config:
78 for section, option, value in repo_config:
79 baseui.setconfig(section, option, value)
79 baseui.setconfig(section, option, value)
80
80
81 # make our hgweb quiet so it doesn't print output
81 # make our hgweb quiet so it doesn't print output
82 baseui.setconfig('ui', 'quiet', 'true')
82 baseui.setconfig('ui', 'quiet', 'true')
83
83
84 baseui.setconfig('ui', 'paginate', 'never')
84 baseui.setconfig('ui', 'paginate', 'never')
85 # for better Error reporting of Mercurial
85 # for better Error reporting of Mercurial
86 baseui.setconfig('ui', 'message-output', 'stderr')
86 baseui.setconfig('ui', 'message-output', 'stderr')
87
87
88 # force mercurial to only use 1 thread, otherwise it may try to set a
88 # force mercurial to only use 1 thread, otherwise it may try to set a
89 # signal in a non-main thread, thus generating a ValueError.
89 # signal in a non-main thread, thus generating a ValueError.
90 baseui.setconfig('worker', 'numcpus', 1)
90 baseui.setconfig('worker', 'numcpus', 1)
91
91
92 # If there is no config for the largefiles extension, we explicitly disable
92 # If there is no config for the largefiles extension, we explicitly disable
93 # it here. This overrides settings from repositories hgrc file. Recent
93 # it here. This overrides settings from repositories hgrc file. Recent
94 # mercurial versions enable largefiles in hgrc on clone from largefile
94 # mercurial versions enable largefiles in hgrc on clone from largefile
95 # repo.
95 # repo.
96 if not baseui.hasconfig('extensions', 'largefiles'):
96 if not baseui.hasconfig('extensions', 'largefiles'):
97 log.debug('Explicitly disable largefiles extension for repo.')
97 log.debug('Explicitly disable largefiles extension for repo.')
98 baseui.setconfig('extensions', 'largefiles', '!')
98 baseui.setconfig('extensions', 'largefiles', '!')
99
99
100 return baseui
100 return baseui
101
101
102
102
103 def reraise_safe_exceptions(func):
103 def reraise_safe_exceptions(func):
104 """Decorator for converting mercurial exceptions to something neutral."""
104 """Decorator for converting mercurial exceptions to something neutral."""
105
105
106 def wrapper(*args, **kwargs):
106 def wrapper(*args, **kwargs):
107 try:
107 try:
108 return func(*args, **kwargs)
108 return func(*args, **kwargs)
109 except (Abort, InterventionRequired) as e:
109 except (Abort, InterventionRequired) as e:
110 raise_from_original(exceptions.AbortException(e))
110 raise_from_original(exceptions.AbortException(e))
111 except RepoLookupError as e:
111 except RepoLookupError as e:
112 raise_from_original(exceptions.LookupException(e))
112 raise_from_original(exceptions.LookupException(e))
113 except RequirementError as e:
113 except RequirementError as e:
114 raise_from_original(exceptions.RequirementException(e))
114 raise_from_original(exceptions.RequirementException(e))
115 except RepoError as e:
115 except RepoError as e:
116 raise_from_original(exceptions.VcsException(e))
116 raise_from_original(exceptions.VcsException(e))
117 except LookupError as e:
117 except LookupError as e:
118 raise_from_original(exceptions.LookupException(e))
118 raise_from_original(exceptions.LookupException(e))
119 except Exception as e:
119 except Exception as e:
120 if not hasattr(e, '_vcs_kind'):
120 if not hasattr(e, '_vcs_kind'):
121 log.exception("Unhandled exception in hg remote call")
121 log.exception("Unhandled exception in hg remote call")
122 raise_from_original(exceptions.UnhandledException(e))
122 raise_from_original(exceptions.UnhandledException(e))
123
123
124 raise
124 raise
125 return wrapper
125 return wrapper
126
126
127
127
128 class MercurialFactory(RepoFactory):
128 class MercurialFactory(RepoFactory):
129 repo_type = 'hg'
129 repo_type = 'hg'
130
130
131 def _create_config(self, config, hooks=True):
131 def _create_config(self, config, hooks=True):
132 if not hooks:
132 if not hooks:
133 hooks_to_clean = frozenset((
133 hooks_to_clean = frozenset((
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
136 new_config = []
136 new_config = []
137 for section, option, value in config:
137 for section, option, value in config:
138 if section == 'hooks' and option in hooks_to_clean:
138 if section == 'hooks' and option in hooks_to_clean:
139 continue
139 continue
140 new_config.append((section, option, value))
140 new_config.append((section, option, value))
141 config = new_config
141 config = new_config
142
142
143 baseui = make_ui_from_config(config)
143 baseui = make_ui_from_config(config)
144 return baseui
144 return baseui
145
145
146 def _create_repo(self, wire, create):
146 def _create_repo(self, wire, create):
147 baseui = self._create_config(wire["config"])
147 baseui = self._create_config(wire["config"])
148 return instance(baseui, wire["path"], create)
148 return instance(baseui, wire["path"], create)
149
149
150 def repo(self, wire, create=False):
150 def repo(self, wire, create=False):
151 """
151 """
152 Get a repository instance for the given path.
152 Get a repository instance for the given path.
153 """
153 """
154 return self._create_repo(wire, create)
154 return self._create_repo(wire, create)
155
155
156
156
157 def patch_ui_message_output(baseui):
157 def patch_ui_message_output(baseui):
158 baseui.setconfig('ui', 'quiet', 'false')
158 baseui.setconfig('ui', 'quiet', 'false')
159 output = io.BytesIO()
159 output = io.BytesIO()
160
160
161 def write(data, **unused_kwargs):
161 def write(data, **unused_kwargs):
162 output.write(data)
162 output.write(data)
163
163
164 baseui.status = write
164 baseui.status = write
165 baseui.write = write
165 baseui.write = write
166 baseui.warn = write
166 baseui.warn = write
167 baseui.debug = write
167 baseui.debug = write
168
168
169 return baseui, output
169 return baseui, output
170
170
171
171
172 class HgRemote(RemoteBase):
172 class HgRemote(RemoteBase):
173
173
174 def __init__(self, factory):
174 def __init__(self, factory):
175 self._factory = factory
175 self._factory = factory
176 self._bulk_methods = {
176 self._bulk_methods = {
177 "affected_files": self.ctx_files,
177 "affected_files": self.ctx_files,
178 "author": self.ctx_user,
178 "author": self.ctx_user,
179 "branch": self.ctx_branch,
179 "branch": self.ctx_branch,
180 "children": self.ctx_children,
180 "children": self.ctx_children,
181 "date": self.ctx_date,
181 "date": self.ctx_date,
182 "message": self.ctx_description,
182 "message": self.ctx_description,
183 "parents": self.ctx_parents,
183 "parents": self.ctx_parents,
184 "status": self.ctx_status,
184 "status": self.ctx_status,
185 "obsolete": self.ctx_obsolete,
185 "obsolete": self.ctx_obsolete,
186 "phase": self.ctx_phase,
186 "phase": self.ctx_phase,
187 "hidden": self.ctx_hidden,
187 "hidden": self.ctx_hidden,
188 "_file_paths": self.ctx_list,
188 "_file_paths": self.ctx_list,
189 }
189 }
190
190
191 def _get_ctx(self, repo, ref):
191 def _get_ctx(self, repo, ref):
192 return get_ctx(repo, ref)
192 return get_ctx(repo, ref)
193
193
194 @reraise_safe_exceptions
194 @reraise_safe_exceptions
195 def discover_hg_version(self):
195 def discover_hg_version(self):
196 from mercurial import util
196 from mercurial import util
197 return util.version()
197 return util.version()
198
198
199 @reraise_safe_exceptions
199 @reraise_safe_exceptions
200 def is_empty(self, wire):
200 def is_empty(self, wire):
201 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
202
202
203 try:
203 try:
204 return len(repo) == 0
204 return len(repo) == 0
205 except Exception:
205 except Exception:
206 log.exception("failed to read object_store")
206 log.exception("failed to read object_store")
207 return False
207 return False
208
208
209 @reraise_safe_exceptions
209 @reraise_safe_exceptions
210 def bookmarks(self, wire):
210 def bookmarks(self, wire):
211 cache_on, context_uid, repo_id = self._cache_on(wire)
211 cache_on, context_uid, repo_id = self._cache_on(wire)
212 @self.region.conditional_cache_on_arguments(condition=cache_on)
212 region = self.region(wire)
213 @region.conditional_cache_on_arguments(condition=cache_on)
213 def _bookmarks(_context_uid, _repo_id):
214 def _bookmarks(_context_uid, _repo_id):
214 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
215 return dict(repo._bookmarks)
216 return dict(repo._bookmarks)
216
217
217 return _bookmarks(context_uid, repo_id)
218 return _bookmarks(context_uid, repo_id)
218
219
219 @reraise_safe_exceptions
220 @reraise_safe_exceptions
220 def branches(self, wire, normal, closed):
221 def branches(self, wire, normal, closed):
221 cache_on, context_uid, repo_id = self._cache_on(wire)
222 cache_on, context_uid, repo_id = self._cache_on(wire)
222 @self.region.conditional_cache_on_arguments(condition=cache_on)
223 region = self.region(wire)
224 @region.conditional_cache_on_arguments(condition=cache_on)
223 def _branches(_context_uid, _repo_id, _normal, _closed):
225 def _branches(_context_uid, _repo_id, _normal, _closed):
224 repo = self._factory.repo(wire)
226 repo = self._factory.repo(wire)
225 iter_branches = repo.branchmap().iterbranches()
227 iter_branches = repo.branchmap().iterbranches()
226 bt = {}
228 bt = {}
227 for branch_name, _heads, tip, is_closed in iter_branches:
229 for branch_name, _heads, tip, is_closed in iter_branches:
228 if normal and not is_closed:
230 if normal and not is_closed:
229 bt[branch_name] = tip
231 bt[branch_name] = tip
230 if closed and is_closed:
232 if closed and is_closed:
231 bt[branch_name] = tip
233 bt[branch_name] = tip
232
234
233 return bt
235 return bt
234
236
235 return _branches(context_uid, repo_id, normal, closed)
237 return _branches(context_uid, repo_id, normal, closed)
236
238
237 @reraise_safe_exceptions
239 @reraise_safe_exceptions
238 def bulk_request(self, wire, commit_id, pre_load):
240 def bulk_request(self, wire, commit_id, pre_load):
239 cache_on, context_uid, repo_id = self._cache_on(wire)
241 cache_on, context_uid, repo_id = self._cache_on(wire)
240 @self.region.conditional_cache_on_arguments(condition=cache_on)
242 region = self.region(wire)
243 @region.conditional_cache_on_arguments(condition=cache_on)
241 def _bulk_request(_repo_id, _commit_id, _pre_load):
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
242 result = {}
245 result = {}
243 for attr in pre_load:
246 for attr in pre_load:
244 try:
247 try:
245 method = self._bulk_methods[attr]
248 method = self._bulk_methods[attr]
246 result[attr] = method(wire, commit_id)
249 result[attr] = method(wire, commit_id)
247 except KeyError as e:
250 except KeyError as e:
248 raise exceptions.VcsException(e)(
251 raise exceptions.VcsException(e)(
249 'Unknown bulk attribute: "%s"' % attr)
252 'Unknown bulk attribute: "%s"' % attr)
250 return result
253 return result
251
254
252 return _bulk_request(repo_id, commit_id, sorted(pre_load))
255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
253
256
254 @reraise_safe_exceptions
257 @reraise_safe_exceptions
255 def ctx_branch(self, wire, commit_id):
258 def ctx_branch(self, wire, commit_id):
256 cache_on, context_uid, repo_id = self._cache_on(wire)
259 cache_on, context_uid, repo_id = self._cache_on(wire)
257 @self.region.conditional_cache_on_arguments(condition=cache_on)
260 region = self.region(wire)
261 @region.conditional_cache_on_arguments(condition=cache_on)
258 def _ctx_branch(_repo_id, _commit_id):
262 def _ctx_branch(_repo_id, _commit_id):
259 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
260 ctx = self._get_ctx(repo, commit_id)
264 ctx = self._get_ctx(repo, commit_id)
261 return ctx.branch()
265 return ctx.branch()
262 return _ctx_branch(repo_id, commit_id)
266 return _ctx_branch(repo_id, commit_id)
263
267
264 @reraise_safe_exceptions
268 @reraise_safe_exceptions
265 def ctx_date(self, wire, commit_id):
269 def ctx_date(self, wire, commit_id):
266 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 region = self.region(wire)
272 @region.conditional_cache_on_arguments(condition=cache_on)
268 def _ctx_date(_repo_id, _commit_id):
273 def _ctx_date(_repo_id, _commit_id):
269 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
270 ctx = self._get_ctx(repo, commit_id)
275 ctx = self._get_ctx(repo, commit_id)
271 return ctx.date()
276 return ctx.date()
272 return _ctx_date(repo_id, commit_id)
277 return _ctx_date(repo_id, commit_id)
273
278
274 @reraise_safe_exceptions
279 @reraise_safe_exceptions
275 def ctx_description(self, wire, revision):
280 def ctx_description(self, wire, revision):
276 repo = self._factory.repo(wire)
281 repo = self._factory.repo(wire)
277 ctx = self._get_ctx(repo, revision)
282 ctx = self._get_ctx(repo, revision)
278 return ctx.description()
283 return ctx.description()
279
284
280 @reraise_safe_exceptions
285 @reraise_safe_exceptions
281 def ctx_files(self, wire, commit_id):
286 def ctx_files(self, wire, commit_id):
282 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
283 @self.region.conditional_cache_on_arguments(condition=cache_on)
288 region = self.region(wire)
289 @region.conditional_cache_on_arguments(condition=cache_on)
284 def _ctx_files(_repo_id, _commit_id):
290 def _ctx_files(_repo_id, _commit_id):
285 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
286 ctx = self._get_ctx(repo, commit_id)
292 ctx = self._get_ctx(repo, commit_id)
287 return ctx.files()
293 return ctx.files()
288
294
289 return _ctx_files(repo_id, commit_id)
295 return _ctx_files(repo_id, commit_id)
290
296
291 @reraise_safe_exceptions
297 @reraise_safe_exceptions
292 def ctx_list(self, path, revision):
298 def ctx_list(self, path, revision):
293 repo = self._factory.repo(path)
299 repo = self._factory.repo(path)
294 ctx = self._get_ctx(repo, revision)
300 ctx = self._get_ctx(repo, revision)
295 return list(ctx)
301 return list(ctx)
296
302
297 @reraise_safe_exceptions
303 @reraise_safe_exceptions
298 def ctx_parents(self, wire, commit_id):
304 def ctx_parents(self, wire, commit_id):
299 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
300 @self.region.conditional_cache_on_arguments(condition=cache_on)
306 region = self.region(wire)
307 @region.conditional_cache_on_arguments(condition=cache_on)
301 def _ctx_parents(_repo_id, _commit_id):
308 def _ctx_parents(_repo_id, _commit_id):
302 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
303 ctx = self._get_ctx(repo, commit_id)
310 ctx = self._get_ctx(repo, commit_id)
304 return [parent.hex() for parent in ctx.parents()
311 return [parent.hex() for parent in ctx.parents()
305 if not (parent.hidden() or parent.obsolete())]
312 if not (parent.hidden() or parent.obsolete())]
306
313
307 return _ctx_parents(repo_id, commit_id)
314 return _ctx_parents(repo_id, commit_id)
308
315
309 @reraise_safe_exceptions
316 @reraise_safe_exceptions
310 def ctx_children(self, wire, commit_id):
317 def ctx_children(self, wire, commit_id):
311 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
312 @self.region.conditional_cache_on_arguments(condition=cache_on)
319 region = self.region(wire)
320 @region.conditional_cache_on_arguments(condition=cache_on)
313 def _ctx_children(_repo_id, _commit_id):
321 def _ctx_children(_repo_id, _commit_id):
314 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
315 ctx = self._get_ctx(repo, commit_id)
323 ctx = self._get_ctx(repo, commit_id)
316 return [child.hex() for child in ctx.children()
324 return [child.hex() for child in ctx.children()
317 if not (child.hidden() or child.obsolete())]
325 if not (child.hidden() or child.obsolete())]
318
326
319 return _ctx_children(repo_id, commit_id)
327 return _ctx_children(repo_id, commit_id)
320
328
321 @reraise_safe_exceptions
329 @reraise_safe_exceptions
322 def ctx_phase(self, wire, commit_id):
330 def ctx_phase(self, wire, commit_id):
323 cache_on, context_uid, repo_id = self._cache_on(wire)
331 cache_on, context_uid, repo_id = self._cache_on(wire)
324 @self.region.conditional_cache_on_arguments(condition=cache_on)
332 region = self.region(wire)
333 @region.conditional_cache_on_arguments(condition=cache_on)
325 def _ctx_phase(_context_uid, _repo_id, _commit_id):
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
326 repo = self._factory.repo(wire)
335 repo = self._factory.repo(wire)
327 ctx = self._get_ctx(repo, commit_id)
336 ctx = self._get_ctx(repo, commit_id)
328 # public=0, draft=1, secret=3
337 # public=0, draft=1, secret=3
329 return ctx.phase()
338 return ctx.phase()
330 return _ctx_phase(context_uid, repo_id, commit_id)
339 return _ctx_phase(context_uid, repo_id, commit_id)
331
340
332 @reraise_safe_exceptions
341 @reraise_safe_exceptions
333 def ctx_obsolete(self, wire, commit_id):
342 def ctx_obsolete(self, wire, commit_id):
334 cache_on, context_uid, repo_id = self._cache_on(wire)
343 cache_on, context_uid, repo_id = self._cache_on(wire)
335 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 region = self.region(wire)
345 @region.conditional_cache_on_arguments(condition=cache_on)
336 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
337 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
338 ctx = self._get_ctx(repo, commit_id)
348 ctx = self._get_ctx(repo, commit_id)
339 return ctx.obsolete()
349 return ctx.obsolete()
340 return _ctx_obsolete(context_uid, repo_id, commit_id)
350 return _ctx_obsolete(context_uid, repo_id, commit_id)
341
351
342 @reraise_safe_exceptions
352 @reraise_safe_exceptions
343 def ctx_hidden(self, wire, commit_id):
353 def ctx_hidden(self, wire, commit_id):
344 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
345 @self.region.conditional_cache_on_arguments(condition=cache_on)
355 region = self.region(wire)
356 @region.conditional_cache_on_arguments(condition=cache_on)
346 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
347 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
348 ctx = self._get_ctx(repo, commit_id)
359 ctx = self._get_ctx(repo, commit_id)
349 return ctx.hidden()
360 return ctx.hidden()
350 return _ctx_hidden(context_uid, repo_id, commit_id)
361 return _ctx_hidden(context_uid, repo_id, commit_id)
351
362
352 @reraise_safe_exceptions
363 @reraise_safe_exceptions
353 def ctx_substate(self, wire, revision):
364 def ctx_substate(self, wire, revision):
354 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
355 ctx = self._get_ctx(repo, revision)
366 ctx = self._get_ctx(repo, revision)
356 return ctx.substate
367 return ctx.substate
357
368
358 @reraise_safe_exceptions
369 @reraise_safe_exceptions
359 def ctx_status(self, wire, revision):
370 def ctx_status(self, wire, revision):
360 repo = self._factory.repo(wire)
371 repo = self._factory.repo(wire)
361 ctx = self._get_ctx(repo, revision)
372 ctx = self._get_ctx(repo, revision)
362 status = repo[ctx.p1().node()].status(other=ctx.node())
373 status = repo[ctx.p1().node()].status(other=ctx.node())
363 # object of status (odd, custom named tuple in mercurial) is not
374 # object of status (odd, custom named tuple in mercurial) is not
364 # correctly serializable, we make it a list, as the underling
375 # correctly serializable, we make it a list, as the underling
365 # API expects this to be a list
376 # API expects this to be a list
366 return list(status)
377 return list(status)
367
378
368 @reraise_safe_exceptions
379 @reraise_safe_exceptions
369 def ctx_user(self, wire, revision):
380 def ctx_user(self, wire, revision):
370 repo = self._factory.repo(wire)
381 repo = self._factory.repo(wire)
371 ctx = self._get_ctx(repo, revision)
382 ctx = self._get_ctx(repo, revision)
372 return ctx.user()
383 return ctx.user()
373
384
374 @reraise_safe_exceptions
385 @reraise_safe_exceptions
375 def check_url(self, url, config):
386 def check_url(self, url, config):
376 _proto = None
387 _proto = None
377 if '+' in url[:url.find('://')]:
388 if '+' in url[:url.find('://')]:
378 _proto = url[0:url.find('+')]
389 _proto = url[0:url.find('+')]
379 url = url[url.find('+') + 1:]
390 url = url[url.find('+') + 1:]
380 handlers = []
391 handlers = []
381 url_obj = url_parser(url)
392 url_obj = url_parser(url)
382 test_uri, authinfo = url_obj.authinfo()
393 test_uri, authinfo = url_obj.authinfo()
383 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
384 url_obj.query = obfuscate_qs(url_obj.query)
395 url_obj.query = obfuscate_qs(url_obj.query)
385
396
386 cleaned_uri = str(url_obj)
397 cleaned_uri = str(url_obj)
387 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
388
399
389 if authinfo:
400 if authinfo:
390 # create a password manager
401 # create a password manager
391 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
392 passmgr.add_password(*authinfo)
403 passmgr.add_password(*authinfo)
393
404
394 handlers.extend((httpbasicauthhandler(passmgr),
405 handlers.extend((httpbasicauthhandler(passmgr),
395 httpdigestauthhandler(passmgr)))
406 httpdigestauthhandler(passmgr)))
396
407
397 o = urllib2.build_opener(*handlers)
408 o = urllib2.build_opener(*handlers)
398 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
399 ('Accept', 'application/mercurial-0.1')]
410 ('Accept', 'application/mercurial-0.1')]
400
411
401 q = {"cmd": 'between'}
412 q = {"cmd": 'between'}
402 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
403 qs = '?%s' % urllib.urlencode(q)
414 qs = '?%s' % urllib.urlencode(q)
404 cu = "%s%s" % (test_uri, qs)
415 cu = "%s%s" % (test_uri, qs)
405 req = urllib2.Request(cu, None, {})
416 req = urllib2.Request(cu, None, {})
406
417
407 try:
418 try:
408 log.debug("Trying to open URL %s", cleaned_uri)
419 log.debug("Trying to open URL %s", cleaned_uri)
409 resp = o.open(req)
420 resp = o.open(req)
410 if resp.code != 200:
421 if resp.code != 200:
411 raise exceptions.URLError()('Return Code is not 200')
422 raise exceptions.URLError()('Return Code is not 200')
412 except Exception as e:
423 except Exception as e:
413 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
414 # means it cannot be cloned
425 # means it cannot be cloned
415 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
416
427
417 # now check if it's a proper hg repo, but don't do it for svn
428 # now check if it's a proper hg repo, but don't do it for svn
418 try:
429 try:
419 if _proto == 'svn':
430 if _proto == 'svn':
420 pass
431 pass
421 else:
432 else:
422 # check for pure hg repos
433 # check for pure hg repos
423 log.debug(
434 log.debug(
424 "Verifying if URL is a Mercurial repository: %s",
435 "Verifying if URL is a Mercurial repository: %s",
425 cleaned_uri)
436 cleaned_uri)
426 ui = make_ui_from_config(config)
437 ui = make_ui_from_config(config)
427 peer_checker = makepeer(ui, url)
438 peer_checker = makepeer(ui, url)
428 peer_checker.lookup('tip')
439 peer_checker.lookup('tip')
429 except Exception as e:
440 except Exception as e:
430 log.warning("URL is not a valid Mercurial repository: %s",
441 log.warning("URL is not a valid Mercurial repository: %s",
431 cleaned_uri)
442 cleaned_uri)
432 raise exceptions.URLError(e)(
443 raise exceptions.URLError(e)(
433 "url [%s] does not look like an hg repo org_exc: %s"
444 "url [%s] does not look like an hg repo org_exc: %s"
434 % (cleaned_uri, e))
445 % (cleaned_uri, e))
435
446
436 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
437 return True
448 return True
438
449
439 @reraise_safe_exceptions
450 @reraise_safe_exceptions
440 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
441 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
442
453
443 if file_filter:
454 if file_filter:
444 match_filter = match(file_filter[0], '', [file_filter[1]])
455 match_filter = match(file_filter[0], '', [file_filter[1]])
445 else:
456 else:
446 match_filter = file_filter
457 match_filter = file_filter
447 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
448
459
449 try:
460 try:
450 return "".join(patch.diff(
461 return "".join(patch.diff(
451 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
452 except RepoLookupError as e:
463 except RepoLookupError as e:
453 raise exceptions.LookupException(e)()
464 raise exceptions.LookupException(e)()
454
465
455 @reraise_safe_exceptions
466 @reraise_safe_exceptions
456 def node_history(self, wire, revision, path, limit):
467 def node_history(self, wire, revision, path, limit):
457 cache_on, context_uid, repo_id = self._cache_on(wire)
468 cache_on, context_uid, repo_id = self._cache_on(wire)
458 @self.region.conditional_cache_on_arguments(condition=cache_on)
469 region = self.region(wire)
470 @region.conditional_cache_on_arguments(condition=cache_on)
459 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
460 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
461
473
462 ctx = self._get_ctx(repo, revision)
474 ctx = self._get_ctx(repo, revision)
463 fctx = ctx.filectx(path)
475 fctx = ctx.filectx(path)
464
476
465 def history_iter():
477 def history_iter():
466 limit_rev = fctx.rev()
478 limit_rev = fctx.rev()
467 for obj in reversed(list(fctx.filelog())):
479 for obj in reversed(list(fctx.filelog())):
468 obj = fctx.filectx(obj)
480 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
481 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
482 if ctx.hidden() or ctx.obsolete():
471 continue
483 continue
472
484
473 if limit_rev >= obj.rev():
485 if limit_rev >= obj.rev():
474 yield obj
486 yield obj
475
487
476 history = []
488 history = []
477 for cnt, obj in enumerate(history_iter()):
489 for cnt, obj in enumerate(history_iter()):
478 if limit and cnt >= limit:
490 if limit and cnt >= limit:
479 break
491 break
480 history.append(hex(obj.node()))
492 history.append(hex(obj.node()))
481
493
482 return [x for x in history]
494 return [x for x in history]
483 return _node_history(context_uid, repo_id, revision, path, limit)
495 return _node_history(context_uid, repo_id, revision, path, limit)
484
496
485 @reraise_safe_exceptions
497 @reraise_safe_exceptions
486 def node_history_untill(self, wire, revision, path, limit):
498 def node_history_untill(self, wire, revision, path, limit):
487 cache_on, context_uid, repo_id = self._cache_on(wire)
499 cache_on, context_uid, repo_id = self._cache_on(wire)
488 @self.region.conditional_cache_on_arguments(condition=cache_on)
500 region = self.region(wire)
501 @region.conditional_cache_on_arguments(condition=cache_on)
489 def _node_history_until(_context_uid, _repo_id):
502 def _node_history_until(_context_uid, _repo_id):
490 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
491 ctx = self._get_ctx(repo, revision)
504 ctx = self._get_ctx(repo, revision)
492 fctx = ctx.filectx(path)
505 fctx = ctx.filectx(path)
493
506
494 file_log = list(fctx.filelog())
507 file_log = list(fctx.filelog())
495 if limit:
508 if limit:
496 # Limit to the last n items
509 # Limit to the last n items
497 file_log = file_log[-limit:]
510 file_log = file_log[-limit:]
498
511
499 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
500 return _node_history_until(context_uid, repo_id, revision, path, limit)
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
501
514
502 @reraise_safe_exceptions
515 @reraise_safe_exceptions
503 def fctx_annotate(self, wire, revision, path):
516 def fctx_annotate(self, wire, revision, path):
504 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
505 ctx = self._get_ctx(repo, revision)
518 ctx = self._get_ctx(repo, revision)
506 fctx = ctx.filectx(path)
519 fctx = ctx.filectx(path)
507
520
508 result = []
521 result = []
509 for i, annotate_obj in enumerate(fctx.annotate(), 1):
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
510 ln_no = i
523 ln_no = i
511 sha = hex(annotate_obj.fctx.node())
524 sha = hex(annotate_obj.fctx.node())
512 content = annotate_obj.text
525 content = annotate_obj.text
513 result.append((ln_no, sha, content))
526 result.append((ln_no, sha, content))
514 return result
527 return result
515
528
516 @reraise_safe_exceptions
529 @reraise_safe_exceptions
517 def fctx_node_data(self, wire, revision, path):
530 def fctx_node_data(self, wire, revision, path):
518 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
519 ctx = self._get_ctx(repo, revision)
532 ctx = self._get_ctx(repo, revision)
520 fctx = ctx.filectx(path)
533 fctx = ctx.filectx(path)
521 return fctx.data()
534 return fctx.data()
522
535
523 @reraise_safe_exceptions
536 @reraise_safe_exceptions
524 def fctx_flags(self, wire, commit_id, path):
537 def fctx_flags(self, wire, commit_id, path):
525 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
526 @self.region.conditional_cache_on_arguments(condition=cache_on)
539 region = self.region(wire)
540 @region.conditional_cache_on_arguments(condition=cache_on)
527 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
528 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
529 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
530 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
531 return fctx.flags()
545 return fctx.flags()
532
546
533 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
534
548
535 @reraise_safe_exceptions
549 @reraise_safe_exceptions
536 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
537 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
538 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 region = self.region(wire)
553 @region.conditional_cache_on_arguments(condition=cache_on)
539 def _fctx_size(_repo_id, _revision, _path):
554 def _fctx_size(_repo_id, _revision, _path):
540 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
541 ctx = self._get_ctx(repo, commit_id)
556 ctx = self._get_ctx(repo, commit_id)
542 fctx = ctx.filectx(path)
557 fctx = ctx.filectx(path)
543 return fctx.size()
558 return fctx.size()
544 return _fctx_size(repo_id, commit_id, path)
559 return _fctx_size(repo_id, commit_id, path)
545
560
546 @reraise_safe_exceptions
561 @reraise_safe_exceptions
547 def get_all_commit_ids(self, wire, name):
562 def get_all_commit_ids(self, wire, name):
548 cache_on, context_uid, repo_id = self._cache_on(wire)
563 cache_on, context_uid, repo_id = self._cache_on(wire)
549 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 region = self.region(wire)
565 @region.conditional_cache_on_arguments(condition=cache_on)
550 def _get_all_commit_ids(_context_uid, _repo_id, _name):
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
551 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
552 repo = repo.filtered(name)
568 repo = repo.filtered(name)
553 revs = map(lambda x: hex(x[7]), repo.changelog.index)
569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
554 return revs
570 return revs
555 return _get_all_commit_ids(context_uid, repo_id, name)
571 return _get_all_commit_ids(context_uid, repo_id, name)
556
572
557 @reraise_safe_exceptions
573 @reraise_safe_exceptions
558 def get_config_value(self, wire, section, name, untrusted=False):
574 def get_config_value(self, wire, section, name, untrusted=False):
559 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
560 return repo.ui.config(section, name, untrusted=untrusted)
576 return repo.ui.config(section, name, untrusted=untrusted)
561
577
562 @reraise_safe_exceptions
578 @reraise_safe_exceptions
563 def is_large_file(self, wire, commit_id, path):
579 def is_large_file(self, wire, commit_id, path):
564 cache_on, context_uid, repo_id = self._cache_on(wire)
580 cache_on, context_uid, repo_id = self._cache_on(wire)
565 @self.region.conditional_cache_on_arguments(condition=cache_on)
581 region = self.region(wire)
582 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
567 return largefiles.lfutil.isstandin(path)
584 return largefiles.lfutil.isstandin(path)
568
585
569 return _is_large_file(context_uid, repo_id, commit_id, path)
586 return _is_large_file(context_uid, repo_id, commit_id, path)
570
587
571 @reraise_safe_exceptions
588 @reraise_safe_exceptions
572 def is_binary(self, wire, revision, path):
589 def is_binary(self, wire, revision, path):
573 cache_on, context_uid, repo_id = self._cache_on(wire)
590 cache_on, context_uid, repo_id = self._cache_on(wire)
574
591
575 @self.region.conditional_cache_on_arguments(condition=cache_on)
592 region = self.region(wire)
593 @region.conditional_cache_on_arguments(condition=cache_on)
576 def _is_binary(_repo_id, _sha, _path):
594 def _is_binary(_repo_id, _sha, _path):
577 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
578 ctx = self._get_ctx(repo, revision)
596 ctx = self._get_ctx(repo, revision)
579 fctx = ctx.filectx(path)
597 fctx = ctx.filectx(path)
580 return fctx.isbinary()
598 return fctx.isbinary()
581
599
582 return _is_binary(repo_id, revision, path)
600 return _is_binary(repo_id, revision, path)
583
601
584 @reraise_safe_exceptions
602 @reraise_safe_exceptions
585 def in_largefiles_store(self, wire, sha):
603 def in_largefiles_store(self, wire, sha):
586 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
587 return largefiles.lfutil.instore(repo, sha)
605 return largefiles.lfutil.instore(repo, sha)
588
606
589 @reraise_safe_exceptions
607 @reraise_safe_exceptions
590 def in_user_cache(self, wire, sha):
608 def in_user_cache(self, wire, sha):
591 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
592 return largefiles.lfutil.inusercache(repo.ui, sha)
610 return largefiles.lfutil.inusercache(repo.ui, sha)
593
611
594 @reraise_safe_exceptions
612 @reraise_safe_exceptions
595 def store_path(self, wire, sha):
613 def store_path(self, wire, sha):
596 repo = self._factory.repo(wire)
614 repo = self._factory.repo(wire)
597 return largefiles.lfutil.storepath(repo, sha)
615 return largefiles.lfutil.storepath(repo, sha)
598
616
599 @reraise_safe_exceptions
617 @reraise_safe_exceptions
600 def link(self, wire, sha, path):
618 def link(self, wire, sha, path):
601 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
602 largefiles.lfutil.link(
620 largefiles.lfutil.link(
603 largefiles.lfutil.usercachepath(repo.ui, sha), path)
621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
604
622
605 @reraise_safe_exceptions
623 @reraise_safe_exceptions
606 def localrepository(self, wire, create=False):
624 def localrepository(self, wire, create=False):
607 self._factory.repo(wire, create=create)
625 self._factory.repo(wire, create=create)
608
626
609 @reraise_safe_exceptions
627 @reraise_safe_exceptions
610 def lookup(self, wire, revision, both):
628 def lookup(self, wire, revision, both):
611 cache_on, context_uid, repo_id = self._cache_on(wire)
629 cache_on, context_uid, repo_id = self._cache_on(wire)
612
630
613 @self.region.conditional_cache_on_arguments(condition=cache_on)
631 region = self.region(wire)
632 @region.conditional_cache_on_arguments(condition=cache_on)
614 def _lookup(_context_uid, _repo_id, _revision, _both):
633 def _lookup(_context_uid, _repo_id, _revision, _both):
615
634
616 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
617 rev = _revision
636 rev = _revision
618 if isinstance(rev, int):
637 if isinstance(rev, int):
619 # NOTE(marcink):
638 # NOTE(marcink):
620 # since Mercurial doesn't support negative indexes properly
639 # since Mercurial doesn't support negative indexes properly
621 # we need to shift accordingly by one to get proper index, e.g
640 # we need to shift accordingly by one to get proper index, e.g
622 # repo[-1] => repo[-2]
641 # repo[-1] => repo[-2]
623 # repo[0] => repo[-1]
642 # repo[0] => repo[-1]
624 if rev <= 0:
643 if rev <= 0:
625 rev = rev + -1
644 rev = rev + -1
626 try:
645 try:
627 ctx = self._get_ctx(repo, rev)
646 ctx = self._get_ctx(repo, rev)
628 except (TypeError, RepoLookupError) as e:
647 except (TypeError, RepoLookupError) as e:
629 e._org_exc_tb = traceback.format_exc()
648 e._org_exc_tb = traceback.format_exc()
630 raise exceptions.LookupException(e)(rev)
649 raise exceptions.LookupException(e)(rev)
631 except LookupError as e:
650 except LookupError as e:
632 e._org_exc_tb = traceback.format_exc()
651 e._org_exc_tb = traceback.format_exc()
633 raise exceptions.LookupException(e)(e.name)
652 raise exceptions.LookupException(e)(e.name)
634
653
635 if not both:
654 if not both:
636 return ctx.hex()
655 return ctx.hex()
637
656
638 ctx = repo[ctx.hex()]
657 ctx = repo[ctx.hex()]
639 return ctx.hex(), ctx.rev()
658 return ctx.hex(), ctx.rev()
640
659
641 return _lookup(context_uid, repo_id, revision, both)
660 return _lookup(context_uid, repo_id, revision, both)
642
661
643 @reraise_safe_exceptions
662 @reraise_safe_exceptions
644 def sync_push(self, wire, url):
663 def sync_push(self, wire, url):
645 if not self.check_url(url, wire['config']):
664 if not self.check_url(url, wire['config']):
646 return
665 return
647
666
648 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
649
668
650 # Disable any prompts for this repo
669 # Disable any prompts for this repo
651 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
652
671
653 bookmarks = dict(repo._bookmarks).keys()
672 bookmarks = dict(repo._bookmarks).keys()
654 remote = peer(repo, {}, url)
673 remote = peer(repo, {}, url)
655 # Disable any prompts for this remote
674 # Disable any prompts for this remote
656 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
657
676
658 return exchange.push(
677 return exchange.push(
659 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
660
679
661 @reraise_safe_exceptions
680 @reraise_safe_exceptions
662 def revision(self, wire, rev):
681 def revision(self, wire, rev):
663 repo = self._factory.repo(wire)
682 repo = self._factory.repo(wire)
664 ctx = self._get_ctx(repo, rev)
683 ctx = self._get_ctx(repo, rev)
665 return ctx.rev()
684 return ctx.rev()
666
685
667 @reraise_safe_exceptions
686 @reraise_safe_exceptions
668 def rev_range(self, wire, commit_filter):
687 def rev_range(self, wire, commit_filter):
669 cache_on, context_uid, repo_id = self._cache_on(wire)
688 cache_on, context_uid, repo_id = self._cache_on(wire)
670
689
671 @self.region.conditional_cache_on_arguments(condition=cache_on)
690 region = self.region(wire)
691 @region.conditional_cache_on_arguments(condition=cache_on)
672 def _rev_range(_context_uid, _repo_id, _filter):
692 def _rev_range(_context_uid, _repo_id, _filter):
673 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
674 revisions = [rev for rev in revrange(repo, commit_filter)]
694 revisions = [rev for rev in revrange(repo, commit_filter)]
675 return revisions
695 return revisions
676
696
677 return _rev_range(context_uid, repo_id, sorted(commit_filter))
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
678
698
679 @reraise_safe_exceptions
699 @reraise_safe_exceptions
680 def rev_range_hash(self, wire, node):
700 def rev_range_hash(self, wire, node):
681 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
682
702
683 def get_revs(repo, rev_opt):
703 def get_revs(repo, rev_opt):
684 if rev_opt:
704 if rev_opt:
685 revs = revrange(repo, rev_opt)
705 revs = revrange(repo, rev_opt)
686 if len(revs) == 0:
706 if len(revs) == 0:
687 return (nullrev, nullrev)
707 return (nullrev, nullrev)
688 return max(revs), min(revs)
708 return max(revs), min(revs)
689 else:
709 else:
690 return len(repo) - 1, 0
710 return len(repo) - 1, 0
691
711
692 stop, start = get_revs(repo, [node + ':'])
712 stop, start = get_revs(repo, [node + ':'])
693 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
694 return revs
714 return revs
695
715
696 @reraise_safe_exceptions
716 @reraise_safe_exceptions
697 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
698 other_path = kwargs.pop('other_path', None)
718 other_path = kwargs.pop('other_path', None)
699
719
700 # case when we want to compare two independent repositories
720 # case when we want to compare two independent repositories
701 if other_path and other_path != wire["path"]:
721 if other_path and other_path != wire["path"]:
702 baseui = self._factory._create_config(wire["config"])
722 baseui = self._factory._create_config(wire["config"])
703 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
704 else:
724 else:
705 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
706 return list(repo.revs(rev_spec, *args))
726 return list(repo.revs(rev_spec, *args))
707
727
708 @reraise_safe_exceptions
728 @reraise_safe_exceptions
709 def verify(self, wire,):
729 def verify(self, wire,):
710 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
711 baseui = self._factory._create_config(wire['config'])
731 baseui = self._factory._create_config(wire['config'])
712
732
713 baseui, output = patch_ui_message_output(baseui)
733 baseui, output = patch_ui_message_output(baseui)
714
734
715 repo.ui = baseui
735 repo.ui = baseui
716 verify.verify(repo)
736 verify.verify(repo)
717 return output.getvalue()
737 return output.getvalue()
718
738
719 @reraise_safe_exceptions
739 @reraise_safe_exceptions
720 def hg_update_cache(self, wire,):
740 def hg_update_cache(self, wire,):
721 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
722 baseui = self._factory._create_config(wire['config'])
742 baseui = self._factory._create_config(wire['config'])
723 baseui, output = patch_ui_message_output(baseui)
743 baseui, output = patch_ui_message_output(baseui)
724
744
725 repo.ui = baseui
745 repo.ui = baseui
726 with repo.wlock(), repo.lock():
746 with repo.wlock(), repo.lock():
727 repo.updatecaches(full=True)
747 repo.updatecaches(full=True)
728
748
729 return output.getvalue()
749 return output.getvalue()
730
750
731 @reraise_safe_exceptions
751 @reraise_safe_exceptions
732 def hg_rebuild_fn_cache(self, wire,):
752 def hg_rebuild_fn_cache(self, wire,):
733 repo = self._factory.repo(wire)
753 repo = self._factory.repo(wire)
734 baseui = self._factory._create_config(wire['config'])
754 baseui = self._factory._create_config(wire['config'])
735 baseui, output = patch_ui_message_output(baseui)
755 baseui, output = patch_ui_message_output(baseui)
736
756
737 repo.ui = baseui
757 repo.ui = baseui
738
758
739 repair.rebuildfncache(baseui, repo)
759 repair.rebuildfncache(baseui, repo)
740
760
741 return output.getvalue()
761 return output.getvalue()
742
762
743 @reraise_safe_exceptions
763 @reraise_safe_exceptions
744 def tags(self, wire):
764 def tags(self, wire):
745 cache_on, context_uid, repo_id = self._cache_on(wire)
765 cache_on, context_uid, repo_id = self._cache_on(wire)
746 @self.region.conditional_cache_on_arguments(condition=cache_on)
766 region = self.region(wire)
767 @region.conditional_cache_on_arguments(condition=cache_on)
747 def _tags(_context_uid, _repo_id):
768 def _tags(_context_uid, _repo_id):
748 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
749 return repo.tags()
770 return repo.tags()
750
771
751 return _tags(context_uid, repo_id)
772 return _tags(context_uid, repo_id)
752
773
753 @reraise_safe_exceptions
774 @reraise_safe_exceptions
754 def update(self, wire, node=None, clean=False):
775 def update(self, wire, node=None, clean=False):
755 repo = self._factory.repo(wire)
776 repo = self._factory.repo(wire)
756 baseui = self._factory._create_config(wire['config'])
777 baseui = self._factory._create_config(wire['config'])
757 commands.update(baseui, repo, node=node, clean=clean)
778 commands.update(baseui, repo, node=node, clean=clean)
758
779
759 @reraise_safe_exceptions
780 @reraise_safe_exceptions
760 def identify(self, wire):
781 def identify(self, wire):
761 repo = self._factory.repo(wire)
782 repo = self._factory.repo(wire)
762 baseui = self._factory._create_config(wire['config'])
783 baseui = self._factory._create_config(wire['config'])
763 output = io.BytesIO()
784 output = io.BytesIO()
764 baseui.write = output.write
785 baseui.write = output.write
765 # This is required to get a full node id
786 # This is required to get a full node id
766 baseui.debugflag = True
787 baseui.debugflag = True
767 commands.identify(baseui, repo, id=True)
788 commands.identify(baseui, repo, id=True)
768
789
769 return output.getvalue()
790 return output.getvalue()
770
791
771 @reraise_safe_exceptions
792 @reraise_safe_exceptions
772 def heads(self, wire, branch=None):
793 def heads(self, wire, branch=None):
773 repo = self._factory.repo(wire)
794 repo = self._factory.repo(wire)
774 baseui = self._factory._create_config(wire['config'])
795 baseui = self._factory._create_config(wire['config'])
775 output = io.BytesIO()
796 output = io.BytesIO()
776
797
777 def write(data, **unused_kwargs):
798 def write(data, **unused_kwargs):
778 output.write(data)
799 output.write(data)
779
800
780 baseui.write = write
801 baseui.write = write
781 if branch:
802 if branch:
782 args = [branch]
803 args = [branch]
783 else:
804 else:
784 args = []
805 args = []
785 commands.heads(baseui, repo, template='{node} ', *args)
806 commands.heads(baseui, repo, template='{node} ', *args)
786
807
787 return output.getvalue()
808 return output.getvalue()
788
809
789 @reraise_safe_exceptions
810 @reraise_safe_exceptions
790 def ancestor(self, wire, revision1, revision2):
811 def ancestor(self, wire, revision1, revision2):
791 repo = self._factory.repo(wire)
812 repo = self._factory.repo(wire)
792 changelog = repo.changelog
813 changelog = repo.changelog
793 lookup = repo.lookup
814 lookup = repo.lookup
794 a = changelog.ancestor(lookup(revision1), lookup(revision2))
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
795 return hex(a)
816 return hex(a)
796
817
797 @reraise_safe_exceptions
818 @reraise_safe_exceptions
798 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
799 baseui = self._factory._create_config(wire["config"], hooks=hooks)
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
800 clone(baseui, source, dest, noupdate=not update_after_clone)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
801
822
802 @reraise_safe_exceptions
823 @reraise_safe_exceptions
803 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
804
825
805 repo = self._factory.repo(wire)
826 repo = self._factory.repo(wire)
806 baseui = self._factory._create_config(wire['config'])
827 baseui = self._factory._create_config(wire['config'])
807 publishing = baseui.configbool('phases', 'publish')
828 publishing = baseui.configbool('phases', 'publish')
808 if publishing:
829 if publishing:
809 new_commit = 'public'
830 new_commit = 'public'
810 else:
831 else:
811 new_commit = 'draft'
832 new_commit = 'draft'
812
833
813 def _filectxfn(_repo, ctx, path):
834 def _filectxfn(_repo, ctx, path):
814 """
835 """
815 Marks given path as added/changed/removed in a given _repo. This is
836 Marks given path as added/changed/removed in a given _repo. This is
816 for internal mercurial commit function.
837 for internal mercurial commit function.
817 """
838 """
818
839
819 # check if this path is removed
840 # check if this path is removed
820 if path in removed:
841 if path in removed:
821 # returning None is a way to mark node for removal
842 # returning None is a way to mark node for removal
822 return None
843 return None
823
844
824 # check if this path is added
845 # check if this path is added
825 for node in updated:
846 for node in updated:
826 if node['path'] == path:
847 if node['path'] == path:
827 return memfilectx(
848 return memfilectx(
828 _repo,
849 _repo,
829 changectx=ctx,
850 changectx=ctx,
830 path=node['path'],
851 path=node['path'],
831 data=node['content'],
852 data=node['content'],
832 islink=False,
853 islink=False,
833 isexec=bool(node['mode'] & stat.S_IXUSR),
854 isexec=bool(node['mode'] & stat.S_IXUSR),
834 copysource=False)
855 copysource=False)
835
856
836 raise exceptions.AbortException()(
857 raise exceptions.AbortException()(
837 "Given path haven't been marked as added, "
858 "Given path haven't been marked as added, "
838 "changed or removed (%s)" % path)
859 "changed or removed (%s)" % path)
839
860
840 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
841
862
842 commit_ctx = memctx(
863 commit_ctx = memctx(
843 repo=repo,
864 repo=repo,
844 parents=parents,
865 parents=parents,
845 text=message,
866 text=message,
846 files=files,
867 files=files,
847 filectxfn=_filectxfn,
868 filectxfn=_filectxfn,
848 user=user,
869 user=user,
849 date=(commit_time, commit_timezone),
870 date=(commit_time, commit_timezone),
850 extra=extra)
871 extra=extra)
851
872
852 n = repo.commitctx(commit_ctx)
873 n = repo.commitctx(commit_ctx)
853 new_id = hex(n)
874 new_id = hex(n)
854
875
855 return new_id
876 return new_id
856
877
857 @reraise_safe_exceptions
878 @reraise_safe_exceptions
858 def pull(self, wire, url, commit_ids=None):
879 def pull(self, wire, url, commit_ids=None):
859 repo = self._factory.repo(wire)
880 repo = self._factory.repo(wire)
860 # Disable any prompts for this repo
881 # Disable any prompts for this repo
861 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
862
883
863 remote = peer(repo, {}, url)
884 remote = peer(repo, {}, url)
864 # Disable any prompts for this remote
885 # Disable any prompts for this remote
865 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
866
887
867 if commit_ids:
888 if commit_ids:
868 commit_ids = [bin(commit_id) for commit_id in commit_ids]
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
869
890
870 return exchange.pull(
891 return exchange.pull(
871 repo, remote, heads=commit_ids, force=None).cgresult
892 repo, remote, heads=commit_ids, force=None).cgresult
872
893
873 @reraise_safe_exceptions
894 @reraise_safe_exceptions
874 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
875 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
876 baseui = self._factory._create_config(wire['config'], hooks=hooks)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
877
898
878 # Mercurial internally has a lot of logic that checks ONLY if
899 # Mercurial internally has a lot of logic that checks ONLY if
879 # option is defined, we just pass those if they are defined then
900 # option is defined, we just pass those if they are defined then
880 opts = {}
901 opts = {}
881 if bookmark:
902 if bookmark:
882 opts['bookmark'] = bookmark
903 opts['bookmark'] = bookmark
883 if branch:
904 if branch:
884 opts['branch'] = branch
905 opts['branch'] = branch
885 if revision:
906 if revision:
886 opts['rev'] = revision
907 opts['rev'] = revision
887
908
888 commands.pull(baseui, repo, source, **opts)
909 commands.pull(baseui, repo, source, **opts)
889
910
890 @reraise_safe_exceptions
911 @reraise_safe_exceptions
891 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
892 repo = self._factory.repo(wire)
913 repo = self._factory.repo(wire)
893 baseui = self._factory._create_config(wire['config'], hooks=hooks)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
894 commands.push(baseui, repo, dest=dest_path, rev=revisions,
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
895 new_branch=push_branches)
916 new_branch=push_branches)
896
917
897 @reraise_safe_exceptions
918 @reraise_safe_exceptions
898 def strip(self, wire, revision, update, backup):
919 def strip(self, wire, revision, update, backup):
899 repo = self._factory.repo(wire)
920 repo = self._factory.repo(wire)
900 ctx = self._get_ctx(repo, revision)
921 ctx = self._get_ctx(repo, revision)
901 hgext_strip(
922 hgext_strip(
902 repo.baseui, repo, ctx.node(), update=update, backup=backup)
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
903
924
904 @reraise_safe_exceptions
925 @reraise_safe_exceptions
905 def get_unresolved_files(self, wire):
926 def get_unresolved_files(self, wire):
906 repo = self._factory.repo(wire)
927 repo = self._factory.repo(wire)
907
928
908 log.debug('Calculating unresolved files for repo: %s', repo)
929 log.debug('Calculating unresolved files for repo: %s', repo)
909 output = io.BytesIO()
930 output = io.BytesIO()
910
931
911 def write(data, **unused_kwargs):
932 def write(data, **unused_kwargs):
912 output.write(data)
933 output.write(data)
913
934
914 baseui = self._factory._create_config(wire['config'])
935 baseui = self._factory._create_config(wire['config'])
915 baseui.write = write
936 baseui.write = write
916
937
917 commands.resolve(baseui, repo, list=True)
938 commands.resolve(baseui, repo, list=True)
918 unresolved = output.getvalue().splitlines(0)
939 unresolved = output.getvalue().splitlines(0)
919 return unresolved
940 return unresolved
920
941
921 @reraise_safe_exceptions
942 @reraise_safe_exceptions
922 def merge(self, wire, revision):
943 def merge(self, wire, revision):
923 repo = self._factory.repo(wire)
944 repo = self._factory.repo(wire)
924 baseui = self._factory._create_config(wire['config'])
945 baseui = self._factory._create_config(wire['config'])
925 repo.ui.setconfig('ui', 'merge', 'internal:dump')
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
926
947
927 # In case of sub repositories are used mercurial prompts the user in
948 # In case of sub repositories are used mercurial prompts the user in
928 # case of merge conflicts or different sub repository sources. By
949 # case of merge conflicts or different sub repository sources. By
929 # setting the interactive flag to `False` mercurial doesn't prompt the
950 # setting the interactive flag to `False` mercurial doesn't prompt the
930 # used but instead uses a default value.
951 # used but instead uses a default value.
931 repo.ui.setconfig('ui', 'interactive', False)
952 repo.ui.setconfig('ui', 'interactive', False)
932 commands.merge(baseui, repo, rev=revision)
953 commands.merge(baseui, repo, rev=revision)
933
954
934 @reraise_safe_exceptions
955 @reraise_safe_exceptions
935 def merge_state(self, wire):
956 def merge_state(self, wire):
936 repo = self._factory.repo(wire)
957 repo = self._factory.repo(wire)
937 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
938
959
939 # In case of sub repositories are used mercurial prompts the user in
960 # In case of sub repositories are used mercurial prompts the user in
940 # case of merge conflicts or different sub repository sources. By
961 # case of merge conflicts or different sub repository sources. By
941 # setting the interactive flag to `False` mercurial doesn't prompt the
962 # setting the interactive flag to `False` mercurial doesn't prompt the
942 # used but instead uses a default value.
963 # used but instead uses a default value.
943 repo.ui.setconfig('ui', 'interactive', False)
964 repo.ui.setconfig('ui', 'interactive', False)
944 ms = hg_merge.mergestate(repo)
965 ms = hg_merge.mergestate(repo)
945 return [x for x in ms.unresolved()]
966 return [x for x in ms.unresolved()]
946
967
947 @reraise_safe_exceptions
968 @reraise_safe_exceptions
948 def commit(self, wire, message, username, close_branch=False):
969 def commit(self, wire, message, username, close_branch=False):
949 repo = self._factory.repo(wire)
970 repo = self._factory.repo(wire)
950 baseui = self._factory._create_config(wire['config'])
971 baseui = self._factory._create_config(wire['config'])
951 repo.ui.setconfig('ui', 'username', username)
972 repo.ui.setconfig('ui', 'username', username)
952 commands.commit(baseui, repo, message=message, close_branch=close_branch)
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
953
974
954 @reraise_safe_exceptions
975 @reraise_safe_exceptions
955 def rebase(self, wire, source=None, dest=None, abort=False):
976 def rebase(self, wire, source=None, dest=None, abort=False):
956 repo = self._factory.repo(wire)
977 repo = self._factory.repo(wire)
957 baseui = self._factory._create_config(wire['config'])
978 baseui = self._factory._create_config(wire['config'])
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
959 # In case of sub repositories are used mercurial prompts the user in
980 # In case of sub repositories are used mercurial prompts the user in
960 # case of merge conflicts or different sub repository sources. By
981 # case of merge conflicts or different sub repository sources. By
961 # setting the interactive flag to `False` mercurial doesn't prompt the
982 # setting the interactive flag to `False` mercurial doesn't prompt the
962 # used but instead uses a default value.
983 # used but instead uses a default value.
963 repo.ui.setconfig('ui', 'interactive', False)
984 repo.ui.setconfig('ui', 'interactive', False)
964 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
965
986
966 @reraise_safe_exceptions
987 @reraise_safe_exceptions
967 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
968 repo = self._factory.repo(wire)
989 repo = self._factory.repo(wire)
969 ctx = self._get_ctx(repo, revision)
990 ctx = self._get_ctx(repo, revision)
970 node = ctx.node()
991 node = ctx.node()
971
992
972 date = (tag_time, tag_timezone)
993 date = (tag_time, tag_timezone)
973 try:
994 try:
974 hg_tag.tag(repo, name, node, message, local, user, date)
995 hg_tag.tag(repo, name, node, message, local, user, date)
975 except Abort as e:
996 except Abort as e:
976 log.exception("Tag operation aborted")
997 log.exception("Tag operation aborted")
977 # Exception can contain unicode which we convert
998 # Exception can contain unicode which we convert
978 raise exceptions.AbortException(e)(repr(e))
999 raise exceptions.AbortException(e)(repr(e))
979
1000
980 @reraise_safe_exceptions
1001 @reraise_safe_exceptions
981 def bookmark(self, wire, bookmark, revision=None):
1002 def bookmark(self, wire, bookmark, revision=None):
982 repo = self._factory.repo(wire)
1003 repo = self._factory.repo(wire)
983 baseui = self._factory._create_config(wire['config'])
1004 baseui = self._factory._create_config(wire['config'])
984 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
985
1006
986 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
987 def install_hooks(self, wire, force=False):
1008 def install_hooks(self, wire, force=False):
988 # we don't need any special hooks for Mercurial
1009 # we don't need any special hooks for Mercurial
989 pass
1010 pass
990
1011
991 @reraise_safe_exceptions
1012 @reraise_safe_exceptions
992 def get_hooks_info(self, wire):
1013 def get_hooks_info(self, wire):
993 return {
1014 return {
994 'pre_version': vcsserver.__version__,
1015 'pre_version': vcsserver.__version__,
995 'post_version': vcsserver.__version__,
1016 'post_version': vcsserver.__version__,
996 }
1017 }
997
1018
998 @reraise_safe_exceptions
1019 @reraise_safe_exceptions
999 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1020 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1000 archive_dir_name, commit_id):
1021 archive_dir_name, commit_id):
1001
1022
1002 def file_walker(_commit_id, path):
1023 def file_walker(_commit_id, path):
1003 repo = self._factory.repo(wire)
1024 repo = self._factory.repo(wire)
1004 ctx = repo[_commit_id]
1025 ctx = repo[_commit_id]
1005 is_root = path in ['', '/']
1026 is_root = path in ['', '/']
1006 if is_root:
1027 if is_root:
1007 matcher = alwaysmatcher(badfn=None)
1028 matcher = alwaysmatcher(badfn=None)
1008 else:
1029 else:
1009 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1030 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1010 file_iter = ctx.manifest().walk(matcher)
1031 file_iter = ctx.manifest().walk(matcher)
1011
1032
1012 for fn in file_iter:
1033 for fn in file_iter:
1013 file_path = fn
1034 file_path = fn
1014 flags = ctx.flags(fn)
1035 flags = ctx.flags(fn)
1015 mode = b'x' in flags and 0o755 or 0o644
1036 mode = b'x' in flags and 0o755 or 0o644
1016 is_link = b'l' in flags
1037 is_link = b'l' in flags
1017
1038
1018 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1039 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1019
1040
1020 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1041 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1021 archive_dir_name, commit_id)
1042 archive_dir_name, commit_id)
1022
1043
@@ -1,705 +1,705 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import wsgiref.util
24 import wsgiref.util
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 import psutil
27 import psutil
28 from itertools import chain
28 from itertools import chain
29 from cStringIO import StringIO
29 from cStringIO import StringIO
30
30
31 import simplejson as json
31 import simplejson as json
32 import msgpack
32 import msgpack
33 from pyramid.config import Configurator
33 from pyramid.config import Configurator
34 from pyramid.settings import asbool, aslist
34 from pyramid.settings import asbool, aslist
35 from pyramid.wsgi import wsgiapp
35 from pyramid.wsgi import wsgiapp
36 from pyramid.compat import configparser
36 from pyramid.compat import configparser
37 from pyramid.response import Response
37 from pyramid.response import Response
38
38
39 from vcsserver.utils import safe_int
39 from vcsserver.utils import safe_int
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
45
45
46 try:
46 try:
47 locale.setlocale(locale.LC_ALL, '')
47 locale.setlocale(locale.LC_ALL, '')
48 except locale.Error as e:
48 except locale.Error as e:
49 log.error(
49 log.error(
50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
51 os.environ['LC_ALL'] = 'C'
51 os.environ['LC_ALL'] = 'C'
52
52
53 import vcsserver
53 import vcsserver
54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
57 from vcsserver.echo_stub.echo_app import EchoApp
57 from vcsserver.echo_stub.echo_app import EchoApp
58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
59 from vcsserver.lib.exc_tracking import store_exception
59 from vcsserver.lib.exc_tracking import store_exception
60 from vcsserver.server import VcsServer
60 from vcsserver.server import VcsServer
61
61
62 try:
62 try:
63 from vcsserver.git import GitFactory, GitRemote
63 from vcsserver.git import GitFactory, GitRemote
64 except ImportError:
64 except ImportError:
65 GitFactory = None
65 GitFactory = None
66 GitRemote = None
66 GitRemote = None
67
67
68 try:
68 try:
69 from vcsserver.hg import MercurialFactory, HgRemote
69 from vcsserver.hg import MercurialFactory, HgRemote
70 except ImportError:
70 except ImportError:
71 MercurialFactory = None
71 MercurialFactory = None
72 HgRemote = None
72 HgRemote = None
73
73
74 try:
74 try:
75 from vcsserver.svn import SubversionFactory, SvnRemote
75 from vcsserver.svn import SubversionFactory, SvnRemote
76 except ImportError:
76 except ImportError:
77 SubversionFactory = None
77 SubversionFactory = None
78 SvnRemote = None
78 SvnRemote = None
79
79
80
80
81 def _is_request_chunked(environ):
81 def _is_request_chunked(environ):
82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
83 return stream
83 return stream
84
84
85
85
86 def _int_setting(settings, name, default):
86 def _int_setting(settings, name, default):
87 settings[name] = int(settings.get(name, default))
87 settings[name] = int(settings.get(name, default))
88 return settings[name]
88 return settings[name]
89
89
90
90
91 def _bool_setting(settings, name, default):
91 def _bool_setting(settings, name, default):
92 input_val = settings.get(name, default)
92 input_val = settings.get(name, default)
93 if isinstance(input_val, unicode):
93 if isinstance(input_val, unicode):
94 input_val = input_val.encode('utf8')
94 input_val = input_val.encode('utf8')
95 settings[name] = asbool(input_val)
95 settings[name] = asbool(input_val)
96 return settings[name]
96 return settings[name]
97
97
98
98
99 def _list_setting(settings, name, default):
99 def _list_setting(settings, name, default):
100 raw_value = settings.get(name, default)
100 raw_value = settings.get(name, default)
101
101
102 # Otherwise we assume it uses pyramids space/newline separation.
102 # Otherwise we assume it uses pyramids space/newline separation.
103 settings[name] = aslist(raw_value)
103 settings[name] = aslist(raw_value)
104 return settings[name]
104 return settings[name]
105
105
106
106
107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
108 value = settings.get(name, default)
108 value = settings.get(name, default)
109
109
110 if default_when_empty and not value:
110 if default_when_empty and not value:
111 # use default value when value is empty
111 # use default value when value is empty
112 value = default
112 value = default
113
113
114 if lower:
114 if lower:
115 value = value.lower()
115 value = value.lower()
116 settings[name] = value
116 settings[name] = value
117 return settings[name]
117 return settings[name]
118
118
119
119
120 def log_max_fd():
120 def log_max_fd():
121 try:
121 try:
122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
123 log.info('Max file descriptors value: %s', maxfd)
123 log.info('Max file descriptors value: %s', maxfd)
124 except Exception:
124 except Exception:
125 pass
125 pass
126
126
127
127
128 class VCS(object):
128 class VCS(object):
129 def __init__(self, locale_conf=None, cache_config=None):
129 def __init__(self, locale_conf=None, cache_config=None):
130 self.locale = locale_conf
130 self.locale = locale_conf
131 self.cache_config = cache_config
131 self.cache_config = cache_config
132 self._configure_locale()
132 self._configure_locale()
133
133
134 log_max_fd()
134 log_max_fd()
135
135
136 if GitFactory and GitRemote:
136 if GitFactory and GitRemote:
137 git_factory = GitFactory()
137 git_factory = GitFactory()
138 self._git_remote = GitRemote(git_factory)
138 self._git_remote = GitRemote(git_factory)
139 else:
139 else:
140 log.info("Git client import failed")
140 log.info("Git client import failed")
141
141
142 if MercurialFactory and HgRemote:
142 if MercurialFactory and HgRemote:
143 hg_factory = MercurialFactory()
143 hg_factory = MercurialFactory()
144 self._hg_remote = HgRemote(hg_factory)
144 self._hg_remote = HgRemote(hg_factory)
145 else:
145 else:
146 log.info("Mercurial client import failed")
146 log.info("Mercurial client import failed")
147
147
148 if SubversionFactory and SvnRemote:
148 if SubversionFactory and SvnRemote:
149 svn_factory = SubversionFactory()
149 svn_factory = SubversionFactory()
150
150
151 # hg factory is used for svn url validation
151 # hg factory is used for svn url validation
152 hg_factory = MercurialFactory()
152 hg_factory = MercurialFactory()
153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
154 else:
154 else:
155 log.info("Subversion client import failed")
155 log.info("Subversion client import failed")
156
156
157 self._vcsserver = VcsServer()
157 self._vcsserver = VcsServer()
158
158
159 def _configure_locale(self):
159 def _configure_locale(self):
160 if self.locale:
160 if self.locale:
161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
162 else:
162 else:
163 log.info(
163 log.info(
164 'Configuring locale subsystem based on environment variables')
164 'Configuring locale subsystem based on environment variables')
165 try:
165 try:
166 # If self.locale is the empty string, then the locale
166 # If self.locale is the empty string, then the locale
167 # module will use the environment variables. See the
167 # module will use the environment variables. See the
168 # documentation of the package `locale`.
168 # documentation of the package `locale`.
169 locale.setlocale(locale.LC_ALL, self.locale)
169 locale.setlocale(locale.LC_ALL, self.locale)
170
170
171 language_code, encoding = locale.getlocale()
171 language_code, encoding = locale.getlocale()
172 log.info(
172 log.info(
173 'Locale set to language code "%s" with encoding "%s".',
173 'Locale set to language code "%s" with encoding "%s".',
174 language_code, encoding)
174 language_code, encoding)
175 except locale.Error:
175 except locale.Error:
176 log.exception(
176 log.exception(
177 'Cannot set locale, not configuring the locale system')
177 'Cannot set locale, not configuring the locale system')
178
178
179
179
180 class WsgiProxy(object):
180 class WsgiProxy(object):
181 def __init__(self, wsgi):
181 def __init__(self, wsgi):
182 self.wsgi = wsgi
182 self.wsgi = wsgi
183
183
184 def __call__(self, environ, start_response):
184 def __call__(self, environ, start_response):
185 input_data = environ['wsgi.input'].read()
185 input_data = environ['wsgi.input'].read()
186 input_data = msgpack.unpackb(input_data)
186 input_data = msgpack.unpackb(input_data)
187
187
188 error = None
188 error = None
189 try:
189 try:
190 data, status, headers = self.wsgi.handle(
190 data, status, headers = self.wsgi.handle(
191 input_data['environment'], input_data['input_data'],
191 input_data['environment'], input_data['input_data'],
192 *input_data['args'], **input_data['kwargs'])
192 *input_data['args'], **input_data['kwargs'])
193 except Exception as e:
193 except Exception as e:
194 data, status, headers = [], None, None
194 data, status, headers = [], None, None
195 error = {
195 error = {
196 'message': str(e),
196 'message': str(e),
197 '_vcs_kind': getattr(e, '_vcs_kind', None)
197 '_vcs_kind': getattr(e, '_vcs_kind', None)
198 }
198 }
199
199
200 start_response(200, {})
200 start_response(200, {})
201 return self._iterator(error, status, headers, data)
201 return self._iterator(error, status, headers, data)
202
202
203 def _iterator(self, error, status, headers, data):
203 def _iterator(self, error, status, headers, data):
204 initial_data = [
204 initial_data = [
205 error,
205 error,
206 status,
206 status,
207 headers,
207 headers,
208 ]
208 ]
209
209
210 for d in chain(initial_data, data):
210 for d in chain(initial_data, data):
211 yield msgpack.packb(d)
211 yield msgpack.packb(d)
212
212
213
213
214 def not_found(request):
214 def not_found(request):
215 return {'status': '404 NOT FOUND'}
215 return {'status': '404 NOT FOUND'}
216
216
217
217
218 class VCSViewPredicate(object):
218 class VCSViewPredicate(object):
219 def __init__(self, val, config):
219 def __init__(self, val, config):
220 self.remotes = val
220 self.remotes = val
221
221
222 def text(self):
222 def text(self):
223 return 'vcs view method = %s' % (self.remotes.keys(),)
223 return 'vcs view method = %s' % (self.remotes.keys(),)
224
224
225 phash = text
225 phash = text
226
226
227 def __call__(self, context, request):
227 def __call__(self, context, request):
228 """
228 """
229 View predicate that returns true if given backend is supported by
229 View predicate that returns true if given backend is supported by
230 defined remotes.
230 defined remotes.
231 """
231 """
232 backend = request.matchdict.get('backend')
232 backend = request.matchdict.get('backend')
233 return backend in self.remotes
233 return backend in self.remotes
234
234
235
235
236 class HTTPApplication(object):
236 class HTTPApplication(object):
237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
238
238
239 remote_wsgi = remote_wsgi
239 remote_wsgi = remote_wsgi
240 _use_echo_app = False
240 _use_echo_app = False
241
241
242 def __init__(self, settings=None, global_config=None):
242 def __init__(self, settings=None, global_config=None):
243 self._sanitize_settings_and_apply_defaults(settings)
243 self._sanitize_settings_and_apply_defaults(settings)
244
244
245 self.config = Configurator(settings=settings)
245 self.config = Configurator(settings=settings)
246 self.global_config = global_config
246 self.global_config = global_config
247 self.config.include('vcsserver.lib.rc_cache')
247 self.config.include('vcsserver.lib.rc_cache')
248
248
249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
251 self._remotes = {
251 self._remotes = {
252 'hg': vcs._hg_remote,
252 'hg': vcs._hg_remote,
253 'git': vcs._git_remote,
253 'git': vcs._git_remote,
254 'svn': vcs._svn_remote,
254 'svn': vcs._svn_remote,
255 'server': vcs._vcsserver,
255 'server': vcs._vcsserver,
256 }
256 }
257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
258 self._use_echo_app = True
258 self._use_echo_app = True
259 log.warning("Using EchoApp for VCS operations.")
259 log.warning("Using EchoApp for VCS operations.")
260 self.remote_wsgi = remote_wsgi_stub
260 self.remote_wsgi = remote_wsgi_stub
261
261
262 self._configure_settings(global_config, settings)
262 self._configure_settings(global_config, settings)
263
263
264 self._configure()
264 self._configure()
265
265
266 def _configure_settings(self, global_config, app_settings):
266 def _configure_settings(self, global_config, app_settings):
267 """
267 """
268 Configure the settings module.
268 Configure the settings module.
269 """
269 """
270 settings_merged = global_config.copy()
270 settings_merged = global_config.copy()
271 settings_merged.update(app_settings)
271 settings_merged.update(app_settings)
272
272
273 git_path = app_settings.get('git_path', None)
273 git_path = app_settings.get('git_path', None)
274 if git_path:
274 if git_path:
275 settings.GIT_EXECUTABLE = git_path
275 settings.GIT_EXECUTABLE = git_path
276 binary_dir = app_settings.get('core.binary_dir', None)
276 binary_dir = app_settings.get('core.binary_dir', None)
277 if binary_dir:
277 if binary_dir:
278 settings.BINARY_DIR = binary_dir
278 settings.BINARY_DIR = binary_dir
279
279
280 # Store the settings to make them available to other modules.
280 # Store the settings to make them available to other modules.
281 vcsserver.PYRAMID_SETTINGS = settings_merged
281 vcsserver.PYRAMID_SETTINGS = settings_merged
282 vcsserver.CONFIG = settings_merged
282 vcsserver.CONFIG = settings_merged
283
283
284 def _sanitize_settings_and_apply_defaults(self, settings):
284 def _sanitize_settings_and_apply_defaults(self, settings):
285 temp_store = tempfile.gettempdir()
285 temp_store = tempfile.gettempdir()
286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
287
287
288 # save default, cache dir, and use it for all backends later.
288 # save default, cache dir, and use it for all backends later.
289 default_cache_dir = _string_setting(
289 default_cache_dir = _string_setting(
290 settings,
290 settings,
291 'cache_dir',
291 'cache_dir',
292 default_cache_dir, lower=False, default_when_empty=True)
292 default_cache_dir, lower=False, default_when_empty=True)
293
293
294 # ensure we have our dir created
294 # ensure we have our dir created
295 if not os.path.isdir(default_cache_dir):
295 if not os.path.isdir(default_cache_dir):
296 os.makedirs(default_cache_dir, mode=0o755)
296 os.makedirs(default_cache_dir, mode=0o755)
297
297
298 # exception store cache
298 # exception store cache
299 _string_setting(
299 _string_setting(
300 settings,
300 settings,
301 'exception_tracker.store_path',
301 'exception_tracker.store_path',
302 temp_store, lower=False, default_when_empty=True)
302 temp_store, lower=False, default_when_empty=True)
303
303
304 # repo_object cache
304 # repo_object cache
305 _string_setting(
305 _string_setting(
306 settings,
306 settings,
307 'rc_cache.repo_object.backend',
307 'rc_cache.repo_object.backend',
308 'dogpile.cache.rc.file_namespace', lower=False)
308 'dogpile.cache.rc.file_namespace', lower=False)
309 _int_setting(
309 _int_setting(
310 settings,
310 settings,
311 'rc_cache.repo_object.expiration_time',
311 'rc_cache.repo_object.expiration_time',
312 30 * 24 * 60 * 60)
312 30 * 24 * 60 * 60)
313 _string_setting(
313 _string_setting(
314 settings,
314 settings,
315 'rc_cache.repo_object.arguments.filename',
315 'rc_cache.repo_object.arguments.filename',
316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
317
317
318 def _configure(self):
318 def _configure(self):
319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
320
320
321 self.config.add_route('service', '/_service')
321 self.config.add_route('service', '/_service')
322 self.config.add_route('status', '/status')
322 self.config.add_route('status', '/status')
323 self.config.add_route('hg_proxy', '/proxy/hg')
323 self.config.add_route('hg_proxy', '/proxy/hg')
324 self.config.add_route('git_proxy', '/proxy/git')
324 self.config.add_route('git_proxy', '/proxy/git')
325
325
326 # rpc methods
326 # rpc methods
327 self.config.add_route('vcs', '/{backend}')
327 self.config.add_route('vcs', '/{backend}')
328
328
329 # streaming rpc remote methods
329 # streaming rpc remote methods
330 self.config.add_route('vcs_stream', '/{backend}/stream')
330 self.config.add_route('vcs_stream', '/{backend}/stream')
331
331
332 # vcs operations clone/push as streaming
332 # vcs operations clone/push as streaming
333 self.config.add_route('stream_git', '/stream/git/*repo_name')
333 self.config.add_route('stream_git', '/stream/git/*repo_name')
334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
335
335
336 self.config.add_view(self.status_view, route_name='status', renderer='json')
336 self.config.add_view(self.status_view, route_name='status', renderer='json')
337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
338
338
339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
342 vcs_view=self._remotes)
342 vcs_view=self._remotes)
343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
344 vcs_view=self._remotes)
344 vcs_view=self._remotes)
345
345
346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
347 self.config.add_view(self.git_stream(), route_name='stream_git')
347 self.config.add_view(self.git_stream(), route_name='stream_git')
348
348
349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
350
350
351 self.config.add_notfound_view(not_found, renderer='json')
351 self.config.add_notfound_view(not_found, renderer='json')
352
352
353 self.config.add_view(self.handle_vcs_exception, context=Exception)
353 self.config.add_view(self.handle_vcs_exception, context=Exception)
354
354
355 self.config.add_tween(
355 self.config.add_tween(
356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
357 )
357 )
358 self.config.add_request_method(
358 self.config.add_request_method(
359 'vcsserver.lib.request_counter.get_request_counter',
359 'vcsserver.lib.request_counter.get_request_counter',
360 'request_count')
360 'request_count')
361
361
362 self.config.add_request_method(
362 self.config.add_request_method(
363 'vcsserver.lib._vendor.statsd.get_statsd_client',
363 'vcsserver.lib._vendor.statsd.get_statsd_client',
364 'statsd', reify=True)
364 'statsd', reify=True)
365
365
366 def wsgi_app(self):
366 def wsgi_app(self):
367 return self.config.make_wsgi_app()
367 return self.config.make_wsgi_app()
368
368
369 def _vcs_view_params(self, request):
369 def _vcs_view_params(self, request):
370 remote = self._remotes[request.matchdict['backend']]
370 remote = self._remotes[request.matchdict['backend']]
371 payload = msgpack.unpackb(request.body, use_list=True)
371 payload = msgpack.unpackb(request.body, use_list=True)
372 method = payload.get('method')
372 method = payload.get('method')
373 params = payload['params']
373 params = payload['params']
374 wire = params.get('wire')
374 wire = params.get('wire')
375 args = params.get('args')
375 args = params.get('args')
376 kwargs = params.get('kwargs')
376 kwargs = params.get('kwargs')
377 context_uid = None
377 context_uid = None
378
378
379 if wire:
379 if wire:
380 try:
380 try:
381 wire['context'] = context_uid = uuid.UUID(wire['context'])
381 wire['context'] = context_uid = uuid.UUID(wire['context'])
382 except KeyError:
382 except KeyError:
383 pass
383 pass
384 args.insert(0, wire)
384 args.insert(0, wire)
385 repo_state_uid = wire.get('repo_state_uid') if wire else None
385 repo_state_uid = wire.get('repo_state_uid') if wire else None
386
386
387 # NOTE(marcink): trading complexity for slight performance
387 # NOTE(marcink): trading complexity for slight performance
388 if log.isEnabledFor(logging.DEBUG):
388 if log.isEnabledFor(logging.DEBUG):
389 no_args_methods = [
389 no_args_methods = [
390
390
391 ]
391 ]
392 if method in no_args_methods:
392 if method in no_args_methods:
393 call_args = ''
393 call_args = ''
394 else:
394 else:
395 call_args = args[1:]
395 call_args = args[1:]
396
396
397 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
397 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
398 method, call_args, kwargs, context_uid, repo_state_uid)
398 method, call_args, kwargs, context_uid, repo_state_uid)
399
399
400 return payload, remote, method, args, kwargs
400 return payload, remote, method, args, kwargs
401
401
402 def vcs_view(self, request):
402 def vcs_view(self, request):
403
403
404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
405 payload_id = payload.get('id')
405 payload_id = payload.get('id')
406
406
407 try:
407 try:
408 resp = getattr(remote, method)(*args, **kwargs)
408 resp = getattr(remote, method)(*args, **kwargs)
409 except Exception as e:
409 except Exception as e:
410 exc_info = list(sys.exc_info())
410 exc_info = list(sys.exc_info())
411 exc_type, exc_value, exc_traceback = exc_info
411 exc_type, exc_value, exc_traceback = exc_info
412
412
413 org_exc = getattr(e, '_org_exc', None)
413 org_exc = getattr(e, '_org_exc', None)
414 org_exc_name = None
414 org_exc_name = None
415 org_exc_tb = ''
415 org_exc_tb = ''
416 if org_exc:
416 if org_exc:
417 org_exc_name = org_exc.__class__.__name__
417 org_exc_name = org_exc.__class__.__name__
418 org_exc_tb = getattr(e, '_org_exc_tb', '')
418 org_exc_tb = getattr(e, '_org_exc_tb', '')
419 # replace our "faked" exception with our org
419 # replace our "faked" exception with our org
420 exc_info[0] = org_exc.__class__
420 exc_info[0] = org_exc.__class__
421 exc_info[1] = org_exc
421 exc_info[1] = org_exc
422
422
423 should_store_exc = True
423 should_store_exc = True
424 if org_exc:
424 if org_exc:
425 def get_exc_fqn(_exc_obj):
425 def get_exc_fqn(_exc_obj):
426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
427 return module_name + '.' + org_exc_name
427 return module_name + '.' + org_exc_name
428
428
429 exc_fqn = get_exc_fqn(org_exc)
429 exc_fqn = get_exc_fqn(org_exc)
430
430
431 if exc_fqn in ['mercurial.error.RepoLookupError',
431 if exc_fqn in ['mercurial.error.RepoLookupError',
432 'vcsserver.exceptions.RefNotFoundException']:
432 'vcsserver.exceptions.RefNotFoundException']:
433 should_store_exc = False
433 should_store_exc = False
434
434
435 if should_store_exc:
435 if should_store_exc:
436 store_exception(id(exc_info), exc_info, request_path=request.path)
436 store_exception(id(exc_info), exc_info, request_path=request.path)
437
437
438 tb_info = ''.join(
438 tb_info = ''.join(
439 traceback.format_exception(exc_type, exc_value, exc_traceback))
439 traceback.format_exception(exc_type, exc_value, exc_traceback))
440
440
441 type_ = e.__class__.__name__
441 type_ = e.__class__.__name__
442 if type_ not in self.ALLOWED_EXCEPTIONS:
442 if type_ not in self.ALLOWED_EXCEPTIONS:
443 type_ = None
443 type_ = None
444
444
445 resp = {
445 resp = {
446 'id': payload_id,
446 'id': payload_id,
447 'error': {
447 'error': {
448 'message': e.message,
448 'message': e.message,
449 'traceback': tb_info,
449 'traceback': tb_info,
450 'org_exc': org_exc_name,
450 'org_exc': org_exc_name,
451 'org_exc_tb': org_exc_tb,
451 'org_exc_tb': org_exc_tb,
452 'type': type_
452 'type': type_
453 }
453 }
454 }
454 }
455
455
456 try:
456 try:
457 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
457 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
458 except AttributeError:
458 except AttributeError:
459 pass
459 pass
460 else:
460 else:
461 resp = {
461 resp = {
462 'id': payload_id,
462 'id': payload_id,
463 'result': resp
463 'result': resp
464 }
464 }
465
465
466 return resp
466 return resp
467
467
468 def vcs_stream_view(self, request):
468 def vcs_stream_view(self, request):
469 payload, remote, method, args, kwargs = self._vcs_view_params(request)
469 payload, remote, method, args, kwargs = self._vcs_view_params(request)
470 # this method has a stream: marker we remove it here
470 # this method has a stream: marker we remove it here
471 method = method.split('stream:')[-1]
471 method = method.split('stream:')[-1]
472 chunk_size = safe_int(payload.get('chunk_size')) or 4096
472 chunk_size = safe_int(payload.get('chunk_size')) or 4096
473
473
474 try:
474 try:
475 resp = getattr(remote, method)(*args, **kwargs)
475 resp = getattr(remote, method)(*args, **kwargs)
476 except Exception as e:
476 except Exception as e:
477 raise
477 raise
478
478
479 def get_chunked_data(method_resp):
479 def get_chunked_data(method_resp):
480 stream = StringIO(method_resp)
480 stream = StringIO(method_resp)
481 while 1:
481 while 1:
482 chunk = stream.read(chunk_size)
482 chunk = stream.read(chunk_size)
483 if not chunk:
483 if not chunk:
484 break
484 break
485 yield chunk
485 yield chunk
486
486
487 response = Response(app_iter=get_chunked_data(resp))
487 response = Response(app_iter=get_chunked_data(resp))
488 response.content_type = 'application/octet-stream'
488 response.content_type = 'application/octet-stream'
489
489
490 return response
490 return response
491
491
492 def status_view(self, request):
492 def status_view(self, request):
493 import vcsserver
493 import vcsserver
494 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
494 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
495 'pid': os.getpid()}
495 'pid': os.getpid()}
496
496
497 def service_view(self, request):
497 def service_view(self, request):
498 import vcsserver
498 import vcsserver
499
499
500 payload = msgpack.unpackb(request.body, use_list=True)
500 payload = msgpack.unpackb(request.body, use_list=True)
501 server_config, app_config = {}, {}
501 server_config, app_config = {}, {}
502
502
503 try:
503 try:
504 path = self.global_config['__file__']
504 path = self.global_config['__file__']
505 config = configparser.RawConfigParser()
505 config = configparser.RawConfigParser()
506
506
507 config.read(path)
507 config.read(path)
508
508
509 if config.has_section('server:main'):
509 if config.has_section('server:main'):
510 server_config = dict(config.items('server:main'))
510 server_config = dict(config.items('server:main'))
511 if config.has_section('app:main'):
511 if config.has_section('app:main'):
512 app_config = dict(config.items('app:main'))
512 app_config = dict(config.items('app:main'))
513
513
514 except Exception:
514 except Exception:
515 log.exception('Failed to read .ini file for display')
515 log.exception('Failed to read .ini file for display')
516
516
517 environ = os.environ.items()
517 environ = os.environ.items()
518
518
519 resp = {
519 resp = {
520 'id': payload.get('id'),
520 'id': payload.get('id'),
521 'result': dict(
521 'result': dict(
522 version=vcsserver.__version__,
522 version=vcsserver.__version__,
523 config=server_config,
523 config=server_config,
524 app_config=app_config,
524 app_config=app_config,
525 environ=environ,
525 environ=environ,
526 payload=payload,
526 payload=payload,
527 )
527 )
528 }
528 }
529 return resp
529 return resp
530
530
531 def _msgpack_renderer_factory(self, info):
531 def _msgpack_renderer_factory(self, info):
532 def _render(value, system):
532 def _render(value, system):
533 request = system.get('request')
533 request = system.get('request')
534 if request is not None:
534 if request is not None:
535 response = request.response
535 response = request.response
536 ct = response.content_type
536 ct = response.content_type
537 if ct == response.default_content_type:
537 if ct == response.default_content_type:
538 response.content_type = 'application/x-msgpack'
538 response.content_type = 'application/x-msgpack'
539 return msgpack.packb(value)
539 return msgpack.packb(value)
540 return _render
540 return _render
541
541
542 def set_env_from_config(self, environ, config):
542 def set_env_from_config(self, environ, config):
543 dict_conf = {}
543 dict_conf = {}
544 try:
544 try:
545 for elem in config:
545 for elem in config:
546 if elem[0] == 'rhodecode':
546 if elem[0] == 'rhodecode':
547 dict_conf = json.loads(elem[2])
547 dict_conf = json.loads(elem[2])
548 break
548 break
549 except Exception:
549 except Exception:
550 log.exception('Failed to fetch SCM CONFIG')
550 log.exception('Failed to fetch SCM CONFIG')
551 return
551 return
552
552
553 username = dict_conf.get('username')
553 username = dict_conf.get('username')
554 if username:
554 if username:
555 environ['REMOTE_USER'] = username
555 environ['REMOTE_USER'] = username
556 # mercurial specific, some extension api rely on this
556 # mercurial specific, some extension api rely on this
557 environ['HGUSER'] = username
557 environ['HGUSER'] = username
558
558
559 ip = dict_conf.get('ip')
559 ip = dict_conf.get('ip')
560 if ip:
560 if ip:
561 environ['REMOTE_HOST'] = ip
561 environ['REMOTE_HOST'] = ip
562
562
563 if _is_request_chunked(environ):
563 if _is_request_chunked(environ):
564 # set the compatibility flag for webob
564 # set the compatibility flag for webob
565 environ['wsgi.input_terminated'] = True
565 environ['wsgi.input_terminated'] = True
566
566
567 def hg_proxy(self):
567 def hg_proxy(self):
568 @wsgiapp
568 @wsgiapp
569 def _hg_proxy(environ, start_response):
569 def _hg_proxy(environ, start_response):
570 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
570 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
571 return app(environ, start_response)
571 return app(environ, start_response)
572 return _hg_proxy
572 return _hg_proxy
573
573
574 def git_proxy(self):
574 def git_proxy(self):
575 @wsgiapp
575 @wsgiapp
576 def _git_proxy(environ, start_response):
576 def _git_proxy(environ, start_response):
577 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
577 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
578 return app(environ, start_response)
578 return app(environ, start_response)
579 return _git_proxy
579 return _git_proxy
580
580
581 def hg_stream(self):
581 def hg_stream(self):
582 if self._use_echo_app:
582 if self._use_echo_app:
583 @wsgiapp
583 @wsgiapp
584 def _hg_stream(environ, start_response):
584 def _hg_stream(environ, start_response):
585 app = EchoApp('fake_path', 'fake_name', None)
585 app = EchoApp('fake_path', 'fake_name', None)
586 return app(environ, start_response)
586 return app(environ, start_response)
587 return _hg_stream
587 return _hg_stream
588 else:
588 else:
589 @wsgiapp
589 @wsgiapp
590 def _hg_stream(environ, start_response):
590 def _hg_stream(environ, start_response):
591 log.debug('http-app: handling hg stream')
591 log.debug('http-app: handling hg stream')
592 repo_path = environ['HTTP_X_RC_REPO_PATH']
592 repo_path = environ['HTTP_X_RC_REPO_PATH']
593 repo_name = environ['HTTP_X_RC_REPO_NAME']
593 repo_name = environ['HTTP_X_RC_REPO_NAME']
594 packed_config = base64.b64decode(
594 packed_config = base64.b64decode(
595 environ['HTTP_X_RC_REPO_CONFIG'])
595 environ['HTTP_X_RC_REPO_CONFIG'])
596 config = msgpack.unpackb(packed_config)
596 config = msgpack.unpackb(packed_config)
597 app = scm_app.create_hg_wsgi_app(
597 app = scm_app.create_hg_wsgi_app(
598 repo_path, repo_name, config)
598 repo_path, repo_name, config)
599
599
600 # Consistent path information for hgweb
600 # Consistent path information for hgweb
601 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
601 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
602 environ['REPO_NAME'] = repo_name
602 environ['REPO_NAME'] = repo_name
603 self.set_env_from_config(environ, config)
603 self.set_env_from_config(environ, config)
604
604
605 log.debug('http-app: starting app handler '
605 log.debug('http-app: starting app handler '
606 'with %s and process request', app)
606 'with %s and process request', app)
607 return app(environ, ResponseFilter(start_response))
607 return app(environ, ResponseFilter(start_response))
608 return _hg_stream
608 return _hg_stream
609
609
610 def git_stream(self):
610 def git_stream(self):
611 if self._use_echo_app:
611 if self._use_echo_app:
612 @wsgiapp
612 @wsgiapp
613 def _git_stream(environ, start_response):
613 def _git_stream(environ, start_response):
614 app = EchoApp('fake_path', 'fake_name', None)
614 app = EchoApp('fake_path', 'fake_name', None)
615 return app(environ, start_response)
615 return app(environ, start_response)
616 return _git_stream
616 return _git_stream
617 else:
617 else:
618 @wsgiapp
618 @wsgiapp
619 def _git_stream(environ, start_response):
619 def _git_stream(environ, start_response):
620 log.debug('http-app: handling git stream')
620 log.debug('http-app: handling git stream')
621 repo_path = environ['HTTP_X_RC_REPO_PATH']
621 repo_path = environ['HTTP_X_RC_REPO_PATH']
622 repo_name = environ['HTTP_X_RC_REPO_NAME']
622 repo_name = environ['HTTP_X_RC_REPO_NAME']
623 packed_config = base64.b64decode(
623 packed_config = base64.b64decode(
624 environ['HTTP_X_RC_REPO_CONFIG'])
624 environ['HTTP_X_RC_REPO_CONFIG'])
625 config = msgpack.unpackb(packed_config)
625 config = msgpack.unpackb(packed_config)
626
626
627 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
627 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
628 self.set_env_from_config(environ, config)
628 self.set_env_from_config(environ, config)
629
629
630 content_type = environ.get('CONTENT_TYPE', '')
630 content_type = environ.get('CONTENT_TYPE', '')
631
631
632 path = environ['PATH_INFO']
632 path = environ['PATH_INFO']
633 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
633 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
634 log.debug(
634 log.debug(
635 'LFS: Detecting if request `%s` is LFS server path based '
635 'LFS: Detecting if request `%s` is LFS server path based '
636 'on content type:`%s`, is_lfs:%s',
636 'on content type:`%s`, is_lfs:%s',
637 path, content_type, is_lfs_request)
637 path, content_type, is_lfs_request)
638
638
639 if not is_lfs_request:
639 if not is_lfs_request:
640 # fallback detection by path
640 # fallback detection by path
641 if GIT_LFS_PROTO_PAT.match(path):
641 if GIT_LFS_PROTO_PAT.match(path):
642 is_lfs_request = True
642 is_lfs_request = True
643 log.debug(
643 log.debug(
644 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
644 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
645 path, is_lfs_request)
645 path, is_lfs_request)
646
646
647 if is_lfs_request:
647 if is_lfs_request:
648 app = scm_app.create_git_lfs_wsgi_app(
648 app = scm_app.create_git_lfs_wsgi_app(
649 repo_path, repo_name, config)
649 repo_path, repo_name, config)
650 else:
650 else:
651 app = scm_app.create_git_wsgi_app(
651 app = scm_app.create_git_wsgi_app(
652 repo_path, repo_name, config)
652 repo_path, repo_name, config)
653
653
654 log.debug('http-app: starting app handler '
654 log.debug('http-app: starting app handler '
655 'with %s and process request', app)
655 'with %s and process request', app)
656
656
657 return app(environ, start_response)
657 return app(environ, start_response)
658
658
659 return _git_stream
659 return _git_stream
660
660
661 def handle_vcs_exception(self, exception, request):
661 def handle_vcs_exception(self, exception, request):
662 _vcs_kind = getattr(exception, '_vcs_kind', '')
662 _vcs_kind = getattr(exception, '_vcs_kind', '')
663 if _vcs_kind == 'repo_locked':
663 if _vcs_kind == 'repo_locked':
664 # Get custom repo-locked status code if present.
664 # Get custom repo-locked status code if present.
665 status_code = request.headers.get('X-RC-Locked-Status-Code')
665 status_code = request.headers.get('X-RC-Locked-Status-Code')
666 return HTTPRepoLocked(
666 return HTTPRepoLocked(
667 title=exception.message, status_code=status_code)
667 title=exception.message, status_code=status_code)
668
668
669 elif _vcs_kind == 'repo_branch_protected':
669 elif _vcs_kind == 'repo_branch_protected':
670 # Get custom repo-branch-protected status code if present.
670 # Get custom repo-branch-protected status code if present.
671 return HTTPRepoBranchProtected(title=exception.message)
671 return HTTPRepoBranchProtected(title=exception.message)
672
672
673 exc_info = request.exc_info
673 exc_info = request.exc_info
674 store_exception(id(exc_info), exc_info)
674 store_exception(id(exc_info), exc_info)
675
675
676 traceback_info = 'unavailable'
676 traceback_info = 'unavailable'
677 if request.exc_info:
677 if request.exc_info:
678 exc_type, exc_value, exc_tb = request.exc_info
678 exc_type, exc_value, exc_tb = request.exc_info
679 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
679 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
680
680
681 log.error(
681 log.error(
682 'error occurred handling this request for path: %s, \n tb: %s',
682 'error occurred handling this request for path: %s, \n tb: %s',
683 request.path, traceback_info)
683 request.path, traceback_info)
684 raise exception
684 raise exception
685
685
686
686
687 class ResponseFilter(object):
687 class ResponseFilter(object):
688
688
689 def __init__(self, start_response):
689 def __init__(self, start_response):
690 self._start_response = start_response
690 self._start_response = start_response
691
691
692 def __call__(self, status, response_headers, exc_info=None):
692 def __call__(self, status, response_headers, exc_info=None):
693 headers = tuple(
693 headers = tuple(
694 (h, v) for h, v in response_headers
694 (h, v) for h, v in response_headers
695 if not wsgiref.util.is_hop_by_hop(h))
695 if not wsgiref.util.is_hop_by_hop(h))
696 return self._start_response(status, headers, exc_info)
696 return self._start_response(status, headers, exc_info)
697
697
698
698
699 def main(global_config, **settings):
699 def main(global_config, **settings):
700 if MercurialFactory:
700 if MercurialFactory:
701 hgpatches.patch_largefiles_capabilities()
701 hgpatches.patch_largefiles_capabilities()
702 hgpatches.patch_subrepo_type_mapping()
702 hgpatches.patch_subrepo_type_mapping()
703
703
704 app = HTTPApplication(settings=settings, global_config=global_config)
704 app = HTTPApplication(settings=settings, global_config=global_config)
705 return app.wsgi_app()
705 return app.wsgi_app()
@@ -1,77 +1,79 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 from dogpile.cache import register_backend
19 from dogpile.cache import register_backend
20
20
21 register_backend(
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
23 "LRUMemoryBackend")
24
24
25 register_backend(
25 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "FileNamespaceBackend")
27 "FileNamespaceBackend")
28
28
29 register_backend(
29 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "RedisPickleBackend")
31 "RedisPickleBackend")
32
32
33 register_backend(
33 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
35 "RedisMsgPackBackend")
36
36
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40 from . import region_meta
40 from . import region_meta
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
41 from .utils import (
42 get_default_cache_settings, backend_key_generator, get_or_create_region,
43 clear_cache_namespace, make_region)
42
44
43
45
44 def configure_dogpile_cache(settings):
46 def configure_dogpile_cache(settings):
45 cache_dir = settings.get('cache_dir')
47 cache_dir = settings.get('cache_dir')
46 if cache_dir:
48 if cache_dir:
47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
49 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48
50
49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
51 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50
52
51 # inspect available namespaces
53 # inspect available namespaces
52 avail_regions = set()
54 avail_regions = set()
53 for key in rc_cache_data.keys():
55 for key in rc_cache_data.keys():
54 namespace_name = key.split('.', 1)[0]
56 namespace_name = key.split('.', 1)[0]
55 if namespace_name in avail_regions:
57 if namespace_name in avail_regions:
56 continue
58 continue
57
59
58 avail_regions.add(namespace_name)
60 avail_regions.add(namespace_name)
59 log.debug('dogpile: found following cache regions: %s', namespace_name)
61 log.debug('dogpile: found following cache regions: %s', namespace_name)
60
62
61 new_region = make_region(
63 new_region = make_region(
62 name=namespace_name,
64 name=namespace_name,
63 function_key_generator=None
65 function_key_generator=None
64 )
66 )
65
67
66 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(namespace_name))
68 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(namespace_name))
67 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
69 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
68 if log.isEnabledFor(logging.DEBUG):
70 if log.isEnabledFor(logging.DEBUG):
69 region_args = dict(backend=new_region.actual_backend.__class__,
71 region_args = dict(backend=new_region.actual_backend.__class__,
70 region_invalidator=new_region.region_invalidator.__class__)
72 region_invalidator=new_region.region_invalidator.__class__)
71 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
73 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
72
74
73 region_meta.dogpile_cache_regions[namespace_name] = new_region
75 region_meta.dogpile_cache_regions[namespace_name] = new_region
74
76
75
77
76 def includeme(config):
78 def includeme(config):
77 configure_dogpile_cache(config.registry.settings)
79 configure_dogpile_cache(config.registry.settings)
@@ -1,158 +1,263 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import time
19 import logging
20 import logging
20 import functools
21 import functools
21 import time
22
23 from decorator import decorate
24
22
25 from dogpile.cache import CacheRegion
23 from dogpile.cache import CacheRegion
26 from dogpile.cache.util import compat
24 from dogpile.cache.util import compat
27
25
28 from vcsserver.utils import safe_str, sha1
26 from vcsserver.utils import safe_str, sha1
29
27
28 from vcsserver.lib.rc_cache import region_meta
30
29
31 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
32
31
33
32
34 class RhodeCodeCacheRegion(CacheRegion):
33 class RhodeCodeCacheRegion(CacheRegion):
35
34
36 def conditional_cache_on_arguments(
35 def conditional_cache_on_arguments(
37 self, namespace=None,
36 self, namespace=None,
38 expiration_time=None,
37 expiration_time=None,
39 should_cache_fn=None,
38 should_cache_fn=None,
40 to_str=compat.string_type,
39 to_str=compat.string_type,
41 function_key_generator=None,
40 function_key_generator=None,
42 condition=True):
41 condition=True):
43 """
42 """
44 Custom conditional decorator, that will not touch any dogpile internals if
43 Custom conditional decorator, that will not touch any dogpile internals if
45 condition isn't meet. This works a bit different than should_cache_fn
44 condition isn't meet. This works a bit different than should_cache_fn
46 And it's faster in cases we don't ever want to compute cached values
45 And it's faster in cases we don't ever want to compute cached values
47 """
46 """
48 expiration_time_is_callable = compat.callable(expiration_time)
47 expiration_time_is_callable = compat.callable(expiration_time)
49
48
50 if function_key_generator is None:
49 if function_key_generator is None:
51 function_key_generator = self.function_key_generator
50 function_key_generator = self.function_key_generator
52
51
52 # workaround for py2 and cython problems, this block should be removed
53 # once we've migrated to py3
54 if 'cython' == 'cython':
55 def decorator(fn):
56 if to_str is compat.string_type:
57 # backwards compatible
58 key_generator = function_key_generator(namespace, fn)
59 else:
60 key_generator = function_key_generator(namespace, fn, to_str=to_str)
61
62 @functools.wraps(fn)
63 def decorate(*arg, **kw):
64 key = key_generator(*arg, **kw)
65
66 @functools.wraps(fn)
67 def creator():
68 return fn(*arg, **kw)
69
70 if not condition:
71 return creator()
72
73 timeout = expiration_time() if expiration_time_is_callable \
74 else expiration_time
75
76 return self.get_or_create(key, creator, timeout, should_cache_fn)
77
78 def invalidate(*arg, **kw):
79 key = key_generator(*arg, **kw)
80 self.delete(key)
81
82 def set_(value, *arg, **kw):
83 key = key_generator(*arg, **kw)
84 self.set(key, value)
85
86 def get(*arg, **kw):
87 key = key_generator(*arg, **kw)
88 return self.get(key)
89
90 def refresh(*arg, **kw):
91 key = key_generator(*arg, **kw)
92 value = fn(*arg, **kw)
93 self.set(key, value)
94 return value
95
96 decorate.set = set_
97 decorate.invalidate = invalidate
98 decorate.refresh = refresh
99 decorate.get = get
100 decorate.original = fn
101 decorate.key_generator = key_generator
102 decorate.__wrapped__ = fn
103
104 return decorate
105 return decorator
106
53 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
107 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
54
108
55 if not condition:
109 if not condition:
56 log.debug('Calling un-cached func:%s', user_func.func_name)
110 log.debug('Calling un-cached func:%s', user_func.func_name)
57 start = time.time()
111 start = time.time()
58 result = user_func(*arg, **kw)
112 result = user_func(*arg, **kw)
59 total = time.time() - start
113 total = time.time() - start
60 log.debug('un-cached func:%s took %.4fs', user_func.func_name, total)
114 log.debug('un-cached func:%s took %.4fs', user_func.func_name, total)
61 return result
115 return result
62
116
63 key = key_generator(*arg, **kw)
117 key = key_generator(*arg, **kw)
64
118
65 timeout = expiration_time() if expiration_time_is_callable \
119 timeout = expiration_time() if expiration_time_is_callable \
66 else expiration_time
120 else expiration_time
67
121
68 log.debug('Calling cached fn:%s', user_func.func_name)
122 log.debug('Calling cached fn:%s', user_func.func_name)
69 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
123 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
70
124
71 def cache_decorator(user_func):
125 def cache_decorator(user_func):
72 if to_str is compat.string_type:
126 if to_str is compat.string_type:
73 # backwards compatible
127 # backwards compatible
74 key_generator = function_key_generator(namespace, user_func)
128 key_generator = function_key_generator(namespace, user_func)
75 else:
129 else:
76 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
130 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
77
131
78 def refresh(*arg, **kw):
132 def refresh(*arg, **kw):
79 """
133 """
80 Like invalidate, but regenerates the value instead
134 Like invalidate, but regenerates the value instead
81 """
135 """
82 key = key_generator(*arg, **kw)
136 key = key_generator(*arg, **kw)
83 value = user_func(*arg, **kw)
137 value = user_func(*arg, **kw)
84 self.set(key, value)
138 self.set(key, value)
85 return value
139 return value
86
140
87 def invalidate(*arg, **kw):
141 def invalidate(*arg, **kw):
88 key = key_generator(*arg, **kw)
142 key = key_generator(*arg, **kw)
89 self.delete(key)
143 self.delete(key)
90
144
91 def set_(value, *arg, **kw):
145 def set_(value, *arg, **kw):
92 key = key_generator(*arg, **kw)
146 key = key_generator(*arg, **kw)
93 self.set(key, value)
147 self.set(key, value)
94
148
95 def get(*arg, **kw):
149 def get(*arg, **kw):
96 key = key_generator(*arg, **kw)
150 key = key_generator(*arg, **kw)
97 return self.get(key)
151 return self.get(key)
98
152
99 user_func.set = set_
153 user_func.set = set_
100 user_func.invalidate = invalidate
154 user_func.invalidate = invalidate
101 user_func.get = get
155 user_func.get = get
102 user_func.refresh = refresh
156 user_func.refresh = refresh
103 user_func.key_generator = key_generator
157 user_func.key_generator = key_generator
104 user_func.original = user_func
158 user_func.original = user_func
105
159
106 # Use `decorate` to preserve the signature of :param:`user_func`.
160 # Use `decorate` to preserve the signature of :param:`user_func`.
107 return decorate(user_func, functools.partial(
161 return decorator.decorate(user_func, functools.partial(
108 get_or_create_for_user_func, key_generator))
162 get_or_create_for_user_func, key_generator))
109
163
110 return cache_decorator
164 return cache_decorator
111
165
112
166
113 def make_region(*arg, **kw):
167 def make_region(*arg, **kw):
114 return RhodeCodeCacheRegion(*arg, **kw)
168 return RhodeCodeCacheRegion(*arg, **kw)
115
169
116
170
117 def get_default_cache_settings(settings, prefixes=None):
171 def get_default_cache_settings(settings, prefixes=None):
118 prefixes = prefixes or []
172 prefixes = prefixes or []
119 cache_settings = {}
173 cache_settings = {}
120 for key in settings.keys():
174 for key in settings.keys():
121 for prefix in prefixes:
175 for prefix in prefixes:
122 if key.startswith(prefix):
176 if key.startswith(prefix):
123 name = key.split(prefix)[1].strip()
177 name = key.split(prefix)[1].strip()
124 val = settings[key]
178 val = settings[key]
125 if isinstance(val, compat.string_types):
179 if isinstance(val, compat.string_types):
126 val = val.strip()
180 val = val.strip()
127 cache_settings[name] = val
181 cache_settings[name] = val
128 return cache_settings
182 return cache_settings
129
183
130
184
131 def compute_key_from_params(*args):
185 def compute_key_from_params(*args):
132 """
186 """
133 Helper to compute key from given params to be used in cache manager
187 Helper to compute key from given params to be used in cache manager
134 """
188 """
135 return sha1("_".join(map(safe_str, args)))
189 return sha1("_".join(map(safe_str, args)))
136
190
137
191
138 def backend_key_generator(backend):
192 def backend_key_generator(backend):
139 """
193 """
140 Special wrapper that also sends over the backend to the key generator
194 Special wrapper that also sends over the backend to the key generator
141 """
195 """
142 def wrapper(namespace, fn):
196 def wrapper(namespace, fn):
143 return key_generator(backend, namespace, fn)
197 return key_generator(backend, namespace, fn)
144 return wrapper
198 return wrapper
145
199
146
200
147 def key_generator(backend, namespace, fn):
201 def key_generator(backend, namespace, fn):
148 fname = fn.__name__
202 fname = fn.__name__
149
203
150 def generate_key(*args):
204 def generate_key(*args):
151 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
205 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
152 namespace_pref = namespace or 'default_namespace'
206 namespace_pref = namespace or 'default_namespace'
153 arg_key = compute_key_from_params(*args)
207 arg_key = compute_key_from_params(*args)
154 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
208 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
155
209
156 return final_key
210 return final_key
157
211
158 return generate_key
212 return generate_key
213
214
215 def get_or_create_region(region_name, region_namespace=None):
216 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
217 region_obj = region_meta.dogpile_cache_regions.get(region_name)
218 if not region_obj:
219 raise EnvironmentError(
220 'Region `{}` not in configured: {}.'.format(
221 region_name, region_meta.dogpile_cache_regions.keys()))
222
223 region_uid_name = '{}:{}'.format(region_name, region_namespace)
224 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
225 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
226 if region_exist:
227 log.debug('Using already configured region: %s', region_namespace)
228 return region_exist
229 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
230 expiration_time = region_obj.expiration_time
231
232 if not os.path.isdir(cache_dir):
233 os.makedirs(cache_dir)
234 new_region = make_region(
235 name=region_uid_name,
236 function_key_generator=backend_key_generator(region_obj.actual_backend)
237 )
238 namespace_filename = os.path.join(
239 cache_dir, "{}.cache.dbm".format(region_namespace))
240 # special type that allows 1db per namespace
241 new_region.configure(
242 backend='dogpile.cache.rc.file_namespace',
243 expiration_time=expiration_time,
244 arguments={"filename": namespace_filename}
245 )
246
247 # create and save in region caches
248 log.debug('configuring new region: %s', region_uid_name)
249 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
250
251 return region_obj
252
253
254 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
255 region = get_or_create_region(cache_region, cache_namespace_uid)
256 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
257 num_delete_keys = len(cache_keys)
258 if invalidate:
259 region.invalidate(hard=False)
260 else:
261 if num_delete_keys:
262 region.delete_multi(cache_keys)
263 return num_delete_keys
@@ -1,855 +1,862 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 import time
22 import time
23 from urllib2 import URLError
23 from urllib2 import URLError
24 import urlparse
24 import urlparse
25 import logging
25 import logging
26 import posixpath as vcspath
26 import posixpath as vcspath
27 import StringIO
27 import StringIO
28 import urllib
28 import urllib
29 import traceback
29 import traceback
30
30
31 import svn.client
31 import svn.client
32 import svn.core
32 import svn.core
33 import svn.delta
33 import svn.delta
34 import svn.diff
34 import svn.diff
35 import svn.fs
35 import svn.fs
36 import svn.repos
36 import svn.repos
37
37
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.utils import safe_str
41 from vcsserver.utils import safe_str
42 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
48 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
49 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
50 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
51 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
52 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
53 }
53 }
54
54
55 current_compatible_version = '1.12'
55 current_compatible_version = '1.12'
56
56
57
57
58 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
59 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
60 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
61 try:
61 try:
62 return func(*args, **kwargs)
62 return func(*args, **kwargs)
63 except Exception as e:
63 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e))
67 raise
67 raise
68 return wrapper
68 return wrapper
69
69
70
70
71 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
72 repo_type = 'svn'
72 repo_type = 'svn'
73
73
74 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
75 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
76 if create:
76 if create:
77 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
78 if compatible_version:
78 if compatible_version:
79
79
80 compatible_version_string = \
80 compatible_version_string = \
81 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
82 or compatible_version
82 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
84
84
85 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
87 else:
88 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
89
89
90 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
91 return repo
91 return repo
92
92
93 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
94 """
94 """
95 Get a repository instance for the given path.
95 Get a repository instance for the given path.
96 """
96 """
97 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
98
98
99
99
100 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
101 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
103 }
103 }
104
104
105
105
106 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
107
107
108 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
109 self._factory = factory
110 # TODO: Remove once we do not use internal Mercurial objects anymore
110 # TODO: Remove once we do not use internal Mercurial objects anymore
111 # for subversion
111 # for subversion
112 self._hg_factory = hg_factory
112 self._hg_factory = hg_factory
113
113
114 @reraise_safe_exceptions
114 @reraise_safe_exceptions
115 def discover_svn_version(self):
115 def discover_svn_version(self):
116 try:
116 try:
117 import svn.core
117 import svn.core
118 svn_ver = svn.core.SVN_VERSION
118 svn_ver = svn.core.SVN_VERSION
119 except ImportError:
119 except ImportError:
120 svn_ver = None
120 svn_ver = None
121 return svn_ver
121 return svn_ver
122
122
123 @reraise_safe_exceptions
123 @reraise_safe_exceptions
124 def is_empty(self, wire):
124 def is_empty(self, wire):
125
125
126 try:
126 try:
127 return self.lookup(wire, -1) == 0
127 return self.lookup(wire, -1) == 0
128 except Exception:
128 except Exception:
129 log.exception("failed to read object_store")
129 log.exception("failed to read object_store")
130 return False
130 return False
131
131
132 def check_url(self, url, config_items):
132 def check_url(self, url, config_items):
133 # this can throw exception if not installed, but we detect this
133 # this can throw exception if not installed, but we detect this
134 from hgsubversion import svnrepo
134 from hgsubversion import svnrepo
135
135
136 baseui = self._hg_factory._create_config(config_items)
136 baseui = self._hg_factory._create_config(config_items)
137 # uuid function get's only valid UUID from proper repo, else
137 # uuid function get's only valid UUID from proper repo, else
138 # throws exception
138 # throws exception
139 try:
139 try:
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
141 except Exception:
141 except Exception:
142 tb = traceback.format_exc()
142 tb = traceback.format_exc()
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
144 raise URLError(
144 raise URLError(
145 '"%s" is not a valid Subversion source url.' % (url, ))
145 '"%s" is not a valid Subversion source url.' % (url, ))
146 return True
146 return True
147
147
148 def is_path_valid_repository(self, wire, path):
148 def is_path_valid_repository(self, wire, path):
149
149
150 # NOTE(marcink): short circuit the check for SVN repo
150 # NOTE(marcink): short circuit the check for SVN repo
151 # the repos.open might be expensive to check, but we have one cheap
151 # the repos.open might be expensive to check, but we have one cheap
152 # pre condition that we can use, to check for 'format' file
152 # pre condition that we can use, to check for 'format' file
153
153
154 if not os.path.isfile(os.path.join(path, 'format')):
154 if not os.path.isfile(os.path.join(path, 'format')):
155 return False
155 return False
156
156
157 try:
157 try:
158 svn.repos.open(path)
158 svn.repos.open(path)
159 except svn.core.SubversionException:
159 except svn.core.SubversionException:
160 tb = traceback.format_exc()
160 tb = traceback.format_exc()
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
162 return False
162 return False
163 return True
163 return True
164
164
165 @reraise_safe_exceptions
165 @reraise_safe_exceptions
166 def verify(self, wire,):
166 def verify(self, wire,):
167 repo_path = wire['path']
167 repo_path = wire['path']
168 if not self.is_path_valid_repository(wire, repo_path):
168 if not self.is_path_valid_repository(wire, repo_path):
169 raise Exception(
169 raise Exception(
170 "Path %s is not a valid Subversion repository." % repo_path)
170 "Path %s is not a valid Subversion repository." % repo_path)
171
171
172 cmd = ['svnadmin', 'info', repo_path]
172 cmd = ['svnadmin', 'info', repo_path]
173 stdout, stderr = subprocessio.run_command(cmd)
173 stdout, stderr = subprocessio.run_command(cmd)
174 return stdout
174 return stdout
175
175
176 def lookup(self, wire, revision):
176 def lookup(self, wire, revision):
177 if revision not in [-1, None, 'HEAD']:
177 if revision not in [-1, None, 'HEAD']:
178 raise NotImplementedError
178 raise NotImplementedError
179 repo = self._factory.repo(wire)
179 repo = self._factory.repo(wire)
180 fs_ptr = svn.repos.fs(repo)
180 fs_ptr = svn.repos.fs(repo)
181 head = svn.fs.youngest_rev(fs_ptr)
181 head = svn.fs.youngest_rev(fs_ptr)
182 return head
182 return head
183
183
184 def lookup_interval(self, wire, start_ts, end_ts):
184 def lookup_interval(self, wire, start_ts, end_ts):
185 repo = self._factory.repo(wire)
185 repo = self._factory.repo(wire)
186 fsobj = svn.repos.fs(repo)
186 fsobj = svn.repos.fs(repo)
187 start_rev = None
187 start_rev = None
188 end_rev = None
188 end_rev = None
189 if start_ts:
189 if start_ts:
190 start_ts_svn = apr_time_t(start_ts)
190 start_ts_svn = apr_time_t(start_ts)
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
192 else:
192 else:
193 start_rev = 1
193 start_rev = 1
194 if end_ts:
194 if end_ts:
195 end_ts_svn = apr_time_t(end_ts)
195 end_ts_svn = apr_time_t(end_ts)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
197 else:
197 else:
198 end_rev = svn.fs.youngest_rev(fsobj)
198 end_rev = svn.fs.youngest_rev(fsobj)
199 return start_rev, end_rev
199 return start_rev, end_rev
200
200
201 def revision_properties(self, wire, revision):
201 def revision_properties(self, wire, revision):
202
202
203 cache_on, context_uid, repo_id = self._cache_on(wire)
203 cache_on, context_uid, repo_id = self._cache_on(wire)
204 @self.region.conditional_cache_on_arguments(condition=cache_on)
204 region = self.region(wire)
205 @region.conditional_cache_on_arguments(condition=cache_on)
205 def _revision_properties(_repo_id, _revision):
206 def _revision_properties(_repo_id, _revision):
206 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
207 fs_ptr = svn.repos.fs(repo)
208 fs_ptr = svn.repos.fs(repo)
208 return svn.fs.revision_proplist(fs_ptr, revision)
209 return svn.fs.revision_proplist(fs_ptr, revision)
209 return _revision_properties(repo_id, revision)
210 return _revision_properties(repo_id, revision)
210
211
211 def revision_changes(self, wire, revision):
212 def revision_changes(self, wire, revision):
212
213
213 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
214 fsobj = svn.repos.fs(repo)
215 fsobj = svn.repos.fs(repo)
215 rev_root = svn.fs.revision_root(fsobj, revision)
216 rev_root = svn.fs.revision_root(fsobj, revision)
216
217
217 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 base_dir = ""
220 base_dir = ""
220 send_deltas = False
221 send_deltas = False
221 svn.repos.replay2(
222 svn.repos.replay2(
222 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 editor_ptr, editor_baton, None)
224 editor_ptr, editor_baton, None)
224
225
225 added = []
226 added = []
226 changed = []
227 changed = []
227 removed = []
228 removed = []
228
229
229 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 for path, change in editor.changes.iteritems():
231 for path, change in editor.changes.iteritems():
231 # TODO: Decide what to do with directory nodes. Subversion can add
232 # TODO: Decide what to do with directory nodes. Subversion can add
232 # empty directories.
233 # empty directories.
233
234
234 if change.item_kind == svn.core.svn_node_dir:
235 if change.item_kind == svn.core.svn_node_dir:
235 continue
236 continue
236 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 added.append(path)
238 added.append(path)
238 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 svn.repos.CHANGE_ACTION_REPLACE]:
240 svn.repos.CHANGE_ACTION_REPLACE]:
240 changed.append(path)
241 changed.append(path)
241 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 removed.append(path)
243 removed.append(path)
243 else:
244 else:
244 raise NotImplementedError(
245 raise NotImplementedError(
245 "Action %s not supported on path %s" % (
246 "Action %s not supported on path %s" % (
246 change.action, path))
247 change.action, path))
247
248
248 changes = {
249 changes = {
249 'added': added,
250 'added': added,
250 'changed': changed,
251 'changed': changed,
251 'removed': removed,
252 'removed': removed,
252 }
253 }
253 return changes
254 return changes
254
255
255 @reraise_safe_exceptions
256 @reraise_safe_exceptions
256 def node_history(self, wire, path, revision, limit):
257 def node_history(self, wire, path, revision, limit):
257 cache_on, context_uid, repo_id = self._cache_on(wire)
258 cache_on, context_uid, repo_id = self._cache_on(wire)
258 @self.region.conditional_cache_on_arguments(condition=cache_on)
259 region = self.region(wire)
260 @region.conditional_cache_on_arguments(condition=cache_on)
259 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
260 cross_copies = False
262 cross_copies = False
261 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
262 fsobj = svn.repos.fs(repo)
264 fsobj = svn.repos.fs(repo)
263 rev_root = svn.fs.revision_root(fsobj, revision)
265 rev_root = svn.fs.revision_root(fsobj, revision)
264
266
265 history_revisions = []
267 history_revisions = []
266 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.node_history(rev_root, path)
267 history = svn.fs.history_prev(history, cross_copies)
269 history = svn.fs.history_prev(history, cross_copies)
268 while history:
270 while history:
269 __, node_revision = svn.fs.history_location(history)
271 __, node_revision = svn.fs.history_location(history)
270 history_revisions.append(node_revision)
272 history_revisions.append(node_revision)
271 if limit and len(history_revisions) >= limit:
273 if limit and len(history_revisions) >= limit:
272 break
274 break
273 history = svn.fs.history_prev(history, cross_copies)
275 history = svn.fs.history_prev(history, cross_copies)
274 return history_revisions
276 return history_revisions
275 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
276
278
277 def node_properties(self, wire, path, revision):
279 def node_properties(self, wire, path, revision):
278 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
279 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 region = self.region(wire)
282 @region.conditional_cache_on_arguments(condition=cache_on)
280 def _node_properties(_repo_id, _path, _revision):
283 def _node_properties(_repo_id, _path, _revision):
281 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
282 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
283 rev_root = svn.fs.revision_root(fsobj, revision)
286 rev_root = svn.fs.revision_root(fsobj, revision)
284 return svn.fs.node_proplist(rev_root, path)
287 return svn.fs.node_proplist(rev_root, path)
285 return _node_properties(repo_id, path, revision)
288 return _node_properties(repo_id, path, revision)
286
289
287 def file_annotate(self, wire, path, revision):
290 def file_annotate(self, wire, path, revision):
288 abs_path = 'file://' + urllib.pathname2url(
291 abs_path = 'file://' + urllib.pathname2url(
289 vcspath.join(wire['path'], path))
292 vcspath.join(wire['path'], path))
290 file_uri = svn.core.svn_path_canonicalize(abs_path)
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
291
294
292 start_rev = svn_opt_revision_value_t(0)
295 start_rev = svn_opt_revision_value_t(0)
293 peg_rev = svn_opt_revision_value_t(revision)
296 peg_rev = svn_opt_revision_value_t(revision)
294 end_rev = peg_rev
297 end_rev = peg_rev
295
298
296 annotations = []
299 annotations = []
297
300
298 def receiver(line_no, revision, author, date, line, pool):
301 def receiver(line_no, revision, author, date, line, pool):
299 annotations.append((line_no, revision, line))
302 annotations.append((line_no, revision, line))
300
303
301 # TODO: Cannot use blame5, missing typemap function in the swig code
304 # TODO: Cannot use blame5, missing typemap function in the swig code
302 try:
305 try:
303 svn.client.blame2(
306 svn.client.blame2(
304 file_uri, peg_rev, start_rev, end_rev,
307 file_uri, peg_rev, start_rev, end_rev,
305 receiver, svn.client.create_context())
308 receiver, svn.client.create_context())
306 except svn.core.SubversionException as exc:
309 except svn.core.SubversionException as exc:
307 log.exception("Error during blame operation.")
310 log.exception("Error during blame operation.")
308 raise Exception(
311 raise Exception(
309 "Blame not supported or file does not exist at path %s. "
312 "Blame not supported or file does not exist at path %s. "
310 "Error %s." % (path, exc))
313 "Error %s." % (path, exc))
311
314
312 return annotations
315 return annotations
313
316
314 def get_node_type(self, wire, path, revision=None):
317 def get_node_type(self, wire, path, revision=None):
315
318
316 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
317 @self.region.conditional_cache_on_arguments(condition=cache_on)
320 region = self.region(wire)
321 @region.conditional_cache_on_arguments(condition=cache_on)
318 def _get_node_type(_repo_id, _path, _revision):
322 def _get_node_type(_repo_id, _path, _revision):
319 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
320 fs_ptr = svn.repos.fs(repo)
324 fs_ptr = svn.repos.fs(repo)
321 if _revision is None:
325 if _revision is None:
322 _revision = svn.fs.youngest_rev(fs_ptr)
326 _revision = svn.fs.youngest_rev(fs_ptr)
323 root = svn.fs.revision_root(fs_ptr, _revision)
327 root = svn.fs.revision_root(fs_ptr, _revision)
324 node = svn.fs.check_path(root, path)
328 node = svn.fs.check_path(root, path)
325 return NODE_TYPE_MAPPING.get(node, None)
329 return NODE_TYPE_MAPPING.get(node, None)
326 return _get_node_type(repo_id, path, revision)
330 return _get_node_type(repo_id, path, revision)
327
331
328 def get_nodes(self, wire, path, revision=None):
332 def get_nodes(self, wire, path, revision=None):
329
333
330 cache_on, context_uid, repo_id = self._cache_on(wire)
334 cache_on, context_uid, repo_id = self._cache_on(wire)
331 @self.region.conditional_cache_on_arguments(condition=cache_on)
335 region = self.region(wire)
336 @region.conditional_cache_on_arguments(condition=cache_on)
332 def _get_nodes(_repo_id, _path, _revision):
337 def _get_nodes(_repo_id, _path, _revision):
333 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
339 fsobj = svn.repos.fs(repo)
335 if _revision is None:
340 if _revision is None:
336 _revision = svn.fs.youngest_rev(fsobj)
341 _revision = svn.fs.youngest_rev(fsobj)
337 root = svn.fs.revision_root(fsobj, _revision)
342 root = svn.fs.revision_root(fsobj, _revision)
338 entries = svn.fs.dir_entries(root, path)
343 entries = svn.fs.dir_entries(root, path)
339 result = []
344 result = []
340 for entry_path, entry_info in entries.iteritems():
345 for entry_path, entry_info in entries.iteritems():
341 result.append(
346 result.append(
342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 return result
348 return result
344 return _get_nodes(repo_id, path, revision)
349 return _get_nodes(repo_id, path, revision)
345
350
346 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
347 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
348 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
349 if rev is None:
354 if rev is None:
350 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
351 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
352 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 return content.read()
358 return content.read()
354
359
355 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
356
361
357 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
358 @self.region.conditional_cache_on_arguments(condition=cache_on)
363 region = self.region(wire)
364 @region.conditional_cache_on_arguments(condition=cache_on)
359 def _get_file_size(_repo_id, _path, _revision):
365 def _get_file_size(_repo_id, _path, _revision):
360 repo = self._factory.repo(wire)
366 repo = self._factory.repo(wire)
361 fsobj = svn.repos.fs(repo)
367 fsobj = svn.repos.fs(repo)
362 if _revision is None:
368 if _revision is None:
363 _revision = svn.fs.youngest_revision(fsobj)
369 _revision = svn.fs.youngest_revision(fsobj)
364 root = svn.fs.revision_root(fsobj, _revision)
370 root = svn.fs.revision_root(fsobj, _revision)
365 size = svn.fs.file_length(root, path)
371 size = svn.fs.file_length(root, path)
366 return size
372 return size
367 return _get_file_size(repo_id, path, revision)
373 return _get_file_size(repo_id, path, revision)
368
374
369 def create_repository(self, wire, compatible_version=None):
375 def create_repository(self, wire, compatible_version=None):
370 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
371 self._factory.repo(wire, create=True,
377 self._factory.repo(wire, create=True,
372 compatible_version=compatible_version)
378 compatible_version=compatible_version)
373
379
374 def get_url_and_credentials(self, src_url):
380 def get_url_and_credentials(self, src_url):
375 obj = urlparse.urlparse(src_url)
381 obj = urlparse.urlparse(src_url)
376 username = obj.username or None
382 username = obj.username or None
377 password = obj.password or None
383 password = obj.password or None
378 return username, password, src_url
384 return username, password, src_url
379
385
380 def import_remote_repository(self, wire, src_url):
386 def import_remote_repository(self, wire, src_url):
381 repo_path = wire['path']
387 repo_path = wire['path']
382 if not self.is_path_valid_repository(wire, repo_path):
388 if not self.is_path_valid_repository(wire, repo_path):
383 raise Exception(
389 raise Exception(
384 "Path %s is not a valid Subversion repository." % repo_path)
390 "Path %s is not a valid Subversion repository." % repo_path)
385
391
386 username, password, src_url = self.get_url_and_credentials(src_url)
392 username, password, src_url = self.get_url_and_credentials(src_url)
387 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
388 '--trust-server-cert-failures=unknown-ca']
394 '--trust-server-cert-failures=unknown-ca']
389 if username and password:
395 if username and password:
390 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += ['--username', username, '--password', password]
391 rdump_cmd += [src_url]
397 rdump_cmd += [src_url]
392
398
393 rdump = subprocess.Popen(
399 rdump = subprocess.Popen(
394 rdump_cmd,
400 rdump_cmd,
395 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
396 load = subprocess.Popen(
402 load = subprocess.Popen(
397 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
398
404
399 # TODO: johbo: This can be a very long operation, might be better
405 # TODO: johbo: This can be a very long operation, might be better
400 # to track some kind of status and provide an api to check if the
406 # to track some kind of status and provide an api to check if the
401 # import is done.
407 # import is done.
402 rdump.wait()
408 rdump.wait()
403 load.wait()
409 load.wait()
404
410
405 log.debug('Return process ended with code: %s', rdump.returncode)
411 log.debug('Return process ended with code: %s', rdump.returncode)
406 if rdump.returncode != 0:
412 if rdump.returncode != 0:
407 errors = rdump.stderr.read()
413 errors = rdump.stderr.read()
408 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
409 rdump.returncode, errors)
415 rdump.returncode, errors)
410 reason = 'UNKNOWN'
416 reason = 'UNKNOWN'
411 if 'svnrdump: E230001:' in errors:
417 if 'svnrdump: E230001:' in errors:
412 reason = 'INVALID_CERTIFICATE'
418 reason = 'INVALID_CERTIFICATE'
413
419
414 if reason == 'UNKNOWN':
420 if reason == 'UNKNOWN':
415 reason = 'UNKNOWN:{}'.format(errors)
421 reason = 'UNKNOWN:{}'.format(errors)
416 raise Exception(
422 raise Exception(
417 'Failed to dump the remote repository from %s. Reason:%s' % (
423 'Failed to dump the remote repository from %s. Reason:%s' % (
418 src_url, reason))
424 src_url, reason))
419 if load.returncode != 0:
425 if load.returncode != 0:
420 raise Exception(
426 raise Exception(
421 'Failed to load the dump of remote repository from %s.' %
427 'Failed to load the dump of remote repository from %s.' %
422 (src_url, ))
428 (src_url, ))
423
429
424 def commit(self, wire, message, author, timestamp, updated, removed):
430 def commit(self, wire, message, author, timestamp, updated, removed):
425 assert isinstance(message, str)
431 assert isinstance(message, str)
426 assert isinstance(author, str)
432 assert isinstance(author, str)
427
433
428 repo = self._factory.repo(wire)
434 repo = self._factory.repo(wire)
429 fsobj = svn.repos.fs(repo)
435 fsobj = svn.repos.fs(repo)
430
436
431 rev = svn.fs.youngest_rev(fsobj)
437 rev = svn.fs.youngest_rev(fsobj)
432 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
433 txn_root = svn.fs.txn_root(txn)
439 txn_root = svn.fs.txn_root(txn)
434
440
435 for node in updated:
441 for node in updated:
436 TxnNodeProcessor(node, txn_root).update()
442 TxnNodeProcessor(node, txn_root).update()
437 for node in removed:
443 for node in removed:
438 TxnNodeProcessor(node, txn_root).remove()
444 TxnNodeProcessor(node, txn_root).remove()
439
445
440 commit_id = svn.repos.fs_commit_txn(repo, txn)
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
441
447
442 if timestamp:
448 if timestamp:
443 apr_time = apr_time_t(timestamp)
449 apr_time = apr_time_t(timestamp)
444 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
445 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
446
452
447 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
448 return commit_id
454 return commit_id
449
455
450 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
451 ignore_whitespace=False, context=3):
457 ignore_whitespace=False, context=3):
452
458
453 wire.update(cache=False)
459 wire.update(cache=False)
454 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
455 diff_creator = SvnDiffer(
461 diff_creator = SvnDiffer(
456 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
457 try:
463 try:
458 return diff_creator.generate_diff()
464 return diff_creator.generate_diff()
459 except svn.core.SubversionException as e:
465 except svn.core.SubversionException as e:
460 log.exception(
466 log.exception(
461 "Error during diff operation operation. "
467 "Error during diff operation operation. "
462 "Path might not exist %s, %s" % (path1, path2))
468 "Path might not exist %s, %s" % (path1, path2))
463 return ""
469 return ""
464
470
465 @reraise_safe_exceptions
471 @reraise_safe_exceptions
466 def is_large_file(self, wire, path):
472 def is_large_file(self, wire, path):
467 return False
473 return False
468
474
469 @reraise_safe_exceptions
475 @reraise_safe_exceptions
470 def is_binary(self, wire, rev, path):
476 def is_binary(self, wire, rev, path):
471 cache_on, context_uid, repo_id = self._cache_on(wire)
477 cache_on, context_uid, repo_id = self._cache_on(wire)
472
478
473 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 region = self.region(wire)
480 @region.conditional_cache_on_arguments(condition=cache_on)
474 def _is_binary(_repo_id, _rev, _path):
481 def _is_binary(_repo_id, _rev, _path):
475 raw_bytes = self.get_file_content(wire, path, rev)
482 raw_bytes = self.get_file_content(wire, path, rev)
476 return raw_bytes and '\0' in raw_bytes
483 return raw_bytes and '\0' in raw_bytes
477
484
478 return _is_binary(repo_id, rev, path)
485 return _is_binary(repo_id, rev, path)
479
486
480 @reraise_safe_exceptions
487 @reraise_safe_exceptions
481 def run_svn_command(self, wire, cmd, **opts):
488 def run_svn_command(self, wire, cmd, **opts):
482 path = wire.get('path', None)
489 path = wire.get('path', None)
483
490
484 if path and os.path.isdir(path):
491 if path and os.path.isdir(path):
485 opts['cwd'] = path
492 opts['cwd'] = path
486
493
487 safe_call = opts.pop('_safe', False)
494 safe_call = opts.pop('_safe', False)
488
495
489 svnenv = os.environ.copy()
496 svnenv = os.environ.copy()
490 svnenv.update(opts.pop('extra_env', {}))
497 svnenv.update(opts.pop('extra_env', {}))
491
498
492 _opts = {'env': svnenv, 'shell': False}
499 _opts = {'env': svnenv, 'shell': False}
493
500
494 try:
501 try:
495 _opts.update(opts)
502 _opts.update(opts)
496 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
497
504
498 return ''.join(p), ''.join(p.error)
505 return ''.join(p), ''.join(p.error)
499 except (EnvironmentError, OSError) as err:
506 except (EnvironmentError, OSError) as err:
500 if safe_call:
507 if safe_call:
501 return '', safe_str(err).strip()
508 return '', safe_str(err).strip()
502 else:
509 else:
503 cmd = ' '.join(cmd) # human friendly CMD
510 cmd = ' '.join(cmd) # human friendly CMD
504 tb_err = ("Couldn't run svn command (%s).\n"
511 tb_err = ("Couldn't run svn command (%s).\n"
505 "Original error was:%s\n"
512 "Original error was:%s\n"
506 "Call options:%s\n"
513 "Call options:%s\n"
507 % (cmd, err, _opts))
514 % (cmd, err, _opts))
508 log.exception(tb_err)
515 log.exception(tb_err)
509 raise exceptions.VcsException()(tb_err)
516 raise exceptions.VcsException()(tb_err)
510
517
511 @reraise_safe_exceptions
518 @reraise_safe_exceptions
512 def install_hooks(self, wire, force=False):
519 def install_hooks(self, wire, force=False):
513 from vcsserver.hook_utils import install_svn_hooks
520 from vcsserver.hook_utils import install_svn_hooks
514 repo_path = wire['path']
521 repo_path = wire['path']
515 binary_dir = settings.BINARY_DIR
522 binary_dir = settings.BINARY_DIR
516 executable = None
523 executable = None
517 if binary_dir:
524 if binary_dir:
518 executable = os.path.join(binary_dir, 'python')
525 executable = os.path.join(binary_dir, 'python')
519 return install_svn_hooks(
526 return install_svn_hooks(
520 repo_path, executable=executable, force_create=force)
527 repo_path, executable=executable, force_create=force)
521
528
522 @reraise_safe_exceptions
529 @reraise_safe_exceptions
523 def get_hooks_info(self, wire):
530 def get_hooks_info(self, wire):
524 from vcsserver.hook_utils import (
531 from vcsserver.hook_utils import (
525 get_svn_pre_hook_version, get_svn_post_hook_version)
532 get_svn_pre_hook_version, get_svn_post_hook_version)
526 repo_path = wire['path']
533 repo_path = wire['path']
527 return {
534 return {
528 'pre_version': get_svn_pre_hook_version(repo_path),
535 'pre_version': get_svn_pre_hook_version(repo_path),
529 'post_version': get_svn_post_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
530 }
537 }
531
538
532 @reraise_safe_exceptions
539 @reraise_safe_exceptions
533 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
540 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
534 archive_dir_name, commit_id):
541 archive_dir_name, commit_id):
535
542
536 def walk_tree(root, root_dir, _commit_id):
543 def walk_tree(root, root_dir, _commit_id):
537 """
544 """
538 Special recursive svn repo walker
545 Special recursive svn repo walker
539 """
546 """
540
547
541 filemode_default = 0o100644
548 filemode_default = 0o100644
542 filemode_executable = 0o100755
549 filemode_executable = 0o100755
543
550
544 file_iter = svn.fs.dir_entries(root, root_dir)
551 file_iter = svn.fs.dir_entries(root, root_dir)
545 for f_name in file_iter:
552 for f_name in file_iter:
546 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
553 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
547
554
548 if f_type == 'dir':
555 if f_type == 'dir':
549 # return only DIR, and then all entries in that dir
556 # return only DIR, and then all entries in that dir
550 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
557 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
551 new_root = os.path.join(root_dir, f_name)
558 new_root = os.path.join(root_dir, f_name)
552 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
559 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
553 yield _f_name, _f_data, _f_type
560 yield _f_name, _f_data, _f_type
554 else:
561 else:
555 f_path = os.path.join(root_dir, f_name).rstrip('/')
562 f_path = os.path.join(root_dir, f_name).rstrip('/')
556 prop_list = svn.fs.node_proplist(root, f_path)
563 prop_list = svn.fs.node_proplist(root, f_path)
557
564
558 f_mode = filemode_default
565 f_mode = filemode_default
559 if prop_list.get('svn:executable'):
566 if prop_list.get('svn:executable'):
560 f_mode = filemode_executable
567 f_mode = filemode_executable
561
568
562 f_is_link = False
569 f_is_link = False
563 if prop_list.get('svn:special'):
570 if prop_list.get('svn:special'):
564 f_is_link = True
571 f_is_link = True
565
572
566 data = {
573 data = {
567 'is_link': f_is_link,
574 'is_link': f_is_link,
568 'mode': f_mode,
575 'mode': f_mode,
569 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
576 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
570 }
577 }
571
578
572 yield f_path, data, f_type
579 yield f_path, data, f_type
573
580
574 def file_walker(_commit_id, path):
581 def file_walker(_commit_id, path):
575 repo = self._factory.repo(wire)
582 repo = self._factory.repo(wire)
576 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
583 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
577
584
578 def no_content():
585 def no_content():
579 raise NoContentException()
586 raise NoContentException()
580
587
581 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
588 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
582 file_path = f_name
589 file_path = f_name
583
590
584 if f_type == 'dir':
591 if f_type == 'dir':
585 mode = f_data['mode']
592 mode = f_data['mode']
586 yield ArchiveNode(file_path, mode, False, no_content)
593 yield ArchiveNode(file_path, mode, False, no_content)
587 else:
594 else:
588 mode = f_data['mode']
595 mode = f_data['mode']
589 is_link = f_data['is_link']
596 is_link = f_data['is_link']
590 data_stream = f_data['content_stream']
597 data_stream = f_data['content_stream']
591 yield ArchiveNode(file_path, mode, is_link, data_stream)
598 yield ArchiveNode(file_path, mode, is_link, data_stream)
592
599
593 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
600 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
594 archive_dir_name, commit_id)
601 archive_dir_name, commit_id)
595
602
596
603
597 class SvnDiffer(object):
604 class SvnDiffer(object):
598 """
605 """
599 Utility to create diffs based on difflib and the Subversion api
606 Utility to create diffs based on difflib and the Subversion api
600 """
607 """
601
608
602 binary_content = False
609 binary_content = False
603
610
604 def __init__(
611 def __init__(
605 self, repo, src_rev, src_path, tgt_rev, tgt_path,
612 self, repo, src_rev, src_path, tgt_rev, tgt_path,
606 ignore_whitespace, context):
613 ignore_whitespace, context):
607 self.repo = repo
614 self.repo = repo
608 self.ignore_whitespace = ignore_whitespace
615 self.ignore_whitespace = ignore_whitespace
609 self.context = context
616 self.context = context
610
617
611 fsobj = svn.repos.fs(repo)
618 fsobj = svn.repos.fs(repo)
612
619
613 self.tgt_rev = tgt_rev
620 self.tgt_rev = tgt_rev
614 self.tgt_path = tgt_path or ''
621 self.tgt_path = tgt_path or ''
615 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
622 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
616 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
623 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
617
624
618 self.src_rev = src_rev
625 self.src_rev = src_rev
619 self.src_path = src_path or self.tgt_path
626 self.src_path = src_path or self.tgt_path
620 self.src_root = svn.fs.revision_root(fsobj, src_rev)
627 self.src_root = svn.fs.revision_root(fsobj, src_rev)
621 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
628 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
622
629
623 self._validate()
630 self._validate()
624
631
625 def _validate(self):
632 def _validate(self):
626 if (self.tgt_kind != svn.core.svn_node_none and
633 if (self.tgt_kind != svn.core.svn_node_none and
627 self.src_kind != svn.core.svn_node_none and
634 self.src_kind != svn.core.svn_node_none and
628 self.src_kind != self.tgt_kind):
635 self.src_kind != self.tgt_kind):
629 # TODO: johbo: proper error handling
636 # TODO: johbo: proper error handling
630 raise Exception(
637 raise Exception(
631 "Source and target are not compatible for diff generation. "
638 "Source and target are not compatible for diff generation. "
632 "Source type: %s, target type: %s" %
639 "Source type: %s, target type: %s" %
633 (self.src_kind, self.tgt_kind))
640 (self.src_kind, self.tgt_kind))
634
641
635 def generate_diff(self):
642 def generate_diff(self):
636 buf = StringIO.StringIO()
643 buf = StringIO.StringIO()
637 if self.tgt_kind == svn.core.svn_node_dir:
644 if self.tgt_kind == svn.core.svn_node_dir:
638 self._generate_dir_diff(buf)
645 self._generate_dir_diff(buf)
639 else:
646 else:
640 self._generate_file_diff(buf)
647 self._generate_file_diff(buf)
641 return buf.getvalue()
648 return buf.getvalue()
642
649
643 def _generate_dir_diff(self, buf):
650 def _generate_dir_diff(self, buf):
644 editor = DiffChangeEditor()
651 editor = DiffChangeEditor()
645 editor_ptr, editor_baton = svn.delta.make_editor(editor)
652 editor_ptr, editor_baton = svn.delta.make_editor(editor)
646 svn.repos.dir_delta2(
653 svn.repos.dir_delta2(
647 self.src_root,
654 self.src_root,
648 self.src_path,
655 self.src_path,
649 '', # src_entry
656 '', # src_entry
650 self.tgt_root,
657 self.tgt_root,
651 self.tgt_path,
658 self.tgt_path,
652 editor_ptr, editor_baton,
659 editor_ptr, editor_baton,
653 authorization_callback_allow_all,
660 authorization_callback_allow_all,
654 False, # text_deltas
661 False, # text_deltas
655 svn.core.svn_depth_infinity, # depth
662 svn.core.svn_depth_infinity, # depth
656 False, # entry_props
663 False, # entry_props
657 False, # ignore_ancestry
664 False, # ignore_ancestry
658 )
665 )
659
666
660 for path, __, change in sorted(editor.changes):
667 for path, __, change in sorted(editor.changes):
661 self._generate_node_diff(
668 self._generate_node_diff(
662 buf, change, path, self.tgt_path, path, self.src_path)
669 buf, change, path, self.tgt_path, path, self.src_path)
663
670
664 def _generate_file_diff(self, buf):
671 def _generate_file_diff(self, buf):
665 change = None
672 change = None
666 if self.src_kind == svn.core.svn_node_none:
673 if self.src_kind == svn.core.svn_node_none:
667 change = "add"
674 change = "add"
668 elif self.tgt_kind == svn.core.svn_node_none:
675 elif self.tgt_kind == svn.core.svn_node_none:
669 change = "delete"
676 change = "delete"
670 tgt_base, tgt_path = vcspath.split(self.tgt_path)
677 tgt_base, tgt_path = vcspath.split(self.tgt_path)
671 src_base, src_path = vcspath.split(self.src_path)
678 src_base, src_path = vcspath.split(self.src_path)
672 self._generate_node_diff(
679 self._generate_node_diff(
673 buf, change, tgt_path, tgt_base, src_path, src_base)
680 buf, change, tgt_path, tgt_base, src_path, src_base)
674
681
675 def _generate_node_diff(
682 def _generate_node_diff(
676 self, buf, change, tgt_path, tgt_base, src_path, src_base):
683 self, buf, change, tgt_path, tgt_base, src_path, src_base):
677
684
678 if self.src_rev == self.tgt_rev and tgt_base == src_base:
685 if self.src_rev == self.tgt_rev and tgt_base == src_base:
679 # makes consistent behaviour with git/hg to return empty diff if
686 # makes consistent behaviour with git/hg to return empty diff if
680 # we compare same revisions
687 # we compare same revisions
681 return
688 return
682
689
683 tgt_full_path = vcspath.join(tgt_base, tgt_path)
690 tgt_full_path = vcspath.join(tgt_base, tgt_path)
684 src_full_path = vcspath.join(src_base, src_path)
691 src_full_path = vcspath.join(src_base, src_path)
685
692
686 self.binary_content = False
693 self.binary_content = False
687 mime_type = self._get_mime_type(tgt_full_path)
694 mime_type = self._get_mime_type(tgt_full_path)
688
695
689 if mime_type and not mime_type.startswith('text'):
696 if mime_type and not mime_type.startswith('text'):
690 self.binary_content = True
697 self.binary_content = True
691 buf.write("=" * 67 + '\n')
698 buf.write("=" * 67 + '\n')
692 buf.write("Cannot display: file marked as a binary type.\n")
699 buf.write("Cannot display: file marked as a binary type.\n")
693 buf.write("svn:mime-type = %s\n" % mime_type)
700 buf.write("svn:mime-type = %s\n" % mime_type)
694 buf.write("Index: %s\n" % (tgt_path, ))
701 buf.write("Index: %s\n" % (tgt_path, ))
695 buf.write("=" * 67 + '\n')
702 buf.write("=" * 67 + '\n')
696 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
703 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
697 'tgt_path': tgt_path})
704 'tgt_path': tgt_path})
698
705
699 if change == 'add':
706 if change == 'add':
700 # TODO: johbo: SVN is missing a zero here compared to git
707 # TODO: johbo: SVN is missing a zero here compared to git
701 buf.write("new file mode 10644\n")
708 buf.write("new file mode 10644\n")
702
709
703 #TODO(marcink): intro to binary detection of svn patches
710 #TODO(marcink): intro to binary detection of svn patches
704 # if self.binary_content:
711 # if self.binary_content:
705 # buf.write('GIT binary patch\n')
712 # buf.write('GIT binary patch\n')
706
713
707 buf.write("--- /dev/null\t(revision 0)\n")
714 buf.write("--- /dev/null\t(revision 0)\n")
708 src_lines = []
715 src_lines = []
709 else:
716 else:
710 if change == 'delete':
717 if change == 'delete':
711 buf.write("deleted file mode 10644\n")
718 buf.write("deleted file mode 10644\n")
712
719
713 #TODO(marcink): intro to binary detection of svn patches
720 #TODO(marcink): intro to binary detection of svn patches
714 # if self.binary_content:
721 # if self.binary_content:
715 # buf.write('GIT binary patch\n')
722 # buf.write('GIT binary patch\n')
716
723
717 buf.write("--- a/%s\t(revision %s)\n" % (
724 buf.write("--- a/%s\t(revision %s)\n" % (
718 src_path, self.src_rev))
725 src_path, self.src_rev))
719 src_lines = self._svn_readlines(self.src_root, src_full_path)
726 src_lines = self._svn_readlines(self.src_root, src_full_path)
720
727
721 if change == 'delete':
728 if change == 'delete':
722 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
729 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
723 tgt_lines = []
730 tgt_lines = []
724 else:
731 else:
725 buf.write("+++ b/%s\t(revision %s)\n" % (
732 buf.write("+++ b/%s\t(revision %s)\n" % (
726 tgt_path, self.tgt_rev))
733 tgt_path, self.tgt_rev))
727 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
734 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
728
735
729 if not self.binary_content:
736 if not self.binary_content:
730 udiff = svn_diff.unified_diff(
737 udiff = svn_diff.unified_diff(
731 src_lines, tgt_lines, context=self.context,
738 src_lines, tgt_lines, context=self.context,
732 ignore_blank_lines=self.ignore_whitespace,
739 ignore_blank_lines=self.ignore_whitespace,
733 ignore_case=False,
740 ignore_case=False,
734 ignore_space_changes=self.ignore_whitespace)
741 ignore_space_changes=self.ignore_whitespace)
735 buf.writelines(udiff)
742 buf.writelines(udiff)
736
743
737 def _get_mime_type(self, path):
744 def _get_mime_type(self, path):
738 try:
745 try:
739 mime_type = svn.fs.node_prop(
746 mime_type = svn.fs.node_prop(
740 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
747 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
741 except svn.core.SubversionException:
748 except svn.core.SubversionException:
742 mime_type = svn.fs.node_prop(
749 mime_type = svn.fs.node_prop(
743 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
750 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
744 return mime_type
751 return mime_type
745
752
746 def _svn_readlines(self, fs_root, node_path):
753 def _svn_readlines(self, fs_root, node_path):
747 if self.binary_content:
754 if self.binary_content:
748 return []
755 return []
749 node_kind = svn.fs.check_path(fs_root, node_path)
756 node_kind = svn.fs.check_path(fs_root, node_path)
750 if node_kind not in (
757 if node_kind not in (
751 svn.core.svn_node_file, svn.core.svn_node_symlink):
758 svn.core.svn_node_file, svn.core.svn_node_symlink):
752 return []
759 return []
753 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
760 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
754 return content.splitlines(True)
761 return content.splitlines(True)
755
762
756
763
757 class DiffChangeEditor(svn.delta.Editor):
764 class DiffChangeEditor(svn.delta.Editor):
758 """
765 """
759 Records changes between two given revisions
766 Records changes between two given revisions
760 """
767 """
761
768
762 def __init__(self):
769 def __init__(self):
763 self.changes = []
770 self.changes = []
764
771
765 def delete_entry(self, path, revision, parent_baton, pool=None):
772 def delete_entry(self, path, revision, parent_baton, pool=None):
766 self.changes.append((path, None, 'delete'))
773 self.changes.append((path, None, 'delete'))
767
774
768 def add_file(
775 def add_file(
769 self, path, parent_baton, copyfrom_path, copyfrom_revision,
776 self, path, parent_baton, copyfrom_path, copyfrom_revision,
770 file_pool=None):
777 file_pool=None):
771 self.changes.append((path, 'file', 'add'))
778 self.changes.append((path, 'file', 'add'))
772
779
773 def open_file(self, path, parent_baton, base_revision, file_pool=None):
780 def open_file(self, path, parent_baton, base_revision, file_pool=None):
774 self.changes.append((path, 'file', 'change'))
781 self.changes.append((path, 'file', 'change'))
775
782
776
783
777 def authorization_callback_allow_all(root, path, pool):
784 def authorization_callback_allow_all(root, path, pool):
778 return True
785 return True
779
786
780
787
781 class TxnNodeProcessor(object):
788 class TxnNodeProcessor(object):
782 """
789 """
783 Utility to process the change of one node within a transaction root.
790 Utility to process the change of one node within a transaction root.
784
791
785 It encapsulates the knowledge of how to add, update or remove
792 It encapsulates the knowledge of how to add, update or remove
786 a node for a given transaction root. The purpose is to support the method
793 a node for a given transaction root. The purpose is to support the method
787 `SvnRemote.commit`.
794 `SvnRemote.commit`.
788 """
795 """
789
796
790 def __init__(self, node, txn_root):
797 def __init__(self, node, txn_root):
791 assert isinstance(node['path'], str)
798 assert isinstance(node['path'], str)
792
799
793 self.node = node
800 self.node = node
794 self.txn_root = txn_root
801 self.txn_root = txn_root
795
802
796 def update(self):
803 def update(self):
797 self._ensure_parent_dirs()
804 self._ensure_parent_dirs()
798 self._add_file_if_node_does_not_exist()
805 self._add_file_if_node_does_not_exist()
799 self._update_file_content()
806 self._update_file_content()
800 self._update_file_properties()
807 self._update_file_properties()
801
808
802 def remove(self):
809 def remove(self):
803 svn.fs.delete(self.txn_root, self.node['path'])
810 svn.fs.delete(self.txn_root, self.node['path'])
804 # TODO: Clean up directory if empty
811 # TODO: Clean up directory if empty
805
812
806 def _ensure_parent_dirs(self):
813 def _ensure_parent_dirs(self):
807 curdir = vcspath.dirname(self.node['path'])
814 curdir = vcspath.dirname(self.node['path'])
808 dirs_to_create = []
815 dirs_to_create = []
809 while not self._svn_path_exists(curdir):
816 while not self._svn_path_exists(curdir):
810 dirs_to_create.append(curdir)
817 dirs_to_create.append(curdir)
811 curdir = vcspath.dirname(curdir)
818 curdir = vcspath.dirname(curdir)
812
819
813 for curdir in reversed(dirs_to_create):
820 for curdir in reversed(dirs_to_create):
814 log.debug('Creating missing directory "%s"', curdir)
821 log.debug('Creating missing directory "%s"', curdir)
815 svn.fs.make_dir(self.txn_root, curdir)
822 svn.fs.make_dir(self.txn_root, curdir)
816
823
817 def _svn_path_exists(self, path):
824 def _svn_path_exists(self, path):
818 path_status = svn.fs.check_path(self.txn_root, path)
825 path_status = svn.fs.check_path(self.txn_root, path)
819 return path_status != svn.core.svn_node_none
826 return path_status != svn.core.svn_node_none
820
827
821 def _add_file_if_node_does_not_exist(self):
828 def _add_file_if_node_does_not_exist(self):
822 kind = svn.fs.check_path(self.txn_root, self.node['path'])
829 kind = svn.fs.check_path(self.txn_root, self.node['path'])
823 if kind == svn.core.svn_node_none:
830 if kind == svn.core.svn_node_none:
824 svn.fs.make_file(self.txn_root, self.node['path'])
831 svn.fs.make_file(self.txn_root, self.node['path'])
825
832
826 def _update_file_content(self):
833 def _update_file_content(self):
827 assert isinstance(self.node['content'], str)
834 assert isinstance(self.node['content'], str)
828 handler, baton = svn.fs.apply_textdelta(
835 handler, baton = svn.fs.apply_textdelta(
829 self.txn_root, self.node['path'], None, None)
836 self.txn_root, self.node['path'], None, None)
830 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
837 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
831
838
832 def _update_file_properties(self):
839 def _update_file_properties(self):
833 properties = self.node.get('properties', {})
840 properties = self.node.get('properties', {})
834 for key, value in properties.iteritems():
841 for key, value in properties.iteritems():
835 svn.fs.change_node_prop(
842 svn.fs.change_node_prop(
836 self.txn_root, self.node['path'], key, value)
843 self.txn_root, self.node['path'], key, value)
837
844
838
845
839 def apr_time_t(timestamp):
846 def apr_time_t(timestamp):
840 """
847 """
841 Convert a Python timestamp into APR timestamp type apr_time_t
848 Convert a Python timestamp into APR timestamp type apr_time_t
842 """
849 """
843 return timestamp * 1E6
850 return timestamp * 1E6
844
851
845
852
846 def svn_opt_revision_value_t(num):
853 def svn_opt_revision_value_t(num):
847 """
854 """
848 Put `num` into a `svn_opt_revision_value_t` structure.
855 Put `num` into a `svn_opt_revision_value_t` structure.
849 """
856 """
850 value = svn.core.svn_opt_revision_value_t()
857 value = svn.core.svn_opt_revision_value_t()
851 value.number = num
858 value.number = num
852 revision = svn.core.svn_opt_revision_t()
859 revision = svn.core.svn_opt_revision_t()
853 revision.kind = svn.core.svn_opt_revision_number
860 revision.kind = svn.core.svn_opt_revision_number
854 revision.value = value
861 revision.value = value
855 return revision
862 return revision
@@ -1,32 +1,45 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib import rc_cache
18
19
19 class RemoteBase(object):
20 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
21 EMPTY_COMMIT = '0' * 40
21
22
22 @property
23 def region(self, wire):
23 def region(self):
24 repo_id = wire.get('repo_id', '')
24 return self._factory._cache_region
25 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
26 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
25
27
26 def _cache_on(self, wire):
28 def _cache_on(self, wire):
27 context = wire.get('context', '')
29 context = wire.get('context', '')
28 context_uid = '{}'.format(context)
30 context_uid = '{}'.format(context)
29 repo_id = wire.get('repo_id', '')
31 repo_id = wire.get('repo_id', '')
30 cache = wire.get('cache', True)
32 cache = wire.get('cache', True)
31 cache_on = context and cache
33 cache_on = context and cache
32 return cache_on, context_uid, repo_id
34 return cache_on, context_uid, repo_id
35
36 def vcsserver_invalidate_cache(self, wire, delete):
37 from vcsserver.lib import rc_cache
38 repo_id = wire.get('repo_id', '')
39
40 if delete:
41 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
42 rc_cache.clear_cache_namespace(
43 'repo_object', cache_namespace_uid, invalidate=True)
44
45 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
General Comments 0
You need to be logged in to leave comments. Login now