##// END OF EJS Templates
scm: added md5 methods to be calculated on vcsserver instead of RhodeCode side
super-admin -
r1074:ccf62cdf python3
parent child Browse files
Show More
@@ -1,208 +1,207 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import time
19 import time
20 import logging
20 import logging
21 import functools
21 import functools
22 import decorator
22 import decorator
23
23
24 from dogpile.cache import CacheRegion
24 from dogpile.cache import CacheRegion
25
25
26 from vcsserver.str_utils import safe_bytes
26 from vcsserver.str_utils import safe_bytes
27 from vcsserver.utils import sha1
27 from vcsserver.utils import sha1
28 from vcsserver.lib.rc_cache import region_meta
28 from vcsserver.lib.rc_cache import region_meta
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 class RhodeCodeCacheRegion(CacheRegion):
33 class RhodeCodeCacheRegion(CacheRegion):
34
34
35 def conditional_cache_on_arguments(
35 def conditional_cache_on_arguments(
36 self, namespace=None,
36 self, namespace=None,
37 expiration_time=None,
37 expiration_time=None,
38 should_cache_fn=None,
38 should_cache_fn=None,
39 to_str=str,
39 to_str=str,
40 function_key_generator=None,
40 function_key_generator=None,
41 condition=True):
41 condition=True):
42 """
42 """
43 Custom conditional decorator, that will not touch any dogpile internals if
43 Custom conditional decorator, that will not touch any dogpile internals if
44 condition isn't meet. This works a bit different than should_cache_fn
44 condition isn't meet. This works a bit different than should_cache_fn
45 And it's faster in cases we don't ever want to compute cached values
45 And it's faster in cases we don't ever want to compute cached values
46 """
46 """
47 expiration_time_is_callable = callable(expiration_time)
47 expiration_time_is_callable = callable(expiration_time)
48
48
49 if function_key_generator is None:
49 if function_key_generator is None:
50 function_key_generator = self.function_key_generator
50 function_key_generator = self.function_key_generator
51
51
52 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
53
53
54 if not condition:
54 if not condition:
55 log.debug('Calling un-cached method:%s', user_func.__name__)
55 log.debug('Calling un-cached method:%s', user_func.__name__)
56 start = time.time()
56 start = time.time()
57 result = user_func(*arg, **kw)
57 result = user_func(*arg, **kw)
58 total = time.time() - start
58 total = time.time() - start
59 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
59 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
60 return result
60 return result
61
61
62 key = key_generator(*arg, **kw)
62 key = key_generator(*arg, **kw)
63
63
64 timeout = expiration_time() if expiration_time_is_callable \
64 timeout = expiration_time() if expiration_time_is_callable \
65 else expiration_time
65 else expiration_time
66
66
67 log.debug('Calling cached method:`%s`', user_func.__name__)
67 log.debug('Calling cached method:`%s`', user_func.__name__)
68 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
68 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
69
69
70 def cache_decorator(user_func):
70 def cache_decorator(user_func):
71 if to_str is str:
71 if to_str is str:
72 # backwards compatible
72 # backwards compatible
73 key_generator = function_key_generator(namespace, user_func)
73 key_generator = function_key_generator(namespace, user_func)
74 else:
74 else:
75 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
75 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
76
76
77 def refresh(*arg, **kw):
77 def refresh(*arg, **kw):
78 """
78 """
79 Like invalidate, but regenerates the value instead
79 Like invalidate, but regenerates the value instead
80 """
80 """
81 key = key_generator(*arg, **kw)
81 key = key_generator(*arg, **kw)
82 value = user_func(*arg, **kw)
82 value = user_func(*arg, **kw)
83 self.set(key, value)
83 self.set(key, value)
84 return value
84 return value
85
85
86 def invalidate(*arg, **kw):
86 def invalidate(*arg, **kw):
87 key = key_generator(*arg, **kw)
87 key = key_generator(*arg, **kw)
88 self.delete(key)
88 self.delete(key)
89
89
90 def set_(value, *arg, **kw):
90 def set_(value, *arg, **kw):
91 key = key_generator(*arg, **kw)
91 key = key_generator(*arg, **kw)
92 self.set(key, value)
92 self.set(key, value)
93
93
94 def get(*arg, **kw):
94 def get(*arg, **kw):
95 key = key_generator(*arg, **kw)
95 key = key_generator(*arg, **kw)
96 return self.get(key)
96 return self.get(key)
97
97
98 user_func.set = set_
98 user_func.set = set_
99 user_func.invalidate = invalidate
99 user_func.invalidate = invalidate
100 user_func.get = get
100 user_func.get = get
101 user_func.refresh = refresh
101 user_func.refresh = refresh
102 user_func.key_generator = key_generator
102 user_func.key_generator = key_generator
103 user_func.original = user_func
103 user_func.original = user_func
104
104
105 # Use `decorate` to preserve the signature of :param:`user_func`.
105 # Use `decorate` to preserve the signature of :param:`user_func`.
106 return decorator.decorate(user_func, functools.partial(
106 return decorator.decorate(user_func, functools.partial(
107 get_or_create_for_user_func, key_generator))
107 get_or_create_for_user_func, key_generator))
108
108
109 return cache_decorator
109 return cache_decorator
110
110
111
111
112 def make_region(*arg, **kw):
112 def make_region(*arg, **kw):
113 return RhodeCodeCacheRegion(*arg, **kw)
113 return RhodeCodeCacheRegion(*arg, **kw)
114
114
115
115
116 def get_default_cache_settings(settings, prefixes=None):
116 def get_default_cache_settings(settings, prefixes=None):
117 prefixes = prefixes or []
117 prefixes = prefixes or []
118 cache_settings = {}
118 cache_settings = {}
119 for key in settings.keys():
119 for key in settings.keys():
120 for prefix in prefixes:
120 for prefix in prefixes:
121 if key.startswith(prefix):
121 if key.startswith(prefix):
122 name = key.split(prefix)[1].strip()
122 name = key.split(prefix)[1].strip()
123 val = settings[key]
123 val = settings[key]
124 if isinstance(val, str):
124 if isinstance(val, str):
125 val = val.strip()
125 val = val.strip()
126 cache_settings[name] = val
126 cache_settings[name] = val
127 return cache_settings
127 return cache_settings
128
128
129
129
130 def compute_key_from_params(*args):
130 def compute_key_from_params(*args):
131 """
131 """
132 Helper to compute key from given params to be used in cache manager
132 Helper to compute key from given params to be used in cache manager
133 """
133 """
134 return sha1(safe_bytes("_".join(map(str, args))))
134 return sha1(safe_bytes("_".join(map(str, args))))
135
135
136
136
137 def backend_key_generator(backend):
137 def backend_key_generator(backend):
138 """
138 """
139 Special wrapper that also sends over the backend to the key generator
139 Special wrapper that also sends over the backend to the key generator
140 """
140 """
141 def wrapper(namespace, fn):
141 def wrapper(namespace, fn):
142 return key_generator(backend, namespace, fn)
142 return key_generator(backend, namespace, fn)
143 return wrapper
143 return wrapper
144
144
145
145
146 def key_generator(backend, namespace, fn):
146 def key_generator(backend, namespace, fn):
147 fname = fn.__name__
147 fname = fn.__name__
148
148
149 def generate_key(*args):
149 def generate_key(*args):
150 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
150 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
151 namespace_pref = namespace or 'default_namespace'
151 namespace_pref = namespace or 'default_namespace'
152 arg_key = compute_key_from_params(*args)
152 arg_key = compute_key_from_params(*args)
153 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
153 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
154
154
155 return final_key
155 return final_key
156
156
157 return generate_key
157 return generate_key
158
158
159
159
160 def get_or_create_region(region_name, region_namespace=None):
160 def get_or_create_region(region_name, region_namespace=None):
161 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
161 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
162 region_obj = region_meta.dogpile_cache_regions.get(region_name)
162 region_obj = region_meta.dogpile_cache_regions.get(region_name)
163 if not region_obj:
163 if not region_obj:
164 raise EnvironmentError(
164 reg_keys = list(region_meta.dogpile_cache_regions.keys())
165 'Region `{}` not in configured: {}.'.format(
165 raise EnvironmentError(f'Region `{region_name}` not in configured: {reg_keys}.')
166 region_name, list(region_meta.dogpile_cache_regions.keys())))
167
166
168 region_uid_name = '{}:{}'.format(region_name, region_namespace)
167 region_uid_name = f'{region_name}:{region_namespace}'
169 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
168 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
170 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
169 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
171 if region_exist:
170 if region_exist:
172 log.debug('Using already configured region: %s', region_namespace)
171 log.debug('Using already configured region: %s', region_namespace)
173 return region_exist
172 return region_exist
174 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
173 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
175 expiration_time = region_obj.expiration_time
174 expiration_time = region_obj.expiration_time
176
175
177 if not os.path.isdir(cache_dir):
176 if not os.path.isdir(cache_dir):
178 os.makedirs(cache_dir)
177 os.makedirs(cache_dir)
179 new_region = make_region(
178 new_region = make_region(
180 name=region_uid_name,
179 name=region_uid_name,
181 function_key_generator=backend_key_generator(region_obj.actual_backend)
180 function_key_generator=backend_key_generator(region_obj.actual_backend)
182 )
181 )
183 namespace_filename = os.path.join(
182 namespace_filename = os.path.join(
184 cache_dir, "{}.cache.dbm".format(region_namespace))
183 cache_dir, f"{region_namespace}.cache.dbm")
185 # special type that allows 1db per namespace
184 # special type that allows 1db per namespace
186 new_region.configure(
185 new_region.configure(
187 backend='dogpile.cache.rc.file_namespace',
186 backend='dogpile.cache.rc.file_namespace',
188 expiration_time=expiration_time,
187 expiration_time=expiration_time,
189 arguments={"filename": namespace_filename}
188 arguments={"filename": namespace_filename}
190 )
189 )
191
190
192 # create and save in region caches
191 # create and save in region caches
193 log.debug('configuring new region: %s', region_uid_name)
192 log.debug('configuring new region: %s', region_uid_name)
194 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
193 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
195
194
196 return region_obj
195 return region_obj
197
196
198
197
199 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
198 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
200 region = get_or_create_region(cache_region, cache_namespace_uid)
199 region = get_or_create_region(cache_region, cache_namespace_uid)
201 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
200 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
202 num_delete_keys = len(cache_keys)
201 num_delete_keys = len(cache_keys)
203 if invalidate:
202 if invalidate:
204 region.invalidate(hard=False)
203 region.invalidate(hard=False)
205 else:
204 else:
206 if num_delete_keys:
205 if num_delete_keys:
207 region.delete_multi(cache_keys)
206 region.delete_multi(cache_keys)
208 return num_delete_keys
207 return num_delete_keys
@@ -1,1332 +1,1343 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib.request, urllib.parse, urllib.error
25 import urllib.request, urllib.parse, urllib.error
26 import urllib.request, urllib.error, urllib.parse
26 import urllib.request, urllib.error, urllib.parse
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes
43 from vcsserver.str_utils import safe_str, safe_int, safe_bytes
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = b'^{}'
53 PEELED_REF_MARKER = b'^{}'
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def reraise_safe_exceptions(func):
59 def reraise_safe_exceptions(func):
60 """Converts Dulwich exceptions to something neutral."""
60 """Converts Dulwich exceptions to something neutral."""
61
61
62 @wraps(func)
62 @wraps(func)
63 def wrapper(*args, **kwargs):
63 def wrapper(*args, **kwargs):
64 try:
64 try:
65 return func(*args, **kwargs)
65 return func(*args, **kwargs)
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
66 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 exc = exceptions.LookupException(org_exc=e)
67 exc = exceptions.LookupException(org_exc=e)
68 raise exc(safe_str(e))
68 raise exc(safe_str(e))
69 except (HangupException, UnexpectedCommandError) as e:
69 except (HangupException, UnexpectedCommandError) as e:
70 exc = exceptions.VcsException(org_exc=e)
70 exc = exceptions.VcsException(org_exc=e)
71 raise exc(safe_str(e))
71 raise exc(safe_str(e))
72 except Exception as e:
72 except Exception as e:
73 # NOTE(marcink): becuase of how dulwich handles some exceptions
73 # NOTE(marcink): becuase of how dulwich handles some exceptions
74 # (KeyError on empty repos), we cannot track this and catch all
74 # (KeyError on empty repos), we cannot track this and catch all
75 # exceptions, it's an exceptions from other handlers
75 # exceptions, it's an exceptions from other handlers
76 #if not hasattr(e, '_vcs_kind'):
76 #if not hasattr(e, '_vcs_kind'):
77 #log.exception("Unhandled exception in git remote call")
77 #log.exception("Unhandled exception in git remote call")
78 #raise_from_original(exceptions.UnhandledException)
78 #raise_from_original(exceptions.UnhandledException)
79 raise
79 raise
80 return wrapper
80 return wrapper
81
81
82
82
83 class Repo(DulwichRepo):
83 class Repo(DulwichRepo):
84 """
84 """
85 A wrapper for dulwich Repo class.
85 A wrapper for dulwich Repo class.
86
86
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
87 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 "Too many open files" error. We need to close all opened file descriptors
88 "Too many open files" error. We need to close all opened file descriptors
89 once the repo object is destroyed.
89 once the repo object is destroyed.
90 """
90 """
91 def __del__(self):
91 def __del__(self):
92 if hasattr(self, 'object_store'):
92 if hasattr(self, 'object_store'):
93 self.close()
93 self.close()
94
94
95
95
96 class Repository(LibGit2Repo):
96 class Repository(LibGit2Repo):
97
97
98 def __enter__(self):
98 def __enter__(self):
99 return self
99 return self
100
100
101 def __exit__(self, exc_type, exc_val, exc_tb):
101 def __exit__(self, exc_type, exc_val, exc_tb):
102 self.free()
102 self.free()
103
103
104
104
105 class GitFactory(RepoFactory):
105 class GitFactory(RepoFactory):
106 repo_type = 'git'
106 repo_type = 'git'
107
107
108 def _create_repo(self, wire, create, use_libgit2=False):
108 def _create_repo(self, wire, create, use_libgit2=False):
109 if use_libgit2:
109 if use_libgit2:
110 return Repository(wire['path'])
110 return Repository(wire['path'])
111 else:
111 else:
112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
112 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
113 return Repo(repo_path)
113 return Repo(repo_path)
114
114
115 def repo(self, wire, create=False, use_libgit2=False):
115 def repo(self, wire, create=False, use_libgit2=False):
116 """
116 """
117 Get a repository instance for the given path.
117 Get a repository instance for the given path.
118 """
118 """
119 return self._create_repo(wire, create, use_libgit2)
119 return self._create_repo(wire, create, use_libgit2)
120
120
121 def repo_libgit2(self, wire):
121 def repo_libgit2(self, wire):
122 return self.repo(wire, use_libgit2=True)
122 return self.repo(wire, use_libgit2=True)
123
123
124
124
125 class GitRemote(RemoteBase):
125 class GitRemote(RemoteBase):
126
126
127 def __init__(self, factory):
127 def __init__(self, factory):
128 self._factory = factory
128 self._factory = factory
129 self._bulk_methods = {
129 self._bulk_methods = {
130 "date": self.date,
130 "date": self.date,
131 "author": self.author,
131 "author": self.author,
132 "branch": self.branch,
132 "branch": self.branch,
133 "message": self.message,
133 "message": self.message,
134 "parents": self.parents,
134 "parents": self.parents,
135 "_commit": self.revision,
135 "_commit": self.revision,
136 }
136 }
137
137
138 def _wire_to_config(self, wire):
138 def _wire_to_config(self, wire):
139 if 'config' in wire:
139 if 'config' in wire:
140 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
140 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
141 return {}
141 return {}
142
142
143 def _remote_conf(self, config):
143 def _remote_conf(self, config):
144 params = [
144 params = [
145 '-c', 'core.askpass=""',
145 '-c', 'core.askpass=""',
146 ]
146 ]
147 ssl_cert_dir = config.get('vcs_ssl_dir')
147 ssl_cert_dir = config.get('vcs_ssl_dir')
148 if ssl_cert_dir:
148 if ssl_cert_dir:
149 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
149 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
150 return params
150 return params
151
151
152 @reraise_safe_exceptions
152 @reraise_safe_exceptions
153 def discover_git_version(self):
153 def discover_git_version(self):
154 stdout, _ = self.run_git_command(
154 stdout, _ = self.run_git_command(
155 {}, ['--version'], _bare=True, _safe=True)
155 {}, ['--version'], _bare=True, _safe=True)
156 prefix = b'git version'
156 prefix = b'git version'
157 if stdout.startswith(prefix):
157 if stdout.startswith(prefix):
158 stdout = stdout[len(prefix):]
158 stdout = stdout[len(prefix):]
159 return safe_str(stdout.strip())
159 return safe_str(stdout.strip())
160
160
161 @reraise_safe_exceptions
161 @reraise_safe_exceptions
162 def is_empty(self, wire):
162 def is_empty(self, wire):
163 repo_init = self._factory.repo_libgit2(wire)
163 repo_init = self._factory.repo_libgit2(wire)
164 with repo_init as repo:
164 with repo_init as repo:
165
165
166 try:
166 try:
167 has_head = repo.head.name
167 has_head = repo.head.name
168 if has_head:
168 if has_head:
169 return False
169 return False
170
170
171 # NOTE(marcink): check again using more expensive method
171 # NOTE(marcink): check again using more expensive method
172 return repo.is_empty
172 return repo.is_empty
173 except Exception:
173 except Exception:
174 pass
174 pass
175
175
176 return True
176 return True
177
177
178 @reraise_safe_exceptions
178 @reraise_safe_exceptions
179 def assert_correct_path(self, wire):
179 def assert_correct_path(self, wire):
180 cache_on, context_uid, repo_id = self._cache_on(wire)
180 cache_on, context_uid, repo_id = self._cache_on(wire)
181 region = self._region(wire)
181 region = self._region(wire)
182
182
183 @region.conditional_cache_on_arguments(condition=cache_on)
183 @region.conditional_cache_on_arguments(condition=cache_on)
184 def _assert_correct_path(_context_uid, _repo_id):
184 def _assert_correct_path(_context_uid, _repo_id):
185 try:
185 try:
186 repo_init = self._factory.repo_libgit2(wire)
186 repo_init = self._factory.repo_libgit2(wire)
187 with repo_init as repo:
187 with repo_init as repo:
188 pass
188 pass
189 except pygit2.GitError:
189 except pygit2.GitError:
190 path = wire.get('path')
190 path = wire.get('path')
191 tb = traceback.format_exc()
191 tb = traceback.format_exc()
192 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
192 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
193 return False
193 return False
194
194
195 return True
195 return True
196 return _assert_correct_path(context_uid, repo_id)
196 return _assert_correct_path(context_uid, repo_id)
197
197
198 @reraise_safe_exceptions
198 @reraise_safe_exceptions
199 def bare(self, wire):
199 def bare(self, wire):
200 repo_init = self._factory.repo_libgit2(wire)
200 repo_init = self._factory.repo_libgit2(wire)
201 with repo_init as repo:
201 with repo_init as repo:
202 return repo.is_bare
202 return repo.is_bare
203
203
204 @reraise_safe_exceptions
204 @reraise_safe_exceptions
205 def blob_as_pretty_string(self, wire, sha):
205 def blob_as_pretty_string(self, wire, sha):
206 repo_init = self._factory.repo_libgit2(wire)
206 repo_init = self._factory.repo_libgit2(wire)
207 with repo_init as repo:
207 with repo_init as repo:
208 blob_obj = repo[sha]
208 blob_obj = repo[sha]
209 blob = blob_obj.data
209 blob = blob_obj.data
210 return blob
210 return blob
211
211
212 @reraise_safe_exceptions
212 @reraise_safe_exceptions
213 def blob_raw_length(self, wire, sha):
213 def blob_raw_length(self, wire, sha):
214 cache_on, context_uid, repo_id = self._cache_on(wire)
214 cache_on, context_uid, repo_id = self._cache_on(wire)
215 region = self._region(wire)
215 region = self._region(wire)
216
216
217 @region.conditional_cache_on_arguments(condition=cache_on)
217 @region.conditional_cache_on_arguments(condition=cache_on)
218 def _blob_raw_length(_repo_id, _sha):
218 def _blob_raw_length(_repo_id, _sha):
219
219
220 repo_init = self._factory.repo_libgit2(wire)
220 repo_init = self._factory.repo_libgit2(wire)
221 with repo_init as repo:
221 with repo_init as repo:
222 blob = repo[sha]
222 blob = repo[sha]
223 return blob.size
223 return blob.size
224
224
225 return _blob_raw_length(repo_id, sha)
225 return _blob_raw_length(repo_id, sha)
226
226
227 def _parse_lfs_pointer(self, raw_content):
227 def _parse_lfs_pointer(self, raw_content):
228 spec_string = b'version https://git-lfs.github.com/spec'
228 spec_string = b'version https://git-lfs.github.com/spec'
229 if raw_content and raw_content.startswith(spec_string):
229 if raw_content and raw_content.startswith(spec_string):
230
230
231 pattern = re.compile(rb"""
231 pattern = re.compile(rb"""
232 (?:\n)?
232 (?:\n)?
233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
233 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
234 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
235 ^size[ ](?P<oid_size>[0-9]+)\n
235 ^size[ ](?P<oid_size>[0-9]+)\n
236 (?:\n)?
236 (?:\n)?
237 """, re.VERBOSE | re.MULTILINE)
237 """, re.VERBOSE | re.MULTILINE)
238 match = pattern.match(raw_content)
238 match = pattern.match(raw_content)
239 if match:
239 if match:
240 return match.groupdict()
240 return match.groupdict()
241
241
242 return {}
242 return {}
243
243
244 @reraise_safe_exceptions
244 @reraise_safe_exceptions
245 def is_large_file(self, wire, commit_id):
245 def is_large_file(self, wire, commit_id):
246 cache_on, context_uid, repo_id = self._cache_on(wire)
246 cache_on, context_uid, repo_id = self._cache_on(wire)
247 region = self._region(wire)
247 region = self._region(wire)
248
248
249 @region.conditional_cache_on_arguments(condition=cache_on)
249 @region.conditional_cache_on_arguments(condition=cache_on)
250 def _is_large_file(_repo_id, _sha):
250 def _is_large_file(_repo_id, _sha):
251 repo_init = self._factory.repo_libgit2(wire)
251 repo_init = self._factory.repo_libgit2(wire)
252 with repo_init as repo:
252 with repo_init as repo:
253 blob = repo[commit_id]
253 blob = repo[commit_id]
254 if blob.is_binary:
254 if blob.is_binary:
255 return {}
255 return {}
256
256
257 return self._parse_lfs_pointer(blob.data)
257 return self._parse_lfs_pointer(blob.data)
258
258
259 return _is_large_file(repo_id, commit_id)
259 return _is_large_file(repo_id, commit_id)
260
260
261 @reraise_safe_exceptions
261 @reraise_safe_exceptions
262 def is_binary(self, wire, tree_id):
262 def is_binary(self, wire, tree_id):
263 cache_on, context_uid, repo_id = self._cache_on(wire)
263 cache_on, context_uid, repo_id = self._cache_on(wire)
264 region = self._region(wire)
264 region = self._region(wire)
265
265
266 @region.conditional_cache_on_arguments(condition=cache_on)
266 @region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
271 return blob_obj.is_binary
272
272
273 return _is_binary(repo_id, tree_id)
273 return _is_binary(repo_id, tree_id)
274
274
275 @reraise_safe_exceptions
275 @reraise_safe_exceptions
276 def md5_hash(self, wire, tree_id):
277 cache_on, context_uid, repo_id = self._cache_on(wire)
278 region = self._region(wire)
279
280 @region.conditional_cache_on_arguments(condition=cache_on)
281 def _md5_hash(_repo_id, _tree_id):
282 return ''
283
284 return _md5_hash(repo_id, tree_id)
285
286 @reraise_safe_exceptions
276 def in_largefiles_store(self, wire, oid):
287 def in_largefiles_store(self, wire, oid):
277 conf = self._wire_to_config(wire)
288 conf = self._wire_to_config(wire)
278 repo_init = self._factory.repo_libgit2(wire)
289 repo_init = self._factory.repo_libgit2(wire)
279 with repo_init as repo:
290 with repo_init as repo:
280 repo_name = repo.path
291 repo_name = repo.path
281
292
282 store_location = conf.get('vcs_git_lfs_store_location')
293 store_location = conf.get('vcs_git_lfs_store_location')
283 if store_location:
294 if store_location:
284
295
285 store = LFSOidStore(
296 store = LFSOidStore(
286 oid=oid, repo=repo_name, store_location=store_location)
297 oid=oid, repo=repo_name, store_location=store_location)
287 return store.has_oid()
298 return store.has_oid()
288
299
289 return False
300 return False
290
301
291 @reraise_safe_exceptions
302 @reraise_safe_exceptions
292 def store_path(self, wire, oid):
303 def store_path(self, wire, oid):
293 conf = self._wire_to_config(wire)
304 conf = self._wire_to_config(wire)
294 repo_init = self._factory.repo_libgit2(wire)
305 repo_init = self._factory.repo_libgit2(wire)
295 with repo_init as repo:
306 with repo_init as repo:
296 repo_name = repo.path
307 repo_name = repo.path
297
308
298 store_location = conf.get('vcs_git_lfs_store_location')
309 store_location = conf.get('vcs_git_lfs_store_location')
299 if store_location:
310 if store_location:
300 store = LFSOidStore(
311 store = LFSOidStore(
301 oid=oid, repo=repo_name, store_location=store_location)
312 oid=oid, repo=repo_name, store_location=store_location)
302 return store.oid_path
313 return store.oid_path
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
314 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304
315
305 @reraise_safe_exceptions
316 @reraise_safe_exceptions
306 def bulk_request(self, wire, rev, pre_load):
317 def bulk_request(self, wire, rev, pre_load):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
308 region = self._region(wire)
319 region = self._region(wire)
309
320
310 @region.conditional_cache_on_arguments(condition=cache_on)
321 @region.conditional_cache_on_arguments(condition=cache_on)
311 def _bulk_request(_repo_id, _rev, _pre_load):
322 def _bulk_request(_repo_id, _rev, _pre_load):
312 result = {}
323 result = {}
313 for attr in pre_load:
324 for attr in pre_load:
314 try:
325 try:
315 method = self._bulk_methods[attr]
326 method = self._bulk_methods[attr]
316 args = [wire, rev]
327 args = [wire, rev]
317 result[attr] = method(*args)
328 result[attr] = method(*args)
318 except KeyError as e:
329 except KeyError as e:
319 raise exceptions.VcsException(e)(
330 raise exceptions.VcsException(e)(
320 "Unknown bulk attribute: %s" % attr)
331 "Unknown bulk attribute: %s" % attr)
321 return result
332 return result
322
333
323 return _bulk_request(repo_id, rev, sorted(pre_load))
334 return _bulk_request(repo_id, rev, sorted(pre_load))
324
335
325 def _build_opener(self, url):
336 def _build_opener(self, url):
326 handlers = []
337 handlers = []
327 url_obj = url_parser(url)
338 url_obj = url_parser(url)
328 _, authinfo = url_obj.authinfo()
339 _, authinfo = url_obj.authinfo()
329
340
330 if authinfo:
341 if authinfo:
331 # create a password manager
342 # create a password manager
332 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
343 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
333 passmgr.add_password(*authinfo)
344 passmgr.add_password(*authinfo)
334
345
335 handlers.extend((httpbasicauthhandler(passmgr),
346 handlers.extend((httpbasicauthhandler(passmgr),
336 httpdigestauthhandler(passmgr)))
347 httpdigestauthhandler(passmgr)))
337
348
338 return urllib.request.build_opener(*handlers)
349 return urllib.request.build_opener(*handlers)
339
350
340 def _type_id_to_name(self, type_id: int):
351 def _type_id_to_name(self, type_id: int):
341 return {
352 return {
342 1: 'commit',
353 1: 'commit',
343 2: 'tree',
354 2: 'tree',
344 3: 'blob',
355 3: 'blob',
345 4: 'tag'
356 4: 'tag'
346 }[type_id]
357 }[type_id]
347
358
348 @reraise_safe_exceptions
359 @reraise_safe_exceptions
349 def check_url(self, url, config):
360 def check_url(self, url, config):
350 url_obj = url_parser(url)
361 url_obj = url_parser(url)
351 test_uri, _ = url_obj.authinfo()
362 test_uri, _ = url_obj.authinfo()
352 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
363 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
353 url_obj.query = obfuscate_qs(url_obj.query)
364 url_obj.query = obfuscate_qs(url_obj.query)
354 cleaned_uri = str(url_obj)
365 cleaned_uri = str(url_obj)
355 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
366 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
356
367
357 if not test_uri.endswith('info/refs'):
368 if not test_uri.endswith('info/refs'):
358 test_uri = test_uri.rstrip('/') + '/info/refs'
369 test_uri = test_uri.rstrip('/') + '/info/refs'
359
370
360 o = self._build_opener(url)
371 o = self._build_opener(url)
361 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
372 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
362
373
363 q = {"service": 'git-upload-pack'}
374 q = {"service": 'git-upload-pack'}
364 qs = '?%s' % urllib.parse.urlencode(q)
375 qs = '?%s' % urllib.parse.urlencode(q)
365 cu = "%s%s" % (test_uri, qs)
376 cu = "%s%s" % (test_uri, qs)
366 req = urllib.request.Request(cu, None, {})
377 req = urllib.request.Request(cu, None, {})
367
378
368 try:
379 try:
369 log.debug("Trying to open URL %s", cleaned_uri)
380 log.debug("Trying to open URL %s", cleaned_uri)
370 resp = o.open(req)
381 resp = o.open(req)
371 if resp.code != 200:
382 if resp.code != 200:
372 raise exceptions.URLError()('Return Code is not 200')
383 raise exceptions.URLError()('Return Code is not 200')
373 except Exception as e:
384 except Exception as e:
374 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
385 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
375 # means it cannot be cloned
386 # means it cannot be cloned
376 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
387 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
377
388
378 # now detect if it's proper git repo
389 # now detect if it's proper git repo
379 gitdata = resp.read()
390 gitdata = resp.read()
380 if 'service=git-upload-pack' in gitdata:
391 if 'service=git-upload-pack' in gitdata:
381 pass
392 pass
382 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
393 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
383 # old style git can return some other format !
394 # old style git can return some other format !
384 pass
395 pass
385 else:
396 else:
386 raise exceptions.URLError()(
397 raise exceptions.URLError()(
387 "url [%s] does not look like an git" % (cleaned_uri,))
398 "url [%s] does not look like an git" % (cleaned_uri,))
388
399
389 return True
400 return True
390
401
391 @reraise_safe_exceptions
402 @reraise_safe_exceptions
392 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
403 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
393 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
404 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
394 remote_refs = self.pull(wire, url, apply_refs=False)
405 remote_refs = self.pull(wire, url, apply_refs=False)
395 repo = self._factory.repo(wire)
406 repo = self._factory.repo(wire)
396 if isinstance(valid_refs, list):
407 if isinstance(valid_refs, list):
397 valid_refs = tuple(valid_refs)
408 valid_refs = tuple(valid_refs)
398
409
399 for k in remote_refs:
410 for k in remote_refs:
400 # only parse heads/tags and skip so called deferred tags
411 # only parse heads/tags and skip so called deferred tags
401 if k.startswith(valid_refs) and not k.endswith(deferred):
412 if k.startswith(valid_refs) and not k.endswith(deferred):
402 repo[k] = remote_refs[k]
413 repo[k] = remote_refs[k]
403
414
404 if update_after_clone:
415 if update_after_clone:
405 # we want to checkout HEAD
416 # we want to checkout HEAD
406 repo["HEAD"] = remote_refs["HEAD"]
417 repo["HEAD"] = remote_refs["HEAD"]
407 index.build_index_from_tree(repo.path, repo.index_path(),
418 index.build_index_from_tree(repo.path, repo.index_path(),
408 repo.object_store, repo["HEAD"].tree)
419 repo.object_store, repo["HEAD"].tree)
409
420
410 @reraise_safe_exceptions
421 @reraise_safe_exceptions
411 def branch(self, wire, commit_id):
422 def branch(self, wire, commit_id):
412 cache_on, context_uid, repo_id = self._cache_on(wire)
423 cache_on, context_uid, repo_id = self._cache_on(wire)
413 region = self._region(wire)
424 region = self._region(wire)
414 @region.conditional_cache_on_arguments(condition=cache_on)
425 @region.conditional_cache_on_arguments(condition=cache_on)
415 def _branch(_context_uid, _repo_id, _commit_id):
426 def _branch(_context_uid, _repo_id, _commit_id):
416 regex = re.compile('^refs/heads')
427 regex = re.compile('^refs/heads')
417
428
418 def filter_with(ref):
429 def filter_with(ref):
419 return regex.match(ref[0]) and ref[1] == _commit_id
430 return regex.match(ref[0]) and ref[1] == _commit_id
420
431
421 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
432 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
422 return [x[0].split('refs/heads/')[-1] for x in branches]
433 return [x[0].split('refs/heads/')[-1] for x in branches]
423
434
424 return _branch(context_uid, repo_id, commit_id)
435 return _branch(context_uid, repo_id, commit_id)
425
436
426 @reraise_safe_exceptions
437 @reraise_safe_exceptions
427 def commit_branches(self, wire, commit_id):
438 def commit_branches(self, wire, commit_id):
428 cache_on, context_uid, repo_id = self._cache_on(wire)
439 cache_on, context_uid, repo_id = self._cache_on(wire)
429 region = self._region(wire)
440 region = self._region(wire)
430 @region.conditional_cache_on_arguments(condition=cache_on)
441 @region.conditional_cache_on_arguments(condition=cache_on)
431 def _commit_branches(_context_uid, _repo_id, _commit_id):
442 def _commit_branches(_context_uid, _repo_id, _commit_id):
432 repo_init = self._factory.repo_libgit2(wire)
443 repo_init = self._factory.repo_libgit2(wire)
433 with repo_init as repo:
444 with repo_init as repo:
434 branches = [x for x in repo.branches.with_commit(_commit_id)]
445 branches = [x for x in repo.branches.with_commit(_commit_id)]
435 return branches
446 return branches
436
447
437 return _commit_branches(context_uid, repo_id, commit_id)
448 return _commit_branches(context_uid, repo_id, commit_id)
438
449
439 @reraise_safe_exceptions
450 @reraise_safe_exceptions
440 def add_object(self, wire, content):
451 def add_object(self, wire, content):
441 repo_init = self._factory.repo_libgit2(wire)
452 repo_init = self._factory.repo_libgit2(wire)
442 with repo_init as repo:
453 with repo_init as repo:
443 blob = objects.Blob()
454 blob = objects.Blob()
444 blob.set_raw_string(content)
455 blob.set_raw_string(content)
445 repo.object_store.add_object(blob)
456 repo.object_store.add_object(blob)
446 return blob.id
457 return blob.id
447
458
448 # TODO: this is quite complex, check if that can be simplified
459 # TODO: this is quite complex, check if that can be simplified
449 @reraise_safe_exceptions
460 @reraise_safe_exceptions
450 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
461 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
451 # Defines the root tree
462 # Defines the root tree
452 class _Root(object):
463 class _Root(object):
453 def __repr__(self):
464 def __repr__(self):
454 return 'ROOT TREE'
465 return 'ROOT TREE'
455 ROOT = _Root()
466 ROOT = _Root()
456
467
457 repo = self._factory.repo(wire)
468 repo = self._factory.repo(wire)
458 object_store = repo.object_store
469 object_store = repo.object_store
459
470
460 # Create tree and populates it with blobs
471 # Create tree and populates it with blobs
461
472
462 if commit_tree and repo[commit_tree]:
473 if commit_tree and repo[commit_tree]:
463 git_commit = repo[commit_data['parents'][0]]
474 git_commit = repo[commit_data['parents'][0]]
464 commit_tree = repo[git_commit.tree] # root tree
475 commit_tree = repo[git_commit.tree] # root tree
465 else:
476 else:
466 commit_tree = objects.Tree()
477 commit_tree = objects.Tree()
467
478
468 for node in updated:
479 for node in updated:
469 # Compute subdirs if needed
480 # Compute subdirs if needed
470 dirpath, nodename = vcspath.split(node['path'])
481 dirpath, nodename = vcspath.split(node['path'])
471 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
482 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
472 parent = commit_tree
483 parent = commit_tree
473 ancestors = [('', parent)]
484 ancestors = [('', parent)]
474
485
475 # Tries to dig for the deepest existing tree
486 # Tries to dig for the deepest existing tree
476 while dirnames:
487 while dirnames:
477 curdir = dirnames.pop(0)
488 curdir = dirnames.pop(0)
478 try:
489 try:
479 dir_id = parent[curdir][1]
490 dir_id = parent[curdir][1]
480 except KeyError:
491 except KeyError:
481 # put curdir back into dirnames and stops
492 # put curdir back into dirnames and stops
482 dirnames.insert(0, curdir)
493 dirnames.insert(0, curdir)
483 break
494 break
484 else:
495 else:
485 # If found, updates parent
496 # If found, updates parent
486 parent = repo[dir_id]
497 parent = repo[dir_id]
487 ancestors.append((curdir, parent))
498 ancestors.append((curdir, parent))
488 # Now parent is deepest existing tree and we need to create
499 # Now parent is deepest existing tree and we need to create
489 # subtrees for dirnames (in reverse order)
500 # subtrees for dirnames (in reverse order)
490 # [this only applies for nodes from added]
501 # [this only applies for nodes from added]
491 new_trees = []
502 new_trees = []
492
503
493 blob = objects.Blob.from_string(node['content'])
504 blob = objects.Blob.from_string(node['content'])
494
505
495 if dirnames:
506 if dirnames:
496 # If there are trees which should be created we need to build
507 # If there are trees which should be created we need to build
497 # them now (in reverse order)
508 # them now (in reverse order)
498 reversed_dirnames = list(reversed(dirnames))
509 reversed_dirnames = list(reversed(dirnames))
499 curtree = objects.Tree()
510 curtree = objects.Tree()
500 curtree[node['node_path']] = node['mode'], blob.id
511 curtree[node['node_path']] = node['mode'], blob.id
501 new_trees.append(curtree)
512 new_trees.append(curtree)
502 for dirname in reversed_dirnames[:-1]:
513 for dirname in reversed_dirnames[:-1]:
503 newtree = objects.Tree()
514 newtree = objects.Tree()
504 newtree[dirname] = (DIR_STAT, curtree.id)
515 newtree[dirname] = (DIR_STAT, curtree.id)
505 new_trees.append(newtree)
516 new_trees.append(newtree)
506 curtree = newtree
517 curtree = newtree
507 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
518 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
508 else:
519 else:
509 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
520 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
510
521
511 new_trees.append(parent)
522 new_trees.append(parent)
512 # Update ancestors
523 # Update ancestors
513 reversed_ancestors = reversed(
524 reversed_ancestors = reversed(
514 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
525 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
515 for parent, tree, path in reversed_ancestors:
526 for parent, tree, path in reversed_ancestors:
516 parent[path] = (DIR_STAT, tree.id)
527 parent[path] = (DIR_STAT, tree.id)
517 object_store.add_object(tree)
528 object_store.add_object(tree)
518
529
519 object_store.add_object(blob)
530 object_store.add_object(blob)
520 for tree in new_trees:
531 for tree in new_trees:
521 object_store.add_object(tree)
532 object_store.add_object(tree)
522
533
523 for node_path in removed:
534 for node_path in removed:
524 paths = node_path.split('/')
535 paths = node_path.split('/')
525 tree = commit_tree # start with top-level
536 tree = commit_tree # start with top-level
526 trees = [{'tree': tree, 'path': ROOT}]
537 trees = [{'tree': tree, 'path': ROOT}]
527 # Traverse deep into the forest...
538 # Traverse deep into the forest...
528 # resolve final tree by iterating the path.
539 # resolve final tree by iterating the path.
529 # e.g a/b/c.txt will get
540 # e.g a/b/c.txt will get
530 # - root as tree then
541 # - root as tree then
531 # - 'a' as tree,
542 # - 'a' as tree,
532 # - 'b' as tree,
543 # - 'b' as tree,
533 # - stop at c as blob.
544 # - stop at c as blob.
534 for path in paths:
545 for path in paths:
535 try:
546 try:
536 obj = repo[tree[path][1]]
547 obj = repo[tree[path][1]]
537 if isinstance(obj, objects.Tree):
548 if isinstance(obj, objects.Tree):
538 trees.append({'tree': obj, 'path': path})
549 trees.append({'tree': obj, 'path': path})
539 tree = obj
550 tree = obj
540 except KeyError:
551 except KeyError:
541 break
552 break
542 #PROBLEM:
553 #PROBLEM:
543 """
554 """
544 We're not editing same reference tree object
555 We're not editing same reference tree object
545 """
556 """
546 # Cut down the blob and all rotten trees on the way back...
557 # Cut down the blob and all rotten trees on the way back...
547 for path, tree_data in reversed(list(zip(paths, trees))):
558 for path, tree_data in reversed(list(zip(paths, trees))):
548 tree = tree_data['tree']
559 tree = tree_data['tree']
549 tree.__delitem__(path)
560 tree.__delitem__(path)
550 # This operation edits the tree, we need to mark new commit back
561 # This operation edits the tree, we need to mark new commit back
551
562
552 if len(tree) > 0:
563 if len(tree) > 0:
553 # This tree still has elements - don't remove it or any
564 # This tree still has elements - don't remove it or any
554 # of it's parents
565 # of it's parents
555 break
566 break
556
567
557 object_store.add_object(commit_tree)
568 object_store.add_object(commit_tree)
558
569
559 # Create commit
570 # Create commit
560 commit = objects.Commit()
571 commit = objects.Commit()
561 commit.tree = commit_tree.id
572 commit.tree = commit_tree.id
562 bytes_keys = [
573 bytes_keys = [
563 'author',
574 'author',
564 'committer',
575 'committer',
565 'message',
576 'message',
566 'encoding'
577 'encoding'
567 ]
578 ]
568
579
569 for k, v in commit_data.items():
580 for k, v in commit_data.items():
570 if k in bytes_keys:
581 if k in bytes_keys:
571 v = safe_bytes(v)
582 v = safe_bytes(v)
572 setattr(commit, k, v)
583 setattr(commit, k, v)
573
584
574 object_store.add_object(commit)
585 object_store.add_object(commit)
575
586
576 self.create_branch(wire, branch, safe_str(commit.id))
587 self.create_branch(wire, branch, safe_str(commit.id))
577
588
578 # dulwich set-ref
589 # dulwich set-ref
579 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
590 repo.refs[safe_bytes(f'refs/heads/{branch}')] = commit.id
580
591
581 return commit.id
592 return commit.id
582
593
583 @reraise_safe_exceptions
594 @reraise_safe_exceptions
584 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
595 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
585 if url != 'default' and '://' not in url:
596 if url != 'default' and '://' not in url:
586 client = LocalGitClient(url)
597 client = LocalGitClient(url)
587 else:
598 else:
588 url_obj = url_parser(url)
599 url_obj = url_parser(url)
589 o = self._build_opener(url)
600 o = self._build_opener(url)
590 url, _ = url_obj.authinfo()
601 url, _ = url_obj.authinfo()
591 client = HttpGitClient(base_url=url, opener=o)
602 client = HttpGitClient(base_url=url, opener=o)
592 repo = self._factory.repo(wire)
603 repo = self._factory.repo(wire)
593
604
594 determine_wants = repo.object_store.determine_wants_all
605 determine_wants = repo.object_store.determine_wants_all
595 if refs:
606 if refs:
596 def determine_wants_requested(references):
607 def determine_wants_requested(references):
597 return [references[r] for r in references if r in refs]
608 return [references[r] for r in references if r in refs]
598 determine_wants = determine_wants_requested
609 determine_wants = determine_wants_requested
599
610
600 try:
611 try:
601 remote_refs = client.fetch(
612 remote_refs = client.fetch(
602 path=url, target=repo, determine_wants=determine_wants)
613 path=url, target=repo, determine_wants=determine_wants)
603 except NotGitRepository as e:
614 except NotGitRepository as e:
604 log.warning(
615 log.warning(
605 'Trying to fetch from "%s" failed, not a Git repository.', url)
616 'Trying to fetch from "%s" failed, not a Git repository.', url)
606 # Exception can contain unicode which we convert
617 # Exception can contain unicode which we convert
607 raise exceptions.AbortException(e)(repr(e))
618 raise exceptions.AbortException(e)(repr(e))
608
619
609 # mikhail: client.fetch() returns all the remote refs, but fetches only
620 # mikhail: client.fetch() returns all the remote refs, but fetches only
610 # refs filtered by `determine_wants` function. We need to filter result
621 # refs filtered by `determine_wants` function. We need to filter result
611 # as well
622 # as well
612 if refs:
623 if refs:
613 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
624 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
614
625
615 if apply_refs:
626 if apply_refs:
616 # TODO: johbo: Needs proper test coverage with a git repository
627 # TODO: johbo: Needs proper test coverage with a git repository
617 # that contains a tag object, so that we would end up with
628 # that contains a tag object, so that we would end up with
618 # a peeled ref at this point.
629 # a peeled ref at this point.
619 for k in remote_refs:
630 for k in remote_refs:
620 if k.endswith(PEELED_REF_MARKER):
631 if k.endswith(PEELED_REF_MARKER):
621 log.debug("Skipping peeled reference %s", k)
632 log.debug("Skipping peeled reference %s", k)
622 continue
633 continue
623 repo[k] = remote_refs[k]
634 repo[k] = remote_refs[k]
624
635
625 if refs and not update_after:
636 if refs and not update_after:
626 # mikhail: explicitly set the head to the last ref.
637 # mikhail: explicitly set the head to the last ref.
627 repo["HEAD"] = remote_refs[refs[-1]]
638 repo["HEAD"] = remote_refs[refs[-1]]
628
639
629 if update_after:
640 if update_after:
630 # we want to checkout HEAD
641 # we want to checkout HEAD
631 repo["HEAD"] = remote_refs["HEAD"]
642 repo["HEAD"] = remote_refs["HEAD"]
632 index.build_index_from_tree(repo.path, repo.index_path(),
643 index.build_index_from_tree(repo.path, repo.index_path(),
633 repo.object_store, repo["HEAD"].tree)
644 repo.object_store, repo["HEAD"].tree)
634 return remote_refs
645 return remote_refs
635
646
636 @reraise_safe_exceptions
647 @reraise_safe_exceptions
637 def sync_fetch(self, wire, url, refs=None, all_refs=False):
648 def sync_fetch(self, wire, url, refs=None, all_refs=False):
638 repo = self._factory.repo(wire)
649 repo = self._factory.repo(wire)
639 if refs and not isinstance(refs, (list, tuple)):
650 if refs and not isinstance(refs, (list, tuple)):
640 refs = [refs]
651 refs = [refs]
641
652
642 config = self._wire_to_config(wire)
653 config = self._wire_to_config(wire)
643 # get all remote refs we'll use to fetch later
654 # get all remote refs we'll use to fetch later
644 cmd = ['ls-remote']
655 cmd = ['ls-remote']
645 if not all_refs:
656 if not all_refs:
646 cmd += ['--heads', '--tags']
657 cmd += ['--heads', '--tags']
647 cmd += [url]
658 cmd += [url]
648 output, __ = self.run_git_command(
659 output, __ = self.run_git_command(
649 wire, cmd, fail_on_stderr=False,
660 wire, cmd, fail_on_stderr=False,
650 _copts=self._remote_conf(config),
661 _copts=self._remote_conf(config),
651 extra_env={'GIT_TERMINAL_PROMPT': '0'})
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
652
663
653 remote_refs = collections.OrderedDict()
664 remote_refs = collections.OrderedDict()
654 fetch_refs = []
665 fetch_refs = []
655
666
656 for ref_line in output.splitlines():
667 for ref_line in output.splitlines():
657 sha, ref = ref_line.split(b'\t')
668 sha, ref = ref_line.split(b'\t')
658 sha = sha.strip()
669 sha = sha.strip()
659 if ref in remote_refs:
670 if ref in remote_refs:
660 # duplicate, skip
671 # duplicate, skip
661 continue
672 continue
662 if ref.endswith(PEELED_REF_MARKER):
673 if ref.endswith(PEELED_REF_MARKER):
663 log.debug("Skipping peeled reference %s", ref)
674 log.debug("Skipping peeled reference %s", ref)
664 continue
675 continue
665 # don't sync HEAD
676 # don't sync HEAD
666 if ref in [b'HEAD']:
677 if ref in [b'HEAD']:
667 continue
678 continue
668
679
669 remote_refs[ref] = sha
680 remote_refs[ref] = sha
670
681
671 if refs and sha in refs:
682 if refs and sha in refs:
672 # we filter fetch using our specified refs
683 # we filter fetch using our specified refs
673 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
684 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
674 elif not refs:
685 elif not refs:
675 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
686 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
676 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
687 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
677
688
678 if fetch_refs:
689 if fetch_refs:
679 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
690 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
680 fetch_refs_chunks = list(chunk)
691 fetch_refs_chunks = list(chunk)
681 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
692 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
682 self.run_git_command(
693 self.run_git_command(
683 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
694 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
684 fail_on_stderr=False,
695 fail_on_stderr=False,
685 _copts=self._remote_conf(config),
696 _copts=self._remote_conf(config),
686 extra_env={'GIT_TERMINAL_PROMPT': '0'})
697 extra_env={'GIT_TERMINAL_PROMPT': '0'})
687
698
688 return remote_refs
699 return remote_refs
689
700
690 @reraise_safe_exceptions
701 @reraise_safe_exceptions
691 def sync_push(self, wire, url, refs=None):
702 def sync_push(self, wire, url, refs=None):
692 if not self.check_url(url, wire):
703 if not self.check_url(url, wire):
693 return
704 return
694 config = self._wire_to_config(wire)
705 config = self._wire_to_config(wire)
695 self._factory.repo(wire)
706 self._factory.repo(wire)
696 self.run_git_command(
707 self.run_git_command(
697 wire, ['push', url, '--mirror'], fail_on_stderr=False,
708 wire, ['push', url, '--mirror'], fail_on_stderr=False,
698 _copts=self._remote_conf(config),
709 _copts=self._remote_conf(config),
699 extra_env={'GIT_TERMINAL_PROMPT': '0'})
710 extra_env={'GIT_TERMINAL_PROMPT': '0'})
700
711
701 @reraise_safe_exceptions
712 @reraise_safe_exceptions
702 def get_remote_refs(self, wire, url):
713 def get_remote_refs(self, wire, url):
703 repo = Repo(url)
714 repo = Repo(url)
704 return repo.get_refs()
715 return repo.get_refs()
705
716
706 @reraise_safe_exceptions
717 @reraise_safe_exceptions
707 def get_description(self, wire):
718 def get_description(self, wire):
708 repo = self._factory.repo(wire)
719 repo = self._factory.repo(wire)
709 return repo.get_description()
720 return repo.get_description()
710
721
711 @reraise_safe_exceptions
722 @reraise_safe_exceptions
712 def get_missing_revs(self, wire, rev1, rev2, path2):
723 def get_missing_revs(self, wire, rev1, rev2, path2):
713 repo = self._factory.repo(wire)
724 repo = self._factory.repo(wire)
714 LocalGitClient(thin_packs=False).fetch(path2, repo)
725 LocalGitClient(thin_packs=False).fetch(path2, repo)
715
726
716 wire_remote = wire.copy()
727 wire_remote = wire.copy()
717 wire_remote['path'] = path2
728 wire_remote['path'] = path2
718 repo_remote = self._factory.repo(wire_remote)
729 repo_remote = self._factory.repo(wire_remote)
719 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
730 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
720
731
721 revs = [
732 revs = [
722 x.commit.id
733 x.commit.id
723 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
734 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
724 return revs
735 return revs
725
736
726 @reraise_safe_exceptions
737 @reraise_safe_exceptions
727 def get_object(self, wire, sha, maybe_unreachable=False):
738 def get_object(self, wire, sha, maybe_unreachable=False):
728 cache_on, context_uid, repo_id = self._cache_on(wire)
739 cache_on, context_uid, repo_id = self._cache_on(wire)
729 region = self._region(wire)
740 region = self._region(wire)
730
741
731 @region.conditional_cache_on_arguments(condition=cache_on)
742 @region.conditional_cache_on_arguments(condition=cache_on)
732 def _get_object(_context_uid, _repo_id, _sha):
743 def _get_object(_context_uid, _repo_id, _sha):
733 repo_init = self._factory.repo_libgit2(wire)
744 repo_init = self._factory.repo_libgit2(wire)
734 with repo_init as repo:
745 with repo_init as repo:
735
746
736 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
747 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
737 try:
748 try:
738 commit = repo.revparse_single(sha)
749 commit = repo.revparse_single(sha)
739 except KeyError:
750 except KeyError:
740 # NOTE(marcink): KeyError doesn't give us any meaningful information
751 # NOTE(marcink): KeyError doesn't give us any meaningful information
741 # here, we instead give something more explicit
752 # here, we instead give something more explicit
742 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
753 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
743 raise exceptions.LookupException(e)(missing_commit_err)
754 raise exceptions.LookupException(e)(missing_commit_err)
744 except ValueError as e:
755 except ValueError as e:
745 raise exceptions.LookupException(e)(missing_commit_err)
756 raise exceptions.LookupException(e)(missing_commit_err)
746
757
747 is_tag = False
758 is_tag = False
748 if isinstance(commit, pygit2.Tag):
759 if isinstance(commit, pygit2.Tag):
749 commit = repo.get(commit.target)
760 commit = repo.get(commit.target)
750 is_tag = True
761 is_tag = True
751
762
752 check_dangling = True
763 check_dangling = True
753 if is_tag:
764 if is_tag:
754 check_dangling = False
765 check_dangling = False
755
766
756 if check_dangling and maybe_unreachable:
767 if check_dangling and maybe_unreachable:
757 check_dangling = False
768 check_dangling = False
758
769
759 # we used a reference and it parsed means we're not having a dangling commit
770 # we used a reference and it parsed means we're not having a dangling commit
760 if sha != commit.hex:
771 if sha != commit.hex:
761 check_dangling = False
772 check_dangling = False
762
773
763 if check_dangling:
774 if check_dangling:
764 # check for dangling commit
775 # check for dangling commit
765 for branch in repo.branches.with_commit(commit.hex):
776 for branch in repo.branches.with_commit(commit.hex):
766 if branch:
777 if branch:
767 break
778 break
768 else:
779 else:
769 # NOTE(marcink): Empty error doesn't give us any meaningful information
780 # NOTE(marcink): Empty error doesn't give us any meaningful information
770 # here, we instead give something more explicit
781 # here, we instead give something more explicit
771 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
782 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
772 raise exceptions.LookupException(e)(missing_commit_err)
783 raise exceptions.LookupException(e)(missing_commit_err)
773
784
774 commit_id = commit.hex
785 commit_id = commit.hex
775 type_id = commit.type
786 type_id = commit.type
776
787
777 return {
788 return {
778 'id': commit_id,
789 'id': commit_id,
779 'type': self._type_id_to_name(type_id),
790 'type': self._type_id_to_name(type_id),
780 'commit_id': commit_id,
791 'commit_id': commit_id,
781 'idx': 0
792 'idx': 0
782 }
793 }
783
794
784 return _get_object(context_uid, repo_id, sha)
795 return _get_object(context_uid, repo_id, sha)
785
796
786 @reraise_safe_exceptions
797 @reraise_safe_exceptions
787 def get_refs(self, wire):
798 def get_refs(self, wire):
788 cache_on, context_uid, repo_id = self._cache_on(wire)
799 cache_on, context_uid, repo_id = self._cache_on(wire)
789 region = self._region(wire)
800 region = self._region(wire)
790
801
791 @region.conditional_cache_on_arguments(condition=cache_on)
802 @region.conditional_cache_on_arguments(condition=cache_on)
792 def _get_refs(_context_uid, _repo_id):
803 def _get_refs(_context_uid, _repo_id):
793
804
794 repo_init = self._factory.repo_libgit2(wire)
805 repo_init = self._factory.repo_libgit2(wire)
795 with repo_init as repo:
806 with repo_init as repo:
796 regex = re.compile('^refs/(heads|tags)/')
807 regex = re.compile('^refs/(heads|tags)/')
797 return {x.name: x.target.hex for x in
808 return {x.name: x.target.hex for x in
798 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
809 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
799
810
800 return _get_refs(context_uid, repo_id)
811 return _get_refs(context_uid, repo_id)
801
812
802 @reraise_safe_exceptions
813 @reraise_safe_exceptions
803 def get_branch_pointers(self, wire):
814 def get_branch_pointers(self, wire):
804 cache_on, context_uid, repo_id = self._cache_on(wire)
815 cache_on, context_uid, repo_id = self._cache_on(wire)
805 region = self._region(wire)
816 region = self._region(wire)
806
817
807 @region.conditional_cache_on_arguments(condition=cache_on)
818 @region.conditional_cache_on_arguments(condition=cache_on)
808 def _get_branch_pointers(_context_uid, _repo_id):
819 def _get_branch_pointers(_context_uid, _repo_id):
809
820
810 repo_init = self._factory.repo_libgit2(wire)
821 repo_init = self._factory.repo_libgit2(wire)
811 regex = re.compile('^refs/heads')
822 regex = re.compile('^refs/heads')
812 with repo_init as repo:
823 with repo_init as repo:
813 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
824 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
814 return {x.target.hex: x.shorthand for x in branches}
825 return {x.target.hex: x.shorthand for x in branches}
815
826
816 return _get_branch_pointers(context_uid, repo_id)
827 return _get_branch_pointers(context_uid, repo_id)
817
828
818 @reraise_safe_exceptions
829 @reraise_safe_exceptions
819 def head(self, wire, show_exc=True):
830 def head(self, wire, show_exc=True):
820 cache_on, context_uid, repo_id = self._cache_on(wire)
831 cache_on, context_uid, repo_id = self._cache_on(wire)
821 region = self._region(wire)
832 region = self._region(wire)
822
833
823 @region.conditional_cache_on_arguments(condition=cache_on)
834 @region.conditional_cache_on_arguments(condition=cache_on)
824 def _head(_context_uid, _repo_id, _show_exc):
835 def _head(_context_uid, _repo_id, _show_exc):
825 repo_init = self._factory.repo_libgit2(wire)
836 repo_init = self._factory.repo_libgit2(wire)
826 with repo_init as repo:
837 with repo_init as repo:
827 try:
838 try:
828 return repo.head.peel().hex
839 return repo.head.peel().hex
829 except Exception:
840 except Exception:
830 if show_exc:
841 if show_exc:
831 raise
842 raise
832 return _head(context_uid, repo_id, show_exc)
843 return _head(context_uid, repo_id, show_exc)
833
844
834 @reraise_safe_exceptions
845 @reraise_safe_exceptions
835 def init(self, wire):
846 def init(self, wire):
836 repo_path = safe_str(wire['path'])
847 repo_path = safe_str(wire['path'])
837 self.repo = Repo.init(repo_path)
848 self.repo = Repo.init(repo_path)
838
849
839 @reraise_safe_exceptions
850 @reraise_safe_exceptions
840 def init_bare(self, wire):
851 def init_bare(self, wire):
841 repo_path = safe_str(wire['path'])
852 repo_path = safe_str(wire['path'])
842 self.repo = Repo.init_bare(repo_path)
853 self.repo = Repo.init_bare(repo_path)
843
854
844 @reraise_safe_exceptions
855 @reraise_safe_exceptions
845 def revision(self, wire, rev):
856 def revision(self, wire, rev):
846
857
847 cache_on, context_uid, repo_id = self._cache_on(wire)
858 cache_on, context_uid, repo_id = self._cache_on(wire)
848 region = self._region(wire)
859 region = self._region(wire)
849
860
850 @region.conditional_cache_on_arguments(condition=cache_on)
861 @region.conditional_cache_on_arguments(condition=cache_on)
851 def _revision(_context_uid, _repo_id, _rev):
862 def _revision(_context_uid, _repo_id, _rev):
852 repo_init = self._factory.repo_libgit2(wire)
863 repo_init = self._factory.repo_libgit2(wire)
853 with repo_init as repo:
864 with repo_init as repo:
854 commit = repo[rev]
865 commit = repo[rev]
855 obj_data = {
866 obj_data = {
856 'id': commit.id.hex,
867 'id': commit.id.hex,
857 }
868 }
858 # tree objects itself don't have tree_id attribute
869 # tree objects itself don't have tree_id attribute
859 if hasattr(commit, 'tree_id'):
870 if hasattr(commit, 'tree_id'):
860 obj_data['tree'] = commit.tree_id.hex
871 obj_data['tree'] = commit.tree_id.hex
861
872
862 return obj_data
873 return obj_data
863 return _revision(context_uid, repo_id, rev)
874 return _revision(context_uid, repo_id, rev)
864
875
865 @reraise_safe_exceptions
876 @reraise_safe_exceptions
866 def date(self, wire, commit_id):
877 def date(self, wire, commit_id):
867 cache_on, context_uid, repo_id = self._cache_on(wire)
878 cache_on, context_uid, repo_id = self._cache_on(wire)
868 region = self._region(wire)
879 region = self._region(wire)
869
880
870 @region.conditional_cache_on_arguments(condition=cache_on)
881 @region.conditional_cache_on_arguments(condition=cache_on)
871 def _date(_repo_id, _commit_id):
882 def _date(_repo_id, _commit_id):
872 repo_init = self._factory.repo_libgit2(wire)
883 repo_init = self._factory.repo_libgit2(wire)
873 with repo_init as repo:
884 with repo_init as repo:
874 commit = repo[commit_id]
885 commit = repo[commit_id]
875
886
876 if hasattr(commit, 'commit_time'):
887 if hasattr(commit, 'commit_time'):
877 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
888 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
878 else:
889 else:
879 commit = commit.get_object()
890 commit = commit.get_object()
880 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
891 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
881
892
882 # TODO(marcink): check dulwich difference of offset vs timezone
893 # TODO(marcink): check dulwich difference of offset vs timezone
883 return [commit_time, commit_time_offset]
894 return [commit_time, commit_time_offset]
884 return _date(repo_id, commit_id)
895 return _date(repo_id, commit_id)
885
896
886 @reraise_safe_exceptions
897 @reraise_safe_exceptions
887 def author(self, wire, commit_id):
898 def author(self, wire, commit_id):
888 cache_on, context_uid, repo_id = self._cache_on(wire)
899 cache_on, context_uid, repo_id = self._cache_on(wire)
889 region = self._region(wire)
900 region = self._region(wire)
890
901
891 @region.conditional_cache_on_arguments(condition=cache_on)
902 @region.conditional_cache_on_arguments(condition=cache_on)
892 def _author(_repo_id, _commit_id):
903 def _author(_repo_id, _commit_id):
893 repo_init = self._factory.repo_libgit2(wire)
904 repo_init = self._factory.repo_libgit2(wire)
894 with repo_init as repo:
905 with repo_init as repo:
895 commit = repo[commit_id]
906 commit = repo[commit_id]
896
907
897 if hasattr(commit, 'author'):
908 if hasattr(commit, 'author'):
898 author = commit.author
909 author = commit.author
899 else:
910 else:
900 author = commit.get_object().author
911 author = commit.get_object().author
901
912
902 if author.email:
913 if author.email:
903 return "{} <{}>".format(author.name, author.email)
914 return "{} <{}>".format(author.name, author.email)
904
915
905 try:
916 try:
906 return "{}".format(author.name)
917 return "{}".format(author.name)
907 except Exception:
918 except Exception:
908 return "{}".format(safe_str(author.raw_name))
919 return "{}".format(safe_str(author.raw_name))
909
920
910 return _author(repo_id, commit_id)
921 return _author(repo_id, commit_id)
911
922
912 @reraise_safe_exceptions
923 @reraise_safe_exceptions
913 def message(self, wire, commit_id):
924 def message(self, wire, commit_id):
914 cache_on, context_uid, repo_id = self._cache_on(wire)
925 cache_on, context_uid, repo_id = self._cache_on(wire)
915 region = self._region(wire)
926 region = self._region(wire)
916 @region.conditional_cache_on_arguments(condition=cache_on)
927 @region.conditional_cache_on_arguments(condition=cache_on)
917 def _message(_repo_id, _commit_id):
928 def _message(_repo_id, _commit_id):
918 repo_init = self._factory.repo_libgit2(wire)
929 repo_init = self._factory.repo_libgit2(wire)
919 with repo_init as repo:
930 with repo_init as repo:
920 commit = repo[commit_id]
931 commit = repo[commit_id]
921 return commit.message
932 return commit.message
922 return _message(repo_id, commit_id)
933 return _message(repo_id, commit_id)
923
934
924 @reraise_safe_exceptions
935 @reraise_safe_exceptions
925 def parents(self, wire, commit_id):
936 def parents(self, wire, commit_id):
926 cache_on, context_uid, repo_id = self._cache_on(wire)
937 cache_on, context_uid, repo_id = self._cache_on(wire)
927 region = self._region(wire)
938 region = self._region(wire)
928
939
929 @region.conditional_cache_on_arguments(condition=cache_on)
940 @region.conditional_cache_on_arguments(condition=cache_on)
930 def _parents(_repo_id, _commit_id):
941 def _parents(_repo_id, _commit_id):
931 repo_init = self._factory.repo_libgit2(wire)
942 repo_init = self._factory.repo_libgit2(wire)
932 with repo_init as repo:
943 with repo_init as repo:
933 commit = repo[commit_id]
944 commit = repo[commit_id]
934 if hasattr(commit, 'parent_ids'):
945 if hasattr(commit, 'parent_ids'):
935 parent_ids = commit.parent_ids
946 parent_ids = commit.parent_ids
936 else:
947 else:
937 parent_ids = commit.get_object().parent_ids
948 parent_ids = commit.get_object().parent_ids
938
949
939 return [x.hex for x in parent_ids]
950 return [x.hex for x in parent_ids]
940 return _parents(repo_id, commit_id)
951 return _parents(repo_id, commit_id)
941
952
942 @reraise_safe_exceptions
953 @reraise_safe_exceptions
943 def children(self, wire, commit_id):
954 def children(self, wire, commit_id):
944 cache_on, context_uid, repo_id = self._cache_on(wire)
955 cache_on, context_uid, repo_id = self._cache_on(wire)
945 region = self._region(wire)
956 region = self._region(wire)
946
957
947 head = self.head(wire)
958 head = self.head(wire)
948
959
949 @region.conditional_cache_on_arguments(condition=cache_on)
960 @region.conditional_cache_on_arguments(condition=cache_on)
950 def _children(_repo_id, _commit_id):
961 def _children(_repo_id, _commit_id):
951
962
952 output, __ = self.run_git_command(
963 output, __ = self.run_git_command(
953 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
964 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
954
965
955 child_ids = []
966 child_ids = []
956 pat = re.compile(r'^{}'.format(commit_id))
967 pat = re.compile(r'^{}'.format(commit_id))
957 for line in output.splitlines():
968 for line in output.splitlines():
958 line = safe_str(line)
969 line = safe_str(line)
959 if pat.match(line):
970 if pat.match(line):
960 found_ids = line.split(' ')[1:]
971 found_ids = line.split(' ')[1:]
961 child_ids.extend(found_ids)
972 child_ids.extend(found_ids)
962 break
973 break
963
974
964 return child_ids
975 return child_ids
965 return _children(repo_id, commit_id)
976 return _children(repo_id, commit_id)
966
977
967 @reraise_safe_exceptions
978 @reraise_safe_exceptions
968 def set_refs(self, wire, key, value):
979 def set_refs(self, wire, key, value):
969 repo_init = self._factory.repo_libgit2(wire)
980 repo_init = self._factory.repo_libgit2(wire)
970 with repo_init as repo:
981 with repo_init as repo:
971 repo.references.create(key, value, force=True)
982 repo.references.create(key, value, force=True)
972
983
973 @reraise_safe_exceptions
984 @reraise_safe_exceptions
974 def create_branch(self, wire, branch_name, commit_id, force=False):
985 def create_branch(self, wire, branch_name, commit_id, force=False):
975 repo_init = self._factory.repo_libgit2(wire)
986 repo_init = self._factory.repo_libgit2(wire)
976 with repo_init as repo:
987 with repo_init as repo:
977 commit = repo[commit_id]
988 commit = repo[commit_id]
978
989
979 if force:
990 if force:
980 repo.branches.local.create(branch_name, commit, force=force)
991 repo.branches.local.create(branch_name, commit, force=force)
981 elif not repo.branches.get(branch_name):
992 elif not repo.branches.get(branch_name):
982 # create only if that branch isn't existing
993 # create only if that branch isn't existing
983 repo.branches.local.create(branch_name, commit, force=force)
994 repo.branches.local.create(branch_name, commit, force=force)
984
995
985 @reraise_safe_exceptions
996 @reraise_safe_exceptions
986 def remove_ref(self, wire, key):
997 def remove_ref(self, wire, key):
987 repo_init = self._factory.repo_libgit2(wire)
998 repo_init = self._factory.repo_libgit2(wire)
988 with repo_init as repo:
999 with repo_init as repo:
989 repo.references.delete(key)
1000 repo.references.delete(key)
990
1001
991 @reraise_safe_exceptions
1002 @reraise_safe_exceptions
992 def tag_remove(self, wire, tag_name):
1003 def tag_remove(self, wire, tag_name):
993 repo_init = self._factory.repo_libgit2(wire)
1004 repo_init = self._factory.repo_libgit2(wire)
994 with repo_init as repo:
1005 with repo_init as repo:
995 key = 'refs/tags/{}'.format(tag_name)
1006 key = 'refs/tags/{}'.format(tag_name)
996 repo.references.delete(key)
1007 repo.references.delete(key)
997
1008
998 @reraise_safe_exceptions
1009 @reraise_safe_exceptions
999 def tree_changes(self, wire, source_id, target_id):
1010 def tree_changes(self, wire, source_id, target_id):
1000 # TODO(marcink): remove this seems it's only used by tests
1011 # TODO(marcink): remove this seems it's only used by tests
1001 repo = self._factory.repo(wire)
1012 repo = self._factory.repo(wire)
1002 source = repo[source_id].tree if source_id else None
1013 source = repo[source_id].tree if source_id else None
1003 target = repo[target_id].tree
1014 target = repo[target_id].tree
1004 result = repo.object_store.tree_changes(source, target)
1015 result = repo.object_store.tree_changes(source, target)
1005 return list(result)
1016 return list(result)
1006
1017
1007 @reraise_safe_exceptions
1018 @reraise_safe_exceptions
1008 def tree_and_type_for_path(self, wire, commit_id, path):
1019 def tree_and_type_for_path(self, wire, commit_id, path):
1009
1020
1010 cache_on, context_uid, repo_id = self._cache_on(wire)
1021 cache_on, context_uid, repo_id = self._cache_on(wire)
1011 region = self._region(wire)
1022 region = self._region(wire)
1012
1023
1013 @region.conditional_cache_on_arguments(condition=cache_on)
1024 @region.conditional_cache_on_arguments(condition=cache_on)
1014 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1025 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1015 repo_init = self._factory.repo_libgit2(wire)
1026 repo_init = self._factory.repo_libgit2(wire)
1016
1027
1017 with repo_init as repo:
1028 with repo_init as repo:
1018 commit = repo[commit_id]
1029 commit = repo[commit_id]
1019 try:
1030 try:
1020 tree = commit.tree[path]
1031 tree = commit.tree[path]
1021 except KeyError:
1032 except KeyError:
1022 return None, None, None
1033 return None, None, None
1023
1034
1024 return tree.id.hex, tree.type_str, tree.filemode
1035 return tree.id.hex, tree.type_str, tree.filemode
1025 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1036 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1026
1037
1027 @reraise_safe_exceptions
1038 @reraise_safe_exceptions
1028 def tree_items(self, wire, tree_id):
1039 def tree_items(self, wire, tree_id):
1029 cache_on, context_uid, repo_id = self._cache_on(wire)
1040 cache_on, context_uid, repo_id = self._cache_on(wire)
1030 region = self._region(wire)
1041 region = self._region(wire)
1031
1042
1032 @region.conditional_cache_on_arguments(condition=cache_on)
1043 @region.conditional_cache_on_arguments(condition=cache_on)
1033 def _tree_items(_repo_id, _tree_id):
1044 def _tree_items(_repo_id, _tree_id):
1034
1045
1035 repo_init = self._factory.repo_libgit2(wire)
1046 repo_init = self._factory.repo_libgit2(wire)
1036 with repo_init as repo:
1047 with repo_init as repo:
1037 try:
1048 try:
1038 tree = repo[tree_id]
1049 tree = repo[tree_id]
1039 except KeyError:
1050 except KeyError:
1040 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1051 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1041
1052
1042 result = []
1053 result = []
1043 for item in tree:
1054 for item in tree:
1044 item_sha = item.hex
1055 item_sha = item.hex
1045 item_mode = item.filemode
1056 item_mode = item.filemode
1046 item_type = item.type_str
1057 item_type = item.type_str
1047
1058
1048 if item_type == 'commit':
1059 if item_type == 'commit':
1049 # NOTE(marcink): submodules we translate to 'link' for backward compat
1060 # NOTE(marcink): submodules we translate to 'link' for backward compat
1050 item_type = 'link'
1061 item_type = 'link'
1051
1062
1052 result.append((item.name, item_mode, item_sha, item_type))
1063 result.append((item.name, item_mode, item_sha, item_type))
1053 return result
1064 return result
1054 return _tree_items(repo_id, tree_id)
1065 return _tree_items(repo_id, tree_id)
1055
1066
1056 @reraise_safe_exceptions
1067 @reraise_safe_exceptions
1057 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1068 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1058 """
1069 """
1059 Old version that uses subprocess to call diff
1070 Old version that uses subprocess to call diff
1060 """
1071 """
1061
1072
1062 flags = [
1073 flags = [
1063 '-U%s' % context, '--patch',
1074 '-U%s' % context, '--patch',
1064 '--binary',
1075 '--binary',
1065 '--find-renames',
1076 '--find-renames',
1066 '--no-indent-heuristic',
1077 '--no-indent-heuristic',
1067 # '--indent-heuristic',
1078 # '--indent-heuristic',
1068 #'--full-index',
1079 #'--full-index',
1069 #'--abbrev=40'
1080 #'--abbrev=40'
1070 ]
1081 ]
1071
1082
1072 if opt_ignorews:
1083 if opt_ignorews:
1073 flags.append('--ignore-all-space')
1084 flags.append('--ignore-all-space')
1074
1085
1075 if commit_id_1 == self.EMPTY_COMMIT:
1086 if commit_id_1 == self.EMPTY_COMMIT:
1076 cmd = ['show'] + flags + [commit_id_2]
1087 cmd = ['show'] + flags + [commit_id_2]
1077 else:
1088 else:
1078 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1089 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1079
1090
1080 if file_filter:
1091 if file_filter:
1081 cmd.extend(['--', file_filter])
1092 cmd.extend(['--', file_filter])
1082
1093
1083 diff, __ = self.run_git_command(wire, cmd)
1094 diff, __ = self.run_git_command(wire, cmd)
1084 # If we used 'show' command, strip first few lines (until actual diff
1095 # If we used 'show' command, strip first few lines (until actual diff
1085 # starts)
1096 # starts)
1086 if commit_id_1 == self.EMPTY_COMMIT:
1097 if commit_id_1 == self.EMPTY_COMMIT:
1087 lines = diff.splitlines()
1098 lines = diff.splitlines()
1088 x = 0
1099 x = 0
1089 for line in lines:
1100 for line in lines:
1090 if line.startswith(b'diff'):
1101 if line.startswith(b'diff'):
1091 break
1102 break
1092 x += 1
1103 x += 1
1093 # Append new line just like 'diff' command do
1104 # Append new line just like 'diff' command do
1094 diff = '\n'.join(lines[x:]) + '\n'
1105 diff = '\n'.join(lines[x:]) + '\n'
1095 return diff
1106 return diff
1096
1107
1097 @reraise_safe_exceptions
1108 @reraise_safe_exceptions
1098 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1109 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1099 repo_init = self._factory.repo_libgit2(wire)
1110 repo_init = self._factory.repo_libgit2(wire)
1100 with repo_init as repo:
1111 with repo_init as repo:
1101 swap = True
1112 swap = True
1102 flags = 0
1113 flags = 0
1103 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1114 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1104
1115
1105 if opt_ignorews:
1116 if opt_ignorews:
1106 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1117 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1107
1118
1108 if commit_id_1 == self.EMPTY_COMMIT:
1119 if commit_id_1 == self.EMPTY_COMMIT:
1109 comm1 = repo[commit_id_2]
1120 comm1 = repo[commit_id_2]
1110 diff_obj = comm1.tree.diff_to_tree(
1121 diff_obj = comm1.tree.diff_to_tree(
1111 flags=flags, context_lines=context, swap=swap)
1122 flags=flags, context_lines=context, swap=swap)
1112
1123
1113 else:
1124 else:
1114 comm1 = repo[commit_id_2]
1125 comm1 = repo[commit_id_2]
1115 comm2 = repo[commit_id_1]
1126 comm2 = repo[commit_id_1]
1116 diff_obj = comm1.tree.diff_to_tree(
1127 diff_obj = comm1.tree.diff_to_tree(
1117 comm2.tree, flags=flags, context_lines=context, swap=swap)
1128 comm2.tree, flags=flags, context_lines=context, swap=swap)
1118 similar_flags = 0
1129 similar_flags = 0
1119 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1130 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1120 diff_obj.find_similar(flags=similar_flags)
1131 diff_obj.find_similar(flags=similar_flags)
1121
1132
1122 if file_filter:
1133 if file_filter:
1123 for p in diff_obj:
1134 for p in diff_obj:
1124 if p.delta.old_file.path == file_filter:
1135 if p.delta.old_file.path == file_filter:
1125 return p.patch or ''
1136 return p.patch or ''
1126 # fo matching path == no diff
1137 # fo matching path == no diff
1127 return ''
1138 return ''
1128 return diff_obj.patch or ''
1139 return diff_obj.patch or ''
1129
1140
1130 @reraise_safe_exceptions
1141 @reraise_safe_exceptions
1131 def node_history(self, wire, commit_id, path, limit):
1142 def node_history(self, wire, commit_id, path, limit):
1132 cache_on, context_uid, repo_id = self._cache_on(wire)
1143 cache_on, context_uid, repo_id = self._cache_on(wire)
1133 region = self._region(wire)
1144 region = self._region(wire)
1134
1145
1135 @region.conditional_cache_on_arguments(condition=cache_on)
1146 @region.conditional_cache_on_arguments(condition=cache_on)
1136 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1147 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1137 # optimize for n==1, rev-list is much faster for that use-case
1148 # optimize for n==1, rev-list is much faster for that use-case
1138 if limit == 1:
1149 if limit == 1:
1139 cmd = ['rev-list', '-1', commit_id, '--', path]
1150 cmd = ['rev-list', '-1', commit_id, '--', path]
1140 else:
1151 else:
1141 cmd = ['log']
1152 cmd = ['log']
1142 if limit:
1153 if limit:
1143 cmd.extend(['-n', str(safe_int(limit, 0))])
1154 cmd.extend(['-n', str(safe_int(limit, 0))])
1144 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1155 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1145
1156
1146 output, __ = self.run_git_command(wire, cmd)
1157 output, __ = self.run_git_command(wire, cmd)
1147 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1158 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1148
1159
1149 return [x for x in commit_ids]
1160 return [x for x in commit_ids]
1150 return _node_history(context_uid, repo_id, commit_id, path, limit)
1161 return _node_history(context_uid, repo_id, commit_id, path, limit)
1151
1162
1152 @reraise_safe_exceptions
1163 @reraise_safe_exceptions
1153 def node_annotate_legacy(self, wire, commit_id, path):
1164 def node_annotate_legacy(self, wire, commit_id, path):
1154 #note: replaced by pygit2 impelementation
1165 #note: replaced by pygit2 impelementation
1155 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1166 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1156 # -l ==> outputs long shas (and we need all 40 characters)
1167 # -l ==> outputs long shas (and we need all 40 characters)
1157 # --root ==> doesn't put '^' character for boundaries
1168 # --root ==> doesn't put '^' character for boundaries
1158 # -r commit_id ==> blames for the given commit
1169 # -r commit_id ==> blames for the given commit
1159 output, __ = self.run_git_command(wire, cmd)
1170 output, __ = self.run_git_command(wire, cmd)
1160
1171
1161 result = []
1172 result = []
1162 for i, blame_line in enumerate(output.splitlines()[:-1]):
1173 for i, blame_line in enumerate(output.splitlines()[:-1]):
1163 line_no = i + 1
1174 line_no = i + 1
1164 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1175 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1165 result.append((line_no, blame_commit_id, line))
1176 result.append((line_no, blame_commit_id, line))
1166
1177
1167 return result
1178 return result
1168
1179
1169 @reraise_safe_exceptions
1180 @reraise_safe_exceptions
1170 def node_annotate(self, wire, commit_id, path):
1181 def node_annotate(self, wire, commit_id, path):
1171
1182
1172 result_libgit = []
1183 result_libgit = []
1173 repo_init = self._factory.repo_libgit2(wire)
1184 repo_init = self._factory.repo_libgit2(wire)
1174 with repo_init as repo:
1185 with repo_init as repo:
1175 commit = repo[commit_id]
1186 commit = repo[commit_id]
1176 blame_obj = repo.blame(path, newest_commit=commit_id)
1187 blame_obj = repo.blame(path, newest_commit=commit_id)
1177 for i, line in enumerate(commit.tree[path].data.splitlines()):
1188 for i, line in enumerate(commit.tree[path].data.splitlines()):
1178 line_no = i + 1
1189 line_no = i + 1
1179 hunk = blame_obj.for_line(line_no)
1190 hunk = blame_obj.for_line(line_no)
1180 blame_commit_id = hunk.final_commit_id.hex
1191 blame_commit_id = hunk.final_commit_id.hex
1181
1192
1182 result_libgit.append((line_no, blame_commit_id, line))
1193 result_libgit.append((line_no, blame_commit_id, line))
1183
1194
1184 return result_libgit
1195 return result_libgit
1185
1196
1186 @reraise_safe_exceptions
1197 @reraise_safe_exceptions
1187 def update_server_info(self, wire):
1198 def update_server_info(self, wire):
1188 repo = self._factory.repo(wire)
1199 repo = self._factory.repo(wire)
1189 update_server_info(repo)
1200 update_server_info(repo)
1190
1201
1191 @reraise_safe_exceptions
1202 @reraise_safe_exceptions
1192 def get_all_commit_ids(self, wire):
1203 def get_all_commit_ids(self, wire):
1193
1204
1194 cache_on, context_uid, repo_id = self._cache_on(wire)
1205 cache_on, context_uid, repo_id = self._cache_on(wire)
1195 region = self._region(wire)
1206 region = self._region(wire)
1196
1207
1197 @region.conditional_cache_on_arguments(condition=cache_on)
1208 @region.conditional_cache_on_arguments(condition=cache_on)
1198 def _get_all_commit_ids(_context_uid, _repo_id):
1209 def _get_all_commit_ids(_context_uid, _repo_id):
1199
1210
1200 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1211 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1201 try:
1212 try:
1202 output, __ = self.run_git_command(wire, cmd)
1213 output, __ = self.run_git_command(wire, cmd)
1203 return output.splitlines()
1214 return output.splitlines()
1204 except Exception:
1215 except Exception:
1205 # Can be raised for empty repositories
1216 # Can be raised for empty repositories
1206 return []
1217 return []
1207
1218
1208 @region.conditional_cache_on_arguments(condition=cache_on)
1219 @region.conditional_cache_on_arguments(condition=cache_on)
1209 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1220 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1210 repo_init = self._factory.repo_libgit2(wire)
1221 repo_init = self._factory.repo_libgit2(wire)
1211 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1222 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1212 results = []
1223 results = []
1213 with repo_init as repo:
1224 with repo_init as repo:
1214 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1225 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1215 results.append(commit.id.hex)
1226 results.append(commit.id.hex)
1216
1227
1217 return _get_all_commit_ids(context_uid, repo_id)
1228 return _get_all_commit_ids(context_uid, repo_id)
1218
1229
1219 @reraise_safe_exceptions
1230 @reraise_safe_exceptions
1220 def run_git_command(self, wire, cmd, **opts):
1231 def run_git_command(self, wire, cmd, **opts):
1221 path = wire.get('path', None)
1232 path = wire.get('path', None)
1222
1233
1223 if path and os.path.isdir(path):
1234 if path and os.path.isdir(path):
1224 opts['cwd'] = path
1235 opts['cwd'] = path
1225
1236
1226 if '_bare' in opts:
1237 if '_bare' in opts:
1227 _copts = []
1238 _copts = []
1228 del opts['_bare']
1239 del opts['_bare']
1229 else:
1240 else:
1230 _copts = ['-c', 'core.quotepath=false', ]
1241 _copts = ['-c', 'core.quotepath=false', ]
1231 safe_call = False
1242 safe_call = False
1232 if '_safe' in opts:
1243 if '_safe' in opts:
1233 # no exc on failure
1244 # no exc on failure
1234 del opts['_safe']
1245 del opts['_safe']
1235 safe_call = True
1246 safe_call = True
1236
1247
1237 if '_copts' in opts:
1248 if '_copts' in opts:
1238 _copts.extend(opts['_copts'] or [])
1249 _copts.extend(opts['_copts'] or [])
1239 del opts['_copts']
1250 del opts['_copts']
1240
1251
1241 gitenv = os.environ.copy()
1252 gitenv = os.environ.copy()
1242 gitenv.update(opts.pop('extra_env', {}))
1253 gitenv.update(opts.pop('extra_env', {}))
1243 # need to clean fix GIT_DIR !
1254 # need to clean fix GIT_DIR !
1244 if 'GIT_DIR' in gitenv:
1255 if 'GIT_DIR' in gitenv:
1245 del gitenv['GIT_DIR']
1256 del gitenv['GIT_DIR']
1246 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1257 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1247 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1258 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1248
1259
1249 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1260 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1250 _opts = {'env': gitenv, 'shell': False}
1261 _opts = {'env': gitenv, 'shell': False}
1251
1262
1252 proc = None
1263 proc = None
1253 try:
1264 try:
1254 _opts.update(opts)
1265 _opts.update(opts)
1255 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1266 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1256
1267
1257 return b''.join(proc), b''.join(proc.stderr)
1268 return b''.join(proc), b''.join(proc.stderr)
1258 except OSError as err:
1269 except OSError as err:
1259 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1270 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1260 tb_err = ("Couldn't run git command (%s).\n"
1271 tb_err = ("Couldn't run git command (%s).\n"
1261 "Original error was:%s\n"
1272 "Original error was:%s\n"
1262 "Call options:%s\n"
1273 "Call options:%s\n"
1263 % (cmd, err, _opts))
1274 % (cmd, err, _opts))
1264 log.exception(tb_err)
1275 log.exception(tb_err)
1265 if safe_call:
1276 if safe_call:
1266 return '', err
1277 return '', err
1267 else:
1278 else:
1268 raise exceptions.VcsException()(tb_err)
1279 raise exceptions.VcsException()(tb_err)
1269 finally:
1280 finally:
1270 if proc:
1281 if proc:
1271 proc.close()
1282 proc.close()
1272
1283
1273 @reraise_safe_exceptions
1284 @reraise_safe_exceptions
1274 def install_hooks(self, wire, force=False):
1285 def install_hooks(self, wire, force=False):
1275 from vcsserver.hook_utils import install_git_hooks
1286 from vcsserver.hook_utils import install_git_hooks
1276 bare = self.bare(wire)
1287 bare = self.bare(wire)
1277 path = wire['path']
1288 path = wire['path']
1278 return install_git_hooks(path, bare, force_create=force)
1289 return install_git_hooks(path, bare, force_create=force)
1279
1290
1280 @reraise_safe_exceptions
1291 @reraise_safe_exceptions
1281 def get_hooks_info(self, wire):
1292 def get_hooks_info(self, wire):
1282 from vcsserver.hook_utils import (
1293 from vcsserver.hook_utils import (
1283 get_git_pre_hook_version, get_git_post_hook_version)
1294 get_git_pre_hook_version, get_git_post_hook_version)
1284 bare = self.bare(wire)
1295 bare = self.bare(wire)
1285 path = wire['path']
1296 path = wire['path']
1286 return {
1297 return {
1287 'pre_version': get_git_pre_hook_version(path, bare),
1298 'pre_version': get_git_pre_hook_version(path, bare),
1288 'post_version': get_git_post_hook_version(path, bare),
1299 'post_version': get_git_post_hook_version(path, bare),
1289 }
1300 }
1290
1301
1291 @reraise_safe_exceptions
1302 @reraise_safe_exceptions
1292 def set_head_ref(self, wire, head_name):
1303 def set_head_ref(self, wire, head_name):
1293 log.debug('Setting refs/head to `%s`', head_name)
1304 log.debug('Setting refs/head to `%s`', head_name)
1294 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1305 cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name]
1295 output, __ = self.run_git_command(wire, cmd)
1306 output, __ = self.run_git_command(wire, cmd)
1296 return [head_name] + output.splitlines()
1307 return [head_name] + output.splitlines()
1297
1308
1298 @reraise_safe_exceptions
1309 @reraise_safe_exceptions
1299 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1310 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1300 archive_dir_name, commit_id):
1311 archive_dir_name, commit_id):
1301
1312
1302 def file_walker(_commit_id, path):
1313 def file_walker(_commit_id, path):
1303 repo_init = self._factory.repo_libgit2(wire)
1314 repo_init = self._factory.repo_libgit2(wire)
1304
1315
1305 with repo_init as repo:
1316 with repo_init as repo:
1306 commit = repo[commit_id]
1317 commit = repo[commit_id]
1307
1318
1308 if path in ['', '/']:
1319 if path in ['', '/']:
1309 tree = commit.tree
1320 tree = commit.tree
1310 else:
1321 else:
1311 tree = commit.tree[path.rstrip('/')]
1322 tree = commit.tree[path.rstrip('/')]
1312 tree_id = tree.id.hex
1323 tree_id = tree.id.hex
1313 try:
1324 try:
1314 tree = repo[tree_id]
1325 tree = repo[tree_id]
1315 except KeyError:
1326 except KeyError:
1316 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1327 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1317
1328
1318 index = LibGit2Index.Index()
1329 index = LibGit2Index.Index()
1319 index.read_tree(tree)
1330 index.read_tree(tree)
1320 file_iter = index
1331 file_iter = index
1321
1332
1322 for fn in file_iter:
1333 for fn in file_iter:
1323 file_path = fn.path
1334 file_path = fn.path
1324 mode = fn.mode
1335 mode = fn.mode
1325 is_link = stat.S_ISLNK(mode)
1336 is_link = stat.S_ISLNK(mode)
1326 if mode == pygit2.GIT_FILEMODE_COMMIT:
1337 if mode == pygit2.GIT_FILEMODE_COMMIT:
1327 log.debug('Skipping path %s as a commit node', file_path)
1338 log.debug('Skipping path %s as a commit node', file_path)
1328 continue
1339 continue
1329 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1340 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1330
1341
1331 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1342 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1332 archive_dir_name, commit_id)
1343 archive_dir_name, commit_id)
@@ -1,1072 +1,1086 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib.request, urllib.parse, urllib.error
21 import urllib.request, urllib.parse, urllib.error
22 import urllib.request, urllib.error, urllib.parse
22 import urllib.request, urllib.error, urllib.parse
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase, purge
25 from hgext import largefiles, rebase, purge
26
26
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30 from mercurial import repair
30 from mercurial import repair
31
31
32 import vcsserver
32 import vcsserver
33 from vcsserver import exceptions
33 from vcsserver import exceptions
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.hgcompat import (
35 from vcsserver.hgcompat import (
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 RepoLookupError, InterventionRequired, RequirementError,
40 RepoLookupError, InterventionRequired, RequirementError,
41 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
41 alwaysmatcher, patternmatcher, hgutil, hgext_strip)
42 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
42 from vcsserver.str_utils import ascii_bytes, ascii_str, safe_str, safe_bytes
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
49
49
50 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
51
51
52 def status(self, *msg, **opts):
52 def status(self, *msg, **opts):
53 str_msg = map(safe_str, msg)
53 str_msg = map(safe_str, msg)
54 log.info(' '.join(str_msg).rstrip('\n'))
54 log.info(' '.join(str_msg).rstrip('\n'))
55 #super(LoggingUI, self).status(*msg, **opts)
55 #super(LoggingUI, self).status(*msg, **opts)
56
56
57 def warn(self, *msg, **opts):
57 def warn(self, *msg, **opts):
58 str_msg = map(safe_str, msg)
58 str_msg = map(safe_str, msg)
59 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
59 log.warning('ui_logger:'+' '.join(str_msg).rstrip('\n'))
60 #super(LoggingUI, self).warn(*msg, **opts)
60 #super(LoggingUI, self).warn(*msg, **opts)
61
61
62 def error(self, *msg, **opts):
62 def error(self, *msg, **opts):
63 str_msg = map(safe_str, msg)
63 str_msg = map(safe_str, msg)
64 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
64 log.error('ui_logger:'+' '.join(str_msg).rstrip('\n'))
65 #super(LoggingUI, self).error(*msg, **opts)
65 #super(LoggingUI, self).error(*msg, **opts)
66
66
67 def note(self, *msg, **opts):
67 def note(self, *msg, **opts):
68 str_msg = map(safe_str, msg)
68 str_msg = map(safe_str, msg)
69 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
69 log.info('ui_logger:'+' '.join(str_msg).rstrip('\n'))
70 #super(LoggingUI, self).note(*msg, **opts)
70 #super(LoggingUI, self).note(*msg, **opts)
71
71
72 def debug(self, *msg, **opts):
72 def debug(self, *msg, **opts):
73 str_msg = map(safe_str, msg)
73 str_msg = map(safe_str, msg)
74 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
74 log.debug('ui_logger:'+' '.join(str_msg).rstrip('\n'))
75 #super(LoggingUI, self).debug(*msg, **opts)
75 #super(LoggingUI, self).debug(*msg, **opts)
76
76
77 baseui = LoggingUI()
77 baseui = LoggingUI()
78
78
79 # clean the baseui object
79 # clean the baseui object
80 baseui._ocfg = hgconfig.config()
80 baseui._ocfg = hgconfig.config()
81 baseui._ucfg = hgconfig.config()
81 baseui._ucfg = hgconfig.config()
82 baseui._tcfg = hgconfig.config()
82 baseui._tcfg = hgconfig.config()
83
83
84 for section, option, value in repo_config:
84 for section, option, value in repo_config:
85 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
85 baseui.setconfig(ascii_bytes(section), ascii_bytes(option), ascii_bytes(value))
86
86
87 # make our hgweb quiet so it doesn't print output
87 # make our hgweb quiet so it doesn't print output
88 baseui.setconfig(b'ui', b'quiet', b'true')
88 baseui.setconfig(b'ui', b'quiet', b'true')
89
89
90 baseui.setconfig(b'ui', b'paginate', b'never')
90 baseui.setconfig(b'ui', b'paginate', b'never')
91 # for better Error reporting of Mercurial
91 # for better Error reporting of Mercurial
92 baseui.setconfig(b'ui', b'message-output', b'stderr')
92 baseui.setconfig(b'ui', b'message-output', b'stderr')
93
93
94 # force mercurial to only use 1 thread, otherwise it may try to set a
94 # force mercurial to only use 1 thread, otherwise it may try to set a
95 # signal in a non-main thread, thus generating a ValueError.
95 # signal in a non-main thread, thus generating a ValueError.
96 baseui.setconfig(b'worker', b'numcpus', 1)
96 baseui.setconfig(b'worker', b'numcpus', 1)
97
97
98 # If there is no config for the largefiles extension, we explicitly disable
98 # If there is no config for the largefiles extension, we explicitly disable
99 # it here. This overrides settings from repositories hgrc file. Recent
99 # it here. This overrides settings from repositories hgrc file. Recent
100 # mercurial versions enable largefiles in hgrc on clone from largefile
100 # mercurial versions enable largefiles in hgrc on clone from largefile
101 # repo.
101 # repo.
102 if not baseui.hasconfig(b'extensions', b'largefiles'):
102 if not baseui.hasconfig(b'extensions', b'largefiles'):
103 log.debug('Explicitly disable largefiles extension for repo.')
103 log.debug('Explicitly disable largefiles extension for repo.')
104 baseui.setconfig(b'extensions', b'largefiles', b'!')
104 baseui.setconfig(b'extensions', b'largefiles', b'!')
105
105
106 return baseui
106 return baseui
107
107
108
108
109 def reraise_safe_exceptions(func):
109 def reraise_safe_exceptions(func):
110 """Decorator for converting mercurial exceptions to something neutral."""
110 """Decorator for converting mercurial exceptions to something neutral."""
111
111
112 def wrapper(*args, **kwargs):
112 def wrapper(*args, **kwargs):
113 try:
113 try:
114 return func(*args, **kwargs)
114 return func(*args, **kwargs)
115 except (Abort, InterventionRequired) as e:
115 except (Abort, InterventionRequired) as e:
116 raise_from_original(exceptions.AbortException(e), e)
116 raise_from_original(exceptions.AbortException(e), e)
117 except RepoLookupError as e:
117 except RepoLookupError as e:
118 raise_from_original(exceptions.LookupException(e), e)
118 raise_from_original(exceptions.LookupException(e), e)
119 except RequirementError as e:
119 except RequirementError as e:
120 raise_from_original(exceptions.RequirementException(e), e)
120 raise_from_original(exceptions.RequirementException(e), e)
121 except RepoError as e:
121 except RepoError as e:
122 raise_from_original(exceptions.VcsException(e), e)
122 raise_from_original(exceptions.VcsException(e), e)
123 except LookupError as e:
123 except LookupError as e:
124 raise_from_original(exceptions.LookupException(e), e)
124 raise_from_original(exceptions.LookupException(e), e)
125 except Exception as e:
125 except Exception as e:
126 if not hasattr(e, '_vcs_kind'):
126 if not hasattr(e, '_vcs_kind'):
127 log.exception("Unhandled exception in hg remote call")
127 log.exception("Unhandled exception in hg remote call")
128 raise_from_original(exceptions.UnhandledException(e), e)
128 raise_from_original(exceptions.UnhandledException(e), e)
129
129
130 raise
130 raise
131 return wrapper
131 return wrapper
132
132
133
133
134 class MercurialFactory(RepoFactory):
134 class MercurialFactory(RepoFactory):
135 repo_type = 'hg'
135 repo_type = 'hg'
136
136
137 def _create_config(self, config, hooks=True):
137 def _create_config(self, config, hooks=True):
138 if not hooks:
138 if not hooks:
139 hooks_to_clean = frozenset((
139 hooks_to_clean = frozenset((
140 'changegroup.repo_size', 'preoutgoing.pre_pull',
140 'changegroup.repo_size', 'preoutgoing.pre_pull',
141 'outgoing.pull_logger', 'prechangegroup.pre_push'))
141 'outgoing.pull_logger', 'prechangegroup.pre_push'))
142 new_config = []
142 new_config = []
143 for section, option, value in config:
143 for section, option, value in config:
144 if section == 'hooks' and option in hooks_to_clean:
144 if section == 'hooks' and option in hooks_to_clean:
145 continue
145 continue
146 new_config.append((section, option, value))
146 new_config.append((section, option, value))
147 config = new_config
147 config = new_config
148
148
149 baseui = make_ui_from_config(config)
149 baseui = make_ui_from_config(config)
150 return baseui
150 return baseui
151
151
152 def _create_repo(self, wire, create):
152 def _create_repo(self, wire, create):
153 baseui = self._create_config(wire["config"])
153 baseui = self._create_config(wire["config"])
154 return instance(baseui, ascii_bytes(wire["path"]), create)
154 return instance(baseui, ascii_bytes(wire["path"]), create)
155
155
156 def repo(self, wire, create=False):
156 def repo(self, wire, create=False):
157 """
157 """
158 Get a repository instance for the given path.
158 Get a repository instance for the given path.
159 """
159 """
160 return self._create_repo(wire, create)
160 return self._create_repo(wire, create)
161
161
162
162
163 def patch_ui_message_output(baseui):
163 def patch_ui_message_output(baseui):
164 baseui.setconfig(b'ui', b'quiet', b'false')
164 baseui.setconfig(b'ui', b'quiet', b'false')
165 output = io.BytesIO()
165 output = io.BytesIO()
166
166
167 def write(data, **unused_kwargs):
167 def write(data, **unused_kwargs):
168 output.write(data)
168 output.write(data)
169
169
170 baseui.status = write
170 baseui.status = write
171 baseui.write = write
171 baseui.write = write
172 baseui.warn = write
172 baseui.warn = write
173 baseui.debug = write
173 baseui.debug = write
174
174
175 return baseui, output
175 return baseui, output
176
176
177
177
178 class HgRemote(RemoteBase):
178 class HgRemote(RemoteBase):
179
179
180 def __init__(self, factory):
180 def __init__(self, factory):
181 self._factory = factory
181 self._factory = factory
182 self._bulk_methods = {
182 self._bulk_methods = {
183 "affected_files": self.ctx_files,
183 "affected_files": self.ctx_files,
184 "author": self.ctx_user,
184 "author": self.ctx_user,
185 "branch": self.ctx_branch,
185 "branch": self.ctx_branch,
186 "children": self.ctx_children,
186 "children": self.ctx_children,
187 "date": self.ctx_date,
187 "date": self.ctx_date,
188 "message": self.ctx_description,
188 "message": self.ctx_description,
189 "parents": self.ctx_parents,
189 "parents": self.ctx_parents,
190 "status": self.ctx_status,
190 "status": self.ctx_status,
191 "obsolete": self.ctx_obsolete,
191 "obsolete": self.ctx_obsolete,
192 "phase": self.ctx_phase,
192 "phase": self.ctx_phase,
193 "hidden": self.ctx_hidden,
193 "hidden": self.ctx_hidden,
194 "_file_paths": self.ctx_list,
194 "_file_paths": self.ctx_list,
195 }
195 }
196
196
197 def _get_ctx(self, repo, ref):
197 def _get_ctx(self, repo, ref):
198 return get_ctx(repo, ref)
198 return get_ctx(repo, ref)
199
199
200 @reraise_safe_exceptions
200 @reraise_safe_exceptions
201 def discover_hg_version(self):
201 def discover_hg_version(self):
202 from mercurial import util
202 from mercurial import util
203 return safe_str(util.version())
203 return safe_str(util.version())
204
204
205 @reraise_safe_exceptions
205 @reraise_safe_exceptions
206 def is_empty(self, wire):
206 def is_empty(self, wire):
207 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
208
208
209 try:
209 try:
210 return len(repo) == 0
210 return len(repo) == 0
211 except Exception:
211 except Exception:
212 log.exception("failed to read object_store")
212 log.exception("failed to read object_store")
213 return False
213 return False
214
214
215 @reraise_safe_exceptions
215 @reraise_safe_exceptions
216 def bookmarks(self, wire):
216 def bookmarks(self, wire):
217 cache_on, context_uid, repo_id = self._cache_on(wire)
217 cache_on, context_uid, repo_id = self._cache_on(wire)
218 region = self._region(wire)
218 region = self._region(wire)
219
219
220 @region.conditional_cache_on_arguments(condition=cache_on)
220 @region.conditional_cache_on_arguments(condition=cache_on)
221 def _bookmarks(_context_uid, _repo_id):
221 def _bookmarks(_context_uid, _repo_id):
222 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
223 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
223 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo._bookmarks.items()}
224
224
225 return _bookmarks(context_uid, repo_id)
225 return _bookmarks(context_uid, repo_id)
226
226
227 @reraise_safe_exceptions
227 @reraise_safe_exceptions
228 def branches(self, wire, normal, closed):
228 def branches(self, wire, normal, closed):
229 cache_on, context_uid, repo_id = self._cache_on(wire)
229 cache_on, context_uid, repo_id = self._cache_on(wire)
230 region = self._region(wire)
230 region = self._region(wire)
231
231
232 @region.conditional_cache_on_arguments(condition=cache_on)
232 @region.conditional_cache_on_arguments(condition=cache_on)
233 def _branches(_context_uid, _repo_id, _normal, _closed):
233 def _branches(_context_uid, _repo_id, _normal, _closed):
234 repo = self._factory.repo(wire)
234 repo = self._factory.repo(wire)
235 iter_branches = repo.branchmap().iterbranches()
235 iter_branches = repo.branchmap().iterbranches()
236 bt = {}
236 bt = {}
237 for branch_name, _heads, tip_node, is_closed in iter_branches:
237 for branch_name, _heads, tip_node, is_closed in iter_branches:
238 if normal and not is_closed:
238 if normal and not is_closed:
239 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
239 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
240 if closed and is_closed:
240 if closed and is_closed:
241 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
241 bt[safe_str(branch_name)] = ascii_str(hex(tip_node))
242
242
243 return bt
243 return bt
244
244
245 return _branches(context_uid, repo_id, normal, closed)
245 return _branches(context_uid, repo_id, normal, closed)
246
246
247 @reraise_safe_exceptions
247 @reraise_safe_exceptions
248 def bulk_request(self, wire, commit_id, pre_load):
248 def bulk_request(self, wire, commit_id, pre_load):
249 cache_on, context_uid, repo_id = self._cache_on(wire)
249 cache_on, context_uid, repo_id = self._cache_on(wire)
250 region = self._region(wire)
250 region = self._region(wire)
251
251
252 @region.conditional_cache_on_arguments(condition=cache_on)
252 @region.conditional_cache_on_arguments(condition=cache_on)
253 def _bulk_request(_repo_id, _commit_id, _pre_load):
253 def _bulk_request(_repo_id, _commit_id, _pre_load):
254 result = {}
254 result = {}
255 for attr in pre_load:
255 for attr in pre_load:
256 try:
256 try:
257 method = self._bulk_methods[attr]
257 method = self._bulk_methods[attr]
258 result[attr] = method(wire, commit_id)
258 result[attr] = method(wire, commit_id)
259 except KeyError as e:
259 except KeyError as e:
260 raise exceptions.VcsException(e)(
260 raise exceptions.VcsException(e)(
261 'Unknown bulk attribute: "%s"' % attr)
261 'Unknown bulk attribute: "%s"' % attr)
262 return result
262 return result
263
263
264 return _bulk_request(repo_id, commit_id, sorted(pre_load))
264 return _bulk_request(repo_id, commit_id, sorted(pre_load))
265
265
266 @reraise_safe_exceptions
266 @reraise_safe_exceptions
267 def ctx_branch(self, wire, commit_id):
267 def ctx_branch(self, wire, commit_id):
268 cache_on, context_uid, repo_id = self._cache_on(wire)
268 cache_on, context_uid, repo_id = self._cache_on(wire)
269 region = self._region(wire)
269 region = self._region(wire)
270
270
271 @region.conditional_cache_on_arguments(condition=cache_on)
271 @region.conditional_cache_on_arguments(condition=cache_on)
272 def _ctx_branch(_repo_id, _commit_id):
272 def _ctx_branch(_repo_id, _commit_id):
273 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
274 ctx = self._get_ctx(repo, commit_id)
274 ctx = self._get_ctx(repo, commit_id)
275 return ctx.branch()
275 return ctx.branch()
276 return _ctx_branch(repo_id, commit_id)
276 return _ctx_branch(repo_id, commit_id)
277
277
278 @reraise_safe_exceptions
278 @reraise_safe_exceptions
279 def ctx_date(self, wire, commit_id):
279 def ctx_date(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 region = self._region(wire)
281 region = self._region(wire)
282
282
283 @region.conditional_cache_on_arguments(condition=cache_on)
283 @region.conditional_cache_on_arguments(condition=cache_on)
284 def _ctx_date(_repo_id, _commit_id):
284 def _ctx_date(_repo_id, _commit_id):
285 repo = self._factory.repo(wire)
285 repo = self._factory.repo(wire)
286 ctx = self._get_ctx(repo, commit_id)
286 ctx = self._get_ctx(repo, commit_id)
287 return ctx.date()
287 return ctx.date()
288 return _ctx_date(repo_id, commit_id)
288 return _ctx_date(repo_id, commit_id)
289
289
290 @reraise_safe_exceptions
290 @reraise_safe_exceptions
291 def ctx_description(self, wire, revision):
291 def ctx_description(self, wire, revision):
292 repo = self._factory.repo(wire)
292 repo = self._factory.repo(wire)
293 ctx = self._get_ctx(repo, revision)
293 ctx = self._get_ctx(repo, revision)
294 return ctx.description()
294 return ctx.description()
295
295
296 @reraise_safe_exceptions
296 @reraise_safe_exceptions
297 def ctx_files(self, wire, commit_id):
297 def ctx_files(self, wire, commit_id):
298 cache_on, context_uid, repo_id = self._cache_on(wire)
298 cache_on, context_uid, repo_id = self._cache_on(wire)
299 region = self._region(wire)
299 region = self._region(wire)
300
300
301 @region.conditional_cache_on_arguments(condition=cache_on)
301 @region.conditional_cache_on_arguments(condition=cache_on)
302 def _ctx_files(_repo_id, _commit_id):
302 def _ctx_files(_repo_id, _commit_id):
303 repo = self._factory.repo(wire)
303 repo = self._factory.repo(wire)
304 ctx = self._get_ctx(repo, commit_id)
304 ctx = self._get_ctx(repo, commit_id)
305 return ctx.files()
305 return ctx.files()
306
306
307 return _ctx_files(repo_id, commit_id)
307 return _ctx_files(repo_id, commit_id)
308
308
309 @reraise_safe_exceptions
309 @reraise_safe_exceptions
310 def ctx_list(self, path, revision):
310 def ctx_list(self, path, revision):
311 repo = self._factory.repo(path)
311 repo = self._factory.repo(path)
312 ctx = self._get_ctx(repo, revision)
312 ctx = self._get_ctx(repo, revision)
313 return list(ctx)
313 return list(ctx)
314
314
315 @reraise_safe_exceptions
315 @reraise_safe_exceptions
316 def ctx_parents(self, wire, commit_id):
316 def ctx_parents(self, wire, commit_id):
317 cache_on, context_uid, repo_id = self._cache_on(wire)
317 cache_on, context_uid, repo_id = self._cache_on(wire)
318 region = self._region(wire)
318 region = self._region(wire)
319
319
320 @region.conditional_cache_on_arguments(condition=cache_on)
320 @region.conditional_cache_on_arguments(condition=cache_on)
321 def _ctx_parents(_repo_id, _commit_id):
321 def _ctx_parents(_repo_id, _commit_id):
322 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
323 ctx = self._get_ctx(repo, commit_id)
323 ctx = self._get_ctx(repo, commit_id)
324 return [parent.hex() for parent in ctx.parents()
324 return [parent.hex() for parent in ctx.parents()
325 if not (parent.hidden() or parent.obsolete())]
325 if not (parent.hidden() or parent.obsolete())]
326
326
327 return _ctx_parents(repo_id, commit_id)
327 return _ctx_parents(repo_id, commit_id)
328
328
329 @reraise_safe_exceptions
329 @reraise_safe_exceptions
330 def ctx_children(self, wire, commit_id):
330 def ctx_children(self, wire, commit_id):
331 cache_on, context_uid, repo_id = self._cache_on(wire)
331 cache_on, context_uid, repo_id = self._cache_on(wire)
332 region = self._region(wire)
332 region = self._region(wire)
333
333
334 @region.conditional_cache_on_arguments(condition=cache_on)
334 @region.conditional_cache_on_arguments(condition=cache_on)
335 def _ctx_children(_repo_id, _commit_id):
335 def _ctx_children(_repo_id, _commit_id):
336 repo = self._factory.repo(wire)
336 repo = self._factory.repo(wire)
337 ctx = self._get_ctx(repo, commit_id)
337 ctx = self._get_ctx(repo, commit_id)
338 return [child.hex() for child in ctx.children()
338 return [child.hex() for child in ctx.children()
339 if not (child.hidden() or child.obsolete())]
339 if not (child.hidden() or child.obsolete())]
340
340
341 return _ctx_children(repo_id, commit_id)
341 return _ctx_children(repo_id, commit_id)
342
342
343 @reraise_safe_exceptions
343 @reraise_safe_exceptions
344 def ctx_phase(self, wire, commit_id):
344 def ctx_phase(self, wire, commit_id):
345 cache_on, context_uid, repo_id = self._cache_on(wire)
345 cache_on, context_uid, repo_id = self._cache_on(wire)
346 region = self._region(wire)
346 region = self._region(wire)
347
347
348 @region.conditional_cache_on_arguments(condition=cache_on)
348 @region.conditional_cache_on_arguments(condition=cache_on)
349 def _ctx_phase(_context_uid, _repo_id, _commit_id):
349 def _ctx_phase(_context_uid, _repo_id, _commit_id):
350 repo = self._factory.repo(wire)
350 repo = self._factory.repo(wire)
351 ctx = self._get_ctx(repo, commit_id)
351 ctx = self._get_ctx(repo, commit_id)
352 # public=0, draft=1, secret=3
352 # public=0, draft=1, secret=3
353 return ctx.phase()
353 return ctx.phase()
354 return _ctx_phase(context_uid, repo_id, commit_id)
354 return _ctx_phase(context_uid, repo_id, commit_id)
355
355
356 @reraise_safe_exceptions
356 @reraise_safe_exceptions
357 def ctx_obsolete(self, wire, commit_id):
357 def ctx_obsolete(self, wire, commit_id):
358 cache_on, context_uid, repo_id = self._cache_on(wire)
358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 region = self._region(wire)
359 region = self._region(wire)
360
360
361 @region.conditional_cache_on_arguments(condition=cache_on)
361 @region.conditional_cache_on_arguments(condition=cache_on)
362 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
362 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
363 repo = self._factory.repo(wire)
363 repo = self._factory.repo(wire)
364 ctx = self._get_ctx(repo, commit_id)
364 ctx = self._get_ctx(repo, commit_id)
365 return ctx.obsolete()
365 return ctx.obsolete()
366 return _ctx_obsolete(context_uid, repo_id, commit_id)
366 return _ctx_obsolete(context_uid, repo_id, commit_id)
367
367
368 @reraise_safe_exceptions
368 @reraise_safe_exceptions
369 def ctx_hidden(self, wire, commit_id):
369 def ctx_hidden(self, wire, commit_id):
370 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
371 region = self._region(wire)
371 region = self._region(wire)
372
372
373 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
374 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
374 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
375 repo = self._factory.repo(wire)
375 repo = self._factory.repo(wire)
376 ctx = self._get_ctx(repo, commit_id)
376 ctx = self._get_ctx(repo, commit_id)
377 return ctx.hidden()
377 return ctx.hidden()
378 return _ctx_hidden(context_uid, repo_id, commit_id)
378 return _ctx_hidden(context_uid, repo_id, commit_id)
379
379
380 @reraise_safe_exceptions
380 @reraise_safe_exceptions
381 def ctx_substate(self, wire, revision):
381 def ctx_substate(self, wire, revision):
382 repo = self._factory.repo(wire)
382 repo = self._factory.repo(wire)
383 ctx = self._get_ctx(repo, revision)
383 ctx = self._get_ctx(repo, revision)
384 return ctx.substate
384 return ctx.substate
385
385
386 @reraise_safe_exceptions
386 @reraise_safe_exceptions
387 def ctx_status(self, wire, revision):
387 def ctx_status(self, wire, revision):
388 repo = self._factory.repo(wire)
388 repo = self._factory.repo(wire)
389 ctx = self._get_ctx(repo, revision)
389 ctx = self._get_ctx(repo, revision)
390 status = repo[ctx.p1().node()].status(other=ctx.node())
390 status = repo[ctx.p1().node()].status(other=ctx.node())
391 # object of status (odd, custom named tuple in mercurial) is not
391 # object of status (odd, custom named tuple in mercurial) is not
392 # correctly serializable, we make it a list, as the underling
392 # correctly serializable, we make it a list, as the underling
393 # API expects this to be a list
393 # API expects this to be a list
394 return list(status)
394 return list(status)
395
395
396 @reraise_safe_exceptions
396 @reraise_safe_exceptions
397 def ctx_user(self, wire, revision):
397 def ctx_user(self, wire, revision):
398 repo = self._factory.repo(wire)
398 repo = self._factory.repo(wire)
399 ctx = self._get_ctx(repo, revision)
399 ctx = self._get_ctx(repo, revision)
400 return ctx.user()
400 return ctx.user()
401
401
402 @reraise_safe_exceptions
402 @reraise_safe_exceptions
403 def check_url(self, url, config):
403 def check_url(self, url, config):
404 _proto = None
404 _proto = None
405 if '+' in url[:url.find('://')]:
405 if '+' in url[:url.find('://')]:
406 _proto = url[0:url.find('+')]
406 _proto = url[0:url.find('+')]
407 url = url[url.find('+') + 1:]
407 url = url[url.find('+') + 1:]
408 handlers = []
408 handlers = []
409 url_obj = url_parser(url)
409 url_obj = url_parser(url)
410 test_uri, authinfo = url_obj.authinfo()
410 test_uri, authinfo = url_obj.authinfo()
411 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
411 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
412 url_obj.query = obfuscate_qs(url_obj.query)
412 url_obj.query = obfuscate_qs(url_obj.query)
413
413
414 cleaned_uri = str(url_obj)
414 cleaned_uri = str(url_obj)
415 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
415 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
416
416
417 if authinfo:
417 if authinfo:
418 # create a password manager
418 # create a password manager
419 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
419 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
420 passmgr.add_password(*authinfo)
420 passmgr.add_password(*authinfo)
421
421
422 handlers.extend((httpbasicauthhandler(passmgr),
422 handlers.extend((httpbasicauthhandler(passmgr),
423 httpdigestauthhandler(passmgr)))
423 httpdigestauthhandler(passmgr)))
424
424
425 o = urllib.request.build_opener(*handlers)
425 o = urllib.request.build_opener(*handlers)
426 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
426 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
427 ('Accept', 'application/mercurial-0.1')]
427 ('Accept', 'application/mercurial-0.1')]
428
428
429 q = {"cmd": 'between'}
429 q = {"cmd": 'between'}
430 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
430 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
431 qs = '?%s' % urllib.parse.urlencode(q)
431 qs = '?%s' % urllib.parse.urlencode(q)
432 cu = "%s%s" % (test_uri, qs)
432 cu = "%s%s" % (test_uri, qs)
433 req = urllib.request.Request(cu, None, {})
433 req = urllib.request.Request(cu, None, {})
434
434
435 try:
435 try:
436 log.debug("Trying to open URL %s", cleaned_uri)
436 log.debug("Trying to open URL %s", cleaned_uri)
437 resp = o.open(req)
437 resp = o.open(req)
438 if resp.code != 200:
438 if resp.code != 200:
439 raise exceptions.URLError()('Return Code is not 200')
439 raise exceptions.URLError()('Return Code is not 200')
440 except Exception as e:
440 except Exception as e:
441 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
441 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
442 # means it cannot be cloned
442 # means it cannot be cloned
443 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
443 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
444
444
445 # now check if it's a proper hg repo, but don't do it for svn
445 # now check if it's a proper hg repo, but don't do it for svn
446 try:
446 try:
447 if _proto == 'svn':
447 if _proto == 'svn':
448 pass
448 pass
449 else:
449 else:
450 # check for pure hg repos
450 # check for pure hg repos
451 log.debug(
451 log.debug(
452 "Verifying if URL is a Mercurial repository: %s",
452 "Verifying if URL is a Mercurial repository: %s",
453 cleaned_uri)
453 cleaned_uri)
454 ui = make_ui_from_config(config)
454 ui = make_ui_from_config(config)
455 peer_checker = makepeer(ui, url)
455 peer_checker = makepeer(ui, url)
456 peer_checker.lookup('tip')
456 peer_checker.lookup('tip')
457 except Exception as e:
457 except Exception as e:
458 log.warning("URL is not a valid Mercurial repository: %s",
458 log.warning("URL is not a valid Mercurial repository: %s",
459 cleaned_uri)
459 cleaned_uri)
460 raise exceptions.URLError(e)(
460 raise exceptions.URLError(e)(
461 "url [%s] does not look like an hg repo org_exc: %s"
461 "url [%s] does not look like an hg repo org_exc: %s"
462 % (cleaned_uri, e))
462 % (cleaned_uri, e))
463
463
464 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
464 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
465 return True
465 return True
466
466
467 @reraise_safe_exceptions
467 @reraise_safe_exceptions
468 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
468 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
469 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
470
470
471 if file_filter:
471 if file_filter:
472 match_filter = match(file_filter[0], '', [file_filter[1]])
472 match_filter = match(file_filter[0], '', [file_filter[1]])
473 else:
473 else:
474 match_filter = file_filter
474 match_filter = file_filter
475 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
475 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
476
476
477 try:
477 try:
478 diff_iter = patch.diff(
478 diff_iter = patch.diff(
479 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
479 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)
480 return b"".join(diff_iter)
480 return b"".join(diff_iter)
481 except RepoLookupError as e:
481 except RepoLookupError as e:
482 raise exceptions.LookupException(e)()
482 raise exceptions.LookupException(e)()
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def node_history(self, wire, revision, path, limit):
485 def node_history(self, wire, revision, path, limit):
486 cache_on, context_uid, repo_id = self._cache_on(wire)
486 cache_on, context_uid, repo_id = self._cache_on(wire)
487 region = self._region(wire)
487 region = self._region(wire)
488
488
489 @region.conditional_cache_on_arguments(condition=cache_on)
489 @region.conditional_cache_on_arguments(condition=cache_on)
490 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
490 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
491 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
492
492
493 ctx = self._get_ctx(repo, revision)
493 ctx = self._get_ctx(repo, revision)
494 fctx = ctx.filectx(safe_bytes(path))
494 fctx = ctx.filectx(safe_bytes(path))
495
495
496 def history_iter():
496 def history_iter():
497 limit_rev = fctx.rev()
497 limit_rev = fctx.rev()
498 for obj in reversed(list(fctx.filelog())):
498 for obj in reversed(list(fctx.filelog())):
499 obj = fctx.filectx(obj)
499 obj = fctx.filectx(obj)
500 ctx = obj.changectx()
500 ctx = obj.changectx()
501 if ctx.hidden() or ctx.obsolete():
501 if ctx.hidden() or ctx.obsolete():
502 continue
502 continue
503
503
504 if limit_rev >= obj.rev():
504 if limit_rev >= obj.rev():
505 yield obj
505 yield obj
506
506
507 history = []
507 history = []
508 for cnt, obj in enumerate(history_iter()):
508 for cnt, obj in enumerate(history_iter()):
509 if limit and cnt >= limit:
509 if limit and cnt >= limit:
510 break
510 break
511 history.append(hex(obj.node()))
511 history.append(hex(obj.node()))
512
512
513 return [x for x in history]
513 return [x for x in history]
514 return _node_history(context_uid, repo_id, revision, path, limit)
514 return _node_history(context_uid, repo_id, revision, path, limit)
515
515
516 @reraise_safe_exceptions
516 @reraise_safe_exceptions
517 def node_history_untill(self, wire, revision, path, limit):
517 def node_history_untill(self, wire, revision, path, limit):
518 cache_on, context_uid, repo_id = self._cache_on(wire)
518 cache_on, context_uid, repo_id = self._cache_on(wire)
519 region = self._region(wire)
519 region = self._region(wire)
520
520
521 @region.conditional_cache_on_arguments(condition=cache_on)
521 @region.conditional_cache_on_arguments(condition=cache_on)
522 def _node_history_until(_context_uid, _repo_id):
522 def _node_history_until(_context_uid, _repo_id):
523 repo = self._factory.repo(wire)
523 repo = self._factory.repo(wire)
524 ctx = self._get_ctx(repo, revision)
524 ctx = self._get_ctx(repo, revision)
525 fctx = ctx.filectx(safe_bytes(path))
525 fctx = ctx.filectx(safe_bytes(path))
526
526
527 file_log = list(fctx.filelog())
527 file_log = list(fctx.filelog())
528 if limit:
528 if limit:
529 # Limit to the last n items
529 # Limit to the last n items
530 file_log = file_log[-limit:]
530 file_log = file_log[-limit:]
531
531
532 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
532 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
533 return _node_history_until(context_uid, repo_id, revision, path, limit)
533 return _node_history_until(context_uid, repo_id, revision, path, limit)
534
534
535 @reraise_safe_exceptions
535 @reraise_safe_exceptions
536 def fctx_annotate(self, wire, revision, path):
536 def fctx_annotate(self, wire, revision, path):
537 repo = self._factory.repo(wire)
537 repo = self._factory.repo(wire)
538 ctx = self._get_ctx(repo, revision)
538 ctx = self._get_ctx(repo, revision)
539 fctx = ctx.filectx(safe_bytes(path))
539 fctx = ctx.filectx(safe_bytes(path))
540
540
541 result = []
541 result = []
542 for i, annotate_obj in enumerate(fctx.annotate(), 1):
542 for i, annotate_obj in enumerate(fctx.annotate(), 1):
543 ln_no = i
543 ln_no = i
544 sha = hex(annotate_obj.fctx.node())
544 sha = hex(annotate_obj.fctx.node())
545 content = annotate_obj.text
545 content = annotate_obj.text
546 result.append((ln_no, sha, content))
546 result.append((ln_no, sha, content))
547 return result
547 return result
548
548
549 @reraise_safe_exceptions
549 @reraise_safe_exceptions
550 def fctx_node_data(self, wire, revision, path):
550 def fctx_node_data(self, wire, revision, path):
551 repo = self._factory.repo(wire)
551 repo = self._factory.repo(wire)
552 ctx = self._get_ctx(repo, revision)
552 ctx = self._get_ctx(repo, revision)
553 fctx = ctx.filectx(safe_bytes(path))
553 fctx = ctx.filectx(safe_bytes(path))
554 return fctx.data()
554 return fctx.data()
555
555
556 @reraise_safe_exceptions
556 @reraise_safe_exceptions
557 def fctx_flags(self, wire, commit_id, path):
557 def fctx_flags(self, wire, commit_id, path):
558 cache_on, context_uid, repo_id = self._cache_on(wire)
558 cache_on, context_uid, repo_id = self._cache_on(wire)
559 region = self._region(wire)
559 region = self._region(wire)
560
560
561 @region.conditional_cache_on_arguments(condition=cache_on)
561 @region.conditional_cache_on_arguments(condition=cache_on)
562 def _fctx_flags(_repo_id, _commit_id, _path):
562 def _fctx_flags(_repo_id, _commit_id, _path):
563 repo = self._factory.repo(wire)
563 repo = self._factory.repo(wire)
564 ctx = self._get_ctx(repo, commit_id)
564 ctx = self._get_ctx(repo, commit_id)
565 fctx = ctx.filectx(safe_bytes(path))
565 fctx = ctx.filectx(safe_bytes(path))
566 return fctx.flags()
566 return fctx.flags()
567
567
568 return _fctx_flags(repo_id, commit_id, path)
568 return _fctx_flags(repo_id, commit_id, path)
569
569
570 @reraise_safe_exceptions
570 @reraise_safe_exceptions
571 def fctx_size(self, wire, commit_id, path):
571 def fctx_size(self, wire, commit_id, path):
572 cache_on, context_uid, repo_id = self._cache_on(wire)
572 cache_on, context_uid, repo_id = self._cache_on(wire)
573 region = self._region(wire)
573 region = self._region(wire)
574
574
575 @region.conditional_cache_on_arguments(condition=cache_on)
575 @region.conditional_cache_on_arguments(condition=cache_on)
576 def _fctx_size(_repo_id, _revision, _path):
576 def _fctx_size(_repo_id, _revision, _path):
577 repo = self._factory.repo(wire)
577 repo = self._factory.repo(wire)
578 ctx = self._get_ctx(repo, commit_id)
578 ctx = self._get_ctx(repo, commit_id)
579 fctx = ctx.filectx(safe_bytes(path))
579 fctx = ctx.filectx(safe_bytes(path))
580 return fctx.size()
580 return fctx.size()
581 return _fctx_size(repo_id, commit_id, path)
581 return _fctx_size(repo_id, commit_id, path)
582
582
583 @reraise_safe_exceptions
583 @reraise_safe_exceptions
584 def get_all_commit_ids(self, wire, name):
584 def get_all_commit_ids(self, wire, name):
585 cache_on, context_uid, repo_id = self._cache_on(wire)
585 cache_on, context_uid, repo_id = self._cache_on(wire)
586 region = self._region(wire)
586 region = self._region(wire)
587
587
588 @region.conditional_cache_on_arguments(condition=cache_on)
588 @region.conditional_cache_on_arguments(condition=cache_on)
589 def _get_all_commit_ids(_context_uid, _repo_id, _name):
589 def _get_all_commit_ids(_context_uid, _repo_id, _name):
590 repo = self._factory.repo(wire)
590 repo = self._factory.repo(wire)
591 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
591 revs = [ascii_str(repo[x].hex()) for x in repo.filtered(b'visible').changelog.revs()]
592 return revs
592 return revs
593 return _get_all_commit_ids(context_uid, repo_id, name)
593 return _get_all_commit_ids(context_uid, repo_id, name)
594
594
595 @reraise_safe_exceptions
595 @reraise_safe_exceptions
596 def get_config_value(self, wire, section, name, untrusted=False):
596 def get_config_value(self, wire, section, name, untrusted=False):
597 repo = self._factory.repo(wire)
597 repo = self._factory.repo(wire)
598 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
598 return repo.ui.config(ascii_bytes(section), ascii_bytes(name), untrusted=untrusted)
599
599
600 @reraise_safe_exceptions
600 @reraise_safe_exceptions
601 def is_large_file(self, wire, commit_id, path):
601 def is_large_file(self, wire, commit_id, path):
602 cache_on, context_uid, repo_id = self._cache_on(wire)
602 cache_on, context_uid, repo_id = self._cache_on(wire)
603 region = self._region(wire)
603 region = self._region(wire)
604
604
605 @region.conditional_cache_on_arguments(condition=cache_on)
605 @region.conditional_cache_on_arguments(condition=cache_on)
606 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
606 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
607 return largefiles.lfutil.isstandin(safe_bytes(path))
607 return largefiles.lfutil.isstandin(safe_bytes(path))
608
608
609 return _is_large_file(context_uid, repo_id, commit_id, path)
609 return _is_large_file(context_uid, repo_id, commit_id, path)
610
610
611 @reraise_safe_exceptions
611 @reraise_safe_exceptions
612 def is_binary(self, wire, revision, path):
612 def is_binary(self, wire, revision, path):
613 cache_on, context_uid, repo_id = self._cache_on(wire)
613 cache_on, context_uid, repo_id = self._cache_on(wire)
614 region = self._region(wire)
614 region = self._region(wire)
615
615
616 @region.conditional_cache_on_arguments(condition=cache_on)
616 @region.conditional_cache_on_arguments(condition=cache_on)
617 def _is_binary(_repo_id, _sha, _path):
617 def _is_binary(_repo_id, _sha, _path):
618 repo = self._factory.repo(wire)
618 repo = self._factory.repo(wire)
619 ctx = self._get_ctx(repo, revision)
619 ctx = self._get_ctx(repo, revision)
620 fctx = ctx.filectx(safe_bytes(path))
620 fctx = ctx.filectx(safe_bytes(path))
621 return fctx.isbinary()
621 return fctx.isbinary()
622
622
623 return _is_binary(repo_id, revision, path)
623 return _is_binary(repo_id, revision, path)
624
624
625 @reraise_safe_exceptions
625 @reraise_safe_exceptions
626 def md5_hash(self, wire, revision, path):
627 cache_on, context_uid, repo_id = self._cache_on(wire)
628 region = self._region(wire)
629
630 @region.conditional_cache_on_arguments(condition=cache_on)
631 def _md5_hash(_repo_id, _sha, _path):
632 repo = self._factory.repo(wire)
633 ctx = self._get_ctx(repo, revision)
634 fctx = ctx.filectx(safe_bytes(path))
635 return hashlib.md5(fctx.data()).hexdigest()
636
637 return _md5_hash(repo_id, revision, path)
638
639 @reraise_safe_exceptions
626 def in_largefiles_store(self, wire, sha):
640 def in_largefiles_store(self, wire, sha):
627 repo = self._factory.repo(wire)
641 repo = self._factory.repo(wire)
628 return largefiles.lfutil.instore(repo, sha)
642 return largefiles.lfutil.instore(repo, sha)
629
643
630 @reraise_safe_exceptions
644 @reraise_safe_exceptions
631 def in_user_cache(self, wire, sha):
645 def in_user_cache(self, wire, sha):
632 repo = self._factory.repo(wire)
646 repo = self._factory.repo(wire)
633 return largefiles.lfutil.inusercache(repo.ui, sha)
647 return largefiles.lfutil.inusercache(repo.ui, sha)
634
648
635 @reraise_safe_exceptions
649 @reraise_safe_exceptions
636 def store_path(self, wire, sha):
650 def store_path(self, wire, sha):
637 repo = self._factory.repo(wire)
651 repo = self._factory.repo(wire)
638 return largefiles.lfutil.storepath(repo, sha)
652 return largefiles.lfutil.storepath(repo, sha)
639
653
640 @reraise_safe_exceptions
654 @reraise_safe_exceptions
641 def link(self, wire, sha, path):
655 def link(self, wire, sha, path):
642 repo = self._factory.repo(wire)
656 repo = self._factory.repo(wire)
643 largefiles.lfutil.link(
657 largefiles.lfutil.link(
644 largefiles.lfutil.usercachepath(repo.ui, sha), path)
658 largefiles.lfutil.usercachepath(repo.ui, sha), path)
645
659
646 @reraise_safe_exceptions
660 @reraise_safe_exceptions
647 def localrepository(self, wire, create=False):
661 def localrepository(self, wire, create=False):
648 self._factory.repo(wire, create=create)
662 self._factory.repo(wire, create=create)
649
663
650 @reraise_safe_exceptions
664 @reraise_safe_exceptions
651 def lookup(self, wire, revision, both):
665 def lookup(self, wire, revision, both):
652 cache_on, context_uid, repo_id = self._cache_on(wire)
666 cache_on, context_uid, repo_id = self._cache_on(wire)
653 region = self._region(wire)
667 region = self._region(wire)
654
668
655 @region.conditional_cache_on_arguments(condition=cache_on)
669 @region.conditional_cache_on_arguments(condition=cache_on)
656 def _lookup(_context_uid, _repo_id, _revision, _both):
670 def _lookup(_context_uid, _repo_id, _revision, _both):
657
671
658 repo = self._factory.repo(wire)
672 repo = self._factory.repo(wire)
659 rev = _revision
673 rev = _revision
660 if isinstance(rev, int):
674 if isinstance(rev, int):
661 # NOTE(marcink):
675 # NOTE(marcink):
662 # since Mercurial doesn't support negative indexes properly
676 # since Mercurial doesn't support negative indexes properly
663 # we need to shift accordingly by one to get proper index, e.g
677 # we need to shift accordingly by one to get proper index, e.g
664 # repo[-1] => repo[-2]
678 # repo[-1] => repo[-2]
665 # repo[0] => repo[-1]
679 # repo[0] => repo[-1]
666 if rev <= 0:
680 if rev <= 0:
667 rev = rev + -1
681 rev = rev + -1
668 try:
682 try:
669 ctx = self._get_ctx(repo, rev)
683 ctx = self._get_ctx(repo, rev)
670 except (TypeError, RepoLookupError) as e:
684 except (TypeError, RepoLookupError) as e:
671 e._org_exc_tb = traceback.format_exc()
685 e._org_exc_tb = traceback.format_exc()
672 raise exceptions.LookupException(e)(rev)
686 raise exceptions.LookupException(e)(rev)
673 except LookupError as e:
687 except LookupError as e:
674 e._org_exc_tb = traceback.format_exc()
688 e._org_exc_tb = traceback.format_exc()
675 raise exceptions.LookupException(e)(e.name)
689 raise exceptions.LookupException(e)(e.name)
676
690
677 if not both:
691 if not both:
678 return ctx.hex()
692 return ctx.hex()
679
693
680 ctx = repo[ctx.hex()]
694 ctx = repo[ctx.hex()]
681 return ctx.hex(), ctx.rev()
695 return ctx.hex(), ctx.rev()
682
696
683 return _lookup(context_uid, repo_id, revision, both)
697 return _lookup(context_uid, repo_id, revision, both)
684
698
685 @reraise_safe_exceptions
699 @reraise_safe_exceptions
686 def sync_push(self, wire, url):
700 def sync_push(self, wire, url):
687 if not self.check_url(url, wire['config']):
701 if not self.check_url(url, wire['config']):
688 return
702 return
689
703
690 repo = self._factory.repo(wire)
704 repo = self._factory.repo(wire)
691
705
692 # Disable any prompts for this repo
706 # Disable any prompts for this repo
693 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
707 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
694
708
695 bookmarks = list(dict(repo._bookmarks).keys())
709 bookmarks = list(dict(repo._bookmarks).keys())
696 remote = peer(repo, {}, safe_bytes(url))
710 remote = peer(repo, {}, safe_bytes(url))
697 # Disable any prompts for this remote
711 # Disable any prompts for this remote
698 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
712 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
699
713
700 return exchange.push(
714 return exchange.push(
701 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
715 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
702
716
703 @reraise_safe_exceptions
717 @reraise_safe_exceptions
704 def revision(self, wire, rev):
718 def revision(self, wire, rev):
705 repo = self._factory.repo(wire)
719 repo = self._factory.repo(wire)
706 ctx = self._get_ctx(repo, rev)
720 ctx = self._get_ctx(repo, rev)
707 return ctx.rev()
721 return ctx.rev()
708
722
709 @reraise_safe_exceptions
723 @reraise_safe_exceptions
710 def rev_range(self, wire, commit_filter):
724 def rev_range(self, wire, commit_filter):
711 cache_on, context_uid, repo_id = self._cache_on(wire)
725 cache_on, context_uid, repo_id = self._cache_on(wire)
712 region = self._region(wire)
726 region = self._region(wire)
713
727
714 @region.conditional_cache_on_arguments(condition=cache_on)
728 @region.conditional_cache_on_arguments(condition=cache_on)
715 def _rev_range(_context_uid, _repo_id, _filter):
729 def _rev_range(_context_uid, _repo_id, _filter):
716 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
717 revisions = [
731 revisions = [
718 ascii_str(repo[rev].hex())
732 ascii_str(repo[rev].hex())
719 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
733 for rev in revrange(repo, list(map(ascii_bytes, commit_filter)))
720 ]
734 ]
721 return revisions
735 return revisions
722
736
723 return _rev_range(context_uid, repo_id, sorted(commit_filter))
737 return _rev_range(context_uid, repo_id, sorted(commit_filter))
724
738
725 @reraise_safe_exceptions
739 @reraise_safe_exceptions
726 def rev_range_hash(self, wire, node):
740 def rev_range_hash(self, wire, node):
727 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
728
742
729 def get_revs(repo, rev_opt):
743 def get_revs(repo, rev_opt):
730 if rev_opt:
744 if rev_opt:
731 revs = revrange(repo, rev_opt)
745 revs = revrange(repo, rev_opt)
732 if len(revs) == 0:
746 if len(revs) == 0:
733 return (nullrev, nullrev)
747 return (nullrev, nullrev)
734 return max(revs), min(revs)
748 return max(revs), min(revs)
735 else:
749 else:
736 return len(repo) - 1, 0
750 return len(repo) - 1, 0
737
751
738 stop, start = get_revs(repo, [node + ':'])
752 stop, start = get_revs(repo, [node + ':'])
739 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
753 revs = [ascii_str(repo[r].hex()) for r in range(start, stop + 1)]
740 return revs
754 return revs
741
755
742 @reraise_safe_exceptions
756 @reraise_safe_exceptions
743 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
757 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
744 other_path = kwargs.pop('other_path', None)
758 other_path = kwargs.pop('other_path', None)
745
759
746 # case when we want to compare two independent repositories
760 # case when we want to compare two independent repositories
747 if other_path and other_path != wire["path"]:
761 if other_path and other_path != wire["path"]:
748 baseui = self._factory._create_config(wire["config"])
762 baseui = self._factory._create_config(wire["config"])
749 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
763 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
750 else:
764 else:
751 repo = self._factory.repo(wire)
765 repo = self._factory.repo(wire)
752 return list(repo.revs(rev_spec, *args))
766 return list(repo.revs(rev_spec, *args))
753
767
754 @reraise_safe_exceptions
768 @reraise_safe_exceptions
755 def verify(self, wire,):
769 def verify(self, wire,):
756 repo = self._factory.repo(wire)
770 repo = self._factory.repo(wire)
757 baseui = self._factory._create_config(wire['config'])
771 baseui = self._factory._create_config(wire['config'])
758
772
759 baseui, output = patch_ui_message_output(baseui)
773 baseui, output = patch_ui_message_output(baseui)
760
774
761 repo.ui = baseui
775 repo.ui = baseui
762 verify.verify(repo)
776 verify.verify(repo)
763 return output.getvalue()
777 return output.getvalue()
764
778
765 @reraise_safe_exceptions
779 @reraise_safe_exceptions
766 def hg_update_cache(self, wire,):
780 def hg_update_cache(self, wire,):
767 repo = self._factory.repo(wire)
781 repo = self._factory.repo(wire)
768 baseui = self._factory._create_config(wire['config'])
782 baseui = self._factory._create_config(wire['config'])
769 baseui, output = patch_ui_message_output(baseui)
783 baseui, output = patch_ui_message_output(baseui)
770
784
771 repo.ui = baseui
785 repo.ui = baseui
772 with repo.wlock(), repo.lock():
786 with repo.wlock(), repo.lock():
773 repo.updatecaches(full=True)
787 repo.updatecaches(full=True)
774
788
775 return output.getvalue()
789 return output.getvalue()
776
790
777 @reraise_safe_exceptions
791 @reraise_safe_exceptions
778 def hg_rebuild_fn_cache(self, wire,):
792 def hg_rebuild_fn_cache(self, wire,):
779 repo = self._factory.repo(wire)
793 repo = self._factory.repo(wire)
780 baseui = self._factory._create_config(wire['config'])
794 baseui = self._factory._create_config(wire['config'])
781 baseui, output = patch_ui_message_output(baseui)
795 baseui, output = patch_ui_message_output(baseui)
782
796
783 repo.ui = baseui
797 repo.ui = baseui
784
798
785 repair.rebuildfncache(baseui, repo)
799 repair.rebuildfncache(baseui, repo)
786
800
787 return output.getvalue()
801 return output.getvalue()
788
802
789 @reraise_safe_exceptions
803 @reraise_safe_exceptions
790 def tags(self, wire):
804 def tags(self, wire):
791 cache_on, context_uid, repo_id = self._cache_on(wire)
805 cache_on, context_uid, repo_id = self._cache_on(wire)
792 region = self._region(wire)
806 region = self._region(wire)
793
807
794 @region.conditional_cache_on_arguments(condition=cache_on)
808 @region.conditional_cache_on_arguments(condition=cache_on)
795 def _tags(_context_uid, _repo_id):
809 def _tags(_context_uid, _repo_id):
796 repo = self._factory.repo(wire)
810 repo = self._factory.repo(wire)
797 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
811 return {safe_str(name): ascii_str(hex(sha)) for name, sha in repo.tags().items()}
798
812
799 return _tags(context_uid, repo_id)
813 return _tags(context_uid, repo_id)
800
814
801 @reraise_safe_exceptions
815 @reraise_safe_exceptions
802 def update(self, wire, node=None, clean=False):
816 def update(self, wire, node=None, clean=False):
803 repo = self._factory.repo(wire)
817 repo = self._factory.repo(wire)
804 baseui = self._factory._create_config(wire['config'])
818 baseui = self._factory._create_config(wire['config'])
805 commands.update(baseui, repo, node=node, clean=clean)
819 commands.update(baseui, repo, node=node, clean=clean)
806
820
807 @reraise_safe_exceptions
821 @reraise_safe_exceptions
808 def identify(self, wire):
822 def identify(self, wire):
809 repo = self._factory.repo(wire)
823 repo = self._factory.repo(wire)
810 baseui = self._factory._create_config(wire['config'])
824 baseui = self._factory._create_config(wire['config'])
811 output = io.BytesIO()
825 output = io.BytesIO()
812 baseui.write = output.write
826 baseui.write = output.write
813 # This is required to get a full node id
827 # This is required to get a full node id
814 baseui.debugflag = True
828 baseui.debugflag = True
815 commands.identify(baseui, repo, id=True)
829 commands.identify(baseui, repo, id=True)
816
830
817 return output.getvalue()
831 return output.getvalue()
818
832
819 @reraise_safe_exceptions
833 @reraise_safe_exceptions
820 def heads(self, wire, branch=None):
834 def heads(self, wire, branch=None):
821 repo = self._factory.repo(wire)
835 repo = self._factory.repo(wire)
822 baseui = self._factory._create_config(wire['config'])
836 baseui = self._factory._create_config(wire['config'])
823 output = io.BytesIO()
837 output = io.BytesIO()
824
838
825 def write(data, **unused_kwargs):
839 def write(data, **unused_kwargs):
826 output.write(data)
840 output.write(data)
827
841
828 baseui.write = write
842 baseui.write = write
829 if branch:
843 if branch:
830 args = [safe_bytes(branch)]
844 args = [safe_bytes(branch)]
831 else:
845 else:
832 args = []
846 args = []
833 commands.heads(baseui, repo, template=b'{node} ', *args)
847 commands.heads(baseui, repo, template=b'{node} ', *args)
834
848
835 return output.getvalue()
849 return output.getvalue()
836
850
837 @reraise_safe_exceptions
851 @reraise_safe_exceptions
838 def ancestor(self, wire, revision1, revision2):
852 def ancestor(self, wire, revision1, revision2):
839 repo = self._factory.repo(wire)
853 repo = self._factory.repo(wire)
840 changelog = repo.changelog
854 changelog = repo.changelog
841 lookup = repo.lookup
855 lookup = repo.lookup
842 a = changelog.ancestor(lookup(revision1), lookup(revision2))
856 a = changelog.ancestor(lookup(revision1), lookup(revision2))
843 return hex(a)
857 return hex(a)
844
858
845 @reraise_safe_exceptions
859 @reraise_safe_exceptions
846 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
860 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
847 baseui = self._factory._create_config(wire["config"], hooks=hooks)
861 baseui = self._factory._create_config(wire["config"], hooks=hooks)
848 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
862 clone(baseui, safe_bytes(source), safe_bytes(dest), noupdate=not update_after_clone)
849
863
850 @reraise_safe_exceptions
864 @reraise_safe_exceptions
851 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
865 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
852
866
853 repo = self._factory.repo(wire)
867 repo = self._factory.repo(wire)
854 baseui = self._factory._create_config(wire['config'])
868 baseui = self._factory._create_config(wire['config'])
855 publishing = baseui.configbool(b'phases', b'publish')
869 publishing = baseui.configbool(b'phases', b'publish')
856
870
857 def _filectxfn(_repo, ctx, path: bytes):
871 def _filectxfn(_repo, ctx, path: bytes):
858 """
872 """
859 Marks given path as added/changed/removed in a given _repo. This is
873 Marks given path as added/changed/removed in a given _repo. This is
860 for internal mercurial commit function.
874 for internal mercurial commit function.
861 """
875 """
862
876
863 # check if this path is removed
877 # check if this path is removed
864 if safe_str(path) in removed:
878 if safe_str(path) in removed:
865 # returning None is a way to mark node for removal
879 # returning None is a way to mark node for removal
866 return None
880 return None
867
881
868 # check if this path is added
882 # check if this path is added
869 for node in updated:
883 for node in updated:
870 if safe_bytes(node['path']) == path:
884 if safe_bytes(node['path']) == path:
871 return memfilectx(
885 return memfilectx(
872 _repo,
886 _repo,
873 changectx=ctx,
887 changectx=ctx,
874 path=safe_bytes(node['path']),
888 path=safe_bytes(node['path']),
875 data=safe_bytes(node['content']),
889 data=safe_bytes(node['content']),
876 islink=False,
890 islink=False,
877 isexec=bool(node['mode'] & stat.S_IXUSR),
891 isexec=bool(node['mode'] & stat.S_IXUSR),
878 copysource=False)
892 copysource=False)
879 abort_exc = exceptions.AbortException()
893 abort_exc = exceptions.AbortException()
880 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
894 raise abort_exc(f"Given path haven't been marked as added, changed or removed ({path})")
881
895
882 if publishing:
896 if publishing:
883 new_commit_phase = b'public'
897 new_commit_phase = b'public'
884 else:
898 else:
885 new_commit_phase = b'draft'
899 new_commit_phase = b'draft'
886 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
900 with repo.ui.configoverride({(b'phases', b'new-commit'): new_commit_phase}):
887 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
901 kwargs = {safe_bytes(k): safe_bytes(v) for k, v in extra.items()}
888 commit_ctx = memctx(
902 commit_ctx = memctx(
889 repo=repo,
903 repo=repo,
890 parents=parents,
904 parents=parents,
891 text=safe_bytes(message),
905 text=safe_bytes(message),
892 files=[safe_bytes(x) for x in files],
906 files=[safe_bytes(x) for x in files],
893 filectxfn=_filectxfn,
907 filectxfn=_filectxfn,
894 user=safe_bytes(user),
908 user=safe_bytes(user),
895 date=(commit_time, commit_timezone),
909 date=(commit_time, commit_timezone),
896 extra=kwargs)
910 extra=kwargs)
897
911
898 n = repo.commitctx(commit_ctx)
912 n = repo.commitctx(commit_ctx)
899 new_id = hex(n)
913 new_id = hex(n)
900
914
901 return new_id
915 return new_id
902
916
903 @reraise_safe_exceptions
917 @reraise_safe_exceptions
904 def pull(self, wire, url, commit_ids=None):
918 def pull(self, wire, url, commit_ids=None):
905 repo = self._factory.repo(wire)
919 repo = self._factory.repo(wire)
906 # Disable any prompts for this repo
920 # Disable any prompts for this repo
907 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
921 repo.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
908
922
909 remote = peer(repo, {}, safe_bytes(url))
923 remote = peer(repo, {}, safe_bytes(url))
910 # Disable any prompts for this remote
924 # Disable any prompts for this remote
911 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
925 remote.ui.setconfig(b'ui', b'interactive', b'off', b'-y')
912
926
913 if commit_ids:
927 if commit_ids:
914 commit_ids = [bin(commit_id) for commit_id in commit_ids]
928 commit_ids = [bin(commit_id) for commit_id in commit_ids]
915
929
916 return exchange.pull(
930 return exchange.pull(
917 repo, remote, heads=commit_ids, force=None).cgresult
931 repo, remote, heads=commit_ids, force=None).cgresult
918
932
919 @reraise_safe_exceptions
933 @reraise_safe_exceptions
920 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
934 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
921 repo = self._factory.repo(wire)
935 repo = self._factory.repo(wire)
922 baseui = self._factory._create_config(wire['config'], hooks=hooks)
936 baseui = self._factory._create_config(wire['config'], hooks=hooks)
923
937
924 # Mercurial internally has a lot of logic that checks ONLY if
938 # Mercurial internally has a lot of logic that checks ONLY if
925 # option is defined, we just pass those if they are defined then
939 # option is defined, we just pass those if they are defined then
926 opts = {}
940 opts = {}
927 if bookmark:
941 if bookmark:
928 opts['bookmark'] = bookmark
942 opts['bookmark'] = bookmark
929 if branch:
943 if branch:
930 opts['branch'] = branch
944 opts['branch'] = branch
931 if revision:
945 if revision:
932 opts['rev'] = revision
946 opts['rev'] = revision
933
947
934 commands.pull(baseui, repo, source, **opts)
948 commands.pull(baseui, repo, source, **opts)
935
949
936 @reraise_safe_exceptions
950 @reraise_safe_exceptions
937 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
951 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
938 repo = self._factory.repo(wire)
952 repo = self._factory.repo(wire)
939 baseui = self._factory._create_config(wire['config'], hooks=hooks)
953 baseui = self._factory._create_config(wire['config'], hooks=hooks)
940 commands.push(baseui, repo, dest=dest_path, rev=revisions,
954 commands.push(baseui, repo, dest=dest_path, rev=revisions,
941 new_branch=push_branches)
955 new_branch=push_branches)
942
956
943 @reraise_safe_exceptions
957 @reraise_safe_exceptions
944 def strip(self, wire, revision, update, backup):
958 def strip(self, wire, revision, update, backup):
945 repo = self._factory.repo(wire)
959 repo = self._factory.repo(wire)
946 ctx = self._get_ctx(repo, revision)
960 ctx = self._get_ctx(repo, revision)
947 hgext_strip(
961 hgext_strip(
948 repo.baseui, repo, ctx.node(), update=update, backup=backup)
962 repo.baseui, repo, ctx.node(), update=update, backup=backup)
949
963
950 @reraise_safe_exceptions
964 @reraise_safe_exceptions
951 def get_unresolved_files(self, wire):
965 def get_unresolved_files(self, wire):
952 repo = self._factory.repo(wire)
966 repo = self._factory.repo(wire)
953
967
954 log.debug('Calculating unresolved files for repo: %s', repo)
968 log.debug('Calculating unresolved files for repo: %s', repo)
955 output = io.BytesIO()
969 output = io.BytesIO()
956
970
957 def write(data, **unused_kwargs):
971 def write(data, **unused_kwargs):
958 output.write(data)
972 output.write(data)
959
973
960 baseui = self._factory._create_config(wire['config'])
974 baseui = self._factory._create_config(wire['config'])
961 baseui.write = write
975 baseui.write = write
962
976
963 commands.resolve(baseui, repo, list=True)
977 commands.resolve(baseui, repo, list=True)
964 unresolved = output.getvalue().splitlines(0)
978 unresolved = output.getvalue().splitlines(0)
965 return unresolved
979 return unresolved
966
980
967 @reraise_safe_exceptions
981 @reraise_safe_exceptions
968 def merge(self, wire, revision):
982 def merge(self, wire, revision):
969 repo = self._factory.repo(wire)
983 repo = self._factory.repo(wire)
970 baseui = self._factory._create_config(wire['config'])
984 baseui = self._factory._create_config(wire['config'])
971 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
985 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
972
986
973 # In case of sub repositories are used mercurial prompts the user in
987 # In case of sub repositories are used mercurial prompts the user in
974 # case of merge conflicts or different sub repository sources. By
988 # case of merge conflicts or different sub repository sources. By
975 # setting the interactive flag to `False` mercurial doesn't prompt the
989 # setting the interactive flag to `False` mercurial doesn't prompt the
976 # used but instead uses a default value.
990 # used but instead uses a default value.
977 repo.ui.setconfig(b'ui', b'interactive', False)
991 repo.ui.setconfig(b'ui', b'interactive', False)
978 commands.merge(baseui, repo, rev=revision)
992 commands.merge(baseui, repo, rev=revision)
979
993
980 @reraise_safe_exceptions
994 @reraise_safe_exceptions
981 def merge_state(self, wire):
995 def merge_state(self, wire):
982 repo = self._factory.repo(wire)
996 repo = self._factory.repo(wire)
983 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
997 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
984
998
985 # In case of sub repositories are used mercurial prompts the user in
999 # In case of sub repositories are used mercurial prompts the user in
986 # case of merge conflicts or different sub repository sources. By
1000 # case of merge conflicts or different sub repository sources. By
987 # setting the interactive flag to `False` mercurial doesn't prompt the
1001 # setting the interactive flag to `False` mercurial doesn't prompt the
988 # used but instead uses a default value.
1002 # used but instead uses a default value.
989 repo.ui.setconfig(b'ui', b'interactive', False)
1003 repo.ui.setconfig(b'ui', b'interactive', False)
990 ms = hg_merge.mergestate(repo)
1004 ms = hg_merge.mergestate(repo)
991 return [x for x in ms.unresolved()]
1005 return [x for x in ms.unresolved()]
992
1006
993 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
994 def commit(self, wire, message, username, close_branch=False):
1008 def commit(self, wire, message, username, close_branch=False):
995 repo = self._factory.repo(wire)
1009 repo = self._factory.repo(wire)
996 baseui = self._factory._create_config(wire['config'])
1010 baseui = self._factory._create_config(wire['config'])
997 repo.ui.setconfig(b'ui', b'username', username)
1011 repo.ui.setconfig(b'ui', b'username', username)
998 commands.commit(baseui, repo, message=message, close_branch=close_branch)
1012 commands.commit(baseui, repo, message=message, close_branch=close_branch)
999
1013
1000 @reraise_safe_exceptions
1014 @reraise_safe_exceptions
1001 def rebase(self, wire, source=None, dest=None, abort=False):
1015 def rebase(self, wire, source=None, dest=None, abort=False):
1002 repo = self._factory.repo(wire)
1016 repo = self._factory.repo(wire)
1003 baseui = self._factory._create_config(wire['config'])
1017 baseui = self._factory._create_config(wire['config'])
1004 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1018 repo.ui.setconfig(b'ui', b'merge', b'internal:dump')
1005 # In case of sub repositories are used mercurial prompts the user in
1019 # In case of sub repositories are used mercurial prompts the user in
1006 # case of merge conflicts or different sub repository sources. By
1020 # case of merge conflicts or different sub repository sources. By
1007 # setting the interactive flag to `False` mercurial doesn't prompt the
1021 # setting the interactive flag to `False` mercurial doesn't prompt the
1008 # used but instead uses a default value.
1022 # used but instead uses a default value.
1009 repo.ui.setconfig(b'ui', b'interactive', False)
1023 repo.ui.setconfig(b'ui', b'interactive', False)
1010 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1024 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
1011
1025
1012 @reraise_safe_exceptions
1026 @reraise_safe_exceptions
1013 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1027 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
1014 repo = self._factory.repo(wire)
1028 repo = self._factory.repo(wire)
1015 ctx = self._get_ctx(repo, revision)
1029 ctx = self._get_ctx(repo, revision)
1016 node = ctx.node()
1030 node = ctx.node()
1017
1031
1018 date = (tag_time, tag_timezone)
1032 date = (tag_time, tag_timezone)
1019 try:
1033 try:
1020 hg_tag.tag(repo, name, node, message, local, user, date)
1034 hg_tag.tag(repo, name, node, message, local, user, date)
1021 except Abort as e:
1035 except Abort as e:
1022 log.exception("Tag operation aborted")
1036 log.exception("Tag operation aborted")
1023 # Exception can contain unicode which we convert
1037 # Exception can contain unicode which we convert
1024 raise exceptions.AbortException(e)(repr(e))
1038 raise exceptions.AbortException(e)(repr(e))
1025
1039
1026 @reraise_safe_exceptions
1040 @reraise_safe_exceptions
1027 def bookmark(self, wire, bookmark, revision=None):
1041 def bookmark(self, wire, bookmark, revision=None):
1028 repo = self._factory.repo(wire)
1042 repo = self._factory.repo(wire)
1029 baseui = self._factory._create_config(wire['config'])
1043 baseui = self._factory._create_config(wire['config'])
1030 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1044 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1031
1045
1032 @reraise_safe_exceptions
1046 @reraise_safe_exceptions
1033 def install_hooks(self, wire, force=False):
1047 def install_hooks(self, wire, force=False):
1034 # we don't need any special hooks for Mercurial
1048 # we don't need any special hooks for Mercurial
1035 pass
1049 pass
1036
1050
1037 @reraise_safe_exceptions
1051 @reraise_safe_exceptions
1038 def get_hooks_info(self, wire):
1052 def get_hooks_info(self, wire):
1039 return {
1053 return {
1040 'pre_version': vcsserver.__version__,
1054 'pre_version': vcsserver.__version__,
1041 'post_version': vcsserver.__version__,
1055 'post_version': vcsserver.__version__,
1042 }
1056 }
1043
1057
1044 @reraise_safe_exceptions
1058 @reraise_safe_exceptions
1045 def set_head_ref(self, wire, head_name):
1059 def set_head_ref(self, wire, head_name):
1046 pass
1060 pass
1047
1061
1048 @reraise_safe_exceptions
1062 @reraise_safe_exceptions
1049 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1063 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1050 archive_dir_name, commit_id):
1064 archive_dir_name, commit_id):
1051
1065
1052 def file_walker(_commit_id, path):
1066 def file_walker(_commit_id, path):
1053 repo = self._factory.repo(wire)
1067 repo = self._factory.repo(wire)
1054 ctx = repo[_commit_id]
1068 ctx = repo[_commit_id]
1055 is_root = path in ['', '/']
1069 is_root = path in ['', '/']
1056 if is_root:
1070 if is_root:
1057 matcher = alwaysmatcher(badfn=None)
1071 matcher = alwaysmatcher(badfn=None)
1058 else:
1072 else:
1059 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1073 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1060 file_iter = ctx.manifest().walk(matcher)
1074 file_iter = ctx.manifest().walk(matcher)
1061
1075
1062 for fn in file_iter:
1076 for fn in file_iter:
1063 file_path = fn
1077 file_path = fn
1064 flags = ctx.flags(fn)
1078 flags = ctx.flags(fn)
1065 mode = b'x' in flags and 0o755 or 0o644
1079 mode = b'x' in flags and 0o755 or 0o644
1066 is_link = b'l' in flags
1080 is_link = b'l' in flags
1067
1081
1068 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1082 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1069
1083
1070 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1084 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1071 archive_dir_name, commit_id)
1085 archive_dir_name, commit_id)
1072
1086
@@ -1,864 +1,875 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31 import svn.client
31 import svn.client
32 import svn.core
32 import svn.core
33 import svn.delta
33 import svn.delta
34 import svn.diff
34 import svn.diff
35 import svn.fs
35 import svn.fs
36 import svn.repos
36 import svn.repos
37
37
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.str_utils import safe_str
41 from vcsserver.str_utils import safe_str
42 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.lib.svnremoterepo import svnremoterepo
43 from vcsserver.lib.svnremoterepo import svnremoterepo
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
48 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
49 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
50 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
51 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
52 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
53 }
53 }
54
54
55 current_compatible_version = '1.14'
55 current_compatible_version = '1.14'
56
56
57
57
58 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
59 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
60 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
61 try:
61 try:
62 return func(*args, **kwargs)
62 return func(*args, **kwargs)
63 except Exception as e:
63 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e))
67 raise
67 raise
68 return wrapper
68 return wrapper
69
69
70
70
71 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
72 repo_type = 'svn'
72 repo_type = 'svn'
73
73
74 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
75 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
76 if create:
76 if create:
77 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
78 if compatible_version:
78 if compatible_version:
79
79
80 compatible_version_string = \
80 compatible_version_string = \
81 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
82 or compatible_version
82 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
84
84
85 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
87 else:
88 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
89
89
90 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
91 return repo
91 return repo
92
92
93 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
94 """
94 """
95 Get a repository instance for the given path.
95 Get a repository instance for the given path.
96 """
96 """
97 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
98
98
99
99
100 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
101 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
103 }
103 }
104
104
105
105
106 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
107
107
108 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
109 self._factory = factory
110
110
111 @reraise_safe_exceptions
111 @reraise_safe_exceptions
112 def discover_svn_version(self):
112 def discover_svn_version(self):
113 try:
113 try:
114 import svn.core
114 import svn.core
115 svn_ver = svn.core.SVN_VERSION
115 svn_ver = svn.core.SVN_VERSION
116 except ImportError:
116 except ImportError:
117 svn_ver = None
117 svn_ver = None
118 return safe_str(svn_ver)
118 return safe_str(svn_ver)
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def is_empty(self, wire):
121 def is_empty(self, wire):
122
122
123 try:
123 try:
124 return self.lookup(wire, -1) == 0
124 return self.lookup(wire, -1) == 0
125 except Exception:
125 except Exception:
126 log.exception("failed to read object_store")
126 log.exception("failed to read object_store")
127 return False
127 return False
128
128
129 def check_url(self, url):
129 def check_url(self, url):
130
130
131 # uuid function get's only valid UUID from proper repo, else
131 # uuid function get's only valid UUID from proper repo, else
132 # throws exception
132 # throws exception
133 username, password, src_url = self.get_url_and_credentials(url)
133 username, password, src_url = self.get_url_and_credentials(url)
134 try:
134 try:
135 svnremoterepo(username, password, src_url).svn().uuid
135 svnremoterepo(username, password, src_url).svn().uuid
136 except Exception:
136 except Exception:
137 tb = traceback.format_exc()
137 tb = traceback.format_exc()
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 raise URLError(
139 raise URLError(
140 '"%s" is not a valid Subversion source url.' % (url, ))
140 '"%s" is not a valid Subversion source url.' % (url, ))
141 return True
141 return True
142
142
143 def is_path_valid_repository(self, wire, path):
143 def is_path_valid_repository(self, wire, path):
144
144
145 # NOTE(marcink): short circuit the check for SVN repo
145 # NOTE(marcink): short circuit the check for SVN repo
146 # the repos.open might be expensive to check, but we have one cheap
146 # the repos.open might be expensive to check, but we have one cheap
147 # pre condition that we can use, to check for 'format' file
147 # pre condition that we can use, to check for 'format' file
148
148
149 if not os.path.isfile(os.path.join(path, 'format')):
149 if not os.path.isfile(os.path.join(path, 'format')):
150 return False
150 return False
151
151
152 try:
152 try:
153 svn.repos.open(path)
153 svn.repos.open(path)
154 except svn.core.SubversionException:
154 except svn.core.SubversionException:
155 tb = traceback.format_exc()
155 tb = traceback.format_exc()
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 return False
157 return False
158 return True
158 return True
159
159
160 @reraise_safe_exceptions
160 @reraise_safe_exceptions
161 def verify(self, wire,):
161 def verify(self, wire,):
162 repo_path = wire['path']
162 repo_path = wire['path']
163 if not self.is_path_valid_repository(wire, repo_path):
163 if not self.is_path_valid_repository(wire, repo_path):
164 raise Exception(
164 raise Exception(
165 "Path %s is not a valid Subversion repository." % repo_path)
165 "Path %s is not a valid Subversion repository." % repo_path)
166
166
167 cmd = ['svnadmin', 'info', repo_path]
167 cmd = ['svnadmin', 'info', repo_path]
168 stdout, stderr = subprocessio.run_command(cmd)
168 stdout, stderr = subprocessio.run_command(cmd)
169 return stdout
169 return stdout
170
170
171 def lookup(self, wire, revision):
171 def lookup(self, wire, revision):
172 if revision not in [-1, None, 'HEAD']:
172 if revision not in [-1, None, 'HEAD']:
173 raise NotImplementedError
173 raise NotImplementedError
174 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
175 fs_ptr = svn.repos.fs(repo)
175 fs_ptr = svn.repos.fs(repo)
176 head = svn.fs.youngest_rev(fs_ptr)
176 head = svn.fs.youngest_rev(fs_ptr)
177 return head
177 return head
178
178
179 def lookup_interval(self, wire, start_ts, end_ts):
179 def lookup_interval(self, wire, start_ts, end_ts):
180 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
181 fsobj = svn.repos.fs(repo)
181 fsobj = svn.repos.fs(repo)
182 start_rev = None
182 start_rev = None
183 end_rev = None
183 end_rev = None
184 if start_ts:
184 if start_ts:
185 start_ts_svn = apr_time_t(start_ts)
185 start_ts_svn = apr_time_t(start_ts)
186 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
186 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
187 else:
187 else:
188 start_rev = 1
188 start_rev = 1
189 if end_ts:
189 if end_ts:
190 end_ts_svn = apr_time_t(end_ts)
190 end_ts_svn = apr_time_t(end_ts)
191 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
191 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
192 else:
192 else:
193 end_rev = svn.fs.youngest_rev(fsobj)
193 end_rev = svn.fs.youngest_rev(fsobj)
194 return start_rev, end_rev
194 return start_rev, end_rev
195
195
196 def revision_properties(self, wire, revision):
196 def revision_properties(self, wire, revision):
197
197
198 cache_on, context_uid, repo_id = self._cache_on(wire)
198 cache_on, context_uid, repo_id = self._cache_on(wire)
199 region = self._region(wire)
199 region = self._region(wire)
200 @region.conditional_cache_on_arguments(condition=cache_on)
200 @region.conditional_cache_on_arguments(condition=cache_on)
201 def _revision_properties(_repo_id, _revision):
201 def _revision_properties(_repo_id, _revision):
202 repo = self._factory.repo(wire)
202 repo = self._factory.repo(wire)
203 fs_ptr = svn.repos.fs(repo)
203 fs_ptr = svn.repos.fs(repo)
204 return svn.fs.revision_proplist(fs_ptr, revision)
204 return svn.fs.revision_proplist(fs_ptr, revision)
205 return _revision_properties(repo_id, revision)
205 return _revision_properties(repo_id, revision)
206
206
207 def revision_changes(self, wire, revision):
207 def revision_changes(self, wire, revision):
208
208
209 repo = self._factory.repo(wire)
209 repo = self._factory.repo(wire)
210 fsobj = svn.repos.fs(repo)
210 fsobj = svn.repos.fs(repo)
211 rev_root = svn.fs.revision_root(fsobj, revision)
211 rev_root = svn.fs.revision_root(fsobj, revision)
212
212
213 editor = svn.repos.ChangeCollector(fsobj, rev_root)
213 editor = svn.repos.ChangeCollector(fsobj, rev_root)
214 editor_ptr, editor_baton = svn.delta.make_editor(editor)
214 editor_ptr, editor_baton = svn.delta.make_editor(editor)
215 base_dir = ""
215 base_dir = ""
216 send_deltas = False
216 send_deltas = False
217 svn.repos.replay2(
217 svn.repos.replay2(
218 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
218 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
219 editor_ptr, editor_baton, None)
219 editor_ptr, editor_baton, None)
220
220
221 added = []
221 added = []
222 changed = []
222 changed = []
223 removed = []
223 removed = []
224
224
225 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
225 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
226 for path, change in editor.changes.items():
226 for path, change in editor.changes.items():
227 # TODO: Decide what to do with directory nodes. Subversion can add
227 # TODO: Decide what to do with directory nodes. Subversion can add
228 # empty directories.
228 # empty directories.
229
229
230 if change.item_kind == svn.core.svn_node_dir:
230 if change.item_kind == svn.core.svn_node_dir:
231 continue
231 continue
232 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
232 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
233 added.append(path)
233 added.append(path)
234 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
234 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
235 svn.repos.CHANGE_ACTION_REPLACE]:
235 svn.repos.CHANGE_ACTION_REPLACE]:
236 changed.append(path)
236 changed.append(path)
237 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
237 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
238 removed.append(path)
238 removed.append(path)
239 else:
239 else:
240 raise NotImplementedError(
240 raise NotImplementedError(
241 "Action %s not supported on path %s" % (
241 "Action %s not supported on path %s" % (
242 change.action, path))
242 change.action, path))
243
243
244 changes = {
244 changes = {
245 'added': added,
245 'added': added,
246 'changed': changed,
246 'changed': changed,
247 'removed': removed,
247 'removed': removed,
248 }
248 }
249 return changes
249 return changes
250
250
251 @reraise_safe_exceptions
251 @reraise_safe_exceptions
252 def node_history(self, wire, path, revision, limit):
252 def node_history(self, wire, path, revision, limit):
253 cache_on, context_uid, repo_id = self._cache_on(wire)
253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 region = self._region(wire)
254 region = self._region(wire)
255 @region.conditional_cache_on_arguments(condition=cache_on)
255 @region.conditional_cache_on_arguments(condition=cache_on)
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 cross_copies = False
257 cross_copies = False
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 fsobj = svn.repos.fs(repo)
259 fsobj = svn.repos.fs(repo)
260 rev_root = svn.fs.revision_root(fsobj, revision)
260 rev_root = svn.fs.revision_root(fsobj, revision)
261
261
262 history_revisions = []
262 history_revisions = []
263 history = svn.fs.node_history(rev_root, path)
263 history = svn.fs.node_history(rev_root, path)
264 history = svn.fs.history_prev(history, cross_copies)
264 history = svn.fs.history_prev(history, cross_copies)
265 while history:
265 while history:
266 __, node_revision = svn.fs.history_location(history)
266 __, node_revision = svn.fs.history_location(history)
267 history_revisions.append(node_revision)
267 history_revisions.append(node_revision)
268 if limit and len(history_revisions) >= limit:
268 if limit and len(history_revisions) >= limit:
269 break
269 break
270 history = svn.fs.history_prev(history, cross_copies)
270 history = svn.fs.history_prev(history, cross_copies)
271 return history_revisions
271 return history_revisions
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273
273
274 def node_properties(self, wire, path, revision):
274 def node_properties(self, wire, path, revision):
275 cache_on, context_uid, repo_id = self._cache_on(wire)
275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 region = self._region(wire)
276 region = self._region(wire)
277 @region.conditional_cache_on_arguments(condition=cache_on)
277 @region.conditional_cache_on_arguments(condition=cache_on)
278 def _node_properties(_repo_id, _path, _revision):
278 def _node_properties(_repo_id, _path, _revision):
279 repo = self._factory.repo(wire)
279 repo = self._factory.repo(wire)
280 fsobj = svn.repos.fs(repo)
280 fsobj = svn.repos.fs(repo)
281 rev_root = svn.fs.revision_root(fsobj, revision)
281 rev_root = svn.fs.revision_root(fsobj, revision)
282 return svn.fs.node_proplist(rev_root, path)
282 return svn.fs.node_proplist(rev_root, path)
283 return _node_properties(repo_id, path, revision)
283 return _node_properties(repo_id, path, revision)
284
284
285 def file_annotate(self, wire, path, revision):
285 def file_annotate(self, wire, path, revision):
286 abs_path = 'file://' + urllib.request.pathname2url(
286 abs_path = 'file://' + urllib.request.pathname2url(
287 vcspath.join(wire['path'], path))
287 vcspath.join(wire['path'], path))
288 file_uri = svn.core.svn_path_canonicalize(abs_path)
288 file_uri = svn.core.svn_path_canonicalize(abs_path)
289
289
290 start_rev = svn_opt_revision_value_t(0)
290 start_rev = svn_opt_revision_value_t(0)
291 peg_rev = svn_opt_revision_value_t(revision)
291 peg_rev = svn_opt_revision_value_t(revision)
292 end_rev = peg_rev
292 end_rev = peg_rev
293
293
294 annotations = []
294 annotations = []
295
295
296 def receiver(line_no, revision, author, date, line, pool):
296 def receiver(line_no, revision, author, date, line, pool):
297 annotations.append((line_no, revision, line))
297 annotations.append((line_no, revision, line))
298
298
299 # TODO: Cannot use blame5, missing typemap function in the swig code
299 # TODO: Cannot use blame5, missing typemap function in the swig code
300 try:
300 try:
301 svn.client.blame2(
301 svn.client.blame2(
302 file_uri, peg_rev, start_rev, end_rev,
302 file_uri, peg_rev, start_rev, end_rev,
303 receiver, svn.client.create_context())
303 receiver, svn.client.create_context())
304 except svn.core.SubversionException as exc:
304 except svn.core.SubversionException as exc:
305 log.exception("Error during blame operation.")
305 log.exception("Error during blame operation.")
306 raise Exception(
306 raise Exception(
307 "Blame not supported or file does not exist at path %s. "
307 "Blame not supported or file does not exist at path %s. "
308 "Error %s." % (path, exc))
308 "Error %s." % (path, exc))
309
309
310 return annotations
310 return annotations
311
311
312 def get_node_type(self, wire, path, revision=None):
312 def get_node_type(self, wire, path, revision=None):
313
313
314 cache_on, context_uid, repo_id = self._cache_on(wire)
314 cache_on, context_uid, repo_id = self._cache_on(wire)
315 region = self._region(wire)
315 region = self._region(wire)
316 @region.conditional_cache_on_arguments(condition=cache_on)
316 @region.conditional_cache_on_arguments(condition=cache_on)
317 def _get_node_type(_repo_id, _path, _revision):
317 def _get_node_type(_repo_id, _path, _revision):
318 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
319 fs_ptr = svn.repos.fs(repo)
319 fs_ptr = svn.repos.fs(repo)
320 if _revision is None:
320 if _revision is None:
321 _revision = svn.fs.youngest_rev(fs_ptr)
321 _revision = svn.fs.youngest_rev(fs_ptr)
322 root = svn.fs.revision_root(fs_ptr, _revision)
322 root = svn.fs.revision_root(fs_ptr, _revision)
323 node = svn.fs.check_path(root, path)
323 node = svn.fs.check_path(root, path)
324 return NODE_TYPE_MAPPING.get(node, None)
324 return NODE_TYPE_MAPPING.get(node, None)
325 return _get_node_type(repo_id, path, revision)
325 return _get_node_type(repo_id, path, revision)
326
326
327 def get_nodes(self, wire, path, revision=None):
327 def get_nodes(self, wire, path, revision=None):
328
328
329 cache_on, context_uid, repo_id = self._cache_on(wire)
329 cache_on, context_uid, repo_id = self._cache_on(wire)
330 region = self._region(wire)
330 region = self._region(wire)
331 @region.conditional_cache_on_arguments(condition=cache_on)
331 @region.conditional_cache_on_arguments(condition=cache_on)
332 def _get_nodes(_repo_id, _path, _revision):
332 def _get_nodes(_repo_id, _path, _revision):
333 repo = self._factory.repo(wire)
333 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
334 fsobj = svn.repos.fs(repo)
335 if _revision is None:
335 if _revision is None:
336 _revision = svn.fs.youngest_rev(fsobj)
336 _revision = svn.fs.youngest_rev(fsobj)
337 root = svn.fs.revision_root(fsobj, _revision)
337 root = svn.fs.revision_root(fsobj, _revision)
338 entries = svn.fs.dir_entries(root, path)
338 entries = svn.fs.dir_entries(root, path)
339 result = []
339 result = []
340 for entry_path, entry_info in entries.items():
340 for entry_path, entry_info in entries.items():
341 result.append(
341 result.append(
342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 return result
343 return result
344 return _get_nodes(repo_id, path, revision)
344 return _get_nodes(repo_id, path, revision)
345
345
346 def get_file_content(self, wire, path, rev=None):
346 def get_file_content(self, wire, path, rev=None):
347 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
348 fsobj = svn.repos.fs(repo)
348 fsobj = svn.repos.fs(repo)
349 if rev is None:
349 if rev is None:
350 rev = svn.fs.youngest_revision(fsobj)
350 rev = svn.fs.youngest_revision(fsobj)
351 root = svn.fs.revision_root(fsobj, rev)
351 root = svn.fs.revision_root(fsobj, rev)
352 content = svn.core.Stream(svn.fs.file_contents(root, path))
352 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 return content.read()
353 return content.read()
354
354
355 def get_file_size(self, wire, path, revision=None):
355 def get_file_size(self, wire, path, revision=None):
356
356
357 cache_on, context_uid, repo_id = self._cache_on(wire)
357 cache_on, context_uid, repo_id = self._cache_on(wire)
358 region = self._region(wire)
358 region = self._region(wire)
359
359
360 @region.conditional_cache_on_arguments(condition=cache_on)
360 @region.conditional_cache_on_arguments(condition=cache_on)
361 def _get_file_size(_repo_id, _path, _revision):
361 def _get_file_size(_repo_id, _path, _revision):
362 repo = self._factory.repo(wire)
362 repo = self._factory.repo(wire)
363 fsobj = svn.repos.fs(repo)
363 fsobj = svn.repos.fs(repo)
364 if _revision is None:
364 if _revision is None:
365 _revision = svn.fs.youngest_revision(fsobj)
365 _revision = svn.fs.youngest_revision(fsobj)
366 root = svn.fs.revision_root(fsobj, _revision)
366 root = svn.fs.revision_root(fsobj, _revision)
367 size = svn.fs.file_length(root, path)
367 size = svn.fs.file_length(root, path)
368 return size
368 return size
369 return _get_file_size(repo_id, path, revision)
369 return _get_file_size(repo_id, path, revision)
370
370
371 def create_repository(self, wire, compatible_version=None):
371 def create_repository(self, wire, compatible_version=None):
372 log.info('Creating Subversion repository in path "%s"', wire['path'])
372 log.info('Creating Subversion repository in path "%s"', wire['path'])
373 self._factory.repo(wire, create=True,
373 self._factory.repo(wire, create=True,
374 compatible_version=compatible_version)
374 compatible_version=compatible_version)
375
375
376 def get_url_and_credentials(self, src_url):
376 def get_url_and_credentials(self, src_url):
377 obj = urllib.parse.urlparse(src_url)
377 obj = urllib.parse.urlparse(src_url)
378 username = obj.username or None
378 username = obj.username or None
379 password = obj.password or None
379 password = obj.password or None
380 return username, password, src_url
380 return username, password, src_url
381
381
382 def import_remote_repository(self, wire, src_url):
382 def import_remote_repository(self, wire, src_url):
383 repo_path = wire['path']
383 repo_path = wire['path']
384 if not self.is_path_valid_repository(wire, repo_path):
384 if not self.is_path_valid_repository(wire, repo_path):
385 raise Exception(
385 raise Exception(
386 "Path %s is not a valid Subversion repository." % repo_path)
386 "Path %s is not a valid Subversion repository." % repo_path)
387
387
388 username, password, src_url = self.get_url_and_credentials(src_url)
388 username, password, src_url = self.get_url_and_credentials(src_url)
389 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
389 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
390 '--trust-server-cert-failures=unknown-ca']
390 '--trust-server-cert-failures=unknown-ca']
391 if username and password:
391 if username and password:
392 rdump_cmd += ['--username', username, '--password', password]
392 rdump_cmd += ['--username', username, '--password', password]
393 rdump_cmd += [src_url]
393 rdump_cmd += [src_url]
394
394
395 rdump = subprocess.Popen(
395 rdump = subprocess.Popen(
396 rdump_cmd,
396 rdump_cmd,
397 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
397 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
398 load = subprocess.Popen(
398 load = subprocess.Popen(
399 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
399 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
400
400
401 # TODO: johbo: This can be a very long operation, might be better
401 # TODO: johbo: This can be a very long operation, might be better
402 # to track some kind of status and provide an api to check if the
402 # to track some kind of status and provide an api to check if the
403 # import is done.
403 # import is done.
404 rdump.wait()
404 rdump.wait()
405 load.wait()
405 load.wait()
406
406
407 log.debug('Return process ended with code: %s', rdump.returncode)
407 log.debug('Return process ended with code: %s', rdump.returncode)
408 if rdump.returncode != 0:
408 if rdump.returncode != 0:
409 errors = rdump.stderr.read()
409 errors = rdump.stderr.read()
410 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
410 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
411
411
412 reason = 'UNKNOWN'
412 reason = 'UNKNOWN'
413 if b'svnrdump: E230001:' in errors:
413 if b'svnrdump: E230001:' in errors:
414 reason = 'INVALID_CERTIFICATE'
414 reason = 'INVALID_CERTIFICATE'
415
415
416 if reason == 'UNKNOWN':
416 if reason == 'UNKNOWN':
417 reason = 'UNKNOWN:{}'.format(safe_str(errors))
417 reason = 'UNKNOWN:{}'.format(safe_str(errors))
418
418
419 raise Exception(
419 raise Exception(
420 'Failed to dump the remote repository from %s. Reason:%s' % (
420 'Failed to dump the remote repository from %s. Reason:%s' % (
421 src_url, reason))
421 src_url, reason))
422 if load.returncode != 0:
422 if load.returncode != 0:
423 raise Exception(
423 raise Exception(
424 'Failed to load the dump of remote repository from %s.' %
424 'Failed to load the dump of remote repository from %s.' %
425 (src_url, ))
425 (src_url, ))
426
426
427 def commit(self, wire, message, author, timestamp, updated, removed):
427 def commit(self, wire, message, author, timestamp, updated, removed):
428 assert isinstance(message, str)
428 assert isinstance(message, str)
429 assert isinstance(author, str)
429 assert isinstance(author, str)
430
430
431 repo = self._factory.repo(wire)
431 repo = self._factory.repo(wire)
432 fsobj = svn.repos.fs(repo)
432 fsobj = svn.repos.fs(repo)
433
433
434 rev = svn.fs.youngest_rev(fsobj)
434 rev = svn.fs.youngest_rev(fsobj)
435 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
435 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
436 txn_root = svn.fs.txn_root(txn)
436 txn_root = svn.fs.txn_root(txn)
437
437
438 for node in updated:
438 for node in updated:
439 TxnNodeProcessor(node, txn_root).update()
439 TxnNodeProcessor(node, txn_root).update()
440 for node in removed:
440 for node in removed:
441 TxnNodeProcessor(node, txn_root).remove()
441 TxnNodeProcessor(node, txn_root).remove()
442
442
443 commit_id = svn.repos.fs_commit_txn(repo, txn)
443 commit_id = svn.repos.fs_commit_txn(repo, txn)
444
444
445 if timestamp:
445 if timestamp:
446 apr_time = apr_time_t(timestamp)
446 apr_time = apr_time_t(timestamp)
447 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
447 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
448 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
448 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
449
449
450 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
450 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
451 return commit_id
451 return commit_id
452
452
453 def diff(self, wire, rev1, rev2, path1=None, path2=None,
453 def diff(self, wire, rev1, rev2, path1=None, path2=None,
454 ignore_whitespace=False, context=3):
454 ignore_whitespace=False, context=3):
455
455
456 wire.update(cache=False)
456 wire.update(cache=False)
457 repo = self._factory.repo(wire)
457 repo = self._factory.repo(wire)
458 diff_creator = SvnDiffer(
458 diff_creator = SvnDiffer(
459 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
459 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
460 try:
460 try:
461 return diff_creator.generate_diff()
461 return diff_creator.generate_diff()
462 except svn.core.SubversionException as e:
462 except svn.core.SubversionException as e:
463 log.exception(
463 log.exception(
464 "Error during diff operation operation. "
464 "Error during diff operation operation. "
465 "Path might not exist %s, %s" % (path1, path2))
465 "Path might not exist %s, %s" % (path1, path2))
466 return ""
466 return ""
467
467
468 @reraise_safe_exceptions
468 @reraise_safe_exceptions
469 def is_large_file(self, wire, path):
469 def is_large_file(self, wire, path):
470 return False
470 return False
471
471
472 @reraise_safe_exceptions
472 @reraise_safe_exceptions
473 def is_binary(self, wire, rev, path):
473 def is_binary(self, wire, rev, path):
474 cache_on, context_uid, repo_id = self._cache_on(wire)
474 cache_on, context_uid, repo_id = self._cache_on(wire)
475 region = self._region(wire)
475
476
476 region = self._region(wire)
477 @region.conditional_cache_on_arguments(condition=cache_on)
477 @region.conditional_cache_on_arguments(condition=cache_on)
478 def _is_binary(_repo_id, _rev, _path):
478 def _is_binary(_repo_id, _rev, _path):
479 raw_bytes = self.get_file_content(wire, path, rev)
479 raw_bytes = self.get_file_content(wire, path, rev)
480 return raw_bytes and '\0' in raw_bytes
480 return raw_bytes and '\0' in raw_bytes
481
481
482 return _is_binary(repo_id, rev, path)
482 return _is_binary(repo_id, rev, path)
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def md5_hash(self, wire, rev, path):
486 cache_on, context_uid, repo_id = self._cache_on(wire)
487 region = self._region(wire)
488
489 @region.conditional_cache_on_arguments(condition=cache_on)
490 def _md5_hash(_repo_id, _rev, _path):
491 return ''
492
493 return _md5_hash(repo_id, rev, path)
494
495 @reraise_safe_exceptions
485 def run_svn_command(self, wire, cmd, **opts):
496 def run_svn_command(self, wire, cmd, **opts):
486 path = wire.get('path', None)
497 path = wire.get('path', None)
487
498
488 if path and os.path.isdir(path):
499 if path and os.path.isdir(path):
489 opts['cwd'] = path
500 opts['cwd'] = path
490
501
491 safe_call = opts.pop('_safe', False)
502 safe_call = opts.pop('_safe', False)
492
503
493 svnenv = os.environ.copy()
504 svnenv = os.environ.copy()
494 svnenv.update(opts.pop('extra_env', {}))
505 svnenv.update(opts.pop('extra_env', {}))
495
506
496 _opts = {'env': svnenv, 'shell': False}
507 _opts = {'env': svnenv, 'shell': False}
497
508
498 try:
509 try:
499 _opts.update(opts)
510 _opts.update(opts)
500 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
511 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
501
512
502 return b''.join(proc), b''.join(proc.stderr)
513 return b''.join(proc), b''.join(proc.stderr)
503 except OSError as err:
514 except OSError as err:
504 if safe_call:
515 if safe_call:
505 return '', safe_str(err).strip()
516 return '', safe_str(err).strip()
506 else:
517 else:
507 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
518 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
508 tb_err = ("Couldn't run svn command (%s).\n"
519 tb_err = ("Couldn't run svn command (%s).\n"
509 "Original error was:%s\n"
520 "Original error was:%s\n"
510 "Call options:%s\n"
521 "Call options:%s\n"
511 % (cmd, err, _opts))
522 % (cmd, err, _opts))
512 log.exception(tb_err)
523 log.exception(tb_err)
513 raise exceptions.VcsException()(tb_err)
524 raise exceptions.VcsException()(tb_err)
514
525
515 @reraise_safe_exceptions
526 @reraise_safe_exceptions
516 def install_hooks(self, wire, force=False):
527 def install_hooks(self, wire, force=False):
517 from vcsserver.hook_utils import install_svn_hooks
528 from vcsserver.hook_utils import install_svn_hooks
518 repo_path = wire['path']
529 repo_path = wire['path']
519 binary_dir = settings.BINARY_DIR
530 binary_dir = settings.BINARY_DIR
520 executable = None
531 executable = None
521 if binary_dir:
532 if binary_dir:
522 executable = os.path.join(binary_dir, 'python')
533 executable = os.path.join(binary_dir, 'python')
523 return install_svn_hooks(
534 return install_svn_hooks(
524 repo_path, executable=executable, force_create=force)
535 repo_path, executable=executable, force_create=force)
525
536
526 @reraise_safe_exceptions
537 @reraise_safe_exceptions
527 def get_hooks_info(self, wire):
538 def get_hooks_info(self, wire):
528 from vcsserver.hook_utils import (
539 from vcsserver.hook_utils import (
529 get_svn_pre_hook_version, get_svn_post_hook_version)
540 get_svn_pre_hook_version, get_svn_post_hook_version)
530 repo_path = wire['path']
541 repo_path = wire['path']
531 return {
542 return {
532 'pre_version': get_svn_pre_hook_version(repo_path),
543 'pre_version': get_svn_pre_hook_version(repo_path),
533 'post_version': get_svn_post_hook_version(repo_path),
544 'post_version': get_svn_post_hook_version(repo_path),
534 }
545 }
535
546
536 @reraise_safe_exceptions
547 @reraise_safe_exceptions
537 def set_head_ref(self, wire, head_name):
548 def set_head_ref(self, wire, head_name):
538 pass
549 pass
539
550
540 @reraise_safe_exceptions
551 @reraise_safe_exceptions
541 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
552 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
542 archive_dir_name, commit_id):
553 archive_dir_name, commit_id):
543
554
544 def walk_tree(root, root_dir, _commit_id):
555 def walk_tree(root, root_dir, _commit_id):
545 """
556 """
546 Special recursive svn repo walker
557 Special recursive svn repo walker
547 """
558 """
548
559
549 filemode_default = 0o100644
560 filemode_default = 0o100644
550 filemode_executable = 0o100755
561 filemode_executable = 0o100755
551
562
552 file_iter = svn.fs.dir_entries(root, root_dir)
563 file_iter = svn.fs.dir_entries(root, root_dir)
553 for f_name in file_iter:
564 for f_name in file_iter:
554 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
565 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
555
566
556 if f_type == 'dir':
567 if f_type == 'dir':
557 # return only DIR, and then all entries in that dir
568 # return only DIR, and then all entries in that dir
558 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
569 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
559 new_root = os.path.join(root_dir, f_name)
570 new_root = os.path.join(root_dir, f_name)
560 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
571 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
561 yield _f_name, _f_data, _f_type
572 yield _f_name, _f_data, _f_type
562 else:
573 else:
563 f_path = os.path.join(root_dir, f_name).rstrip('/')
574 f_path = os.path.join(root_dir, f_name).rstrip('/')
564 prop_list = svn.fs.node_proplist(root, f_path)
575 prop_list = svn.fs.node_proplist(root, f_path)
565
576
566 f_mode = filemode_default
577 f_mode = filemode_default
567 if prop_list.get('svn:executable'):
578 if prop_list.get('svn:executable'):
568 f_mode = filemode_executable
579 f_mode = filemode_executable
569
580
570 f_is_link = False
581 f_is_link = False
571 if prop_list.get('svn:special'):
582 if prop_list.get('svn:special'):
572 f_is_link = True
583 f_is_link = True
573
584
574 data = {
585 data = {
575 'is_link': f_is_link,
586 'is_link': f_is_link,
576 'mode': f_mode,
587 'mode': f_mode,
577 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
588 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
578 }
589 }
579
590
580 yield f_path, data, f_type
591 yield f_path, data, f_type
581
592
582 def file_walker(_commit_id, path):
593 def file_walker(_commit_id, path):
583 repo = self._factory.repo(wire)
594 repo = self._factory.repo(wire)
584 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
595 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
585
596
586 def no_content():
597 def no_content():
587 raise NoContentException()
598 raise NoContentException()
588
599
589 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
600 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
590 file_path = f_name
601 file_path = f_name
591
602
592 if f_type == 'dir':
603 if f_type == 'dir':
593 mode = f_data['mode']
604 mode = f_data['mode']
594 yield ArchiveNode(file_path, mode, False, no_content)
605 yield ArchiveNode(file_path, mode, False, no_content)
595 else:
606 else:
596 mode = f_data['mode']
607 mode = f_data['mode']
597 is_link = f_data['is_link']
608 is_link = f_data['is_link']
598 data_stream = f_data['content_stream']
609 data_stream = f_data['content_stream']
599 yield ArchiveNode(file_path, mode, is_link, data_stream)
610 yield ArchiveNode(file_path, mode, is_link, data_stream)
600
611
601 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
612 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
602 archive_dir_name, commit_id)
613 archive_dir_name, commit_id)
603
614
604
615
605 class SvnDiffer(object):
616 class SvnDiffer(object):
606 """
617 """
607 Utility to create diffs based on difflib and the Subversion api
618 Utility to create diffs based on difflib and the Subversion api
608 """
619 """
609
620
610 binary_content = False
621 binary_content = False
611
622
612 def __init__(
623 def __init__(
613 self, repo, src_rev, src_path, tgt_rev, tgt_path,
624 self, repo, src_rev, src_path, tgt_rev, tgt_path,
614 ignore_whitespace, context):
625 ignore_whitespace, context):
615 self.repo = repo
626 self.repo = repo
616 self.ignore_whitespace = ignore_whitespace
627 self.ignore_whitespace = ignore_whitespace
617 self.context = context
628 self.context = context
618
629
619 fsobj = svn.repos.fs(repo)
630 fsobj = svn.repos.fs(repo)
620
631
621 self.tgt_rev = tgt_rev
632 self.tgt_rev = tgt_rev
622 self.tgt_path = tgt_path or ''
633 self.tgt_path = tgt_path or ''
623 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
634 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
624 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
635 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
625
636
626 self.src_rev = src_rev
637 self.src_rev = src_rev
627 self.src_path = src_path or self.tgt_path
638 self.src_path = src_path or self.tgt_path
628 self.src_root = svn.fs.revision_root(fsobj, src_rev)
639 self.src_root = svn.fs.revision_root(fsobj, src_rev)
629 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
640 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
630
641
631 self._validate()
642 self._validate()
632
643
633 def _validate(self):
644 def _validate(self):
634 if (self.tgt_kind != svn.core.svn_node_none and
645 if (self.tgt_kind != svn.core.svn_node_none and
635 self.src_kind != svn.core.svn_node_none and
646 self.src_kind != svn.core.svn_node_none and
636 self.src_kind != self.tgt_kind):
647 self.src_kind != self.tgt_kind):
637 # TODO: johbo: proper error handling
648 # TODO: johbo: proper error handling
638 raise Exception(
649 raise Exception(
639 "Source and target are not compatible for diff generation. "
650 "Source and target are not compatible for diff generation. "
640 "Source type: %s, target type: %s" %
651 "Source type: %s, target type: %s" %
641 (self.src_kind, self.tgt_kind))
652 (self.src_kind, self.tgt_kind))
642
653
643 def generate_diff(self):
654 def generate_diff(self):
644 buf = io.StringIO()
655 buf = io.StringIO()
645 if self.tgt_kind == svn.core.svn_node_dir:
656 if self.tgt_kind == svn.core.svn_node_dir:
646 self._generate_dir_diff(buf)
657 self._generate_dir_diff(buf)
647 else:
658 else:
648 self._generate_file_diff(buf)
659 self._generate_file_diff(buf)
649 return buf.getvalue()
660 return buf.getvalue()
650
661
651 def _generate_dir_diff(self, buf):
662 def _generate_dir_diff(self, buf):
652 editor = DiffChangeEditor()
663 editor = DiffChangeEditor()
653 editor_ptr, editor_baton = svn.delta.make_editor(editor)
664 editor_ptr, editor_baton = svn.delta.make_editor(editor)
654 svn.repos.dir_delta2(
665 svn.repos.dir_delta2(
655 self.src_root,
666 self.src_root,
656 self.src_path,
667 self.src_path,
657 '', # src_entry
668 '', # src_entry
658 self.tgt_root,
669 self.tgt_root,
659 self.tgt_path,
670 self.tgt_path,
660 editor_ptr, editor_baton,
671 editor_ptr, editor_baton,
661 authorization_callback_allow_all,
672 authorization_callback_allow_all,
662 False, # text_deltas
673 False, # text_deltas
663 svn.core.svn_depth_infinity, # depth
674 svn.core.svn_depth_infinity, # depth
664 False, # entry_props
675 False, # entry_props
665 False, # ignore_ancestry
676 False, # ignore_ancestry
666 )
677 )
667
678
668 for path, __, change in sorted(editor.changes):
679 for path, __, change in sorted(editor.changes):
669 self._generate_node_diff(
680 self._generate_node_diff(
670 buf, change, path, self.tgt_path, path, self.src_path)
681 buf, change, path, self.tgt_path, path, self.src_path)
671
682
672 def _generate_file_diff(self, buf):
683 def _generate_file_diff(self, buf):
673 change = None
684 change = None
674 if self.src_kind == svn.core.svn_node_none:
685 if self.src_kind == svn.core.svn_node_none:
675 change = "add"
686 change = "add"
676 elif self.tgt_kind == svn.core.svn_node_none:
687 elif self.tgt_kind == svn.core.svn_node_none:
677 change = "delete"
688 change = "delete"
678 tgt_base, tgt_path = vcspath.split(self.tgt_path)
689 tgt_base, tgt_path = vcspath.split(self.tgt_path)
679 src_base, src_path = vcspath.split(self.src_path)
690 src_base, src_path = vcspath.split(self.src_path)
680 self._generate_node_diff(
691 self._generate_node_diff(
681 buf, change, tgt_path, tgt_base, src_path, src_base)
692 buf, change, tgt_path, tgt_base, src_path, src_base)
682
693
683 def _generate_node_diff(
694 def _generate_node_diff(
684 self, buf, change, tgt_path, tgt_base, src_path, src_base):
695 self, buf, change, tgt_path, tgt_base, src_path, src_base):
685
696
686 if self.src_rev == self.tgt_rev and tgt_base == src_base:
697 if self.src_rev == self.tgt_rev and tgt_base == src_base:
687 # makes consistent behaviour with git/hg to return empty diff if
698 # makes consistent behaviour with git/hg to return empty diff if
688 # we compare same revisions
699 # we compare same revisions
689 return
700 return
690
701
691 tgt_full_path = vcspath.join(tgt_base, tgt_path)
702 tgt_full_path = vcspath.join(tgt_base, tgt_path)
692 src_full_path = vcspath.join(src_base, src_path)
703 src_full_path = vcspath.join(src_base, src_path)
693
704
694 self.binary_content = False
705 self.binary_content = False
695 mime_type = self._get_mime_type(tgt_full_path)
706 mime_type = self._get_mime_type(tgt_full_path)
696
707
697 if mime_type and not mime_type.startswith('text'):
708 if mime_type and not mime_type.startswith('text'):
698 self.binary_content = True
709 self.binary_content = True
699 buf.write("=" * 67 + '\n')
710 buf.write("=" * 67 + '\n')
700 buf.write("Cannot display: file marked as a binary type.\n")
711 buf.write("Cannot display: file marked as a binary type.\n")
701 buf.write("svn:mime-type = %s\n" % mime_type)
712 buf.write("svn:mime-type = %s\n" % mime_type)
702 buf.write("Index: %s\n" % (tgt_path, ))
713 buf.write("Index: %s\n" % (tgt_path, ))
703 buf.write("=" * 67 + '\n')
714 buf.write("=" * 67 + '\n')
704 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
715 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
705 'tgt_path': tgt_path})
716 'tgt_path': tgt_path})
706
717
707 if change == 'add':
718 if change == 'add':
708 # TODO: johbo: SVN is missing a zero here compared to git
719 # TODO: johbo: SVN is missing a zero here compared to git
709 buf.write("new file mode 10644\n")
720 buf.write("new file mode 10644\n")
710
721
711 #TODO(marcink): intro to binary detection of svn patches
722 #TODO(marcink): intro to binary detection of svn patches
712 # if self.binary_content:
723 # if self.binary_content:
713 # buf.write('GIT binary patch\n')
724 # buf.write('GIT binary patch\n')
714
725
715 buf.write("--- /dev/null\t(revision 0)\n")
726 buf.write("--- /dev/null\t(revision 0)\n")
716 src_lines = []
727 src_lines = []
717 else:
728 else:
718 if change == 'delete':
729 if change == 'delete':
719 buf.write("deleted file mode 10644\n")
730 buf.write("deleted file mode 10644\n")
720
731
721 #TODO(marcink): intro to binary detection of svn patches
732 #TODO(marcink): intro to binary detection of svn patches
722 # if self.binary_content:
733 # if self.binary_content:
723 # buf.write('GIT binary patch\n')
734 # buf.write('GIT binary patch\n')
724
735
725 buf.write("--- a/%s\t(revision %s)\n" % (
736 buf.write("--- a/%s\t(revision %s)\n" % (
726 src_path, self.src_rev))
737 src_path, self.src_rev))
727 src_lines = self._svn_readlines(self.src_root, src_full_path)
738 src_lines = self._svn_readlines(self.src_root, src_full_path)
728
739
729 if change == 'delete':
740 if change == 'delete':
730 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
741 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
731 tgt_lines = []
742 tgt_lines = []
732 else:
743 else:
733 buf.write("+++ b/%s\t(revision %s)\n" % (
744 buf.write("+++ b/%s\t(revision %s)\n" % (
734 tgt_path, self.tgt_rev))
745 tgt_path, self.tgt_rev))
735 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
746 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
736
747
737 if not self.binary_content:
748 if not self.binary_content:
738 udiff = svn_diff.unified_diff(
749 udiff = svn_diff.unified_diff(
739 src_lines, tgt_lines, context=self.context,
750 src_lines, tgt_lines, context=self.context,
740 ignore_blank_lines=self.ignore_whitespace,
751 ignore_blank_lines=self.ignore_whitespace,
741 ignore_case=False,
752 ignore_case=False,
742 ignore_space_changes=self.ignore_whitespace)
753 ignore_space_changes=self.ignore_whitespace)
743 buf.writelines(udiff)
754 buf.writelines(udiff)
744
755
745 def _get_mime_type(self, path):
756 def _get_mime_type(self, path):
746 try:
757 try:
747 mime_type = svn.fs.node_prop(
758 mime_type = svn.fs.node_prop(
748 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
759 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
749 except svn.core.SubversionException:
760 except svn.core.SubversionException:
750 mime_type = svn.fs.node_prop(
761 mime_type = svn.fs.node_prop(
751 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
762 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
752 return mime_type
763 return mime_type
753
764
754 def _svn_readlines(self, fs_root, node_path):
765 def _svn_readlines(self, fs_root, node_path):
755 if self.binary_content:
766 if self.binary_content:
756 return []
767 return []
757 node_kind = svn.fs.check_path(fs_root, node_path)
768 node_kind = svn.fs.check_path(fs_root, node_path)
758 if node_kind not in (
769 if node_kind not in (
759 svn.core.svn_node_file, svn.core.svn_node_symlink):
770 svn.core.svn_node_file, svn.core.svn_node_symlink):
760 return []
771 return []
761 content = svn.core.Stream(
772 content = svn.core.Stream(
762 svn.fs.file_contents(fs_root, node_path)).read()
773 svn.fs.file_contents(fs_root, node_path)).read()
763 return content.splitlines(True)
774 return content.splitlines(True)
764
775
765
776
766 class DiffChangeEditor(svn.delta.Editor):
777 class DiffChangeEditor(svn.delta.Editor):
767 """
778 """
768 Records changes between two given revisions
779 Records changes between two given revisions
769 """
780 """
770
781
771 def __init__(self):
782 def __init__(self):
772 self.changes = []
783 self.changes = []
773
784
774 def delete_entry(self, path, revision, parent_baton, pool=None):
785 def delete_entry(self, path, revision, parent_baton, pool=None):
775 self.changes.append((path, None, 'delete'))
786 self.changes.append((path, None, 'delete'))
776
787
777 def add_file(
788 def add_file(
778 self, path, parent_baton, copyfrom_path, copyfrom_revision,
789 self, path, parent_baton, copyfrom_path, copyfrom_revision,
779 file_pool=None):
790 file_pool=None):
780 self.changes.append((path, 'file', 'add'))
791 self.changes.append((path, 'file', 'add'))
781
792
782 def open_file(self, path, parent_baton, base_revision, file_pool=None):
793 def open_file(self, path, parent_baton, base_revision, file_pool=None):
783 self.changes.append((path, 'file', 'change'))
794 self.changes.append((path, 'file', 'change'))
784
795
785
796
786 def authorization_callback_allow_all(root, path, pool):
797 def authorization_callback_allow_all(root, path, pool):
787 return True
798 return True
788
799
789
800
790 class TxnNodeProcessor(object):
801 class TxnNodeProcessor(object):
791 """
802 """
792 Utility to process the change of one node within a transaction root.
803 Utility to process the change of one node within a transaction root.
793
804
794 It encapsulates the knowledge of how to add, update or remove
805 It encapsulates the knowledge of how to add, update or remove
795 a node for a given transaction root. The purpose is to support the method
806 a node for a given transaction root. The purpose is to support the method
796 `SvnRemote.commit`.
807 `SvnRemote.commit`.
797 """
808 """
798
809
799 def __init__(self, node, txn_root):
810 def __init__(self, node, txn_root):
800 assert isinstance(node['path'], str)
811 assert isinstance(node['path'], str)
801
812
802 self.node = node
813 self.node = node
803 self.txn_root = txn_root
814 self.txn_root = txn_root
804
815
805 def update(self):
816 def update(self):
806 self._ensure_parent_dirs()
817 self._ensure_parent_dirs()
807 self._add_file_if_node_does_not_exist()
818 self._add_file_if_node_does_not_exist()
808 self._update_file_content()
819 self._update_file_content()
809 self._update_file_properties()
820 self._update_file_properties()
810
821
811 def remove(self):
822 def remove(self):
812 svn.fs.delete(self.txn_root, self.node['path'])
823 svn.fs.delete(self.txn_root, self.node['path'])
813 # TODO: Clean up directory if empty
824 # TODO: Clean up directory if empty
814
825
815 def _ensure_parent_dirs(self):
826 def _ensure_parent_dirs(self):
816 curdir = vcspath.dirname(self.node['path'])
827 curdir = vcspath.dirname(self.node['path'])
817 dirs_to_create = []
828 dirs_to_create = []
818 while not self._svn_path_exists(curdir):
829 while not self._svn_path_exists(curdir):
819 dirs_to_create.append(curdir)
830 dirs_to_create.append(curdir)
820 curdir = vcspath.dirname(curdir)
831 curdir = vcspath.dirname(curdir)
821
832
822 for curdir in reversed(dirs_to_create):
833 for curdir in reversed(dirs_to_create):
823 log.debug('Creating missing directory "%s"', curdir)
834 log.debug('Creating missing directory "%s"', curdir)
824 svn.fs.make_dir(self.txn_root, curdir)
835 svn.fs.make_dir(self.txn_root, curdir)
825
836
826 def _svn_path_exists(self, path):
837 def _svn_path_exists(self, path):
827 path_status = svn.fs.check_path(self.txn_root, path)
838 path_status = svn.fs.check_path(self.txn_root, path)
828 return path_status != svn.core.svn_node_none
839 return path_status != svn.core.svn_node_none
829
840
830 def _add_file_if_node_does_not_exist(self):
841 def _add_file_if_node_does_not_exist(self):
831 kind = svn.fs.check_path(self.txn_root, self.node['path'])
842 kind = svn.fs.check_path(self.txn_root, self.node['path'])
832 if kind == svn.core.svn_node_none:
843 if kind == svn.core.svn_node_none:
833 svn.fs.make_file(self.txn_root, self.node['path'])
844 svn.fs.make_file(self.txn_root, self.node['path'])
834
845
835 def _update_file_content(self):
846 def _update_file_content(self):
836 assert isinstance(self.node['content'], str)
847 assert isinstance(self.node['content'], str)
837 handler, baton = svn.fs.apply_textdelta(
848 handler, baton = svn.fs.apply_textdelta(
838 self.txn_root, self.node['path'], None, None)
849 self.txn_root, self.node['path'], None, None)
839 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
850 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
840
851
841 def _update_file_properties(self):
852 def _update_file_properties(self):
842 properties = self.node.get('properties', {})
853 properties = self.node.get('properties', {})
843 for key, value in properties.items():
854 for key, value in properties.items():
844 svn.fs.change_node_prop(
855 svn.fs.change_node_prop(
845 self.txn_root, self.node['path'], key, value)
856 self.txn_root, self.node['path'], key, value)
846
857
847
858
848 def apr_time_t(timestamp):
859 def apr_time_t(timestamp):
849 """
860 """
850 Convert a Python timestamp into APR timestamp type apr_time_t
861 Convert a Python timestamp into APR timestamp type apr_time_t
851 """
862 """
852 return timestamp * 1E6
863 return timestamp * 1E6
853
864
854
865
855 def svn_opt_revision_value_t(num):
866 def svn_opt_revision_value_t(num):
856 """
867 """
857 Put `num` into a `svn_opt_revision_value_t` structure.
868 Put `num` into a `svn_opt_revision_value_t` structure.
858 """
869 """
859 value = svn.core.svn_opt_revision_value_t()
870 value = svn.core.svn_opt_revision_value_t()
860 value.number = num
871 value.number = num
861 revision = svn.core.svn_opt_revision_t()
872 revision = svn.core.svn_opt_revision_t()
862 revision.kind = svn.core.svn_opt_revision_number
873 revision.kind = svn.core.svn_opt_revision_number
863 revision.value = value
874 revision.value = value
864 return revision
875 return revision
General Comments 0
You need to be logged in to leave comments. Login now