##// END OF EJS Templates
py3: import/code fixes
marcink -
r987:1d5adfe8 python3
parent child Browse files
Show More
@@ -1,99 +1,96 b''
1 1 self: super: {
2 2
3 3 # change GIT version
4 4 # latest supported are in: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
5 5 git = super.lib.overrideDerivation super.git (oldAttrs: {
6 6 name = "git-2.25.3";
7 7 src = self.fetchurl {
8 8 url = "https://www.kernel.org/pub/software/scm/git/git-2.25.3.tar.xz";
9 9 sha256 = "0yvr97cl0dvj3fwblq1mb0cp97v8hrn9l98p8b1jx8815mbsnz9h";
10 10 };
11 11
12 12 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
13 13 patches = [
14 14 ./patches/git/docbook2texi.patch
15 15 ./patches/git/git-sh-i18n.patch
16 16 ./patches/git/ssh-path.patch
17 17 ./patches/git/git-send-email-honor-PATH.patch
18 18 ./patches/git/installCheck-path.patch
19 19 ];
20 20
21 21 });
22 22
23 23 libgit2rc = super.lib.overrideDerivation super.libgit2 (oldAttrs: {
24 24 name = "libgit2-1.0.1";
25 25 version = "1.0.1";
26 26
27 27 src = self.fetchFromGitHub {
28 28 owner = "libgit2";
29 29 repo = "libgit2";
30 30 rev = "v1.0.1";
31 sha256 = "1cm8fvs05rj0baigs2133q5a0sm3pa234y8h6hmwhl2bz9xq3k4b";
31 sha256 = "0xqdnvrq1bnf8hxh9xjw25y2cg91agvd9jr5qwd30z2a0dzll22v";
32 32 };
33 33
34 34 cmakeFlags = [ "-DTHREADSAFE=ON" "-DUSE_HTTPS=no"];
35 35
36 36 buildInputs = [
37 37 super.zlib
38 38 super.libssh2
39 39 super.openssl
40 40 super.curl
41 41 ];
42 42
43
44 43 });
45 44
46
47
48 45 # Override subversion derivation to
49 # - activate python bindings
46 # - activate special python bindings
50 47 subversionrc =
51 48 let
52 49 py3c = self.python37Packages.buildPythonPackage rec {
53 50 pname = "py3c";
54 51 version = "1.0";
55 52 src = self.fetchurl {
56 53 url = "https://files.pythonhosted.org/packages/6a/aa/9f1a69a8c71e72553b281603633e42501de932aa4d9912bccbf9a2884093/py3c-1.0.tar.gz";
57 54 sha256 = "1h80jqi6r64kppxb4kshsiadrgc5hwk5arp3zcki01jf4ahknjz9";
58 55 };
59 56 format = "setuptools";
60 57 doCheck = false;
61 58 buildInputs = [];
62 59 checkInputs = [];
63 60 nativeBuildInputs = [];
64 61 propagatedBuildInputs = [];
65 62 meta = {
66 63 license = [ ];
67 64 };
68 65 };
69 66 in
70 67 let
71 68 pythonWithEnv = self.python37Packages.python.buildEnv.override {
72 69 extraLibs = [ py3c ];
73 70 };
74 71 in
75 72 let
76 73 subversionWithPython = super.subversion.override {
77 74 httpSupport = true; # client must support http
78 75 pythonBindings = true;
79 76 python = pythonWithEnv;
80 77 };
81 78
82 79 in
83 80 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
84 81 name = "subversion-1.14.0";
85 82 src = self.fetchurl {
86 83 url = "https://archive.apache.org/dist/subversion/subversion-1.14.0.tar.gz";
87 84 sha256 = "1l1px5kva5a13pi2rkxfgxfvypvl6bmbkdag6168fhayad3i2ggg";
88 85 };
89 86
90 87 ## use internal lz4/utf8proc because it is stable and shipped with SVN
91 88 configureFlags = oldAttrs.configureFlags ++ [
92 89 " --with-lz4=internal"
93 90 " --with-utf8proc=internal"
94 91 ];
95 92 });
96 93
97 94
98 95
99 96 }
@@ -1,76 +1,76 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 import urlparse
21 import urllib.parse
22 22
23 23 from vcsserver.lib.rc_cache import region_meta
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class RepoFactory(object):
28 28 """
29 29 Utility to create instances of repository
30 30
31 31 It provides internal caching of the `repo` object based on
32 32 the :term:`call context`.
33 33 """
34 34 repo_type = None
35 35
36 36 def __init__(self):
37 37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38 38
39 39 def _create_config(self, path, config):
40 40 config = {}
41 41 return config
42 42
43 43 def _create_repo(self, wire, create):
44 44 raise NotImplementedError()
45 45
46 46 def repo(self, wire, create=False):
47 47 raise NotImplementedError()
48 48
49 49
50 50 def obfuscate_qs(query_string):
51 51 if query_string is None:
52 52 return None
53 53
54 54 parsed = []
55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
55 for k, v in urllib.parse.parse_qsl(query_string, keep_blank_values=True):
56 56 if k in ['auth_token', 'api_key']:
57 57 v = "*****"
58 58 parsed.append((k, v))
59 59
60 60 return '&'.join('{}{}'.format(
61 61 k, '={}'.format(v) if v else '') for k, v in parsed)
62 62
63 63
64 64 def raise_from_original(new_type):
65 65 """
66 66 Raise a new exception type with original args and traceback.
67 67 """
68 68 exc_type, exc_value, exc_traceback = sys.exc_info()
69 69 new_exc = new_type(*exc_value.args)
70 70 # store the original traceback into the new exc
71 71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
72 72
73 73 try:
74 raise new_exc, None, exc_traceback
74 raise new_exc.with_traceback(exc_traceback)
75 75 finally:
76 76 del exc_traceback
@@ -1,1192 +1,1192 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 import urllib
26 import urllib2
25 import urllib.request, urllib.parse, urllib.error
26 import urllib.request, urllib.error, urllib.parse
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from dulwich import index, objects
33 33 from dulwich.client import HttpGitClient, LocalGitClient
34 34 from dulwich.errors import (
35 35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 36 MissingCommitError, ObjectMissing, HangupException,
37 37 UnexpectedCommandError)
38 38 from dulwich.repo import Repo as DulwichRepo
39 39 from dulwich.server import update_server_info
40 40
41 41 from vcsserver import exceptions, settings, subprocessio
42 42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 43 from vcsserver.base import RepoFactory, obfuscate_qs
44 44 from vcsserver.hgcompat import (
45 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 46 from vcsserver.git_lfs.lib import LFSOidStore
47 47 from vcsserver.vcs_base import RemoteBase
48 48
49 49 DIR_STAT = stat.S_IFDIR
50 50 FILE_MODE = stat.S_IFMT
51 51 GIT_LINK = objects.S_IFGITLINK
52 52 PEELED_REF_MARKER = '^{}'
53 53
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 def str_to_dulwich(value):
59 59 """
60 60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 61 """
62 62 return value.decode(settings.WIRE_ENCODING)
63 63
64 64
65 65 def reraise_safe_exceptions(func):
66 66 """Converts Dulwich exceptions to something neutral."""
67 67
68 68 @wraps(func)
69 69 def wrapper(*args, **kwargs):
70 70 try:
71 71 return func(*args, **kwargs)
72 72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 73 exc = exceptions.LookupException(org_exc=e)
74 74 raise exc(safe_str(e))
75 75 except (HangupException, UnexpectedCommandError) as e:
76 76 exc = exceptions.VcsException(org_exc=e)
77 77 raise exc(safe_str(e))
78 78 except Exception as e:
79 79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 80 # (KeyError on empty repos), we cannot track this and catch all
81 81 # exceptions, it's an exceptions from other handlers
82 82 #if not hasattr(e, '_vcs_kind'):
83 83 #log.exception("Unhandled exception in git remote call")
84 84 #raise_from_original(exceptions.UnhandledException)
85 85 raise
86 86 return wrapper
87 87
88 88
89 89 class Repo(DulwichRepo):
90 90 """
91 91 A wrapper for dulwich Repo class.
92 92
93 93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 94 "Too many open files" error. We need to close all opened file descriptors
95 95 once the repo object is destroyed.
96 96 """
97 97 def __del__(self):
98 98 if hasattr(self, 'object_store'):
99 99 self.close()
100 100
101 101
102 102 class Repository(LibGit2Repo):
103 103
104 104 def __enter__(self):
105 105 return self
106 106
107 107 def __exit__(self, exc_type, exc_val, exc_tb):
108 108 self.free()
109 109
110 110
111 111 class GitFactory(RepoFactory):
112 112 repo_type = 'git'
113 113
114 114 def _create_repo(self, wire, create, use_libgit2=False):
115 115 if use_libgit2:
116 116 return Repository(wire['path'])
117 117 else:
118 118 repo_path = str_to_dulwich(wire['path'])
119 119 return Repo(repo_path)
120 120
121 121 def repo(self, wire, create=False, use_libgit2=False):
122 122 """
123 123 Get a repository instance for the given path.
124 124 """
125 125 return self._create_repo(wire, create, use_libgit2)
126 126
127 127 def repo_libgit2(self, wire):
128 128 return self.repo(wire, use_libgit2=True)
129 129
130 130
131 131 class GitRemote(RemoteBase):
132 132
133 133 def __init__(self, factory):
134 134 self._factory = factory
135 135 self._bulk_methods = {
136 136 "date": self.date,
137 137 "author": self.author,
138 138 "branch": self.branch,
139 139 "message": self.message,
140 140 "parents": self.parents,
141 141 "_commit": self.revision,
142 142 }
143 143
144 144 def _wire_to_config(self, wire):
145 145 if 'config' in wire:
146 146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 147 return {}
148 148
149 149 def _remote_conf(self, config):
150 150 params = [
151 151 '-c', 'core.askpass=""',
152 152 ]
153 153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 154 if ssl_cert_dir:
155 155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 156 return params
157 157
158 158 @reraise_safe_exceptions
159 159 def discover_git_version(self):
160 160 stdout, _ = self.run_git_command(
161 161 {}, ['--version'], _bare=True, _safe=True)
162 162 prefix = 'git version'
163 163 if stdout.startswith(prefix):
164 164 stdout = stdout[len(prefix):]
165 165 return stdout.strip()
166 166
167 167 @reraise_safe_exceptions
168 168 def is_empty(self, wire):
169 169 repo_init = self._factory.repo_libgit2(wire)
170 170 with repo_init as repo:
171 171
172 172 try:
173 173 has_head = repo.head.name
174 174 if has_head:
175 175 return False
176 176
177 177 # NOTE(marcink): check again using more expensive method
178 178 return repo.is_empty
179 179 except Exception:
180 180 pass
181 181
182 182 return True
183 183
184 184 @reraise_safe_exceptions
185 185 def assert_correct_path(self, wire):
186 186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 188 def _assert_correct_path(_context_uid, _repo_id):
189 189 try:
190 190 repo_init = self._factory.repo_libgit2(wire)
191 191 with repo_init as repo:
192 192 pass
193 193 except pygit2.GitError:
194 194 path = wire.get('path')
195 195 tb = traceback.format_exc()
196 196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 197 return False
198 198
199 199 return True
200 200 return _assert_correct_path(context_uid, repo_id)
201 201
202 202 @reraise_safe_exceptions
203 203 def bare(self, wire):
204 204 repo_init = self._factory.repo_libgit2(wire)
205 205 with repo_init as repo:
206 206 return repo.is_bare
207 207
208 208 @reraise_safe_exceptions
209 209 def blob_as_pretty_string(self, wire, sha):
210 210 repo_init = self._factory.repo_libgit2(wire)
211 211 with repo_init as repo:
212 212 blob_obj = repo[sha]
213 213 blob = blob_obj.data
214 214 return blob
215 215
216 216 @reraise_safe_exceptions
217 217 def blob_raw_length(self, wire, sha):
218 218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 220 def _blob_raw_length(_repo_id, _sha):
221 221
222 222 repo_init = self._factory.repo_libgit2(wire)
223 223 with repo_init as repo:
224 224 blob = repo[sha]
225 225 return blob.size
226 226
227 227 return _blob_raw_length(repo_id, sha)
228 228
229 229 def _parse_lfs_pointer(self, raw_content):
230 230
231 231 spec_string = 'version https://git-lfs.github.com/spec'
232 232 if raw_content and raw_content.startswith(spec_string):
233 233 pattern = re.compile(r"""
234 234 (?:\n)?
235 235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 237 ^size[ ](?P<oid_size>[0-9]+)\n
238 238 (?:\n)?
239 239 """, re.VERBOSE | re.MULTILINE)
240 240 match = pattern.match(raw_content)
241 241 if match:
242 242 return match.groupdict()
243 243
244 244 return {}
245 245
246 246 @reraise_safe_exceptions
247 247 def is_large_file(self, wire, commit_id):
248 248 cache_on, context_uid, repo_id = self._cache_on(wire)
249 249
250 250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 251 def _is_large_file(_repo_id, _sha):
252 252 repo_init = self._factory.repo_libgit2(wire)
253 253 with repo_init as repo:
254 254 blob = repo[commit_id]
255 255 if blob.is_binary:
256 256 return {}
257 257
258 258 return self._parse_lfs_pointer(blob.data)
259 259
260 260 return _is_large_file(repo_id, commit_id)
261 261
262 262 @reraise_safe_exceptions
263 263 def is_binary(self, wire, tree_id):
264 264 cache_on, context_uid, repo_id = self._cache_on(wire)
265 265
266 266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 267 def _is_binary(_repo_id, _tree_id):
268 268 repo_init = self._factory.repo_libgit2(wire)
269 269 with repo_init as repo:
270 270 blob_obj = repo[tree_id]
271 271 return blob_obj.is_binary
272 272
273 273 return _is_binary(repo_id, tree_id)
274 274
275 275 @reraise_safe_exceptions
276 276 def in_largefiles_store(self, wire, oid):
277 277 conf = self._wire_to_config(wire)
278 278 repo_init = self._factory.repo_libgit2(wire)
279 279 with repo_init as repo:
280 280 repo_name = repo.path
281 281
282 282 store_location = conf.get('vcs_git_lfs_store_location')
283 283 if store_location:
284 284
285 285 store = LFSOidStore(
286 286 oid=oid, repo=repo_name, store_location=store_location)
287 287 return store.has_oid()
288 288
289 289 return False
290 290
291 291 @reraise_safe_exceptions
292 292 def store_path(self, wire, oid):
293 293 conf = self._wire_to_config(wire)
294 294 repo_init = self._factory.repo_libgit2(wire)
295 295 with repo_init as repo:
296 296 repo_name = repo.path
297 297
298 298 store_location = conf.get('vcs_git_lfs_store_location')
299 299 if store_location:
300 300 store = LFSOidStore(
301 301 oid=oid, repo=repo_name, store_location=store_location)
302 302 return store.oid_path
303 303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304 304
305 305 @reraise_safe_exceptions
306 306 def bulk_request(self, wire, rev, pre_load):
307 307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 309 def _bulk_request(_repo_id, _rev, _pre_load):
310 310 result = {}
311 311 for attr in pre_load:
312 312 try:
313 313 method = self._bulk_methods[attr]
314 314 args = [wire, rev]
315 315 result[attr] = method(*args)
316 316 except KeyError as e:
317 317 raise exceptions.VcsException(e)(
318 318 "Unknown bulk attribute: %s" % attr)
319 319 return result
320 320
321 321 return _bulk_request(repo_id, rev, sorted(pre_load))
322 322
323 323 def _build_opener(self, url):
324 324 handlers = []
325 325 url_obj = url_parser(url)
326 326 _, authinfo = url_obj.authinfo()
327 327
328 328 if authinfo:
329 329 # create a password manager
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
330 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
331 331 passmgr.add_password(*authinfo)
332 332
333 333 handlers.extend((httpbasicauthhandler(passmgr),
334 334 httpdigestauthhandler(passmgr)))
335 335
336 return urllib2.build_opener(*handlers)
336 return urllib.request.build_opener(*handlers)
337 337
338 338 def _type_id_to_name(self, type_id):
339 339 return {
340 340 1: b'commit',
341 341 2: b'tree',
342 342 3: b'blob',
343 343 4: b'tag'
344 344 }[type_id]
345 345
346 346 @reraise_safe_exceptions
347 347 def check_url(self, url, config):
348 348 url_obj = url_parser(url)
349 349 test_uri, _ = url_obj.authinfo()
350 350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 351 url_obj.query = obfuscate_qs(url_obj.query)
352 352 cleaned_uri = str(url_obj)
353 353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354 354
355 355 if not test_uri.endswith('info/refs'):
356 356 test_uri = test_uri.rstrip('/') + '/info/refs'
357 357
358 358 o = self._build_opener(url)
359 359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360 360
361 361 q = {"service": 'git-upload-pack'}
362 qs = '?%s' % urllib.urlencode(q)
362 qs = '?%s' % urllib.parse.urlencode(q)
363 363 cu = "%s%s" % (test_uri, qs)
364 req = urllib2.Request(cu, None, {})
364 req = urllib.request.Request(cu, None, {})
365 365
366 366 try:
367 367 log.debug("Trying to open URL %s", cleaned_uri)
368 368 resp = o.open(req)
369 369 if resp.code != 200:
370 370 raise exceptions.URLError()('Return Code is not 200')
371 371 except Exception as e:
372 372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 373 # means it cannot be cloned
374 374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375 375
376 376 # now detect if it's proper git repo
377 377 gitdata = resp.read()
378 378 if 'service=git-upload-pack' in gitdata:
379 379 pass
380 380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 381 # old style git can return some other format !
382 382 pass
383 383 else:
384 384 raise exceptions.URLError()(
385 385 "url [%s] does not look like an git" % (cleaned_uri,))
386 386
387 387 return True
388 388
389 389 @reraise_safe_exceptions
390 390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 392 remote_refs = self.pull(wire, url, apply_refs=False)
393 393 repo = self._factory.repo(wire)
394 394 if isinstance(valid_refs, list):
395 395 valid_refs = tuple(valid_refs)
396 396
397 397 for k in remote_refs:
398 398 # only parse heads/tags and skip so called deferred tags
399 399 if k.startswith(valid_refs) and not k.endswith(deferred):
400 400 repo[k] = remote_refs[k]
401 401
402 402 if update_after_clone:
403 403 # we want to checkout HEAD
404 404 repo["HEAD"] = remote_refs["HEAD"]
405 405 index.build_index_from_tree(repo.path, repo.index_path(),
406 406 repo.object_store, repo["HEAD"].tree)
407 407
408 408 @reraise_safe_exceptions
409 409 def branch(self, wire, commit_id):
410 410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 412 def _branch(_context_uid, _repo_id, _commit_id):
413 413 regex = re.compile('^refs/heads')
414 414
415 415 def filter_with(ref):
416 416 return regex.match(ref[0]) and ref[1] == _commit_id
417 417
418 branches = filter(filter_with, self.get_refs(wire).items())
418 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
419 419 return [x[0].split('refs/heads/')[-1] for x in branches]
420 420
421 421 return _branch(context_uid, repo_id, commit_id)
422 422
423 423 @reraise_safe_exceptions
424 424 def commit_branches(self, wire, commit_id):
425 425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 428 repo_init = self._factory.repo_libgit2(wire)
429 429 with repo_init as repo:
430 430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 431 return branches
432 432
433 433 return _commit_branches(context_uid, repo_id, commit_id)
434 434
435 435 @reraise_safe_exceptions
436 436 def add_object(self, wire, content):
437 437 repo_init = self._factory.repo_libgit2(wire)
438 438 with repo_init as repo:
439 439 blob = objects.Blob()
440 440 blob.set_raw_string(content)
441 441 repo.object_store.add_object(blob)
442 442 return blob.id
443 443
444 444 # TODO: this is quite complex, check if that can be simplified
445 445 @reraise_safe_exceptions
446 446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 447 repo = self._factory.repo(wire)
448 448 object_store = repo.object_store
449 449
450 450 # Create tree and populates it with blobs
451 451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452 452
453 453 for node in updated:
454 454 # Compute subdirs if needed
455 455 dirpath, nodename = vcspath.split(node['path'])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
456 dirnames = list(map(safe_str, dirpath and dirpath.split('/') or []))
457 457 parent = commit_tree
458 458 ancestors = [('', parent)]
459 459
460 460 # Tries to dig for the deepest existing tree
461 461 while dirnames:
462 462 curdir = dirnames.pop(0)
463 463 try:
464 464 dir_id = parent[curdir][1]
465 465 except KeyError:
466 466 # put curdir back into dirnames and stops
467 467 dirnames.insert(0, curdir)
468 468 break
469 469 else:
470 470 # If found, updates parent
471 471 parent = repo[dir_id]
472 472 ancestors.append((curdir, parent))
473 473 # Now parent is deepest existing tree and we need to create
474 474 # subtrees for dirnames (in reverse order)
475 475 # [this only applies for nodes from added]
476 476 new_trees = []
477 477
478 478 blob = objects.Blob.from_string(node['content'])
479 479
480 480 if dirnames:
481 481 # If there are trees which should be created we need to build
482 482 # them now (in reverse order)
483 483 reversed_dirnames = list(reversed(dirnames))
484 484 curtree = objects.Tree()
485 485 curtree[node['node_path']] = node['mode'], blob.id
486 486 new_trees.append(curtree)
487 487 for dirname in reversed_dirnames[:-1]:
488 488 newtree = objects.Tree()
489 489 newtree[dirname] = (DIR_STAT, curtree.id)
490 490 new_trees.append(newtree)
491 491 curtree = newtree
492 492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 493 else:
494 494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495 495
496 496 new_trees.append(parent)
497 497 # Update ancestors
498 498 reversed_ancestors = reversed(
499 499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 500 for parent, tree, path in reversed_ancestors:
501 501 parent[path] = (DIR_STAT, tree.id)
502 502 object_store.add_object(tree)
503 503
504 504 object_store.add_object(blob)
505 505 for tree in new_trees:
506 506 object_store.add_object(tree)
507 507
508 508 for node_path in removed:
509 509 paths = node_path.split('/')
510 510 tree = commit_tree
511 511 trees = [tree]
512 512 # Traverse deep into the forest...
513 513 for path in paths:
514 514 try:
515 515 obj = repo[tree[path][1]]
516 516 if isinstance(obj, objects.Tree):
517 517 trees.append(obj)
518 518 tree = obj
519 519 except KeyError:
520 520 break
521 521 # Cut down the blob and all rotten trees on the way back...
522 for path, tree in reversed(zip(paths, trees)):
522 for path, tree in reversed(list(zip(paths, trees))):
523 523 del tree[path]
524 524 if tree:
525 525 # This tree still has elements - don't remove it or any
526 526 # of it's parents
527 527 break
528 528
529 529 object_store.add_object(commit_tree)
530 530
531 531 # Create commit
532 532 commit = objects.Commit()
533 533 commit.tree = commit_tree.id
534 for k, v in commit_data.iteritems():
534 for k, v in commit_data.items():
535 535 setattr(commit, k, v)
536 536 object_store.add_object(commit)
537 537
538 538 self.create_branch(wire, branch, commit.id)
539 539
540 540 # dulwich set-ref
541 541 ref = 'refs/heads/%s' % branch
542 542 repo.refs[ref] = commit.id
543 543
544 544 return commit.id
545 545
546 546 @reraise_safe_exceptions
547 547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 548 if url != 'default' and '://' not in url:
549 549 client = LocalGitClient(url)
550 550 else:
551 551 url_obj = url_parser(url)
552 552 o = self._build_opener(url)
553 553 url, _ = url_obj.authinfo()
554 554 client = HttpGitClient(base_url=url, opener=o)
555 555 repo = self._factory.repo(wire)
556 556
557 557 determine_wants = repo.object_store.determine_wants_all
558 558 if refs:
559 559 def determine_wants_requested(references):
560 560 return [references[r] for r in references if r in refs]
561 561 determine_wants = determine_wants_requested
562 562
563 563 try:
564 564 remote_refs = client.fetch(
565 565 path=url, target=repo, determine_wants=determine_wants)
566 566 except NotGitRepository as e:
567 567 log.warning(
568 568 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 569 # Exception can contain unicode which we convert
570 570 raise exceptions.AbortException(e)(repr(e))
571 571
572 572 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 573 # refs filtered by `determine_wants` function. We need to filter result
574 574 # as well
575 575 if refs:
576 576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577 577
578 578 if apply_refs:
579 579 # TODO: johbo: Needs proper test coverage with a git repository
580 580 # that contains a tag object, so that we would end up with
581 581 # a peeled ref at this point.
582 582 for k in remote_refs:
583 583 if k.endswith(PEELED_REF_MARKER):
584 584 log.debug("Skipping peeled reference %s", k)
585 585 continue
586 586 repo[k] = remote_refs[k]
587 587
588 588 if refs and not update_after:
589 589 # mikhail: explicitly set the head to the last ref.
590 590 repo['HEAD'] = remote_refs[refs[-1]]
591 591
592 592 if update_after:
593 593 # we want to checkout HEAD
594 594 repo["HEAD"] = remote_refs["HEAD"]
595 595 index.build_index_from_tree(repo.path, repo.index_path(),
596 596 repo.object_store, repo["HEAD"].tree)
597 597 return remote_refs
598 598
599 599 @reraise_safe_exceptions
600 600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 601 repo = self._factory.repo(wire)
602 602 if refs and not isinstance(refs, (list, tuple)):
603 603 refs = [refs]
604 604
605 605 config = self._wire_to_config(wire)
606 606 # get all remote refs we'll use to fetch later
607 607 cmd = ['ls-remote']
608 608 if not all_refs:
609 609 cmd += ['--heads', '--tags']
610 610 cmd += [url]
611 611 output, __ = self.run_git_command(
612 612 wire, cmd, fail_on_stderr=False,
613 613 _copts=self._remote_conf(config),
614 614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615 615
616 616 remote_refs = collections.OrderedDict()
617 617 fetch_refs = []
618 618
619 619 for ref_line in output.splitlines():
620 620 sha, ref = ref_line.split('\t')
621 621 sha = sha.strip()
622 622 if ref in remote_refs:
623 623 # duplicate, skip
624 624 continue
625 625 if ref.endswith(PEELED_REF_MARKER):
626 626 log.debug("Skipping peeled reference %s", ref)
627 627 continue
628 628 # don't sync HEAD
629 629 if ref in ['HEAD']:
630 630 continue
631 631
632 632 remote_refs[ref] = sha
633 633
634 634 if refs and sha in refs:
635 635 # we filter fetch using our specified refs
636 636 fetch_refs.append('{}:{}'.format(ref, ref))
637 637 elif not refs:
638 638 fetch_refs.append('{}:{}'.format(ref, ref))
639 639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640 640
641 641 if fetch_refs:
642 642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 643 fetch_refs_chunks = list(chunk)
644 644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 645 _out, _err = self.run_git_command(
646 646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 647 fail_on_stderr=False,
648 648 _copts=self._remote_conf(config),
649 649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650 650
651 651 return remote_refs
652 652
653 653 @reraise_safe_exceptions
654 654 def sync_push(self, wire, url, refs=None):
655 655 if not self.check_url(url, wire):
656 656 return
657 657 config = self._wire_to_config(wire)
658 658 self._factory.repo(wire)
659 659 self.run_git_command(
660 660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 661 _copts=self._remote_conf(config),
662 662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663 663
664 664 @reraise_safe_exceptions
665 665 def get_remote_refs(self, wire, url):
666 666 repo = Repo(url)
667 667 return repo.get_refs()
668 668
669 669 @reraise_safe_exceptions
670 670 def get_description(self, wire):
671 671 repo = self._factory.repo(wire)
672 672 return repo.get_description()
673 673
674 674 @reraise_safe_exceptions
675 675 def get_missing_revs(self, wire, rev1, rev2, path2):
676 676 repo = self._factory.repo(wire)
677 677 LocalGitClient(thin_packs=False).fetch(path2, repo)
678 678
679 679 wire_remote = wire.copy()
680 680 wire_remote['path'] = path2
681 681 repo_remote = self._factory.repo(wire_remote)
682 682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683 683
684 684 revs = [
685 685 x.commit.id
686 686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 687 return revs
688 688
689 689 @reraise_safe_exceptions
690 690 def get_object(self, wire, sha, maybe_unreachable=False):
691 691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 693 def _get_object(_context_uid, _repo_id, _sha):
694 694 repo_init = self._factory.repo_libgit2(wire)
695 695 with repo_init as repo:
696 696
697 697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 698 try:
699 699 commit = repo.revparse_single(sha)
700 700 except KeyError:
701 701 # NOTE(marcink): KeyError doesn't give us any meaningful information
702 702 # here, we instead give something more explicit
703 703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
704 704 raise exceptions.LookupException(e)(missing_commit_err)
705 705 except ValueError as e:
706 706 raise exceptions.LookupException(e)(missing_commit_err)
707 707
708 708 is_tag = False
709 709 if isinstance(commit, pygit2.Tag):
710 710 commit = repo.get(commit.target)
711 711 is_tag = True
712 712
713 713 check_dangling = True
714 714 if is_tag:
715 715 check_dangling = False
716 716
717 717 if check_dangling and maybe_unreachable:
718 718 check_dangling = False
719 719
720 720 # we used a reference and it parsed means we're not having a dangling commit
721 721 if sha != commit.hex:
722 722 check_dangling = False
723 723
724 724 if check_dangling:
725 725 # check for dangling commit
726 726 for branch in repo.branches.with_commit(commit.hex):
727 727 if branch:
728 728 break
729 729 else:
730 730 # NOTE(marcink): Empty error doesn't give us any meaningful information
731 731 # here, we instead give something more explicit
732 732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
733 733 raise exceptions.LookupException(e)(missing_commit_err)
734 734
735 735 commit_id = commit.hex
736 type_id = commit.type
736 type_id = commit.type_str
737 737
738 738 return {
739 739 'id': commit_id,
740 740 'type': self._type_id_to_name(type_id),
741 741 'commit_id': commit_id,
742 742 'idx': 0
743 743 }
744 744
745 745 return _get_object(context_uid, repo_id, sha)
746 746
747 747 @reraise_safe_exceptions
748 748 def get_refs(self, wire):
749 749 cache_on, context_uid, repo_id = self._cache_on(wire)
750 750 @self.region.conditional_cache_on_arguments(condition=cache_on)
751 751 def _get_refs(_context_uid, _repo_id):
752 752
753 753 repo_init = self._factory.repo_libgit2(wire)
754 754 with repo_init as repo:
755 755 regex = re.compile('^refs/(heads|tags)/')
756 756 return {x.name: x.target.hex for x in
757 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
757 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
758 758
759 759 return _get_refs(context_uid, repo_id)
760 760
761 761 @reraise_safe_exceptions
762 762 def get_branch_pointers(self, wire):
763 763 cache_on, context_uid, repo_id = self._cache_on(wire)
764 764 @self.region.conditional_cache_on_arguments(condition=cache_on)
765 765 def _get_branch_pointers(_context_uid, _repo_id):
766 766
767 767 repo_init = self._factory.repo_libgit2(wire)
768 768 regex = re.compile('^refs/heads')
769 769 with repo_init as repo:
770 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
770 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
771 771 return {x.target.hex: x.shorthand for x in branches}
772 772
773 773 return _get_branch_pointers(context_uid, repo_id)
774 774
775 775 @reraise_safe_exceptions
776 776 def head(self, wire, show_exc=True):
777 777 cache_on, context_uid, repo_id = self._cache_on(wire)
778 778 @self.region.conditional_cache_on_arguments(condition=cache_on)
779 779 def _head(_context_uid, _repo_id, _show_exc):
780 780 repo_init = self._factory.repo_libgit2(wire)
781 781 with repo_init as repo:
782 782 try:
783 783 return repo.head.peel().hex
784 784 except Exception:
785 785 if show_exc:
786 786 raise
787 787 return _head(context_uid, repo_id, show_exc)
788 788
789 789 @reraise_safe_exceptions
790 790 def init(self, wire):
791 791 repo_path = str_to_dulwich(wire['path'])
792 792 self.repo = Repo.init(repo_path)
793 793
794 794 @reraise_safe_exceptions
795 795 def init_bare(self, wire):
796 796 repo_path = str_to_dulwich(wire['path'])
797 797 self.repo = Repo.init_bare(repo_path)
798 798
799 799 @reraise_safe_exceptions
800 800 def revision(self, wire, rev):
801 801
802 802 cache_on, context_uid, repo_id = self._cache_on(wire)
803 803 @self.region.conditional_cache_on_arguments(condition=cache_on)
804 804 def _revision(_context_uid, _repo_id, _rev):
805 805 repo_init = self._factory.repo_libgit2(wire)
806 806 with repo_init as repo:
807 807 commit = repo[rev]
808 808 obj_data = {
809 809 'id': commit.id.hex,
810 810 }
811 811 # tree objects itself don't have tree_id attribute
812 812 if hasattr(commit, 'tree_id'):
813 813 obj_data['tree'] = commit.tree_id.hex
814 814
815 815 return obj_data
816 816 return _revision(context_uid, repo_id, rev)
817 817
818 818 @reraise_safe_exceptions
819 819 def date(self, wire, commit_id):
820 820 cache_on, context_uid, repo_id = self._cache_on(wire)
821 821 @self.region.conditional_cache_on_arguments(condition=cache_on)
822 822 def _date(_repo_id, _commit_id):
823 823 repo_init = self._factory.repo_libgit2(wire)
824 824 with repo_init as repo:
825 825 commit = repo[commit_id]
826 826
827 827 if hasattr(commit, 'commit_time'):
828 828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
829 829 else:
830 830 commit = commit.get_object()
831 831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
832 832
833 833 # TODO(marcink): check dulwich difference of offset vs timezone
834 834 return [commit_time, commit_time_offset]
835 835 return _date(repo_id, commit_id)
836 836
837 837 @reraise_safe_exceptions
838 838 def author(self, wire, commit_id):
839 839 cache_on, context_uid, repo_id = self._cache_on(wire)
840 840 @self.region.conditional_cache_on_arguments(condition=cache_on)
841 841 def _author(_repo_id, _commit_id):
842 842 repo_init = self._factory.repo_libgit2(wire)
843 843 with repo_init as repo:
844 844 commit = repo[commit_id]
845 845
846 846 if hasattr(commit, 'author'):
847 847 author = commit.author
848 848 else:
849 849 author = commit.get_object().author
850 850
851 851 if author.email:
852 return u"{} <{}>".format(author.name, author.email)
852 return "{} <{}>".format(author.name, author.email)
853 853
854 854 try:
855 return u"{}".format(author.name)
855 return "{}".format(author.name)
856 856 except Exception:
857 return u"{}".format(safe_unicode(author.raw_name))
857 return "{}".format(safe_unicode(author.raw_name))
858 858
859 859 return _author(repo_id, commit_id)
860 860
861 861 @reraise_safe_exceptions
862 862 def message(self, wire, commit_id):
863 863 cache_on, context_uid, repo_id = self._cache_on(wire)
864 864 @self.region.conditional_cache_on_arguments(condition=cache_on)
865 865 def _message(_repo_id, _commit_id):
866 866 repo_init = self._factory.repo_libgit2(wire)
867 867 with repo_init as repo:
868 868 commit = repo[commit_id]
869 869 return commit.message
870 870 return _message(repo_id, commit_id)
871 871
872 872 @reraise_safe_exceptions
873 873 def parents(self, wire, commit_id):
874 874 cache_on, context_uid, repo_id = self._cache_on(wire)
875 875 @self.region.conditional_cache_on_arguments(condition=cache_on)
876 876 def _parents(_repo_id, _commit_id):
877 877 repo_init = self._factory.repo_libgit2(wire)
878 878 with repo_init as repo:
879 879 commit = repo[commit_id]
880 880 if hasattr(commit, 'parent_ids'):
881 881 parent_ids = commit.parent_ids
882 882 else:
883 883 parent_ids = commit.get_object().parent_ids
884 884
885 885 return [x.hex for x in parent_ids]
886 886 return _parents(repo_id, commit_id)
887 887
888 888 @reraise_safe_exceptions
889 889 def children(self, wire, commit_id):
890 890 cache_on, context_uid, repo_id = self._cache_on(wire)
891 891 @self.region.conditional_cache_on_arguments(condition=cache_on)
892 892 def _children(_repo_id, _commit_id):
893 893 output, __ = self.run_git_command(
894 894 wire, ['rev-list', '--all', '--children'])
895 895
896 896 child_ids = []
897 897 pat = re.compile(r'^%s' % commit_id)
898 898 for l in output.splitlines():
899 899 if pat.match(l):
900 900 found_ids = l.split(' ')[1:]
901 901 child_ids.extend(found_ids)
902 902
903 903 return child_ids
904 904 return _children(repo_id, commit_id)
905 905
906 906 @reraise_safe_exceptions
907 907 def set_refs(self, wire, key, value):
908 908 repo_init = self._factory.repo_libgit2(wire)
909 909 with repo_init as repo:
910 910 repo.references.create(key, value, force=True)
911 911
912 912 @reraise_safe_exceptions
913 913 def create_branch(self, wire, branch_name, commit_id, force=False):
914 914 repo_init = self._factory.repo_libgit2(wire)
915 915 with repo_init as repo:
916 916 commit = repo[commit_id]
917 917
918 918 if force:
919 919 repo.branches.local.create(branch_name, commit, force=force)
920 920 elif not repo.branches.get(branch_name):
921 921 # create only if that branch isn't existing
922 922 repo.branches.local.create(branch_name, commit, force=force)
923 923
924 924 @reraise_safe_exceptions
925 925 def remove_ref(self, wire, key):
926 926 repo_init = self._factory.repo_libgit2(wire)
927 927 with repo_init as repo:
928 928 repo.references.delete(key)
929 929
930 930 @reraise_safe_exceptions
931 931 def tag_remove(self, wire, tag_name):
932 932 repo_init = self._factory.repo_libgit2(wire)
933 933 with repo_init as repo:
934 934 key = 'refs/tags/{}'.format(tag_name)
935 935 repo.references.delete(key)
936 936
937 937 @reraise_safe_exceptions
938 938 def tree_changes(self, wire, source_id, target_id):
939 939 # TODO(marcink): remove this seems it's only used by tests
940 940 repo = self._factory.repo(wire)
941 941 source = repo[source_id].tree if source_id else None
942 942 target = repo[target_id].tree
943 943 result = repo.object_store.tree_changes(source, target)
944 944 return list(result)
945 945
946 946 @reraise_safe_exceptions
947 947 def tree_and_type_for_path(self, wire, commit_id, path):
948 948
949 949 cache_on, context_uid, repo_id = self._cache_on(wire)
950 950 @self.region.conditional_cache_on_arguments(condition=cache_on)
951 951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
952 952 repo_init = self._factory.repo_libgit2(wire)
953 953
954 954 with repo_init as repo:
955 955 commit = repo[commit_id]
956 956 try:
957 957 tree = commit.tree[path]
958 958 except KeyError:
959 959 return None, None, None
960 960
961 return tree.id.hex, tree.type, tree.filemode
961 return tree.id.hex, tree.type_str, tree.filemode
962 962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
963 963
964 964 @reraise_safe_exceptions
965 965 def tree_items(self, wire, tree_id):
966 966 cache_on, context_uid, repo_id = self._cache_on(wire)
967 967 @self.region.conditional_cache_on_arguments(condition=cache_on)
968 968 def _tree_items(_repo_id, _tree_id):
969 969
970 970 repo_init = self._factory.repo_libgit2(wire)
971 971 with repo_init as repo:
972 972 try:
973 973 tree = repo[tree_id]
974 974 except KeyError:
975 975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
976 976
977 977 result = []
978 978 for item in tree:
979 979 item_sha = item.hex
980 980 item_mode = item.filemode
981 item_type = item.type
981 item_type = item.type_str
982 982
983 983 if item_type == 'commit':
984 984 # NOTE(marcink): submodules we translate to 'link' for backward compat
985 985 item_type = 'link'
986 986
987 987 result.append((item.name, item_mode, item_sha, item_type))
988 988 return result
989 989 return _tree_items(repo_id, tree_id)
990 990
991 991 @reraise_safe_exceptions
992 992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
993 993 """
994 994 Old version that uses subprocess to call diff
995 995 """
996 996
997 997 flags = [
998 998 '-U%s' % context, '--patch',
999 999 '--binary',
1000 1000 '--find-renames',
1001 1001 '--no-indent-heuristic',
1002 1002 # '--indent-heuristic',
1003 1003 #'--full-index',
1004 1004 #'--abbrev=40'
1005 1005 ]
1006 1006
1007 1007 if opt_ignorews:
1008 1008 flags.append('--ignore-all-space')
1009 1009
1010 1010 if commit_id_1 == self.EMPTY_COMMIT:
1011 1011 cmd = ['show'] + flags + [commit_id_2]
1012 1012 else:
1013 1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1014 1014
1015 1015 if file_filter:
1016 1016 cmd.extend(['--', file_filter])
1017 1017
1018 1018 diff, __ = self.run_git_command(wire, cmd)
1019 1019 # If we used 'show' command, strip first few lines (until actual diff
1020 1020 # starts)
1021 1021 if commit_id_1 == self.EMPTY_COMMIT:
1022 1022 lines = diff.splitlines()
1023 1023 x = 0
1024 1024 for line in lines:
1025 1025 if line.startswith('diff'):
1026 1026 break
1027 1027 x += 1
1028 1028 # Append new line just like 'diff' command do
1029 1029 diff = '\n'.join(lines[x:]) + '\n'
1030 1030 return diff
1031 1031
1032 1032 @reraise_safe_exceptions
1033 1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1034 1034 repo_init = self._factory.repo_libgit2(wire)
1035 1035 with repo_init as repo:
1036 1036 swap = True
1037 1037 flags = 0
1038 1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1039 1039
1040 1040 if opt_ignorews:
1041 1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1042 1042
1043 1043 if commit_id_1 == self.EMPTY_COMMIT:
1044 1044 comm1 = repo[commit_id_2]
1045 1045 diff_obj = comm1.tree.diff_to_tree(
1046 1046 flags=flags, context_lines=context, swap=swap)
1047 1047
1048 1048 else:
1049 1049 comm1 = repo[commit_id_2]
1050 1050 comm2 = repo[commit_id_1]
1051 1051 diff_obj = comm1.tree.diff_to_tree(
1052 1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1053 1053 similar_flags = 0
1054 1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1055 1055 diff_obj.find_similar(flags=similar_flags)
1056 1056
1057 1057 if file_filter:
1058 1058 for p in diff_obj:
1059 1059 if p.delta.old_file.path == file_filter:
1060 1060 return p.patch or ''
1061 1061 # fo matching path == no diff
1062 1062 return ''
1063 1063 return diff_obj.patch or ''
1064 1064
1065 1065 @reraise_safe_exceptions
1066 1066 def node_history(self, wire, commit_id, path, limit):
1067 1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1068 1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1069 1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1070 1070 # optimize for n==1, rev-list is much faster for that use-case
1071 1071 if limit == 1:
1072 1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1073 1073 else:
1074 1074 cmd = ['log']
1075 1075 if limit:
1076 1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1077 1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1078 1078
1079 1079 output, __ = self.run_git_command(wire, cmd)
1080 1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1081 1081
1082 1082 return [x for x in commit_ids]
1083 1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1084 1084
1085 1085 @reraise_safe_exceptions
1086 1086 def node_annotate(self, wire, commit_id, path):
1087 1087
1088 1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1089 1089 # -l ==> outputs long shas (and we need all 40 characters)
1090 1090 # --root ==> doesn't put '^' character for boundaries
1091 1091 # -r commit_id ==> blames for the given commit
1092 1092 output, __ = self.run_git_command(wire, cmd)
1093 1093
1094 1094 result = []
1095 1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1096 1096 line_no = i + 1
1097 1097 commit_id, line = re.split(r' ', blame_line, 1)
1098 1098 result.append((line_no, commit_id, line))
1099 1099 return result
1100 1100
1101 1101 @reraise_safe_exceptions
1102 1102 def update_server_info(self, wire):
1103 1103 repo = self._factory.repo(wire)
1104 1104 update_server_info(repo)
1105 1105
1106 1106 @reraise_safe_exceptions
1107 1107 def get_all_commit_ids(self, wire):
1108 1108
1109 1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1110 1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1111 1111 def _get_all_commit_ids(_context_uid, _repo_id):
1112 1112
1113 1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1114 1114 try:
1115 1115 output, __ = self.run_git_command(wire, cmd)
1116 1116 return output.splitlines()
1117 1117 except Exception:
1118 1118 # Can be raised for empty repositories
1119 1119 return []
1120 1120 return _get_all_commit_ids(context_uid, repo_id)
1121 1121
1122 1122 @reraise_safe_exceptions
1123 1123 def run_git_command(self, wire, cmd, **opts):
1124 1124 path = wire.get('path', None)
1125 1125
1126 1126 if path and os.path.isdir(path):
1127 1127 opts['cwd'] = path
1128 1128
1129 1129 if '_bare' in opts:
1130 1130 _copts = []
1131 1131 del opts['_bare']
1132 1132 else:
1133 1133 _copts = ['-c', 'core.quotepath=false', ]
1134 1134 safe_call = False
1135 1135 if '_safe' in opts:
1136 1136 # no exc on failure
1137 1137 del opts['_safe']
1138 1138 safe_call = True
1139 1139
1140 1140 if '_copts' in opts:
1141 1141 _copts.extend(opts['_copts'] or [])
1142 1142 del opts['_copts']
1143 1143
1144 1144 gitenv = os.environ.copy()
1145 1145 gitenv.update(opts.pop('extra_env', {}))
1146 1146 # need to clean fix GIT_DIR !
1147 1147 if 'GIT_DIR' in gitenv:
1148 1148 del gitenv['GIT_DIR']
1149 1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1150 1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1151 1151
1152 1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1153 1153 _opts = {'env': gitenv, 'shell': False}
1154 1154
1155 1155 proc = None
1156 1156 try:
1157 1157 _opts.update(opts)
1158 1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1159 1159
1160 1160 return ''.join(proc), ''.join(proc.error)
1161 1161 except (EnvironmentError, OSError) as err:
1162 1162 cmd = ' '.join(cmd) # human friendly CMD
1163 1163 tb_err = ("Couldn't run git command (%s).\n"
1164 1164 "Original error was:%s\n"
1165 1165 "Call options:%s\n"
1166 1166 % (cmd, err, _opts))
1167 1167 log.exception(tb_err)
1168 1168 if safe_call:
1169 1169 return '', err
1170 1170 else:
1171 1171 raise exceptions.VcsException()(tb_err)
1172 1172 finally:
1173 1173 if proc:
1174 1174 proc.close()
1175 1175
1176 1176 @reraise_safe_exceptions
1177 1177 def install_hooks(self, wire, force=False):
1178 1178 from vcsserver.hook_utils import install_git_hooks
1179 1179 bare = self.bare(wire)
1180 1180 path = wire['path']
1181 1181 return install_git_hooks(path, bare, force_create=force)
1182 1182
1183 1183 @reraise_safe_exceptions
1184 1184 def get_hooks_info(self, wire):
1185 1185 from vcsserver.hook_utils import (
1186 1186 get_git_pre_hook_version, get_git_post_hook_version)
1187 1187 bare = self.bare(wire)
1188 1188 path = wire['path']
1189 1189 return {
1190 1190 'pre_version': get_git_pre_hook_version(path, bare),
1191 1191 'post_version': get_git_post_hook_version(path, bare),
1192 1192 }
@@ -1,19 +1,19 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 from app import create_app
19 from .app import create_app
@@ -1,1009 +1,1009 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 import urllib
22 import urllib2
21 import urllib.request, urllib.parse, urllib.error
22 import urllib.request, urllib.error, urllib.parse
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase, purge
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30 from mercurial import repair
31 31
32 32 import vcsserver
33 33 from vcsserver import exceptions
34 34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 35 from vcsserver.hgcompat import (
36 36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 40 RepoLookupError, InterventionRequired, RequirementError)
41 41 from vcsserver.vcs_base import RemoteBase
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 def make_ui_from_config(repo_config):
47 47
48 48 class LoggingUI(ui.ui):
49 49 def status(self, *msg, **opts):
50 50 log.info(' '.join(msg).rstrip('\n'))
51 51 super(LoggingUI, self).status(*msg, **opts)
52 52
53 53 def warn(self, *msg, **opts):
54 54 log.warn(' '.join(msg).rstrip('\n'))
55 55 super(LoggingUI, self).warn(*msg, **opts)
56 56
57 57 def error(self, *msg, **opts):
58 58 log.error(' '.join(msg).rstrip('\n'))
59 59 super(LoggingUI, self).error(*msg, **opts)
60 60
61 61 def note(self, *msg, **opts):
62 62 log.info(' '.join(msg).rstrip('\n'))
63 63 super(LoggingUI, self).note(*msg, **opts)
64 64
65 65 def debug(self, *msg, **opts):
66 66 log.debug(' '.join(msg).rstrip('\n'))
67 67 super(LoggingUI, self).debug(*msg, **opts)
68 68
69 69 baseui = LoggingUI()
70 70
71 71 # clean the baseui object
72 72 baseui._ocfg = hgconfig.config()
73 73 baseui._ucfg = hgconfig.config()
74 74 baseui._tcfg = hgconfig.config()
75 75
76 76 for section, option, value in repo_config:
77 77 baseui.setconfig(section, option, value)
78 78
79 79 # make our hgweb quiet so it doesn't print output
80 80 baseui.setconfig('ui', 'quiet', 'true')
81 81
82 82 baseui.setconfig('ui', 'paginate', 'never')
83 83 # for better Error reporting of Mercurial
84 84 baseui.setconfig('ui', 'message-output', 'stderr')
85 85
86 86 # force mercurial to only use 1 thread, otherwise it may try to set a
87 87 # signal in a non-main thread, thus generating a ValueError.
88 88 baseui.setconfig('worker', 'numcpus', 1)
89 89
90 90 # If there is no config for the largefiles extension, we explicitly disable
91 91 # it here. This overrides settings from repositories hgrc file. Recent
92 92 # mercurial versions enable largefiles in hgrc on clone from largefile
93 93 # repo.
94 94 if not baseui.hasconfig('extensions', 'largefiles'):
95 95 log.debug('Explicitly disable largefiles extension for repo.')
96 96 baseui.setconfig('extensions', 'largefiles', '!')
97 97
98 98 return baseui
99 99
100 100
101 101 def reraise_safe_exceptions(func):
102 102 """Decorator for converting mercurial exceptions to something neutral."""
103 103
104 104 def wrapper(*args, **kwargs):
105 105 try:
106 106 return func(*args, **kwargs)
107 107 except (Abort, InterventionRequired) as e:
108 108 raise_from_original(exceptions.AbortException(e))
109 109 except RepoLookupError as e:
110 110 raise_from_original(exceptions.LookupException(e))
111 111 except RequirementError as e:
112 112 raise_from_original(exceptions.RequirementException(e))
113 113 except RepoError as e:
114 114 raise_from_original(exceptions.VcsException(e))
115 115 except LookupError as e:
116 116 raise_from_original(exceptions.LookupException(e))
117 117 except Exception as e:
118 118 if not hasattr(e, '_vcs_kind'):
119 119 log.exception("Unhandled exception in hg remote call")
120 120 raise_from_original(exceptions.UnhandledException(e))
121 121
122 122 raise
123 123 return wrapper
124 124
125 125
126 126 class MercurialFactory(RepoFactory):
127 127 repo_type = 'hg'
128 128
129 129 def _create_config(self, config, hooks=True):
130 130 if not hooks:
131 131 hooks_to_clean = frozenset((
132 132 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 134 new_config = []
135 135 for section, option, value in config:
136 136 if section == 'hooks' and option in hooks_to_clean:
137 137 continue
138 138 new_config.append((section, option, value))
139 139 config = new_config
140 140
141 141 baseui = make_ui_from_config(config)
142 142 return baseui
143 143
144 144 def _create_repo(self, wire, create):
145 145 baseui = self._create_config(wire["config"])
146 146 return instance(baseui, wire["path"], create)
147 147
148 148 def repo(self, wire, create=False):
149 149 """
150 150 Get a repository instance for the given path.
151 151 """
152 152 return self._create_repo(wire, create)
153 153
154 154
155 155 def patch_ui_message_output(baseui):
156 156 baseui.setconfig('ui', 'quiet', 'false')
157 157 output = io.BytesIO()
158 158
159 159 def write(data, **unused_kwargs):
160 160 output.write(data)
161 161
162 162 baseui.status = write
163 163 baseui.write = write
164 164 baseui.warn = write
165 165 baseui.debug = write
166 166
167 167 return baseui, output
168 168
169 169
170 170 class HgRemote(RemoteBase):
171 171
172 172 def __init__(self, factory):
173 173 self._factory = factory
174 174 self._bulk_methods = {
175 175 "affected_files": self.ctx_files,
176 176 "author": self.ctx_user,
177 177 "branch": self.ctx_branch,
178 178 "children": self.ctx_children,
179 179 "date": self.ctx_date,
180 180 "message": self.ctx_description,
181 181 "parents": self.ctx_parents,
182 182 "status": self.ctx_status,
183 183 "obsolete": self.ctx_obsolete,
184 184 "phase": self.ctx_phase,
185 185 "hidden": self.ctx_hidden,
186 186 "_file_paths": self.ctx_list,
187 187 }
188 188
189 189 def _get_ctx(self, repo, ref):
190 190 return get_ctx(repo, ref)
191 191
192 192 @reraise_safe_exceptions
193 193 def discover_hg_version(self):
194 194 from mercurial import util
195 195 return util.version()
196 196
197 197 @reraise_safe_exceptions
198 198 def is_empty(self, wire):
199 199 repo = self._factory.repo(wire)
200 200
201 201 try:
202 202 return len(repo) == 0
203 203 except Exception:
204 204 log.exception("failed to read object_store")
205 205 return False
206 206
207 207 @reraise_safe_exceptions
208 208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 209 if kind == "tgz":
210 210 archiver = archival.tarit(archive_path, mtime, "gz")
211 211 elif kind == "tbz2":
212 212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 213 elif kind == 'zip':
214 214 archiver = archival.zipit(archive_path, mtime)
215 215 else:
216 216 raise exceptions.ArchiveException()(
217 217 'Remote does not support: "%s".' % kind)
218 218
219 219 for f_path, f_mode, f_is_link, f_content in file_info:
220 220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 221 archiver.done()
222 222
223 223 @reraise_safe_exceptions
224 224 def bookmarks(self, wire):
225 225 cache_on, context_uid, repo_id = self._cache_on(wire)
226 226 @self.region.conditional_cache_on_arguments(condition=cache_on)
227 227 def _bookmarks(_context_uid, _repo_id):
228 228 repo = self._factory.repo(wire)
229 229 return dict(repo._bookmarks)
230 230
231 231 return _bookmarks(context_uid, repo_id)
232 232
233 233 @reraise_safe_exceptions
234 234 def branches(self, wire, normal, closed):
235 235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 237 def _branches(_context_uid, _repo_id, _normal, _closed):
238 238 repo = self._factory.repo(wire)
239 239 iter_branches = repo.branchmap().iterbranches()
240 240 bt = {}
241 241 for branch_name, _heads, tip, is_closed in iter_branches:
242 242 if normal and not is_closed:
243 243 bt[branch_name] = tip
244 244 if closed and is_closed:
245 245 bt[branch_name] = tip
246 246
247 247 return bt
248 248
249 249 return _branches(context_uid, repo_id, normal, closed)
250 250
251 251 @reraise_safe_exceptions
252 252 def bulk_request(self, wire, commit_id, pre_load):
253 253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 254 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 255 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 256 result = {}
257 257 for attr in pre_load:
258 258 try:
259 259 method = self._bulk_methods[attr]
260 260 result[attr] = method(wire, commit_id)
261 261 except KeyError as e:
262 262 raise exceptions.VcsException(e)(
263 263 'Unknown bulk attribute: "%s"' % attr)
264 264 return result
265 265
266 266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267 267
268 268 @reraise_safe_exceptions
269 269 def ctx_branch(self, wire, commit_id):
270 270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 272 def _ctx_branch(_repo_id, _commit_id):
273 273 repo = self._factory.repo(wire)
274 274 ctx = self._get_ctx(repo, commit_id)
275 275 return ctx.branch()
276 276 return _ctx_branch(repo_id, commit_id)
277 277
278 278 @reraise_safe_exceptions
279 279 def ctx_date(self, wire, commit_id):
280 280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 282 def _ctx_date(_repo_id, _commit_id):
283 283 repo = self._factory.repo(wire)
284 284 ctx = self._get_ctx(repo, commit_id)
285 285 return ctx.date()
286 286 return _ctx_date(repo_id, commit_id)
287 287
288 288 @reraise_safe_exceptions
289 289 def ctx_description(self, wire, revision):
290 290 repo = self._factory.repo(wire)
291 291 ctx = self._get_ctx(repo, revision)
292 292 return ctx.description()
293 293
294 294 @reraise_safe_exceptions
295 295 def ctx_files(self, wire, commit_id):
296 296 cache_on, context_uid, repo_id = self._cache_on(wire)
297 297 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 298 def _ctx_files(_repo_id, _commit_id):
299 299 repo = self._factory.repo(wire)
300 300 ctx = self._get_ctx(repo, commit_id)
301 301 return ctx.files()
302 302
303 303 return _ctx_files(repo_id, commit_id)
304 304
305 305 @reraise_safe_exceptions
306 306 def ctx_list(self, path, revision):
307 307 repo = self._factory.repo(path)
308 308 ctx = self._get_ctx(repo, revision)
309 309 return list(ctx)
310 310
311 311 @reraise_safe_exceptions
312 312 def ctx_parents(self, wire, commit_id):
313 313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 315 def _ctx_parents(_repo_id, _commit_id):
316 316 repo = self._factory.repo(wire)
317 317 ctx = self._get_ctx(repo, commit_id)
318 318 return [parent.hex() for parent in ctx.parents()
319 319 if not (parent.hidden() or parent.obsolete())]
320 320
321 321 return _ctx_parents(repo_id, commit_id)
322 322
323 323 @reraise_safe_exceptions
324 324 def ctx_children(self, wire, commit_id):
325 325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 327 def _ctx_children(_repo_id, _commit_id):
328 328 repo = self._factory.repo(wire)
329 329 ctx = self._get_ctx(repo, commit_id)
330 330 return [child.hex() for child in ctx.children()
331 331 if not (child.hidden() or child.obsolete())]
332 332
333 333 return _ctx_children(repo_id, commit_id)
334 334
335 335 @reraise_safe_exceptions
336 336 def ctx_phase(self, wire, commit_id):
337 337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 338 @self.region.conditional_cache_on_arguments(condition=cache_on)
339 339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 340 repo = self._factory.repo(wire)
341 341 ctx = self._get_ctx(repo, commit_id)
342 342 # public=0, draft=1, secret=3
343 343 return ctx.phase()
344 344 return _ctx_phase(context_uid, repo_id, commit_id)
345 345
346 346 @reraise_safe_exceptions
347 347 def ctx_obsolete(self, wire, commit_id):
348 348 cache_on, context_uid, repo_id = self._cache_on(wire)
349 349 @self.region.conditional_cache_on_arguments(condition=cache_on)
350 350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 351 repo = self._factory.repo(wire)
352 352 ctx = self._get_ctx(repo, commit_id)
353 353 return ctx.obsolete()
354 354 return _ctx_obsolete(context_uid, repo_id, commit_id)
355 355
356 356 @reraise_safe_exceptions
357 357 def ctx_hidden(self, wire, commit_id):
358 358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 359 @self.region.conditional_cache_on_arguments(condition=cache_on)
360 360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 361 repo = self._factory.repo(wire)
362 362 ctx = self._get_ctx(repo, commit_id)
363 363 return ctx.hidden()
364 364 return _ctx_hidden(context_uid, repo_id, commit_id)
365 365
366 366 @reraise_safe_exceptions
367 367 def ctx_substate(self, wire, revision):
368 368 repo = self._factory.repo(wire)
369 369 ctx = self._get_ctx(repo, revision)
370 370 return ctx.substate
371 371
372 372 @reraise_safe_exceptions
373 373 def ctx_status(self, wire, revision):
374 374 repo = self._factory.repo(wire)
375 375 ctx = self._get_ctx(repo, revision)
376 376 status = repo[ctx.p1().node()].status(other=ctx.node())
377 377 # object of status (odd, custom named tuple in mercurial) is not
378 378 # correctly serializable, we make it a list, as the underling
379 379 # API expects this to be a list
380 380 return list(status)
381 381
382 382 @reraise_safe_exceptions
383 383 def ctx_user(self, wire, revision):
384 384 repo = self._factory.repo(wire)
385 385 ctx = self._get_ctx(repo, revision)
386 386 return ctx.user()
387 387
388 388 @reraise_safe_exceptions
389 389 def check_url(self, url, config):
390 390 _proto = None
391 391 if '+' in url[:url.find('://')]:
392 392 _proto = url[0:url.find('+')]
393 393 url = url[url.find('+') + 1:]
394 394 handlers = []
395 395 url_obj = url_parser(url)
396 396 test_uri, authinfo = url_obj.authinfo()
397 397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 398 url_obj.query = obfuscate_qs(url_obj.query)
399 399
400 400 cleaned_uri = str(url_obj)
401 401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402 402
403 403 if authinfo:
404 404 # create a password manager
405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
405 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
406 406 passmgr.add_password(*authinfo)
407 407
408 408 handlers.extend((httpbasicauthhandler(passmgr),
409 409 httpdigestauthhandler(passmgr)))
410 410
411 o = urllib2.build_opener(*handlers)
411 o = urllib.request.build_opener(*handlers)
412 412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 413 ('Accept', 'application/mercurial-0.1')]
414 414
415 415 q = {"cmd": 'between'}
416 416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 qs = '?%s' % urllib.urlencode(q)
417 qs = '?%s' % urllib.parse.urlencode(q)
418 418 cu = "%s%s" % (test_uri, qs)
419 req = urllib2.Request(cu, None, {})
419 req = urllib.request.Request(cu, None, {})
420 420
421 421 try:
422 422 log.debug("Trying to open URL %s", cleaned_uri)
423 423 resp = o.open(req)
424 424 if resp.code != 200:
425 425 raise exceptions.URLError()('Return Code is not 200')
426 426 except Exception as e:
427 427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 428 # means it cannot be cloned
429 429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430 430
431 431 # now check if it's a proper hg repo, but don't do it for svn
432 432 try:
433 433 if _proto == 'svn':
434 434 pass
435 435 else:
436 436 # check for pure hg repos
437 437 log.debug(
438 438 "Verifying if URL is a Mercurial repository: %s",
439 439 cleaned_uri)
440 440 ui = make_ui_from_config(config)
441 441 peer_checker = makepeer(ui, url)
442 442 peer_checker.lookup('tip')
443 443 except Exception as e:
444 444 log.warning("URL is not a valid Mercurial repository: %s",
445 445 cleaned_uri)
446 446 raise exceptions.URLError(e)(
447 447 "url [%s] does not look like an hg repo org_exc: %s"
448 448 % (cleaned_uri, e))
449 449
450 450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 451 return True
452 452
453 453 @reraise_safe_exceptions
454 454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 455 repo = self._factory.repo(wire)
456 456
457 457 if file_filter:
458 458 match_filter = match(file_filter[0], '', [file_filter[1]])
459 459 else:
460 460 match_filter = file_filter
461 461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462 462
463 463 try:
464 464 return "".join(patch.diff(
465 465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 466 except RepoLookupError as e:
467 467 raise exceptions.LookupException(e)()
468 468
469 469 @reraise_safe_exceptions
470 470 def node_history(self, wire, revision, path, limit):
471 471 cache_on, context_uid, repo_id = self._cache_on(wire)
472 472 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 474 repo = self._factory.repo(wire)
475 475
476 476 ctx = self._get_ctx(repo, revision)
477 477 fctx = ctx.filectx(path)
478 478
479 479 def history_iter():
480 480 limit_rev = fctx.rev()
481 481 for obj in reversed(list(fctx.filelog())):
482 482 obj = fctx.filectx(obj)
483 483 ctx = obj.changectx()
484 484 if ctx.hidden() or ctx.obsolete():
485 485 continue
486 486
487 487 if limit_rev >= obj.rev():
488 488 yield obj
489 489
490 490 history = []
491 491 for cnt, obj in enumerate(history_iter()):
492 492 if limit and cnt >= limit:
493 493 break
494 494 history.append(hex(obj.node()))
495 495
496 496 return [x for x in history]
497 497 return _node_history(context_uid, repo_id, revision, path, limit)
498 498
499 499 @reraise_safe_exceptions
500 500 def node_history_untill(self, wire, revision, path, limit):
501 501 cache_on, context_uid, repo_id = self._cache_on(wire)
502 502 @self.region.conditional_cache_on_arguments(condition=cache_on)
503 503 def _node_history_until(_context_uid, _repo_id):
504 504 repo = self._factory.repo(wire)
505 505 ctx = self._get_ctx(repo, revision)
506 506 fctx = ctx.filectx(path)
507 507
508 508 file_log = list(fctx.filelog())
509 509 if limit:
510 510 # Limit to the last n items
511 511 file_log = file_log[-limit:]
512 512
513 513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 514 return _node_history_until(context_uid, repo_id, revision, path, limit)
515 515
516 516 @reraise_safe_exceptions
517 517 def fctx_annotate(self, wire, revision, path):
518 518 repo = self._factory.repo(wire)
519 519 ctx = self._get_ctx(repo, revision)
520 520 fctx = ctx.filectx(path)
521 521
522 522 result = []
523 523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 524 ln_no = i
525 525 sha = hex(annotate_obj.fctx.node())
526 526 content = annotate_obj.text
527 527 result.append((ln_no, sha, content))
528 528 return result
529 529
530 530 @reraise_safe_exceptions
531 531 def fctx_node_data(self, wire, revision, path):
532 532 repo = self._factory.repo(wire)
533 533 ctx = self._get_ctx(repo, revision)
534 534 fctx = ctx.filectx(path)
535 535 return fctx.data()
536 536
537 537 @reraise_safe_exceptions
538 538 def fctx_flags(self, wire, commit_id, path):
539 539 cache_on, context_uid, repo_id = self._cache_on(wire)
540 540 @self.region.conditional_cache_on_arguments(condition=cache_on)
541 541 def _fctx_flags(_repo_id, _commit_id, _path):
542 542 repo = self._factory.repo(wire)
543 543 ctx = self._get_ctx(repo, commit_id)
544 544 fctx = ctx.filectx(path)
545 545 return fctx.flags()
546 546
547 547 return _fctx_flags(repo_id, commit_id, path)
548 548
549 549 @reraise_safe_exceptions
550 550 def fctx_size(self, wire, commit_id, path):
551 551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 552 @self.region.conditional_cache_on_arguments(condition=cache_on)
553 553 def _fctx_size(_repo_id, _revision, _path):
554 554 repo = self._factory.repo(wire)
555 555 ctx = self._get_ctx(repo, commit_id)
556 556 fctx = ctx.filectx(path)
557 557 return fctx.size()
558 558 return _fctx_size(repo_id, commit_id, path)
559 559
560 560 @reraise_safe_exceptions
561 561 def get_all_commit_ids(self, wire, name):
562 562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 565 repo = self._factory.repo(wire)
566 566 repo = repo.filtered(name)
567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
567 revs = [hex(x[7]) for x in repo.changelog.index]
568 568 return revs
569 569 return _get_all_commit_ids(context_uid, repo_id, name)
570 570
571 571 @reraise_safe_exceptions
572 572 def get_config_value(self, wire, section, name, untrusted=False):
573 573 repo = self._factory.repo(wire)
574 574 return repo.ui.config(section, name, untrusted=untrusted)
575 575
576 576 @reraise_safe_exceptions
577 577 def is_large_file(self, wire, commit_id, path):
578 578 cache_on, context_uid, repo_id = self._cache_on(wire)
579 579 @self.region.conditional_cache_on_arguments(condition=cache_on)
580 580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 581 return largefiles.lfutil.isstandin(path)
582 582
583 583 return _is_large_file(context_uid, repo_id, commit_id, path)
584 584
585 585 @reraise_safe_exceptions
586 586 def is_binary(self, wire, revision, path):
587 587 cache_on, context_uid, repo_id = self._cache_on(wire)
588 588
589 589 @self.region.conditional_cache_on_arguments(condition=cache_on)
590 590 def _is_binary(_repo_id, _sha, _path):
591 591 repo = self._factory.repo(wire)
592 592 ctx = self._get_ctx(repo, revision)
593 593 fctx = ctx.filectx(path)
594 594 return fctx.isbinary()
595 595
596 596 return _is_binary(repo_id, revision, path)
597 597
598 598 @reraise_safe_exceptions
599 599 def in_largefiles_store(self, wire, sha):
600 600 repo = self._factory.repo(wire)
601 601 return largefiles.lfutil.instore(repo, sha)
602 602
603 603 @reraise_safe_exceptions
604 604 def in_user_cache(self, wire, sha):
605 605 repo = self._factory.repo(wire)
606 606 return largefiles.lfutil.inusercache(repo.ui, sha)
607 607
608 608 @reraise_safe_exceptions
609 609 def store_path(self, wire, sha):
610 610 repo = self._factory.repo(wire)
611 611 return largefiles.lfutil.storepath(repo, sha)
612 612
613 613 @reraise_safe_exceptions
614 614 def link(self, wire, sha, path):
615 615 repo = self._factory.repo(wire)
616 616 largefiles.lfutil.link(
617 617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618 618
619 619 @reraise_safe_exceptions
620 620 def localrepository(self, wire, create=False):
621 621 self._factory.repo(wire, create=create)
622 622
623 623 @reraise_safe_exceptions
624 624 def lookup(self, wire, revision, both):
625 625 cache_on, context_uid, repo_id = self._cache_on(wire)
626 626 @self.region.conditional_cache_on_arguments(condition=cache_on)
627 627 def _lookup(_context_uid, _repo_id, _revision, _both):
628 628
629 629 repo = self._factory.repo(wire)
630 630 rev = _revision
631 631 if isinstance(rev, int):
632 632 # NOTE(marcink):
633 633 # since Mercurial doesn't support negative indexes properly
634 634 # we need to shift accordingly by one to get proper index, e.g
635 635 # repo[-1] => repo[-2]
636 636 # repo[0] => repo[-1]
637 637 if rev <= 0:
638 638 rev = rev + -1
639 639 try:
640 640 ctx = self._get_ctx(repo, rev)
641 641 except (TypeError, RepoLookupError) as e:
642 642 e._org_exc_tb = traceback.format_exc()
643 643 raise exceptions.LookupException(e)(rev)
644 644 except LookupError as e:
645 645 e._org_exc_tb = traceback.format_exc()
646 646 raise exceptions.LookupException(e)(e.name)
647 647
648 648 if not both:
649 649 return ctx.hex()
650 650
651 651 ctx = repo[ctx.hex()]
652 652 return ctx.hex(), ctx.rev()
653 653
654 654 return _lookup(context_uid, repo_id, revision, both)
655 655
656 656 @reraise_safe_exceptions
657 657 def sync_push(self, wire, url):
658 658 if not self.check_url(url, wire['config']):
659 659 return
660 660
661 661 repo = self._factory.repo(wire)
662 662
663 663 # Disable any prompts for this repo
664 664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665 665
666 bookmarks = dict(repo._bookmarks).keys()
666 bookmarks = list(dict(repo._bookmarks).keys())
667 667 remote = peer(repo, {}, url)
668 668 # Disable any prompts for this remote
669 669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670 670
671 671 return exchange.push(
672 672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673 673
674 674 @reraise_safe_exceptions
675 675 def revision(self, wire, rev):
676 676 repo = self._factory.repo(wire)
677 677 ctx = self._get_ctx(repo, rev)
678 678 return ctx.rev()
679 679
680 680 @reraise_safe_exceptions
681 681 def rev_range(self, wire, commit_filter):
682 682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 683
684 684 @self.region.conditional_cache_on_arguments(condition=cache_on)
685 685 def _rev_range(_context_uid, _repo_id, _filter):
686 686 repo = self._factory.repo(wire)
687 687 revisions = [rev for rev in revrange(repo, commit_filter)]
688 688 return revisions
689 689
690 690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691 691
692 692 @reraise_safe_exceptions
693 693 def rev_range_hash(self, wire, node):
694 694 repo = self._factory.repo(wire)
695 695
696 696 def get_revs(repo, rev_opt):
697 697 if rev_opt:
698 698 revs = revrange(repo, rev_opt)
699 699 if len(revs) == 0:
700 700 return (nullrev, nullrev)
701 701 return max(revs), min(revs)
702 702 else:
703 703 return len(repo) - 1, 0
704 704
705 705 stop, start = get_revs(repo, [node + ':'])
706 706 revs = [hex(repo[r].node()) for r in range(start, stop + 1)]
707 707 return revs
708 708
709 709 @reraise_safe_exceptions
710 710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 711 other_path = kwargs.pop('other_path', None)
712 712
713 713 # case when we want to compare two independent repositories
714 714 if other_path and other_path != wire["path"]:
715 715 baseui = self._factory._create_config(wire["config"])
716 716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 717 else:
718 718 repo = self._factory.repo(wire)
719 719 return list(repo.revs(rev_spec, *args))
720 720
721 721 @reraise_safe_exceptions
722 722 def verify(self, wire,):
723 723 repo = self._factory.repo(wire)
724 724 baseui = self._factory._create_config(wire['config'])
725 725
726 726 baseui, output = patch_ui_message_output(baseui)
727 727
728 728 repo.ui = baseui
729 729 verify.verify(repo)
730 730 return output.getvalue()
731 731
732 732 @reraise_safe_exceptions
733 733 def hg_update_cache(self, wire,):
734 734 repo = self._factory.repo(wire)
735 735 baseui = self._factory._create_config(wire['config'])
736 736 baseui, output = patch_ui_message_output(baseui)
737 737
738 738 repo.ui = baseui
739 739 with repo.wlock(), repo.lock():
740 740 repo.updatecaches(full=True)
741 741
742 742 return output.getvalue()
743 743
744 744 @reraise_safe_exceptions
745 745 def hg_rebuild_fn_cache(self, wire,):
746 746 repo = self._factory.repo(wire)
747 747 baseui = self._factory._create_config(wire['config'])
748 748 baseui, output = patch_ui_message_output(baseui)
749 749
750 750 repo.ui = baseui
751 751
752 752 repair.rebuildfncache(baseui, repo)
753 753
754 754 return output.getvalue()
755 755
756 756 @reraise_safe_exceptions
757 757 def tags(self, wire):
758 758 cache_on, context_uid, repo_id = self._cache_on(wire)
759 759 @self.region.conditional_cache_on_arguments(condition=cache_on)
760 760 def _tags(_context_uid, _repo_id):
761 761 repo = self._factory.repo(wire)
762 762 return repo.tags()
763 763
764 764 return _tags(context_uid, repo_id)
765 765
766 766 @reraise_safe_exceptions
767 767 def update(self, wire, node=None, clean=False):
768 768 repo = self._factory.repo(wire)
769 769 baseui = self._factory._create_config(wire['config'])
770 770 commands.update(baseui, repo, node=node, clean=clean)
771 771
772 772 @reraise_safe_exceptions
773 773 def identify(self, wire):
774 774 repo = self._factory.repo(wire)
775 775 baseui = self._factory._create_config(wire['config'])
776 776 output = io.BytesIO()
777 777 baseui.write = output.write
778 778 # This is required to get a full node id
779 779 baseui.debugflag = True
780 780 commands.identify(baseui, repo, id=True)
781 781
782 782 return output.getvalue()
783 783
784 784 @reraise_safe_exceptions
785 785 def heads(self, wire, branch=None):
786 786 repo = self._factory.repo(wire)
787 787 baseui = self._factory._create_config(wire['config'])
788 788 output = io.BytesIO()
789 789
790 790 def write(data, **unused_kwargs):
791 791 output.write(data)
792 792
793 793 baseui.write = write
794 794 if branch:
795 795 args = [branch]
796 796 else:
797 797 args = []
798 798 commands.heads(baseui, repo, template='{node} ', *args)
799 799
800 800 return output.getvalue()
801 801
802 802 @reraise_safe_exceptions
803 803 def ancestor(self, wire, revision1, revision2):
804 804 repo = self._factory.repo(wire)
805 805 changelog = repo.changelog
806 806 lookup = repo.lookup
807 807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 808 return hex(a)
809 809
810 810 @reraise_safe_exceptions
811 811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 813 clone(baseui, source, dest, noupdate=not update_after_clone)
814 814
815 815 @reraise_safe_exceptions
816 816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817 817
818 818 repo = self._factory.repo(wire)
819 819 baseui = self._factory._create_config(wire['config'])
820 820 publishing = baseui.configbool('phases', 'publish')
821 821 if publishing:
822 822 new_commit = 'public'
823 823 else:
824 824 new_commit = 'draft'
825 825
826 826 def _filectxfn(_repo, ctx, path):
827 827 """
828 828 Marks given path as added/changed/removed in a given _repo. This is
829 829 for internal mercurial commit function.
830 830 """
831 831
832 832 # check if this path is removed
833 833 if path in removed:
834 834 # returning None is a way to mark node for removal
835 835 return None
836 836
837 837 # check if this path is added
838 838 for node in updated:
839 839 if node['path'] == path:
840 840 return memfilectx(
841 841 _repo,
842 842 changectx=ctx,
843 843 path=node['path'],
844 844 data=node['content'],
845 845 islink=False,
846 846 isexec=bool(node['mode'] & stat.S_IXUSR),
847 847 copysource=False)
848 848
849 849 raise exceptions.AbortException()(
850 850 "Given path haven't been marked as added, "
851 851 "changed or removed (%s)" % path)
852 852
853 853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854 854
855 855 commit_ctx = memctx(
856 856 repo=repo,
857 857 parents=parents,
858 858 text=message,
859 859 files=files,
860 860 filectxfn=_filectxfn,
861 861 user=user,
862 862 date=(commit_time, commit_timezone),
863 863 extra=extra)
864 864
865 865 n = repo.commitctx(commit_ctx)
866 866 new_id = hex(n)
867 867
868 868 return new_id
869 869
870 870 @reraise_safe_exceptions
871 871 def pull(self, wire, url, commit_ids=None):
872 872 repo = self._factory.repo(wire)
873 873 # Disable any prompts for this repo
874 874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875 875
876 876 remote = peer(repo, {}, url)
877 877 # Disable any prompts for this remote
878 878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879 879
880 880 if commit_ids:
881 881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882 882
883 883 return exchange.pull(
884 884 repo, remote, heads=commit_ids, force=None).cgresult
885 885
886 886 @reraise_safe_exceptions
887 887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 888 repo = self._factory.repo(wire)
889 889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890 890
891 891 # Mercurial internally has a lot of logic that checks ONLY if
892 892 # option is defined, we just pass those if they are defined then
893 893 opts = {}
894 894 if bookmark:
895 895 opts['bookmark'] = bookmark
896 896 if branch:
897 897 opts['branch'] = branch
898 898 if revision:
899 899 opts['rev'] = revision
900 900
901 901 commands.pull(baseui, repo, source, **opts)
902 902
903 903 @reraise_safe_exceptions
904 904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 905 repo = self._factory.repo(wire)
906 906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 908 new_branch=push_branches)
909 909
910 910 @reraise_safe_exceptions
911 911 def strip(self, wire, revision, update, backup):
912 912 repo = self._factory.repo(wire)
913 913 ctx = self._get_ctx(repo, revision)
914 914 hgext_strip(
915 915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916 916
917 917 @reraise_safe_exceptions
918 918 def get_unresolved_files(self, wire):
919 919 repo = self._factory.repo(wire)
920 920
921 921 log.debug('Calculating unresolved files for repo: %s', repo)
922 922 output = io.BytesIO()
923 923
924 924 def write(data, **unused_kwargs):
925 925 output.write(data)
926 926
927 927 baseui = self._factory._create_config(wire['config'])
928 928 baseui.write = write
929 929
930 930 commands.resolve(baseui, repo, list=True)
931 931 unresolved = output.getvalue().splitlines(0)
932 932 return unresolved
933 933
934 934 @reraise_safe_exceptions
935 935 def merge(self, wire, revision):
936 936 repo = self._factory.repo(wire)
937 937 baseui = self._factory._create_config(wire['config'])
938 938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939 939
940 940 # In case of sub repositories are used mercurial prompts the user in
941 941 # case of merge conflicts or different sub repository sources. By
942 942 # setting the interactive flag to `False` mercurial doesn't prompt the
943 943 # used but instead uses a default value.
944 944 repo.ui.setconfig('ui', 'interactive', False)
945 945 commands.merge(baseui, repo, rev=revision)
946 946
947 947 @reraise_safe_exceptions
948 948 def merge_state(self, wire):
949 949 repo = self._factory.repo(wire)
950 950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951 951
952 952 # In case of sub repositories are used mercurial prompts the user in
953 953 # case of merge conflicts or different sub repository sources. By
954 954 # setting the interactive flag to `False` mercurial doesn't prompt the
955 955 # used but instead uses a default value.
956 956 repo.ui.setconfig('ui', 'interactive', False)
957 957 ms = hg_merge.mergestate(repo)
958 958 return [x for x in ms.unresolved()]
959 959
960 960 @reraise_safe_exceptions
961 961 def commit(self, wire, message, username, close_branch=False):
962 962 repo = self._factory.repo(wire)
963 963 baseui = self._factory._create_config(wire['config'])
964 964 repo.ui.setconfig('ui', 'username', username)
965 965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966 966
967 967 @reraise_safe_exceptions
968 968 def rebase(self, wire, source=None, dest=None, abort=False):
969 969 repo = self._factory.repo(wire)
970 970 baseui = self._factory._create_config(wire['config'])
971 971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 972 # In case of sub repositories are used mercurial prompts the user in
973 973 # case of merge conflicts or different sub repository sources. By
974 974 # setting the interactive flag to `False` mercurial doesn't prompt the
975 975 # used but instead uses a default value.
976 976 repo.ui.setconfig('ui', 'interactive', False)
977 977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978 978
979 979 @reraise_safe_exceptions
980 980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 981 repo = self._factory.repo(wire)
982 982 ctx = self._get_ctx(repo, revision)
983 983 node = ctx.node()
984 984
985 985 date = (tag_time, tag_timezone)
986 986 try:
987 987 hg_tag.tag(repo, name, node, message, local, user, date)
988 988 except Abort as e:
989 989 log.exception("Tag operation aborted")
990 990 # Exception can contain unicode which we convert
991 991 raise exceptions.AbortException(e)(repr(e))
992 992
993 993 @reraise_safe_exceptions
994 994 def bookmark(self, wire, bookmark, revision=None):
995 995 repo = self._factory.repo(wire)
996 996 baseui = self._factory._create_config(wire['config'])
997 997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998 998
999 999 @reraise_safe_exceptions
1000 1000 def install_hooks(self, wire, force=False):
1001 1001 # we don't need any special hooks for Mercurial
1002 1002 pass
1003 1003
1004 1004 @reraise_safe_exceptions
1005 1005 def get_hooks_info(self, wire):
1006 1006 return {
1007 1007 'pre_version': vcsserver.__version__,
1008 1008 'post_version': vcsserver.__version__,
1009 1009 }
@@ -1,134 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 39 lfproto._capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 44 wrapped_capabilities = lfproto._capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 47 def _dynamic_capabilities(orig, repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 55 return calc_capabilities(orig, repo, proto)
56 56 else:
57 57 logger.debug('Extension largefiles disabled')
58 58 return orig(repo, proto)
59 59
60 60 return _dynamic_capabilities
61 61
62 62
63 63 def patch_subrepo_type_mapping():
64 64 from collections import defaultdict
65 from hgcompat import subrepo, subrepoutil
65 from .hgcompat import subrepo, subrepoutil
66 66 from vcsserver.exceptions import SubrepoMergeException
67 67
68 68 class NoOpSubrepo(subrepo.abstractsubrepo):
69 69
70 70 def __init__(self, ctx, path, *args, **kwargs):
71 71 """Initialize abstractsubrepo part
72 72
73 73 ``ctx`` is the context referring this subrepository in the
74 74 parent repository.
75 75
76 76 ``path`` is the path to this subrepository as seen from
77 77 innermost repository.
78 78 """
79 79 self.ui = ctx.repo().ui
80 80 self._ctx = ctx
81 81 self._path = path
82 82
83 83 def storeclean(self, path):
84 84 """
85 85 returns true if the repository has not changed since it was last
86 86 cloned from or pushed to a given repository.
87 87 """
88 88 return True
89 89
90 90 def dirty(self, ignoreupdate=False, missing=False):
91 91 """returns true if the dirstate of the subrepo is dirty or does not
92 92 match current stored state. If ignoreupdate is true, only check
93 93 whether the subrepo has uncommitted changes in its dirstate.
94 94 """
95 95 return False
96 96
97 97 def basestate(self):
98 98 """current working directory base state, disregarding .hgsubstate
99 99 state and working directory modifications"""
100 100 substate = subrepoutil.state(self._ctx, self.ui)
101 101 file_system_path, rev, repotype = substate.get(self._path)
102 102 return rev
103 103
104 104 def remove(self):
105 105 """remove the subrepo
106 106
107 107 (should verify the dirstate is not dirty first)
108 108 """
109 109 pass
110 110
111 111 def get(self, state, overwrite=False):
112 112 """run whatever commands are needed to put the subrepo into
113 113 this state
114 114 """
115 115 pass
116 116
117 117 def merge(self, state):
118 118 """merge currently-saved state with the new state."""
119 119 raise SubrepoMergeException()()
120 120
121 121 def push(self, opts):
122 122 """perform whatever action is analogous to 'hg push'
123 123
124 124 This may be a no-op on some systems.
125 125 """
126 126 pass
127 127
128 128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 129 # whenever a subrepo class is looked up.
130 130 subrepo.types = {
131 131 'hg': NoOpSubrepo,
132 132 'git': NoOpSubrepo,
133 133 'svn': NoOpSubrepo
134 134 }
@@ -1,729 +1,729 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 from httplib import HTTPConnection
28 from http.client import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 55
56 56 response_data = response.read()
57 57
58 58 try:
59 59 return json.loads(response_data)
60 60 except Exception:
61 61 log.exception('Failed to decode hook response json data. '
62 62 'response_code:%s, raw_data:%s',
63 63 response.status, response_data)
64 64 raise
65 65
66 66 def _serialize(self, hook_name, extras):
67 67 data = {
68 68 'method': hook_name,
69 69 'extras': extras
70 70 }
71 71 return json.dumps(data)
72 72
73 73
74 74 class HooksDummyClient(object):
75 75 def __init__(self, hooks_module):
76 76 self._hooks_module = importlib.import_module(hooks_module)
77 77
78 78 def __call__(self, hook_name, extras):
79 79 with self._hooks_module.Hooks() as hooks:
80 80 return getattr(hooks, hook_name)(extras)
81 81
82 82
83 83 class HooksShadowRepoClient(object):
84 84
85 85 def __call__(self, hook_name, extras):
86 86 return {'output': '', 'status': 0}
87 87
88 88
89 89 class RemoteMessageWriter(object):
90 90 """Writer base class."""
91 91 def write(self, message):
92 92 raise NotImplementedError()
93 93
94 94
95 95 class HgMessageWriter(RemoteMessageWriter):
96 96 """Writer that knows how to send messages to mercurial clients."""
97 97
98 98 def __init__(self, ui):
99 99 self.ui = ui
100 100
101 101 def write(self, message):
102 102 # TODO: Check why the quiet flag is set by default.
103 103 old = self.ui.quiet
104 104 self.ui.quiet = False
105 105 self.ui.status(message.encode('utf-8'))
106 106 self.ui.quiet = old
107 107
108 108
109 109 class GitMessageWriter(RemoteMessageWriter):
110 110 """Writer that knows how to send messages to git clients."""
111 111
112 112 def __init__(self, stdout=None):
113 113 self.stdout = stdout or sys.stdout
114 114
115 115 def write(self, message):
116 116 self.stdout.write(message.encode('utf-8'))
117 117
118 118
119 119 class SvnMessageWriter(RemoteMessageWriter):
120 120 """Writer that knows how to send messages to svn clients."""
121 121
122 122 def __init__(self, stderr=None):
123 123 # SVN needs data sent to stderr for back-to-client messaging
124 124 self.stderr = stderr or sys.stderr
125 125
126 126 def write(self, message):
127 127 self.stderr.write(message.encode('utf-8'))
128 128
129 129
130 130 def _handle_exception(result):
131 131 exception_class = result.get('exception')
132 132 exception_traceback = result.get('exception_traceback')
133 133
134 134 if exception_traceback:
135 135 log.error('Got traceback from remote call:%s', exception_traceback)
136 136
137 137 if exception_class == 'HTTPLockedRC':
138 138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 139 elif exception_class == 'HTTPBranchProtected':
140 140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 141 elif exception_class == 'RepositoryError':
142 142 raise exceptions.VcsException()(*result['exception_args'])
143 143 elif exception_class:
144 144 raise Exception('Got remote exception "%s" with args "%s"' %
145 145 (exception_class, result['exception_args']))
146 146
147 147
148 148 def _get_hooks_client(extras):
149 149 hooks_uri = extras.get('hooks_uri')
150 150 is_shadow_repo = extras.get('is_shadow_repo')
151 151 if hooks_uri:
152 152 return HooksHttpClient(extras['hooks_uri'])
153 153 elif is_shadow_repo:
154 154 return HooksShadowRepoClient()
155 155 else:
156 156 return HooksDummyClient(extras['hooks_module'])
157 157
158 158
159 159 def _call_hook(hook_name, extras, writer):
160 160 hooks_client = _get_hooks_client(extras)
161 161 log.debug('Hooks, using client:%s', hooks_client)
162 162 result = hooks_client(hook_name, extras)
163 163 log.debug('Hooks got result: %s', result)
164 164
165 165 _handle_exception(result)
166 166 writer.write(result['output'])
167 167
168 168 return result['status']
169 169
170 170
171 171 def _extras_from_ui(ui):
172 172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 173 if not hook_data:
174 174 # maybe it's inside environ ?
175 175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 176 if env_hook_data:
177 177 hook_data = env_hook_data
178 178
179 179 extras = {}
180 180 if hook_data:
181 181 extras = json.loads(hook_data)
182 182 return extras
183 183
184 184
185 185 def _rev_range_hash(repo, node, check_heads=False):
186 186 from vcsserver.hgcompat import get_ctx
187 187
188 188 commits = []
189 189 revs = []
190 190 start = get_ctx(repo, node).rev()
191 191 end = len(repo)
192 192 for rev in range(start, end):
193 193 revs.append(rev)
194 194 ctx = get_ctx(repo, rev)
195 195 commit_id = mercurial.node.hex(ctx.node())
196 196 branch = ctx.branch()
197 197 commits.append((commit_id, branch))
198 198
199 199 parent_heads = []
200 200 if check_heads:
201 201 parent_heads = _check_heads(repo, start, end, revs)
202 202 return commits, parent_heads
203 203
204 204
205 205 def _check_heads(repo, start, end, commits):
206 206 from vcsserver.hgcompat import get_ctx
207 207 changelog = repo.changelog
208 208 parents = set()
209 209
210 210 for new_rev in commits:
211 211 for p in changelog.parentrevs(new_rev):
212 212 if p == mercurial.node.nullrev:
213 213 continue
214 214 if p < start:
215 215 parents.add(p)
216 216
217 217 for p in parents:
218 218 branch = get_ctx(repo, p).branch()
219 219 # The heads descending from that parent, on the same branch
220 220 parent_heads = set([p])
221 221 reachable = set([p])
222 222 for x in range(p + 1, end):
223 223 if get_ctx(repo, x).branch() != branch:
224 224 continue
225 225 for pp in changelog.parentrevs(x):
226 226 if pp in reachable:
227 227 reachable.add(x)
228 228 parent_heads.discard(pp)
229 229 parent_heads.add(x)
230 230 # More than one head? Suggest merging
231 231 if len(parent_heads) > 1:
232 232 return list(parent_heads)
233 233
234 234 return []
235 235
236 236
237 237 def _get_git_env():
238 238 env = {}
239 239 for k, v in os.environ.items():
240 240 if k.startswith('GIT'):
241 241 env[k] = v
242 242
243 243 # serialized version
244 244 return [(k, v) for k, v in env.items()]
245 245
246 246
247 247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 248 env = {}
249 249 for k, v in os.environ.items():
250 250 if k.startswith('HG'):
251 251 env[k] = v
252 252
253 253 env['HG_NODE'] = old_rev
254 254 env['HG_NODE_LAST'] = new_rev
255 255 env['HG_TXNID'] = txnid
256 256 env['HG_PENDING'] = repo_path
257 257
258 258 return [(k, v) for k, v in env.items()]
259 259
260 260
261 261 def repo_size(ui, repo, **kwargs):
262 262 extras = _extras_from_ui(ui)
263 263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264 264
265 265
266 266 def pre_pull(ui, repo, **kwargs):
267 267 extras = _extras_from_ui(ui)
268 268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269 269
270 270
271 271 def pre_pull_ssh(ui, repo, **kwargs):
272 272 extras = _extras_from_ui(ui)
273 273 if extras and extras.get('SSH'):
274 274 return pre_pull(ui, repo, **kwargs)
275 275 return 0
276 276
277 277
278 278 def post_pull(ui, repo, **kwargs):
279 279 extras = _extras_from_ui(ui)
280 280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281 281
282 282
283 283 def post_pull_ssh(ui, repo, **kwargs):
284 284 extras = _extras_from_ui(ui)
285 285 if extras and extras.get('SSH'):
286 286 return post_pull(ui, repo, **kwargs)
287 287 return 0
288 288
289 289
290 290 def pre_push(ui, repo, node=None, **kwargs):
291 291 """
292 292 Mercurial pre_push hook
293 293 """
294 294 extras = _extras_from_ui(ui)
295 295 detect_force_push = extras.get('detect_force_push')
296 296
297 297 rev_data = []
298 298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 299 branches = collections.defaultdict(list)
300 300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 301 for commit_id, branch in commits:
302 302 branches[branch].append(commit_id)
303 303
304 304 for branch, commits in branches.items():
305 305 old_rev = kwargs.get('node_last') or commits[0]
306 306 rev_data.append({
307 307 'total_commits': len(commits),
308 308 'old_rev': old_rev,
309 309 'new_rev': commits[-1],
310 310 'ref': '',
311 311 'type': 'branch',
312 312 'name': branch,
313 313 })
314 314
315 315 for push_ref in rev_data:
316 316 push_ref['multiple_heads'] = _heads
317 317
318 318 repo_path = os.path.join(
319 319 extras.get('repo_store', ''), extras.get('repository', ''))
320 320 push_ref['hg_env'] = _get_hg_env(
321 321 old_rev=push_ref['old_rev'],
322 322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 323 repo_path=repo_path)
324 324
325 325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 326 extras['commit_ids'] = rev_data
327 327
328 328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329 329
330 330
331 331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 332 extras = _extras_from_ui(ui)
333 333 if extras.get('SSH'):
334 334 return pre_push(ui, repo, node, **kwargs)
335 335
336 336 return 0
337 337
338 338
339 339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 340 """
341 341 Mercurial pre_push hook for SSH
342 342 """
343 343 extras = _extras_from_ui(ui)
344 344 if extras.get('SSH'):
345 345 permission = extras['SSH_PERMISSIONS']
346 346
347 347 if 'repository.write' == permission or 'repository.admin' == permission:
348 348 return 0
349 349
350 350 # non-zero ret code
351 351 return 1
352 352
353 353 return 0
354 354
355 355
356 356 def post_push(ui, repo, node, **kwargs):
357 357 """
358 358 Mercurial post_push hook
359 359 """
360 360 extras = _extras_from_ui(ui)
361 361
362 362 commit_ids = []
363 363 branches = []
364 364 bookmarks = []
365 365 tags = []
366 366
367 367 commits, _heads = _rev_range_hash(repo, node)
368 368 for commit_id, branch in commits:
369 369 commit_ids.append(commit_id)
370 370 if branch not in branches:
371 371 branches.append(branch)
372 372
373 373 if hasattr(ui, '_rc_pushkey_branches'):
374 374 bookmarks = ui._rc_pushkey_branches
375 375
376 376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 377 extras['commit_ids'] = commit_ids
378 378 extras['new_refs'] = {
379 379 'branches': branches,
380 380 'bookmarks': bookmarks,
381 381 'tags': tags
382 382 }
383 383
384 384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385 385
386 386
387 387 def post_push_ssh(ui, repo, node, **kwargs):
388 388 """
389 389 Mercurial post_push hook for SSH
390 390 """
391 391 if _extras_from_ui(ui).get('SSH'):
392 392 return post_push(ui, repo, node, **kwargs)
393 393 return 0
394 394
395 395
396 396 def key_push(ui, repo, **kwargs):
397 397 from vcsserver.hgcompat import get_ctx
398 398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 399 # store new bookmarks in our UI object propagated later to post_push
400 400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 401 return
402 402
403 403
404 404 # backward compat
405 405 log_pull_action = post_pull
406 406
407 407 # backward compat
408 408 log_push_action = post_push
409 409
410 410
411 411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 412 """
413 413 Old hook name: keep here for backward compatibility.
414 414
415 415 This is only required when the installed git hooks are not upgraded.
416 416 """
417 417 pass
418 418
419 419
420 420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 421 """
422 422 Old hook name: keep here for backward compatibility.
423 423
424 424 This is only required when the installed git hooks are not upgraded.
425 425 """
426 426 pass
427 427
428 428
429 429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430 430
431 431
432 432 def git_pre_pull(extras):
433 433 """
434 434 Pre pull hook.
435 435
436 436 :param extras: dictionary containing the keys defined in simplevcs
437 437 :type extras: dict
438 438
439 439 :return: status code of the hook. 0 for success.
440 440 :rtype: int
441 441 """
442 442 if 'pull' not in extras['hooks']:
443 443 return HookResponse(0, '')
444 444
445 445 stdout = io.BytesIO()
446 446 try:
447 447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 448 except Exception as error:
449 449 status = 128
450 450 stdout.write('ERROR: %s\n' % str(error))
451 451
452 452 return HookResponse(status, stdout.getvalue())
453 453
454 454
455 455 def git_post_pull(extras):
456 456 """
457 457 Post pull hook.
458 458
459 459 :param extras: dictionary containing the keys defined in simplevcs
460 460 :type extras: dict
461 461
462 462 :return: status code of the hook. 0 for success.
463 463 :rtype: int
464 464 """
465 465 if 'pull' not in extras['hooks']:
466 466 return HookResponse(0, '')
467 467
468 468 stdout = io.BytesIO()
469 469 try:
470 470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 471 except Exception as error:
472 472 status = 128
473 473 stdout.write('ERROR: %s\n' % error)
474 474
475 475 return HookResponse(status, stdout.getvalue())
476 476
477 477
478 478 def _parse_git_ref_lines(revision_lines):
479 479 rev_data = []
480 480 for revision_line in revision_lines or []:
481 481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 482 ref_data = ref.split('/', 2)
483 483 if ref_data[1] in ('tags', 'heads'):
484 484 rev_data.append({
485 485 # NOTE(marcink):
486 486 # we're unable to tell total_commits for git at this point
487 487 # but we set the variable for consistency with GIT
488 488 'total_commits': -1,
489 489 'old_rev': old_rev,
490 490 'new_rev': new_rev,
491 491 'ref': ref,
492 492 'type': ref_data[1],
493 493 'name': ref_data[2],
494 494 })
495 495 return rev_data
496 496
497 497
498 498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 499 """
500 500 Pre push hook.
501 501
502 502 :param extras: dictionary containing the keys defined in simplevcs
503 503 :type extras: dict
504 504
505 505 :return: status code of the hook. 0 for success.
506 506 :rtype: int
507 507 """
508 508 extras = json.loads(env['RC_SCM_DATA'])
509 509 rev_data = _parse_git_ref_lines(revision_lines)
510 510 if 'push' not in extras['hooks']:
511 511 return 0
512 512 empty_commit_id = '0' * 40
513 513
514 514 detect_force_push = extras.get('detect_force_push')
515 515
516 516 for push_ref in rev_data:
517 517 # store our git-env which holds the temp store
518 518 push_ref['git_env'] = _get_git_env()
519 519 push_ref['pruned_sha'] = ''
520 520 if not detect_force_push:
521 521 # don't check for forced-push when we don't need to
522 522 continue
523 523
524 524 type_ = push_ref['type']
525 525 new_branch = push_ref['old_rev'] == empty_commit_id
526 526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 527 if type_ == 'heads' and not (new_branch or delete_branch):
528 528 old_rev = push_ref['old_rev']
529 529 new_rev = push_ref['new_rev']
530 530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 531 stdout, stderr = subprocessio.run_command(
532 532 cmd, env=os.environ.copy())
533 533 # means we're having some non-reachable objects, this forced push was used
534 534 if stdout:
535 535 push_ref['pruned_sha'] = stdout.splitlines()
536 536
537 537 extras['hook_type'] = 'pre_receive'
538 538 extras['commit_ids'] = rev_data
539 539 return _call_hook('pre_push', extras, GitMessageWriter())
540 540
541 541
542 542 def git_post_receive(unused_repo_path, revision_lines, env):
543 543 """
544 544 Post push hook.
545 545
546 546 :param extras: dictionary containing the keys defined in simplevcs
547 547 :type extras: dict
548 548
549 549 :return: status code of the hook. 0 for success.
550 550 :rtype: int
551 551 """
552 552 extras = json.loads(env['RC_SCM_DATA'])
553 553 if 'push' not in extras['hooks']:
554 554 return 0
555 555
556 556 rev_data = _parse_git_ref_lines(revision_lines)
557 557
558 558 git_revs = []
559 559
560 560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 561 # subcommand sets the PATH environment variable so that it point to the
562 562 # correct version of the git executable.
563 563 empty_commit_id = '0' * 40
564 564 branches = []
565 565 tags = []
566 566 for push_ref in rev_data:
567 567 type_ = push_ref['type']
568 568
569 569 if type_ == 'heads':
570 570 if push_ref['old_rev'] == empty_commit_id:
571 571 # starting new branch case
572 572 if push_ref['name'] not in branches:
573 573 branches.append(push_ref['name'])
574 574
575 575 # Fix up head revision if needed
576 576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 577 try:
578 578 subprocessio.run_command(cmd, env=os.environ.copy())
579 579 except Exception:
580 580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 581 'refs/heads/%s' % push_ref['name']]
582 582 print("Setting default branch to %s" % push_ref['name'])
583 583 subprocessio.run_command(cmd, env=os.environ.copy())
584 584
585 585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 586 '--format=%(refname)', 'refs/heads/*']
587 587 stdout, stderr = subprocessio.run_command(
588 588 cmd, env=os.environ.copy())
589 589 heads = stdout
590 590 heads = heads.replace(push_ref['ref'], '')
591 591 heads = ' '.join(head for head
592 592 in heads.splitlines() if head) or '.'
593 593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 595 '--not', heads]
596 596 stdout, stderr = subprocessio.run_command(
597 597 cmd, env=os.environ.copy())
598 598 git_revs.extend(stdout.splitlines())
599 599 elif push_ref['new_rev'] == empty_commit_id:
600 600 # delete branch case
601 601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 602 else:
603 603 if push_ref['name'] not in branches:
604 604 branches.append(push_ref['name'])
605 605
606 606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 607 '{old_rev}..{new_rev}'.format(**push_ref),
608 608 '--reverse', '--pretty=format:%H']
609 609 stdout, stderr = subprocessio.run_command(
610 610 cmd, env=os.environ.copy())
611 611 git_revs.extend(stdout.splitlines())
612 612 elif type_ == 'tags':
613 613 if push_ref['name'] not in tags:
614 614 tags.append(push_ref['name'])
615 615 git_revs.append('tag=>%s' % push_ref['name'])
616 616
617 617 extras['hook_type'] = 'post_receive'
618 618 extras['commit_ids'] = git_revs
619 619 extras['new_refs'] = {
620 620 'branches': branches,
621 621 'bookmarks': [],
622 622 'tags': tags,
623 623 }
624 624
625 625 if 'repo_size' in extras['hooks']:
626 626 try:
627 627 _call_hook('repo_size', extras, GitMessageWriter())
628 628 except:
629 629 pass
630 630
631 631 return _call_hook('post_push', extras, GitMessageWriter())
632 632
633 633
634 634 def _get_extras_from_txn_id(path, txn_id):
635 635 extras = {}
636 636 try:
637 637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 638 '-t', txn_id,
639 639 '--revprop', path, 'rc-scm-extras']
640 640 stdout, stderr = subprocessio.run_command(
641 641 cmd, env=os.environ.copy())
642 642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 643 except Exception:
644 644 log.exception('Failed to extract extras info from txn_id')
645 645
646 646 return extras
647 647
648 648
649 649 def _get_extras_from_commit_id(commit_id, path):
650 650 extras = {}
651 651 try:
652 652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 653 '-r', commit_id,
654 654 '--revprop', path, 'rc-scm-extras']
655 655 stdout, stderr = subprocessio.run_command(
656 656 cmd, env=os.environ.copy())
657 657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 658 except Exception:
659 659 log.exception('Failed to extract extras info from commit_id')
660 660
661 661 return extras
662 662
663 663
664 664 def svn_pre_commit(repo_path, commit_data, env):
665 665 path, txn_id = commit_data
666 666 branches = []
667 667 tags = []
668 668
669 669 if env.get('RC_SCM_DATA'):
670 670 extras = json.loads(env['RC_SCM_DATA'])
671 671 else:
672 672 # fallback method to read from TXN-ID stored data
673 673 extras = _get_extras_from_txn_id(path, txn_id)
674 674 if not extras:
675 675 return 0
676 676
677 677 extras['hook_type'] = 'pre_commit'
678 678 extras['commit_ids'] = [txn_id]
679 679 extras['txn_id'] = txn_id
680 680 extras['new_refs'] = {
681 681 'total_commits': 1,
682 682 'branches': branches,
683 683 'bookmarks': [],
684 684 'tags': tags,
685 685 }
686 686
687 687 return _call_hook('pre_push', extras, SvnMessageWriter())
688 688
689 689
690 690 def svn_post_commit(repo_path, commit_data, env):
691 691 """
692 692 commit_data is path, rev, txn_id
693 693 """
694 694 if len(commit_data) == 3:
695 695 path, commit_id, txn_id = commit_data
696 696 elif len(commit_data) == 2:
697 697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 698 'Some functionality might be limited')
699 699 path, commit_id = commit_data
700 700 txn_id = None
701 701
702 702 branches = []
703 703 tags = []
704 704
705 705 if env.get('RC_SCM_DATA'):
706 706 extras = json.loads(env['RC_SCM_DATA'])
707 707 else:
708 708 # fallback method to read from TXN-ID stored data
709 709 extras = _get_extras_from_commit_id(commit_id, path)
710 710 if not extras:
711 711 return 0
712 712
713 713 extras['hook_type'] = 'post_commit'
714 714 extras['commit_ids'] = [commit_id]
715 715 extras['txn_id'] = txn_id
716 716 extras['new_refs'] = {
717 717 'branches': branches,
718 718 'bookmarks': [],
719 719 'tags': tags,
720 720 'total_commits': 1,
721 721 }
722 722
723 723 if 'repo_size' in extras['hooks']:
724 724 try:
725 725 _call_hook('repo_size', extras, SvnMessageWriter())
726 726 except Exception:
727 727 pass
728 728
729 729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,791 +1,791 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 from __future__ import absolute_import
18
19 19
20 20 import os
21 21 import subprocess
22 from urllib2 import URLError
23 import urlparse
22 from urllib.error import URLError
23 import urllib.parse
24 24 import logging
25 25 import posixpath as vcspath
26 import StringIO
27 import urllib
26 import io
27 import urllib.request, urllib.parse, urllib.error
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39 from vcsserver.vcs_base import RemoteBase
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 svn_compatible_versions_map = {
45 45 'pre-1.4-compatible': '1.3',
46 46 'pre-1.5-compatible': '1.4',
47 47 'pre-1.6-compatible': '1.5',
48 48 'pre-1.8-compatible': '1.7',
49 49 'pre-1.9-compatible': '1.8',
50 50 }
51 51
52 52 current_compatible_version = '1.12'
53 53
54 54
55 55 def reraise_safe_exceptions(func):
56 56 """Decorator for converting svn exceptions to something neutral."""
57 57 def wrapper(*args, **kwargs):
58 58 try:
59 59 return func(*args, **kwargs)
60 60 except Exception as e:
61 61 if not hasattr(e, '_vcs_kind'):
62 62 log.exception("Unhandled exception in svn remote call")
63 63 raise_from_original(exceptions.UnhandledException(e))
64 64 raise
65 65 return wrapper
66 66
67 67
68 68 class SubversionFactory(RepoFactory):
69 69 repo_type = 'svn'
70 70
71 71 def _create_repo(self, wire, create, compatible_version):
72 72 path = svn.core.svn_path_canonicalize(wire['path'])
73 73 if create:
74 74 fs_config = {'compatible-version': current_compatible_version}
75 75 if compatible_version:
76 76
77 77 compatible_version_string = \
78 78 svn_compatible_versions_map.get(compatible_version) \
79 79 or compatible_version
80 80 fs_config['compatible-version'] = compatible_version_string
81 81
82 82 log.debug('Create SVN repo with config "%s"', fs_config)
83 83 repo = svn.repos.create(path, "", "", None, fs_config)
84 84 else:
85 85 repo = svn.repos.open(path)
86 86
87 87 log.debug('Got SVN object: %s', repo)
88 88 return repo
89 89
90 90 def repo(self, wire, create=False, compatible_version=None):
91 91 """
92 92 Get a repository instance for the given path.
93 93 """
94 94 return self._create_repo(wire, create, compatible_version)
95 95
96 96
97 97 NODE_TYPE_MAPPING = {
98 98 svn.core.svn_node_file: 'file',
99 99 svn.core.svn_node_dir: 'dir',
100 100 }
101 101
102 102
103 103 class SvnRemote(RemoteBase):
104 104
105 105 def __init__(self, factory, hg_factory=None):
106 106 self._factory = factory
107 107 # TODO: Remove once we do not use internal Mercurial objects anymore
108 108 # for subversion
109 109 self._hg_factory = hg_factory
110 110
111 111 @reraise_safe_exceptions
112 112 def discover_svn_version(self):
113 113 try:
114 114 import svn.core
115 115 svn_ver = svn.core.SVN_VERSION
116 116 except ImportError:
117 117 svn_ver = None
118 118 return svn_ver
119 119
120 120 @reraise_safe_exceptions
121 121 def is_empty(self, wire):
122 122
123 123 try:
124 124 return self.lookup(wire, -1) == 0
125 125 except Exception:
126 126 log.exception("failed to read object_store")
127 127 return False
128 128
129 129 def check_url(self, url, config_items):
130 130 # this can throw exception if not installed, but we detect this
131 131 from hgsubversion import svnrepo
132 132
133 133 baseui = self._hg_factory._create_config(config_items)
134 134 # uuid function get's only valid UUID from proper repo, else
135 135 # throws exception
136 136 try:
137 137 svnrepo.svnremoterepo(baseui, url).svn.uuid
138 138 except Exception:
139 139 tb = traceback.format_exc()
140 140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
141 141 raise URLError(
142 142 '"%s" is not a valid Subversion source url.' % (url, ))
143 143 return True
144 144
145 145 def is_path_valid_repository(self, wire, path):
146 146
147 147 # NOTE(marcink): short circuit the check for SVN repo
148 148 # the repos.open might be expensive to check, but we have one cheap
149 149 # pre condition that we can use, to check for 'format' file
150 150
151 151 if not os.path.isfile(os.path.join(path, 'format')):
152 152 return False
153 153
154 154 try:
155 155 svn.repos.open(path)
156 156 except svn.core.SubversionException:
157 157 tb = traceback.format_exc()
158 158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
159 159 return False
160 160 return True
161 161
162 162 @reraise_safe_exceptions
163 163 def verify(self, wire,):
164 164 repo_path = wire['path']
165 165 if not self.is_path_valid_repository(wire, repo_path):
166 166 raise Exception(
167 167 "Path %s is not a valid Subversion repository." % repo_path)
168 168
169 169 cmd = ['svnadmin', 'info', repo_path]
170 170 stdout, stderr = subprocessio.run_command(cmd)
171 171 return stdout
172 172
173 173 def lookup(self, wire, revision):
174 174 if revision not in [-1, None, 'HEAD']:
175 175 raise NotImplementedError
176 176 repo = self._factory.repo(wire)
177 177 fs_ptr = svn.repos.fs(repo)
178 178 head = svn.fs.youngest_rev(fs_ptr)
179 179 return head
180 180
181 181 def lookup_interval(self, wire, start_ts, end_ts):
182 182 repo = self._factory.repo(wire)
183 183 fsobj = svn.repos.fs(repo)
184 184 start_rev = None
185 185 end_rev = None
186 186 if start_ts:
187 187 start_ts_svn = apr_time_t(start_ts)
188 188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
189 189 else:
190 190 start_rev = 1
191 191 if end_ts:
192 192 end_ts_svn = apr_time_t(end_ts)
193 193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
194 194 else:
195 195 end_rev = svn.fs.youngest_rev(fsobj)
196 196 return start_rev, end_rev
197 197
198 198 def revision_properties(self, wire, revision):
199 199
200 200 cache_on, context_uid, repo_id = self._cache_on(wire)
201 201 @self.region.conditional_cache_on_arguments(condition=cache_on)
202 202 def _revision_properties(_repo_id, _revision):
203 203 repo = self._factory.repo(wire)
204 204 fs_ptr = svn.repos.fs(repo)
205 205 return svn.fs.revision_proplist(fs_ptr, revision)
206 206 return _revision_properties(repo_id, revision)
207 207
208 208 def revision_changes(self, wire, revision):
209 209
210 210 repo = self._factory.repo(wire)
211 211 fsobj = svn.repos.fs(repo)
212 212 rev_root = svn.fs.revision_root(fsobj, revision)
213 213
214 214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
215 215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
216 216 base_dir = ""
217 217 send_deltas = False
218 218 svn.repos.replay2(
219 219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
220 220 editor_ptr, editor_baton, None)
221 221
222 222 added = []
223 223 changed = []
224 224 removed = []
225 225
226 226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
227 for path, change in editor.changes.iteritems():
227 for path, change in editor.changes.items():
228 228 # TODO: Decide what to do with directory nodes. Subversion can add
229 229 # empty directories.
230 230
231 231 if change.item_kind == svn.core.svn_node_dir:
232 232 continue
233 233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
234 234 added.append(path)
235 235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
236 236 svn.repos.CHANGE_ACTION_REPLACE]:
237 237 changed.append(path)
238 238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
239 239 removed.append(path)
240 240 else:
241 241 raise NotImplementedError(
242 242 "Action %s not supported on path %s" % (
243 243 change.action, path))
244 244
245 245 changes = {
246 246 'added': added,
247 247 'changed': changed,
248 248 'removed': removed,
249 249 }
250 250 return changes
251 251
252 252 @reraise_safe_exceptions
253 253 def node_history(self, wire, path, revision, limit):
254 254 cache_on, context_uid, repo_id = self._cache_on(wire)
255 255 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 257 cross_copies = False
258 258 repo = self._factory.repo(wire)
259 259 fsobj = svn.repos.fs(repo)
260 260 rev_root = svn.fs.revision_root(fsobj, revision)
261 261
262 262 history_revisions = []
263 263 history = svn.fs.node_history(rev_root, path)
264 264 history = svn.fs.history_prev(history, cross_copies)
265 265 while history:
266 266 __, node_revision = svn.fs.history_location(history)
267 267 history_revisions.append(node_revision)
268 268 if limit and len(history_revisions) >= limit:
269 269 break
270 270 history = svn.fs.history_prev(history, cross_copies)
271 271 return history_revisions
272 272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273 273
274 274 def node_properties(self, wire, path, revision):
275 275 cache_on, context_uid, repo_id = self._cache_on(wire)
276 276 @self.region.conditional_cache_on_arguments(condition=cache_on)
277 277 def _node_properties(_repo_id, _path, _revision):
278 278 repo = self._factory.repo(wire)
279 279 fsobj = svn.repos.fs(repo)
280 280 rev_root = svn.fs.revision_root(fsobj, revision)
281 281 return svn.fs.node_proplist(rev_root, path)
282 282 return _node_properties(repo_id, path, revision)
283 283
284 284 def file_annotate(self, wire, path, revision):
285 abs_path = 'file://' + urllib.pathname2url(
285 abs_path = 'file://' + urllib.request.pathname2url(
286 286 vcspath.join(wire['path'], path))
287 287 file_uri = svn.core.svn_path_canonicalize(abs_path)
288 288
289 289 start_rev = svn_opt_revision_value_t(0)
290 290 peg_rev = svn_opt_revision_value_t(revision)
291 291 end_rev = peg_rev
292 292
293 293 annotations = []
294 294
295 295 def receiver(line_no, revision, author, date, line, pool):
296 296 annotations.append((line_no, revision, line))
297 297
298 298 # TODO: Cannot use blame5, missing typemap function in the swig code
299 299 try:
300 300 svn.client.blame2(
301 301 file_uri, peg_rev, start_rev, end_rev,
302 302 receiver, svn.client.create_context())
303 303 except svn.core.SubversionException as exc:
304 304 log.exception("Error during blame operation.")
305 305 raise Exception(
306 306 "Blame not supported or file does not exist at path %s. "
307 307 "Error %s." % (path, exc))
308 308
309 309 return annotations
310 310
311 311 def get_node_type(self, wire, path, revision=None):
312 312
313 313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 315 def _get_node_type(_repo_id, _path, _revision):
316 316 repo = self._factory.repo(wire)
317 317 fs_ptr = svn.repos.fs(repo)
318 318 if _revision is None:
319 319 _revision = svn.fs.youngest_rev(fs_ptr)
320 320 root = svn.fs.revision_root(fs_ptr, _revision)
321 321 node = svn.fs.check_path(root, path)
322 322 return NODE_TYPE_MAPPING.get(node, None)
323 323 return _get_node_type(repo_id, path, revision)
324 324
325 325 def get_nodes(self, wire, path, revision=None):
326 326
327 327 cache_on, context_uid, repo_id = self._cache_on(wire)
328 328 @self.region.conditional_cache_on_arguments(condition=cache_on)
329 329 def _get_nodes(_repo_id, _path, _revision):
330 330 repo = self._factory.repo(wire)
331 331 fsobj = svn.repos.fs(repo)
332 332 if _revision is None:
333 333 _revision = svn.fs.youngest_rev(fsobj)
334 334 root = svn.fs.revision_root(fsobj, _revision)
335 335 entries = svn.fs.dir_entries(root, path)
336 336 result = []
337 for entry_path, entry_info in entries.iteritems():
337 for entry_path, entry_info in entries.items():
338 338 result.append(
339 339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 340 return result
341 341 return _get_nodes(repo_id, path, revision)
342 342
343 343 def get_file_content(self, wire, path, rev=None):
344 344 repo = self._factory.repo(wire)
345 345 fsobj = svn.repos.fs(repo)
346 346 if rev is None:
347 347 rev = svn.fs.youngest_revision(fsobj)
348 348 root = svn.fs.revision_root(fsobj, rev)
349 349 content = svn.core.Stream(svn.fs.file_contents(root, path))
350 350 return content.read()
351 351
352 352 def get_file_size(self, wire, path, revision=None):
353 353
354 354 cache_on, context_uid, repo_id = self._cache_on(wire)
355 355 @self.region.conditional_cache_on_arguments(condition=cache_on)
356 356 def _get_file_size(_repo_id, _path, _revision):
357 357 repo = self._factory.repo(wire)
358 358 fsobj = svn.repos.fs(repo)
359 359 if _revision is None:
360 360 _revision = svn.fs.youngest_revision(fsobj)
361 361 root = svn.fs.revision_root(fsobj, _revision)
362 362 size = svn.fs.file_length(root, path)
363 363 return size
364 364 return _get_file_size(repo_id, path, revision)
365 365
366 366 def create_repository(self, wire, compatible_version=None):
367 367 log.info('Creating Subversion repository in path "%s"', wire['path'])
368 368 self._factory.repo(wire, create=True,
369 369 compatible_version=compatible_version)
370 370
371 371 def get_url_and_credentials(self, src_url):
372 obj = urlparse.urlparse(src_url)
372 obj = urllib.parse.urlparse(src_url)
373 373 username = obj.username or None
374 374 password = obj.password or None
375 375 return username, password, src_url
376 376
377 377 def import_remote_repository(self, wire, src_url):
378 378 repo_path = wire['path']
379 379 if not self.is_path_valid_repository(wire, repo_path):
380 380 raise Exception(
381 381 "Path %s is not a valid Subversion repository." % repo_path)
382 382
383 383 username, password, src_url = self.get_url_and_credentials(src_url)
384 384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
385 385 '--trust-server-cert-failures=unknown-ca']
386 386 if username and password:
387 387 rdump_cmd += ['--username', username, '--password', password]
388 388 rdump_cmd += [src_url]
389 389
390 390 rdump = subprocess.Popen(
391 391 rdump_cmd,
392 392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
393 393 load = subprocess.Popen(
394 394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
395 395
396 396 # TODO: johbo: This can be a very long operation, might be better
397 397 # to track some kind of status and provide an api to check if the
398 398 # import is done.
399 399 rdump.wait()
400 400 load.wait()
401 401
402 402 log.debug('Return process ended with code: %s', rdump.returncode)
403 403 if rdump.returncode != 0:
404 404 errors = rdump.stderr.read()
405 405 log.error('svnrdump dump failed: statuscode %s: message: %s',
406 406 rdump.returncode, errors)
407 407 reason = 'UNKNOWN'
408 408 if 'svnrdump: E230001:' in errors:
409 409 reason = 'INVALID_CERTIFICATE'
410 410
411 411 if reason == 'UNKNOWN':
412 412 reason = 'UNKNOWN:{}'.format(errors)
413 413 raise Exception(
414 414 'Failed to dump the remote repository from %s. Reason:%s' % (
415 415 src_url, reason))
416 416 if load.returncode != 0:
417 417 raise Exception(
418 418 'Failed to load the dump of remote repository from %s.' %
419 419 (src_url, ))
420 420
421 421 def commit(self, wire, message, author, timestamp, updated, removed):
422 422 assert isinstance(message, str)
423 423 assert isinstance(author, str)
424 424
425 425 repo = self._factory.repo(wire)
426 426 fsobj = svn.repos.fs(repo)
427 427
428 428 rev = svn.fs.youngest_rev(fsobj)
429 429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
430 430 txn_root = svn.fs.txn_root(txn)
431 431
432 432 for node in updated:
433 433 TxnNodeProcessor(node, txn_root).update()
434 434 for node in removed:
435 435 TxnNodeProcessor(node, txn_root).remove()
436 436
437 437 commit_id = svn.repos.fs_commit_txn(repo, txn)
438 438
439 439 if timestamp:
440 440 apr_time = apr_time_t(timestamp)
441 441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
442 442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
443 443
444 444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
445 445 return commit_id
446 446
447 447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
448 448 ignore_whitespace=False, context=3):
449 449
450 450 wire.update(cache=False)
451 451 repo = self._factory.repo(wire)
452 452 diff_creator = SvnDiffer(
453 453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
454 454 try:
455 455 return diff_creator.generate_diff()
456 456 except svn.core.SubversionException as e:
457 457 log.exception(
458 458 "Error during diff operation operation. "
459 459 "Path might not exist %s, %s" % (path1, path2))
460 460 return ""
461 461
462 462 @reraise_safe_exceptions
463 463 def is_large_file(self, wire, path):
464 464 return False
465 465
466 466 @reraise_safe_exceptions
467 467 def is_binary(self, wire, rev, path):
468 468 cache_on, context_uid, repo_id = self._cache_on(wire)
469 469
470 470 @self.region.conditional_cache_on_arguments(condition=cache_on)
471 471 def _is_binary(_repo_id, _rev, _path):
472 472 raw_bytes = self.get_file_content(wire, path, rev)
473 473 return raw_bytes and '\0' in raw_bytes
474 474
475 475 return _is_binary(repo_id, rev, path)
476 476
477 477 @reraise_safe_exceptions
478 478 def run_svn_command(self, wire, cmd, **opts):
479 479 path = wire.get('path', None)
480 480
481 481 if path and os.path.isdir(path):
482 482 opts['cwd'] = path
483 483
484 484 safe_call = False
485 485 if '_safe' in opts:
486 486 safe_call = True
487 487
488 488 svnenv = os.environ.copy()
489 489 svnenv.update(opts.pop('extra_env', {}))
490 490
491 491 _opts = {'env': svnenv, 'shell': False}
492 492
493 493 try:
494 494 _opts.update(opts)
495 495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
496 496
497 497 return ''.join(p), ''.join(p.error)
498 498 except (EnvironmentError, OSError) as err:
499 499 cmd = ' '.join(cmd) # human friendly CMD
500 500 tb_err = ("Couldn't run svn command (%s).\n"
501 501 "Original error was:%s\n"
502 502 "Call options:%s\n"
503 503 % (cmd, err, _opts))
504 504 log.exception(tb_err)
505 505 if safe_call:
506 506 return '', err
507 507 else:
508 508 raise exceptions.VcsException()(tb_err)
509 509
510 510 @reraise_safe_exceptions
511 511 def install_hooks(self, wire, force=False):
512 512 from vcsserver.hook_utils import install_svn_hooks
513 513 repo_path = wire['path']
514 514 binary_dir = settings.BINARY_DIR
515 515 executable = None
516 516 if binary_dir:
517 517 executable = os.path.join(binary_dir, 'python')
518 518 return install_svn_hooks(
519 519 repo_path, executable=executable, force_create=force)
520 520
521 521 @reraise_safe_exceptions
522 522 def get_hooks_info(self, wire):
523 523 from vcsserver.hook_utils import (
524 524 get_svn_pre_hook_version, get_svn_post_hook_version)
525 525 repo_path = wire['path']
526 526 return {
527 527 'pre_version': get_svn_pre_hook_version(repo_path),
528 528 'post_version': get_svn_post_hook_version(repo_path),
529 529 }
530 530
531 531
532 532 class SvnDiffer(object):
533 533 """
534 534 Utility to create diffs based on difflib and the Subversion api
535 535 """
536 536
537 537 binary_content = False
538 538
539 539 def __init__(
540 540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
541 541 ignore_whitespace, context):
542 542 self.repo = repo
543 543 self.ignore_whitespace = ignore_whitespace
544 544 self.context = context
545 545
546 546 fsobj = svn.repos.fs(repo)
547 547
548 548 self.tgt_rev = tgt_rev
549 549 self.tgt_path = tgt_path or ''
550 550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
551 551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
552 552
553 553 self.src_rev = src_rev
554 554 self.src_path = src_path or self.tgt_path
555 555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
556 556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
557 557
558 558 self._validate()
559 559
560 560 def _validate(self):
561 561 if (self.tgt_kind != svn.core.svn_node_none and
562 562 self.src_kind != svn.core.svn_node_none and
563 563 self.src_kind != self.tgt_kind):
564 564 # TODO: johbo: proper error handling
565 565 raise Exception(
566 566 "Source and target are not compatible for diff generation. "
567 567 "Source type: %s, target type: %s" %
568 568 (self.src_kind, self.tgt_kind))
569 569
570 570 def generate_diff(self):
571 buf = StringIO.StringIO()
571 buf = io.StringIO()
572 572 if self.tgt_kind == svn.core.svn_node_dir:
573 573 self._generate_dir_diff(buf)
574 574 else:
575 575 self._generate_file_diff(buf)
576 576 return buf.getvalue()
577 577
578 578 def _generate_dir_diff(self, buf):
579 579 editor = DiffChangeEditor()
580 580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
581 581 svn.repos.dir_delta2(
582 582 self.src_root,
583 583 self.src_path,
584 584 '', # src_entry
585 585 self.tgt_root,
586 586 self.tgt_path,
587 587 editor_ptr, editor_baton,
588 588 authorization_callback_allow_all,
589 589 False, # text_deltas
590 590 svn.core.svn_depth_infinity, # depth
591 591 False, # entry_props
592 592 False, # ignore_ancestry
593 593 )
594 594
595 595 for path, __, change in sorted(editor.changes):
596 596 self._generate_node_diff(
597 597 buf, change, path, self.tgt_path, path, self.src_path)
598 598
599 599 def _generate_file_diff(self, buf):
600 600 change = None
601 601 if self.src_kind == svn.core.svn_node_none:
602 602 change = "add"
603 603 elif self.tgt_kind == svn.core.svn_node_none:
604 604 change = "delete"
605 605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
606 606 src_base, src_path = vcspath.split(self.src_path)
607 607 self._generate_node_diff(
608 608 buf, change, tgt_path, tgt_base, src_path, src_base)
609 609
610 610 def _generate_node_diff(
611 611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
612 612
613 613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
614 614 # makes consistent behaviour with git/hg to return empty diff if
615 615 # we compare same revisions
616 616 return
617 617
618 618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
619 619 src_full_path = vcspath.join(src_base, src_path)
620 620
621 621 self.binary_content = False
622 622 mime_type = self._get_mime_type(tgt_full_path)
623 623
624 624 if mime_type and not mime_type.startswith('text'):
625 625 self.binary_content = True
626 626 buf.write("=" * 67 + '\n')
627 627 buf.write("Cannot display: file marked as a binary type.\n")
628 628 buf.write("svn:mime-type = %s\n" % mime_type)
629 629 buf.write("Index: %s\n" % (tgt_path, ))
630 630 buf.write("=" * 67 + '\n')
631 631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
632 632 'tgt_path': tgt_path})
633 633
634 634 if change == 'add':
635 635 # TODO: johbo: SVN is missing a zero here compared to git
636 636 buf.write("new file mode 10644\n")
637 637
638 638 #TODO(marcink): intro to binary detection of svn patches
639 639 # if self.binary_content:
640 640 # buf.write('GIT binary patch\n')
641 641
642 642 buf.write("--- /dev/null\t(revision 0)\n")
643 643 src_lines = []
644 644 else:
645 645 if change == 'delete':
646 646 buf.write("deleted file mode 10644\n")
647 647
648 648 #TODO(marcink): intro to binary detection of svn patches
649 649 # if self.binary_content:
650 650 # buf.write('GIT binary patch\n')
651 651
652 652 buf.write("--- a/%s\t(revision %s)\n" % (
653 653 src_path, self.src_rev))
654 654 src_lines = self._svn_readlines(self.src_root, src_full_path)
655 655
656 656 if change == 'delete':
657 657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
658 658 tgt_lines = []
659 659 else:
660 660 buf.write("+++ b/%s\t(revision %s)\n" % (
661 661 tgt_path, self.tgt_rev))
662 662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
663 663
664 664 if not self.binary_content:
665 665 udiff = svn_diff.unified_diff(
666 666 src_lines, tgt_lines, context=self.context,
667 667 ignore_blank_lines=self.ignore_whitespace,
668 668 ignore_case=False,
669 669 ignore_space_changes=self.ignore_whitespace)
670 670 buf.writelines(udiff)
671 671
672 672 def _get_mime_type(self, path):
673 673 try:
674 674 mime_type = svn.fs.node_prop(
675 675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 676 except svn.core.SubversionException:
677 677 mime_type = svn.fs.node_prop(
678 678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
679 679 return mime_type
680 680
681 681 def _svn_readlines(self, fs_root, node_path):
682 682 if self.binary_content:
683 683 return []
684 684 node_kind = svn.fs.check_path(fs_root, node_path)
685 685 if node_kind not in (
686 686 svn.core.svn_node_file, svn.core.svn_node_symlink):
687 687 return []
688 688 content = svn.core.Stream(
689 689 svn.fs.file_contents(fs_root, node_path)).read()
690 690 return content.splitlines(True)
691 691
692 692
693 693 class DiffChangeEditor(svn.delta.Editor):
694 694 """
695 695 Records changes between two given revisions
696 696 """
697 697
698 698 def __init__(self):
699 699 self.changes = []
700 700
701 701 def delete_entry(self, path, revision, parent_baton, pool=None):
702 702 self.changes.append((path, None, 'delete'))
703 703
704 704 def add_file(
705 705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
706 706 file_pool=None):
707 707 self.changes.append((path, 'file', 'add'))
708 708
709 709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
710 710 self.changes.append((path, 'file', 'change'))
711 711
712 712
713 713 def authorization_callback_allow_all(root, path, pool):
714 714 return True
715 715
716 716
717 717 class TxnNodeProcessor(object):
718 718 """
719 719 Utility to process the change of one node within a transaction root.
720 720
721 721 It encapsulates the knowledge of how to add, update or remove
722 722 a node for a given transaction root. The purpose is to support the method
723 723 `SvnRemote.commit`.
724 724 """
725 725
726 726 def __init__(self, node, txn_root):
727 727 assert isinstance(node['path'], str)
728 728
729 729 self.node = node
730 730 self.txn_root = txn_root
731 731
732 732 def update(self):
733 733 self._ensure_parent_dirs()
734 734 self._add_file_if_node_does_not_exist()
735 735 self._update_file_content()
736 736 self._update_file_properties()
737 737
738 738 def remove(self):
739 739 svn.fs.delete(self.txn_root, self.node['path'])
740 740 # TODO: Clean up directory if empty
741 741
742 742 def _ensure_parent_dirs(self):
743 743 curdir = vcspath.dirname(self.node['path'])
744 744 dirs_to_create = []
745 745 while not self._svn_path_exists(curdir):
746 746 dirs_to_create.append(curdir)
747 747 curdir = vcspath.dirname(curdir)
748 748
749 749 for curdir in reversed(dirs_to_create):
750 750 log.debug('Creating missing directory "%s"', curdir)
751 751 svn.fs.make_dir(self.txn_root, curdir)
752 752
753 753 def _svn_path_exists(self, path):
754 754 path_status = svn.fs.check_path(self.txn_root, path)
755 755 return path_status != svn.core.svn_node_none
756 756
757 757 def _add_file_if_node_does_not_exist(self):
758 758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
759 759 if kind == svn.core.svn_node_none:
760 760 svn.fs.make_file(self.txn_root, self.node['path'])
761 761
762 762 def _update_file_content(self):
763 763 assert isinstance(self.node['content'], str)
764 764 handler, baton = svn.fs.apply_textdelta(
765 765 self.txn_root, self.node['path'], None, None)
766 766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
767 767
768 768 def _update_file_properties(self):
769 769 properties = self.node.get('properties', {})
770 for key, value in properties.iteritems():
770 for key, value in properties.items():
771 771 svn.fs.change_node_prop(
772 772 self.txn_root, self.node['path'], key, value)
773 773
774 774
775 775 def apr_time_t(timestamp):
776 776 """
777 777 Convert a Python timestamp into APR timestamp type apr_time_t
778 778 """
779 779 return timestamp * 1E6
780 780
781 781
782 782 def svn_opt_revision_value_t(num):
783 783 """
784 784 Put `num` into a `svn_opt_revision_value_t` structure.
785 785 """
786 786 value = svn.core.svn_opt_revision_value_t()
787 787 value.number = num
788 788 revision = svn.core.svn_opt_revision_t()
789 789 revision.kind = svn.core.svn_opt_revision_number
790 790 revision.value = value
791 791 return revision
@@ -1,241 +1,241 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import contextlib
19 19 import io
20 20 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
22 from SocketServer import TCPServer
21 from http.server import BaseHTTPRequestHandler
22 from socketserver import TCPServer
23 23
24 24 import mercurial.ui
25 25 import mock
26 26 import pytest
27 27 import simplejson as json
28 28
29 29 from vcsserver import hooks
30 30
31 31
32 32 def get_hg_ui(extras=None):
33 33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 34 extras = extras or {}
35 35 required_extras = {
36 36 'username': '',
37 37 'repository': '',
38 38 'locked_by': '',
39 39 'scm': '',
40 40 'make_lock': '',
41 41 'action': '',
42 42 'ip': '',
43 43 'hooks_uri': 'fake_hooks_uri',
44 44 }
45 45 required_extras.update(extras)
46 46 hg_ui = mercurial.ui.ui()
47 47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48 48
49 49 return hg_ui
50 50
51 51
52 52 def test_git_pre_receive_is_disabled():
53 53 extras = {'hooks': ['pull']}
54 54 response = hooks.git_pre_receive(None, None,
55 55 {'RC_SCM_DATA': json.dumps(extras)})
56 56
57 57 assert response == 0
58 58
59 59
60 60 def test_git_post_receive_is_disabled():
61 61 extras = {'hooks': ['pull']}
62 62 response = hooks.git_post_receive(None, '',
63 63 {'RC_SCM_DATA': json.dumps(extras)})
64 64
65 65 assert response == 0
66 66
67 67
68 68 def test_git_post_receive_calls_repo_size():
69 69 extras = {'hooks': ['push', 'repo_size']}
70 70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 71 hooks.git_post_receive(
72 72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 75 expected_calls = [
76 76 mock.call('repo_size', extras, mock.ANY),
77 77 mock.call('post_push', extras, mock.ANY),
78 78 ]
79 79 assert call_hook_mock.call_args_list == expected_calls
80 80
81 81
82 82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 83 extras = {'hooks': ['push']}
84 84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 85 hooks.git_post_receive(
86 86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 89 expected_calls = [
90 90 mock.call('post_push', extras, mock.ANY)
91 91 ]
92 92 assert call_hook_mock.call_args_list == expected_calls
93 93
94 94
95 95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 96 extras = {'hooks': ['push', 'repo_size']}
97 97 status = 0
98 98
99 99 def side_effect(name, *args, **kwargs):
100 100 if name == 'repo_size':
101 101 raise Exception('Fake exception')
102 102 else:
103 103 return status
104 104
105 105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 106 call_hook_mock.side_effect = side_effect
107 107 result = hooks.git_post_receive(
108 108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 109 assert result == status
110 110
111 111
112 112 def test_git_pre_pull_is_disabled():
113 113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114 114
115 115
116 116 def test_git_post_pull_is_disabled():
117 117 assert (
118 118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119 119
120 120
121 121 class TestGetHooksClient(object):
122 122
123 123 def test_returns_http_client_when_protocol_matches(self):
124 124 hooks_uri = 'localhost:8000'
125 125 result = hooks._get_hooks_client({
126 126 'hooks_uri': hooks_uri,
127 127 'hooks_protocol': 'http'
128 128 })
129 129 assert isinstance(result, hooks.HooksHttpClient)
130 130 assert result.hooks_uri == hooks_uri
131 131
132 132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 133 fake_module = mock.Mock()
134 134 import_patcher = mock.patch.object(
135 135 hooks.importlib, 'import_module', return_value=fake_module)
136 136 fake_module_name = 'fake.module'
137 137 with import_patcher as import_mock:
138 138 result = hooks._get_hooks_client(
139 139 {'hooks_module': fake_module_name})
140 140
141 141 import_mock.assert_called_once_with(fake_module_name)
142 142 assert isinstance(result, hooks.HooksDummyClient)
143 143 assert result._hooks_module == fake_module
144 144
145 145
146 146 class TestHooksHttpClient(object):
147 147 def test_init_sets_hooks_uri(self):
148 148 uri = 'localhost:3000'
149 149 client = hooks.HooksHttpClient(uri)
150 150 assert client.hooks_uri == uri
151 151
152 152 def test_serialize_returns_json_string(self):
153 153 client = hooks.HooksHttpClient('localhost:3000')
154 154 hook_name = 'test'
155 155 extras = {
156 156 'first': 1,
157 157 'second': 'two'
158 158 }
159 159 result = client._serialize(hook_name, extras)
160 160 expected_result = json.dumps({
161 161 'method': hook_name,
162 162 'extras': extras
163 163 })
164 164 assert result == expected_result
165 165
166 166 def test_call_queries_http_server(self, http_mirror):
167 167 client = hooks.HooksHttpClient(http_mirror.uri)
168 168 hook_name = 'test'
169 169 extras = {
170 170 'first': 1,
171 171 'second': 'two'
172 172 }
173 173 result = client(hook_name, extras)
174 174 expected_result = {
175 175 'method': hook_name,
176 176 'extras': extras
177 177 }
178 178 assert result == expected_result
179 179
180 180
181 181 class TestHooksDummyClient(object):
182 182 def test_init_imports_hooks_module(self):
183 183 hooks_module_name = 'rhodecode.fake.module'
184 184 hooks_module = mock.MagicMock()
185 185
186 186 import_patcher = mock.patch.object(
187 187 hooks.importlib, 'import_module', return_value=hooks_module)
188 188 with import_patcher as import_mock:
189 189 client = hooks.HooksDummyClient(hooks_module_name)
190 190 import_mock.assert_called_once_with(hooks_module_name)
191 191 assert client._hooks_module == hooks_module
192 192
193 193 def test_call_returns_hook_result(self):
194 194 hooks_module_name = 'rhodecode.fake.module'
195 195 hooks_module = mock.MagicMock()
196 196 import_patcher = mock.patch.object(
197 197 hooks.importlib, 'import_module', return_value=hooks_module)
198 198 with import_patcher:
199 199 client = hooks.HooksDummyClient(hooks_module_name)
200 200
201 201 result = client('post_push', {})
202 202 hooks_module.Hooks.assert_called_once_with()
203 203 assert result == hooks_module.Hooks().__enter__().post_push()
204 204
205 205
206 206 @pytest.fixture
207 207 def http_mirror(request):
208 208 server = MirrorHttpServer()
209 209 request.addfinalizer(server.stop)
210 210 return server
211 211
212 212
213 213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 214 def do_POST(self):
215 215 length = int(self.headers['Content-Length'])
216 216 body = self.rfile.read(length).decode('utf-8')
217 217 self.send_response(200)
218 218 self.end_headers()
219 219 self.wfile.write(body)
220 220
221 221
222 222 class MirrorHttpServer(object):
223 223 ip_address = '127.0.0.1'
224 224 port = 0
225 225
226 226 def __init__(self):
227 227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 228 _, self.port = self._daemon.server_address
229 229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 230 self._thread.daemon = True
231 231 self._thread.start()
232 232
233 233 def stop(self):
234 234 self._daemon.shutdown()
235 235 self._thread.join()
236 236 self._daemon = None
237 237 self._thread = None
238 238
239 239 @property
240 240 def uri(self):
241 241 return '{}:{}'.format(self.ip_address, self.port)
General Comments 0
You need to be logged in to leave comments. Login now