##// END OF EJS Templates
caches: replaced beaker with dogpile cache.
marcink -
r483:80e9ab60 default
parent child Browse files
Show More
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19 from dogpile.cache import register_backend
20
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
24
25 log = logging.getLogger(__name__)
26
27 from . import region_meta
28 from .util import key_generator, get_default_cache_settings, make_region
29
30
31 def configure_dogpile_cache(settings):
32 cache_dir = settings.get('cache_dir')
33 if cache_dir:
34 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
35
36 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
37
38 # inspect available namespaces
39 avail_regions = set()
40 for key in rc_cache_data.keys():
41 namespace_name = key.split('.', 1)[0]
42 avail_regions.add(namespace_name)
43 log.debug('dogpile: found following cache regions: %s', avail_regions)
44
45 # register them into namespace
46 for region_name in avail_regions:
47 new_region = make_region(
48 name=region_name,
49 function_key_generator=key_generator
50 )
51
52 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
53
54 log.debug('dogpile: registering a new region %s[%s]',
55 region_name, new_region.__dict__)
56 region_meta.dogpile_cache_regions[region_name] = new_region
57
58
59 def includeme(config):
60 configure_dogpile_cache(config.registry.settings)
@@ -0,0 +1,51 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19
20 from dogpile.cache.backends import memory as memory_backend
21 from lru import LRU as LRUDict
22
23
24 _default_max_size = 1024
25
26 log = logging.getLogger(__name__)
27
28
29 class LRUMemoryBackend(memory_backend.MemoryBackend):
30 pickle_values = False
31
32 def __init__(self, arguments):
33 max_size = arguments.pop('max_size', _default_max_size)
34 callback = None
35 if arguments.pop('log_max_size_reached', None):
36 def evicted(key, value):
37 log.debug(
38 'LRU: evicting key `%s` due to max size %s reach', key, max_size)
39 callback = evicted
40
41 arguments['cache_dict'] = LRUDict(max_size, callback=callback)
42 super(LRUMemoryBackend, self).__init__(arguments)
43
44 def delete(self, key):
45 if self._cache.has_key(key):
46 del self._cache[key]
47
48 def delete_multi(self, keys):
49 for key in keys:
50 if self._cache.has_key(key):
51 del self._cache[key]
@@ -0,0 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import tempfile
20
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
@@ -0,0 +1,136 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import logging
20 import functools
21
22 from vcsserver.utils import safe_str, sha1
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
25
26 log = logging.getLogger(__name__)
27
28
29 class RhodeCodeCacheRegion(CacheRegion):
30
31 def conditional_cache_on_arguments(
32 self, namespace=None,
33 expiration_time=None,
34 should_cache_fn=None,
35 to_str=compat.string_type,
36 function_key_generator=None,
37 condition=True):
38 """
39 Custom conditional decorator, that will not touch any dogpile internals if
40 condition isn't meet. This works a bit different than should_cache_fn
41 And it's faster in cases we don't ever want to compute cached values
42 """
43 expiration_time_is_callable = compat.callable(expiration_time)
44
45 if function_key_generator is None:
46 function_key_generator = self.function_key_generator
47
48 def decorator(fn):
49 if to_str is compat.string_type:
50 # backwards compatible
51 key_generator = function_key_generator(namespace, fn)
52 else:
53 key_generator = function_key_generator(namespace, fn, to_str=to_str)
54
55 @functools.wraps(fn)
56 def decorate(*arg, **kw):
57 key = key_generator(*arg, **kw)
58
59 @functools.wraps(fn)
60 def creator():
61 return fn(*arg, **kw)
62
63 if not condition:
64 return creator()
65
66 timeout = expiration_time() if expiration_time_is_callable \
67 else expiration_time
68
69 return self.get_or_create(key, creator, timeout, should_cache_fn)
70
71 def invalidate(*arg, **kw):
72 key = key_generator(*arg, **kw)
73 self.delete(key)
74
75 def set_(value, *arg, **kw):
76 key = key_generator(*arg, **kw)
77 self.set(key, value)
78
79 def get(*arg, **kw):
80 key = key_generator(*arg, **kw)
81 return self.get(key)
82
83 def refresh(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 value = fn(*arg, **kw)
86 self.set(key, value)
87 return value
88
89 decorate.set = set_
90 decorate.invalidate = invalidate
91 decorate.refresh = refresh
92 decorate.get = get
93 decorate.original = fn
94 decorate.key_generator = key_generator
95
96 return decorate
97
98 return decorator
99
100
101 def make_region(*arg, **kw):
102 return RhodeCodeCacheRegion(*arg, **kw)
103
104
105 def get_default_cache_settings(settings, prefixes=None):
106 prefixes = prefixes or []
107 cache_settings = {}
108 for key in settings.keys():
109 for prefix in prefixes:
110 if key.startswith(prefix):
111 name = key.split(prefix)[1].strip()
112 val = settings[key]
113 if isinstance(val, basestring):
114 val = val.strip()
115 cache_settings[name] = val
116 return cache_settings
117
118
119 def compute_key_from_params(*args):
120 """
121 Helper to compute key from given params to be used in cache manager
122 """
123 return sha1("_".join(map(safe_str, args)))
124
125
126 def key_generator(namespace, fn):
127 fname = fn.__name__
128
129 def generate_key(*args):
130 namespace_pref = namespace or 'default'
131 arg_key = compute_key_from_params(*args)
132 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
133
134 return final_key
135
136 return generate_key
@@ -1,1 +1,79 b''
1 development_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 0.0.0.0
10 port = 9900
11
12 use = egg:waitress#main
13
14
15 [app:main]
16 use = egg:rhodecode-vcsserver
17
18 pyramid.default_locale_name = en
19 pyramid.includes =
20
21 ## default locale used by VCS systems
22 locale = en_US.UTF-8
23
24
25 ## path to binaries for vcsserver, it should be set by the installer
26 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 core.binary_dir = ""
28
29 ## cache region for storing repo_objects cache
30 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
31 ## cache auto-expires after N seconds
32 rc_cache.repo_object.expiration_time = 300
33 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
34 rc_cache.repo_object.max_size = 100
35
36
37 ################################
38 ### LOGGING CONFIGURATION ####
39 ################################
40 [loggers]
41 keys = root, vcsserver
42
43 [handlers]
44 keys = console
45
46 [formatters]
47 keys = generic
48
49 #############
50 ## LOGGERS ##
51 #############
52 [logger_root]
53 level = NOTSET
54 handlers = console
55
56 [logger_vcsserver]
57 level = DEBUG
58 handlers =
59 qualname = vcsserver
60 propagate = 1
61
62
63 ##############
64 ## HANDLERS ##
65 ##############
66
67 [handler_console]
68 class = StreamHandler
69 args = (sys.stderr,)
70 level = DEBUG
71 formatter = generic
72
73 ################
74 ## FORMATTERS ##
75 ################
76
77 [formatter_generic]
78 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
79 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1 +1,100 b''
1 production_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 127.0.0.1
10 port = 9900
11
12
13 ##########################
14 ## GUNICORN WSGI SERVER ##
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
21 ## process name
22 proc_name = rhodecode_vcsserver
23 ## type of worker class, currently `sync` is the only option allowed.
24 worker_class = sync
25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 #worker_connections = 10
27 ## max number of requests that worker will handle before being gracefully
28 ## restarted, could prevent memory leaks
29 max_requests = 1000
30 max_requests_jitter = 30
31 ## amount of time a worker can spend with handling a request before it
32 ## gets killed and restarted. Set to 6hrs
33 timeout = 21600
34
35
36 [app:main]
37 use = egg:rhodecode-vcsserver
38
39 pyramid.default_locale_name = en
40 pyramid.includes =
41
42 ## default locale used by VCS systems
43 locale = en_US.UTF-8
44
45
46 ## path to binaries for vcsserver, it should be set by the installer
47 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 core.binary_dir = ""
49
50 ## cache region for storing repo_objects cache
51 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
52 ## cache auto-expires after N seconds
53 rc_cache.repo_object.expiration_time = 300
54 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
55 rc_cache.repo_object.max_size = 100
56
57
58 ################################
59 ### LOGGING CONFIGURATION ####
60 ################################
61 [loggers]
62 keys = root, vcsserver
63
64 [handlers]
65 keys = console
66
67 [formatters]
68 keys = generic
69
70 #############
71 ## LOGGERS ##
72 #############
73 [logger_root]
74 level = NOTSET
75 handlers = console
76
77 [logger_vcsserver]
78 level = DEBUG
79 handlers =
80 qualname = vcsserver
81 propagate = 1
82
83
84 ##############
85 ## HANDLERS ##
86 ##############
87
88 [handler_console]
89 class = StreamHandler
90 args = (sys.stderr,)
91 level = DEBUG
92 formatter = generic
93
94 ################
95 ## FORMATTERS ##
96 ################
97
98 [formatter_generic]
99 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
100 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,60 +1,53 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 7 { pkgs
8 8 , basePythonPackages
9 9 }:
10 10
11 11 let
12 12 sed = "sed -i";
13 13
14 14 in
15 15
16 16 self: super: {
17 17
18 "beaker" = super."beaker".override (attrs: {
19 patches = [
20 ./patch_beaker/patch-beaker-lock-func-debug.diff
21 ./patch_beaker/patch-beaker-metadata-reuse.diff
22 ];
23 });
24
25 18 "gevent" = super."gevent".override (attrs: {
26 19 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
27 20 # NOTE: (marcink) odd requirements from gevent aren't set properly,
28 21 # thus we need to inject psutil manually
29 22 self."psutil"
30 23 ];
31 24 });
32 25
33 26 "hgsubversion" = super."hgsubversion".override (attrs: {
34 27 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
35 28 pkgs.sqlite
36 29 #basePythonPackages.sqlite3
37 30 self.mercurial
38 31 ];
39 32 });
40 33
41 34 "subvertpy" = super."subvertpy".override (attrs: {
42 35 SVN_PREFIX = "${pkgs.subversion.dev}";
43 36 propagatedBuildInputs = [
44 37 pkgs.apr.dev
45 38 pkgs.aprutil
46 39 pkgs.subversion
47 40 ];
48 41 });
49 42
50 43 "mercurial" = super."mercurial".override (attrs: {
51 44 propagatedBuildInputs = [
52 45 # self.python.modules.curses
53 46 ];
54 47 });
55 48
56 49 # Avoid that base packages screw up the build process
57 50 inherit (basePythonPackages)
58 51 setuptools;
59 52
60 53 }
@@ -1,931 +1,936 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.1.5";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
12 12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-18.1.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
23 23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 "beaker" = super.buildPythonPackage {
41 name = "beaker-1.9.1";
42 doCheck = false;
43 propagatedBuildInputs = [
44 self."funcsigs"
45 ];
46 src = fetchurl {
47 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
48 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
49 };
50 meta = {
51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 };
53 };
54 40 "beautifulsoup4" = super.buildPythonPackage {
55 41 name = "beautifulsoup4-4.6.0";
56 42 doCheck = false;
57 43 src = fetchurl {
58 44 url = "https://files.pythonhosted.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
59 45 sha256 = "12cf0ygpz9srpfh9gx2f9ba0swa1rzypv3sm4r0hmjyw6b4nm2w0";
60 46 };
61 47 meta = {
62 48 license = [ pkgs.lib.licenses.mit ];
63 49 };
64 50 };
65 51 "configobj" = super.buildPythonPackage {
66 52 name = "configobj-5.0.6";
67 53 doCheck = false;
68 54 propagatedBuildInputs = [
69 55 self."six"
70 56 ];
71 57 src = fetchurl {
72 58 url = "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
73 59 sha256 = "00h9rcmws03xvdlfni11yb60bz3kxfvsj6dg6nrpzj71f03nbxd2";
74 60 };
75 61 meta = {
76 62 license = [ pkgs.lib.licenses.bsdOriginal ];
77 63 };
78 64 };
79 65 "cov-core" = super.buildPythonPackage {
80 66 name = "cov-core-1.15.0";
81 67 doCheck = false;
82 68 propagatedBuildInputs = [
83 69 self."coverage"
84 70 ];
85 71 src = fetchurl {
86 72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
87 73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
88 74 };
89 75 meta = {
90 76 license = [ pkgs.lib.licenses.mit ];
91 77 };
92 78 };
93 79 "coverage" = super.buildPythonPackage {
94 80 name = "coverage-3.7.1";
95 81 doCheck = false;
96 82 src = fetchurl {
97 83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
98 84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
99 85 };
100 86 meta = {
101 87 license = [ pkgs.lib.licenses.bsdOriginal ];
102 88 };
103 89 };
104 90 "decorator" = super.buildPythonPackage {
105 91 name = "decorator-4.1.2";
106 92 doCheck = false;
107 93 src = fetchurl {
108 94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
109 95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
110 96 };
111 97 meta = {
112 98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
113 99 };
114 100 };
101 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.6.6";
103 doCheck = false;
104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
107 };
108 meta = {
109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 };
111 };
112 "dogpile.core" = super.buildPythonPackage {
113 name = "dogpile.core-0.4.1";
114 doCheck = false;
115 src = fetchurl {
116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 };
119 meta = {
120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 };
122 };
115 123 "dulwich" = super.buildPythonPackage {
116 124 name = "dulwich-0.13.0";
117 125 doCheck = false;
118 126 src = fetchurl {
119 127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
120 128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
121 129 };
122 130 meta = {
123 131 license = [ pkgs.lib.licenses.gpl2Plus ];
124 132 };
125 133 };
126 134 "enum34" = super.buildPythonPackage {
127 135 name = "enum34-1.1.6";
128 136 doCheck = false;
129 137 src = fetchurl {
130 138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
131 139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
132 140 };
133 141 meta = {
134 142 license = [ pkgs.lib.licenses.bsdOriginal ];
135 143 };
136 144 };
137 145 "funcsigs" = super.buildPythonPackage {
138 146 name = "funcsigs-1.0.2";
139 147 doCheck = false;
140 148 src = fetchurl {
141 149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
142 150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
143 151 };
144 152 meta = {
145 153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
146 154 };
147 155 };
148 156 "gevent" = super.buildPythonPackage {
149 157 name = "gevent-1.3.4";
150 158 doCheck = false;
151 159 propagatedBuildInputs = [
152 160 self."greenlet"
153 161 ];
154 162 src = fetchurl {
155 163 url = "https://files.pythonhosted.org/packages/f8/85/f92a8f43c9f15ffad49d743d929863a042ce3e8de5746c63bb4d6ce51a02/gevent-1.3.4.tar.gz";
156 164 sha256 = "0x2gm3iba4cprclnbkcq2i14m6br2hfqns8yv3sjil46b1qdri2k";
157 165 };
158 166 meta = {
159 167 license = [ pkgs.lib.licenses.mit ];
160 168 };
161 169 };
162 170 "gprof2dot" = super.buildPythonPackage {
163 171 name = "gprof2dot-2017.9.19";
164 172 doCheck = false;
165 173 src = fetchurl {
166 174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
167 175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
168 176 };
169 177 meta = {
170 178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
171 179 };
172 180 };
173 181 "greenlet" = super.buildPythonPackage {
174 182 name = "greenlet-0.4.13";
175 183 doCheck = false;
176 184 src = fetchurl {
177 185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
178 186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
179 187 };
180 188 meta = {
181 189 license = [ pkgs.lib.licenses.mit ];
182 190 };
183 191 };
184 192 "gunicorn" = super.buildPythonPackage {
185 193 name = "gunicorn-19.9.0";
186 194 doCheck = false;
187 195 src = fetchurl {
188 196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
189 197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
190 198 };
191 199 meta = {
192 200 license = [ pkgs.lib.licenses.mit ];
193 201 };
194 202 };
195 203 "hg-evolve" = super.buildPythonPackage {
196 204 name = "hg-evolve-8.0.1";
197 205 doCheck = false;
198 206 src = fetchurl {
199 207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
200 208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
201 209 };
202 210 meta = {
203 211 license = [ { fullName = "GPLv2+"; } ];
204 212 };
205 213 };
206 214 "hgsubversion" = super.buildPythonPackage {
207 215 name = "hgsubversion-1.9.2";
208 216 doCheck = false;
209 217 propagatedBuildInputs = [
210 218 self."mercurial"
211 219 self."subvertpy"
212 220 ];
213 221 src = fetchurl {
214 222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
215 223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
216 224 };
217 225 meta = {
218 226 license = [ pkgs.lib.licenses.gpl1 ];
219 227 };
220 228 };
221 229 "hupper" = super.buildPythonPackage {
222 230 name = "hupper-1.3";
223 231 doCheck = false;
224 232 src = fetchurl {
225 233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
226 234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
227 235 };
228 236 meta = {
229 237 license = [ pkgs.lib.licenses.mit ];
230 238 };
231 239 };
232 "infrae.cache" = super.buildPythonPackage {
233 name = "infrae.cache-1.0.1";
234 doCheck = false;
235 propagatedBuildInputs = [
236 self."beaker"
237 self."repoze.lru"
238 ];
239 src = fetchurl {
240 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
241 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
242 };
243 meta = {
244 license = [ pkgs.lib.licenses.zpl21 ];
245 };
246 };
247 240 "ipdb" = super.buildPythonPackage {
248 241 name = "ipdb-0.11";
249 242 doCheck = false;
250 243 propagatedBuildInputs = [
251 244 self."setuptools"
252 245 self."ipython"
253 246 ];
254 247 src = fetchurl {
255 248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
256 249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
257 250 };
258 251 meta = {
259 252 license = [ pkgs.lib.licenses.bsdOriginal ];
260 253 };
261 254 };
262 255 "ipython" = super.buildPythonPackage {
263 256 name = "ipython-5.1.0";
264 257 doCheck = false;
265 258 propagatedBuildInputs = [
266 259 self."setuptools"
267 260 self."decorator"
268 261 self."pickleshare"
269 262 self."simplegeneric"
270 263 self."traitlets"
271 264 self."prompt-toolkit"
272 265 self."pygments"
273 266 self."pexpect"
274 267 self."backports.shutil-get-terminal-size"
275 268 self."pathlib2"
276 269 self."pexpect"
277 270 ];
278 271 src = fetchurl {
279 272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
280 273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
281 274 };
282 275 meta = {
283 276 license = [ pkgs.lib.licenses.bsdOriginal ];
284 277 };
285 278 };
286 279 "ipython-genutils" = super.buildPythonPackage {
287 280 name = "ipython-genutils-0.2.0";
288 281 doCheck = false;
289 282 src = fetchurl {
290 283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
291 284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
292 285 };
293 286 meta = {
294 287 license = [ pkgs.lib.licenses.bsdOriginal ];
295 288 };
296 289 };
290 "lru-dict" = super.buildPythonPackage {
291 name = "lru-dict-1.1.6";
292 doCheck = false;
293 src = fetchurl {
294 url = "https://files.pythonhosted.org/packages/00/a5/32ed6e10246cd341ca8cc205acea5d208e4053f48a4dced2b1b31d45ba3f/lru-dict-1.1.6.tar.gz";
295 sha256 = "1k2lhd4dpl6xa6iialbwx4l6bkdzxmzhygms39pvf19x1rk5fm1n";
296 };
297 meta = {
298 license = [ pkgs.lib.licenses.mit ];
299 };
300 };
297 301 "mako" = super.buildPythonPackage {
298 302 name = "mako-1.0.7";
299 303 doCheck = false;
300 304 propagatedBuildInputs = [
301 305 self."markupsafe"
302 306 ];
303 307 src = fetchurl {
304 308 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
305 309 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
306 310 };
307 311 meta = {
308 312 license = [ pkgs.lib.licenses.mit ];
309 313 };
310 314 };
311 315 "markupsafe" = super.buildPythonPackage {
312 316 name = "markupsafe-1.0";
313 317 doCheck = false;
314 318 src = fetchurl {
315 319 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
316 320 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
317 321 };
318 322 meta = {
319 323 license = [ pkgs.lib.licenses.bsdOriginal ];
320 324 };
321 325 };
322 326 "mercurial" = super.buildPythonPackage {
323 327 name = "mercurial-4.6.1";
324 328 doCheck = false;
325 329 src = fetchurl {
326 330 url = "https://files.pythonhosted.org/packages/12/e7/46894628ed3d6b0ae1e324523b09fdb8a90f0720bebe43cab88e0ea91b39/mercurial-4.6.1.tar.gz";
327 331 sha256 = "138h46k4rhr8gd0a5nwm8896f4x97dla20wqizllhvmar35qxyl9";
328 332 };
329 333 meta = {
330 334 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
331 335 };
332 336 };
333 337 "mock" = super.buildPythonPackage {
334 338 name = "mock-1.0.1";
335 339 doCheck = false;
336 340 src = fetchurl {
337 341 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
338 342 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
339 343 };
340 344 meta = {
341 345 license = [ pkgs.lib.licenses.bsdOriginal ];
342 346 };
343 347 };
344 348 "more-itertools" = super.buildPythonPackage {
345 349 name = "more-itertools-4.2.0";
346 350 doCheck = false;
347 351 propagatedBuildInputs = [
348 352 self."six"
349 353 ];
350 354 src = fetchurl {
351 355 url = "https://files.pythonhosted.org/packages/c0/2f/6773347277d76c5ade4414a6c3f785ef27e7f5c4b0870ec7e888e66a8d83/more-itertools-4.2.0.tar.gz";
352 356 sha256 = "1s6qhl7a7jy8gqw8p545rxfp7rwz1hmjr9p6prk93zbv6f9rhsrb";
353 357 };
354 358 meta = {
355 359 license = [ pkgs.lib.licenses.mit ];
356 360 };
357 361 };
358 362 "msgpack-python" = super.buildPythonPackage {
359 363 name = "msgpack-python-0.5.6";
360 364 doCheck = false;
361 365 src = fetchurl {
362 366 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
363 367 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
364 368 };
365 369 meta = {
366 370 license = [ pkgs.lib.licenses.asl20 ];
367 371 };
368 372 };
369 373 "pastedeploy" = super.buildPythonPackage {
370 374 name = "pastedeploy-1.5.2";
371 375 doCheck = false;
372 376 src = fetchurl {
373 377 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
374 378 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
375 379 };
376 380 meta = {
377 381 license = [ pkgs.lib.licenses.mit ];
378 382 };
379 383 };
380 384 "pathlib2" = super.buildPythonPackage {
381 385 name = "pathlib2-2.3.0";
382 386 doCheck = false;
383 387 propagatedBuildInputs = [
384 388 self."six"
385 389 self."scandir"
386 390 ];
387 391 src = fetchurl {
388 392 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
389 393 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
390 394 };
391 395 meta = {
392 396 license = [ pkgs.lib.licenses.mit ];
393 397 };
394 398 };
395 399 "pexpect" = super.buildPythonPackage {
396 400 name = "pexpect-4.6.0";
397 401 doCheck = false;
398 402 propagatedBuildInputs = [
399 403 self."ptyprocess"
400 404 ];
401 405 src = fetchurl {
402 406 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
403 407 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
404 408 };
405 409 meta = {
406 410 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
407 411 };
408 412 };
409 413 "pickleshare" = super.buildPythonPackage {
410 414 name = "pickleshare-0.7.4";
411 415 doCheck = false;
412 416 propagatedBuildInputs = [
413 417 self."pathlib2"
414 418 ];
415 419 src = fetchurl {
416 420 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
417 421 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
418 422 };
419 423 meta = {
420 424 license = [ pkgs.lib.licenses.mit ];
421 425 };
422 426 };
423 427 "plaster" = super.buildPythonPackage {
424 428 name = "plaster-1.0";
425 429 doCheck = false;
426 430 propagatedBuildInputs = [
427 431 self."setuptools"
428 432 ];
429 433 src = fetchurl {
430 434 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
431 435 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
432 436 };
433 437 meta = {
434 438 license = [ pkgs.lib.licenses.mit ];
435 439 };
436 440 };
437 441 "plaster-pastedeploy" = super.buildPythonPackage {
438 442 name = "plaster-pastedeploy-0.5";
439 443 doCheck = false;
440 444 propagatedBuildInputs = [
441 445 self."pastedeploy"
442 446 self."plaster"
443 447 ];
444 448 src = fetchurl {
445 449 url = "https://files.pythonhosted.org/packages/e7/05/cc12d9d3efaa10046b6ec5de91b16486c95de4847dc57599bf58021a3d5c/plaster_pastedeploy-0.5.tar.gz";
446 450 sha256 = "1aavz3vbh7m9m6hfidwh6gqlrs1mrxl7k6794rm9jdik59dii8vh";
447 451 };
448 452 meta = {
449 453 license = [ pkgs.lib.licenses.mit ];
450 454 };
451 455 };
452 456 "pluggy" = super.buildPythonPackage {
453 457 name = "pluggy-0.6.0";
454 458 doCheck = false;
455 459 src = fetchurl {
456 460 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
457 461 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
458 462 };
459 463 meta = {
460 464 license = [ pkgs.lib.licenses.mit ];
461 465 };
462 466 };
463 467 "prompt-toolkit" = super.buildPythonPackage {
464 468 name = "prompt-toolkit-1.0.15";
465 469 doCheck = false;
466 470 propagatedBuildInputs = [
467 471 self."six"
468 472 self."wcwidth"
469 473 ];
470 474 src = fetchurl {
471 475 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
472 476 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
473 477 };
474 478 meta = {
475 479 license = [ pkgs.lib.licenses.bsdOriginal ];
476 480 };
477 481 };
478 482 "psutil" = super.buildPythonPackage {
479 483 name = "psutil-5.4.6";
480 484 doCheck = false;
481 485 src = fetchurl {
482 486 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
483 487 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
484 488 };
485 489 meta = {
486 490 license = [ pkgs.lib.licenses.bsdOriginal ];
487 491 };
488 492 };
489 493 "ptyprocess" = super.buildPythonPackage {
490 494 name = "ptyprocess-0.6.0";
491 495 doCheck = false;
492 496 src = fetchurl {
493 497 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
494 498 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
495 499 };
496 500 meta = {
497 501 license = [ ];
498 502 };
499 503 };
500 504 "py" = super.buildPythonPackage {
501 505 name = "py-1.5.3";
502 506 doCheck = false;
503 507 src = fetchurl {
504 508 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
505 509 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
506 510 };
507 511 meta = {
508 512 license = [ pkgs.lib.licenses.mit ];
509 513 };
510 514 };
511 515 "pygments" = super.buildPythonPackage {
512 516 name = "pygments-2.2.0";
513 517 doCheck = false;
514 518 src = fetchurl {
515 519 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
516 520 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
517 521 };
518 522 meta = {
519 523 license = [ pkgs.lib.licenses.bsdOriginal ];
520 524 };
521 525 };
522 526 "pyramid" = super.buildPythonPackage {
523 527 name = "pyramid-1.9.2";
524 528 doCheck = false;
525 529 propagatedBuildInputs = [
526 530 self."setuptools"
527 531 self."webob"
528 532 self."repoze.lru"
529 533 self."zope.interface"
530 534 self."zope.deprecation"
531 535 self."venusian"
532 536 self."translationstring"
533 537 self."pastedeploy"
534 538 self."plaster"
535 539 self."plaster-pastedeploy"
536 540 self."hupper"
537 541 ];
538 542 src = fetchurl {
539 543 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
540 544 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
541 545 };
542 546 meta = {
543 547 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
544 548 };
545 549 };
546 550 "pyramid-mako" = super.buildPythonPackage {
547 551 name = "pyramid-mako-1.0.2";
548 552 doCheck = false;
549 553 propagatedBuildInputs = [
550 554 self."pyramid"
551 555 self."mako"
552 556 ];
553 557 src = fetchurl {
554 558 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
555 559 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
556 560 };
557 561 meta = {
558 562 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
559 563 };
560 564 };
561 565 "pytest" = super.buildPythonPackage {
562 566 name = "pytest-3.6.0";
563 567 doCheck = false;
564 568 propagatedBuildInputs = [
565 569 self."py"
566 570 self."six"
567 571 self."setuptools"
568 572 self."attrs"
569 573 self."more-itertools"
570 574 self."atomicwrites"
571 575 self."pluggy"
572 576 self."funcsigs"
573 577 ];
574 578 src = fetchurl {
575 579 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
576 580 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
577 581 };
578 582 meta = {
579 583 license = [ pkgs.lib.licenses.mit ];
580 584 };
581 585 };
582 586 "pytest-cov" = super.buildPythonPackage {
583 587 name = "pytest-cov-2.5.1";
584 588 doCheck = false;
585 589 propagatedBuildInputs = [
586 590 self."pytest"
587 591 self."coverage"
588 592 ];
589 593 src = fetchurl {
590 594 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
591 595 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
592 596 };
593 597 meta = {
594 598 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
595 599 };
596 600 };
597 601 "pytest-profiling" = super.buildPythonPackage {
598 602 name = "pytest-profiling-1.3.0";
599 603 doCheck = false;
600 604 propagatedBuildInputs = [
601 605 self."six"
602 606 self."pytest"
603 607 self."gprof2dot"
604 608 ];
605 609 src = fetchurl {
606 610 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
607 611 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
608 612 };
609 613 meta = {
610 614 license = [ pkgs.lib.licenses.mit ];
611 615 };
612 616 };
613 617 "pytest-runner" = super.buildPythonPackage {
614 618 name = "pytest-runner-4.2";
615 619 doCheck = false;
616 620 src = fetchurl {
617 621 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
618 622 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
619 623 };
620 624 meta = {
621 625 license = [ pkgs.lib.licenses.mit ];
622 626 };
623 627 };
624 628 "pytest-sugar" = super.buildPythonPackage {
625 629 name = "pytest-sugar-0.9.1";
626 630 doCheck = false;
627 631 propagatedBuildInputs = [
628 632 self."pytest"
629 633 self."termcolor"
630 634 ];
631 635 src = fetchurl {
632 636 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
633 637 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
634 638 };
635 639 meta = {
636 640 license = [ pkgs.lib.licenses.bsdOriginal ];
637 641 };
638 642 };
639 643 "pytest-timeout" = super.buildPythonPackage {
640 644 name = "pytest-timeout-1.2.1";
641 645 doCheck = false;
642 646 propagatedBuildInputs = [
643 647 self."pytest"
644 648 ];
645 649 src = fetchurl {
646 650 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
647 651 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
648 652 };
649 653 meta = {
650 654 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
651 655 };
652 656 };
653 657 "repoze.lru" = super.buildPythonPackage {
654 658 name = "repoze.lru-0.7";
655 659 doCheck = false;
656 660 src = fetchurl {
657 661 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
658 662 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
659 663 };
660 664 meta = {
661 665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
662 666 };
663 667 };
664 668 "rhodecode-vcsserver" = super.buildPythonPackage {
665 669 name = "rhodecode-vcsserver-4.13.0";
666 670 buildInputs = [
667 671 self."pytest"
668 672 self."py"
669 673 self."pytest-cov"
670 674 self."pytest-sugar"
671 675 self."pytest-runner"
672 676 self."pytest-profiling"
673 677 self."gprof2dot"
674 678 self."pytest-timeout"
675 679 self."mock"
676 680 self."webtest"
677 681 self."cov-core"
678 682 self."coverage"
679 683 self."configobj"
680 684 ];
681 685 doCheck = true;
682 686 propagatedBuildInputs = [
683 self."beaker"
684 687 self."configobj"
688 self."dogpile.cache"
689 self."dogpile.core"
685 690 self."decorator"
686 691 self."dulwich"
687 692 self."hgsubversion"
688 693 self."hg-evolve"
689 self."infrae.cache"
694 self."lru-dict"
690 695 self."mako"
691 696 self."markupsafe"
692 697 self."mercurial"
693 698 self."msgpack-python"
694 699 self."pastedeploy"
695 700 self."psutil"
696 701 self."pyramid"
697 702 self."pyramid-mako"
698 703 self."pygments"
699 704 self."pathlib2"
700 705 self."repoze.lru"
701 706 self."simplejson"
702 707 self."subprocess32"
703 708 self."subvertpy"
704 709 self."six"
705 710 self."translationstring"
706 711 self."webob"
707 712 self."zope.deprecation"
708 713 self."zope.interface"
709 714 self."gevent"
710 715 self."greenlet"
711 716 self."gunicorn"
712 717 self."waitress"
713 718 self."ipdb"
714 719 self."ipython"
715 720 self."pytest"
716 721 self."py"
717 722 self."pytest-cov"
718 723 self."pytest-sugar"
719 724 self."pytest-runner"
720 725 self."pytest-profiling"
721 726 self."gprof2dot"
722 727 self."pytest-timeout"
723 728 self."mock"
724 729 self."webtest"
725 730 self."cov-core"
726 731 self."coverage"
727 732 ];
728 733 src = ./.;
729 734 meta = {
730 735 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
731 736 };
732 737 };
733 738 "scandir" = super.buildPythonPackage {
734 739 name = "scandir-1.7";
735 740 doCheck = false;
736 741 src = fetchurl {
737 742 url = "https://files.pythonhosted.org/packages/13/bb/e541b74230bbf7a20a3949a2ee6631be299378a784f5445aa5d0047c192b/scandir-1.7.tar.gz";
738 743 sha256 = "0gbnhjzg42rj87ljv9kb648rfxph69ly3c8r9841dxy4d7l5pmdj";
739 744 };
740 745 meta = {
741 746 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
742 747 };
743 748 };
744 749 "simplegeneric" = super.buildPythonPackage {
745 750 name = "simplegeneric-0.8.1";
746 751 doCheck = false;
747 752 src = fetchurl {
748 753 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
749 754 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
750 755 };
751 756 meta = {
752 757 license = [ pkgs.lib.licenses.zpl21 ];
753 758 };
754 759 };
755 760 "simplejson" = super.buildPythonPackage {
756 761 name = "simplejson-3.11.1";
757 762 doCheck = false;
758 763 src = fetchurl {
759 764 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
760 765 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
761 766 };
762 767 meta = {
763 768 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
764 769 };
765 770 };
766 771 "six" = super.buildPythonPackage {
767 772 name = "six-1.11.0";
768 773 doCheck = false;
769 774 src = fetchurl {
770 775 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
771 776 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
772 777 };
773 778 meta = {
774 779 license = [ pkgs.lib.licenses.mit ];
775 780 };
776 781 };
777 782 "subprocess32" = super.buildPythonPackage {
778 783 name = "subprocess32-3.5.1";
779 784 doCheck = false;
780 785 src = fetchurl {
781 786 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
782 787 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
783 788 };
784 789 meta = {
785 790 license = [ pkgs.lib.licenses.psfl ];
786 791 };
787 792 };
788 793 "subvertpy" = super.buildPythonPackage {
789 794 name = "subvertpy-0.10.1";
790 795 doCheck = false;
791 796 src = fetchurl {
792 797 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
793 798 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
794 799 };
795 800 meta = {
796 801 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
797 802 };
798 803 };
799 804 "termcolor" = super.buildPythonPackage {
800 805 name = "termcolor-1.1.0";
801 806 doCheck = false;
802 807 src = fetchurl {
803 808 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
804 809 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
805 810 };
806 811 meta = {
807 812 license = [ pkgs.lib.licenses.mit ];
808 813 };
809 814 };
810 815 "traitlets" = super.buildPythonPackage {
811 816 name = "traitlets-4.3.2";
812 817 doCheck = false;
813 818 propagatedBuildInputs = [
814 819 self."ipython-genutils"
815 820 self."six"
816 821 self."decorator"
817 822 self."enum34"
818 823 ];
819 824 src = fetchurl {
820 825 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
821 826 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
822 827 };
823 828 meta = {
824 829 license = [ pkgs.lib.licenses.bsdOriginal ];
825 830 };
826 831 };
827 832 "translationstring" = super.buildPythonPackage {
828 833 name = "translationstring-1.3";
829 834 doCheck = false;
830 835 src = fetchurl {
831 836 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
832 837 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
833 838 };
834 839 meta = {
835 840 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
836 841 };
837 842 };
838 843 "venusian" = super.buildPythonPackage {
839 844 name = "venusian-1.1.0";
840 845 doCheck = false;
841 846 src = fetchurl {
842 847 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
843 848 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
844 849 };
845 850 meta = {
846 851 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
847 852 };
848 853 };
849 854 "waitress" = super.buildPythonPackage {
850 855 name = "waitress-1.1.0";
851 856 doCheck = false;
852 857 src = fetchurl {
853 858 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
854 859 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
855 860 };
856 861 meta = {
857 862 license = [ pkgs.lib.licenses.zpl21 ];
858 863 };
859 864 };
860 865 "wcwidth" = super.buildPythonPackage {
861 866 name = "wcwidth-0.1.7";
862 867 doCheck = false;
863 868 src = fetchurl {
864 869 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
865 870 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
866 871 };
867 872 meta = {
868 873 license = [ pkgs.lib.licenses.mit ];
869 874 };
870 875 };
871 876 "webob" = super.buildPythonPackage {
872 877 name = "webob-1.7.4";
873 878 doCheck = false;
874 879 src = fetchurl {
875 880 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
876 881 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
877 882 };
878 883 meta = {
879 884 license = [ pkgs.lib.licenses.mit ];
880 885 };
881 886 };
882 887 "webtest" = super.buildPythonPackage {
883 888 name = "webtest-2.0.29";
884 889 doCheck = false;
885 890 propagatedBuildInputs = [
886 891 self."six"
887 892 self."webob"
888 893 self."waitress"
889 894 self."beautifulsoup4"
890 895 ];
891 896 src = fetchurl {
892 897 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
893 898 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
894 899 };
895 900 meta = {
896 901 license = [ pkgs.lib.licenses.mit ];
897 902 };
898 903 };
899 904 "zope.deprecation" = super.buildPythonPackage {
900 905 name = "zope.deprecation-4.3.0";
901 906 doCheck = false;
902 907 propagatedBuildInputs = [
903 908 self."setuptools"
904 909 ];
905 910 src = fetchurl {
906 911 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
907 912 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
908 913 };
909 914 meta = {
910 915 license = [ pkgs.lib.licenses.zpl21 ];
911 916 };
912 917 };
913 918 "zope.interface" = super.buildPythonPackage {
914 919 name = "zope.interface-4.5.0";
915 920 doCheck = false;
916 921 propagatedBuildInputs = [
917 922 self."setuptools"
918 923 ];
919 924 src = fetchurl {
920 925 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
921 926 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
922 927 };
923 928 meta = {
924 929 license = [ pkgs.lib.licenses.zpl21 ];
925 930 };
926 931 };
927 932
928 933 ### Test requirements
929 934
930 935
931 936 }
@@ -1,45 +1,46 b''
1 1 ## dependencies
2 2
3 beaker==1.9.1
4 3 configobj==5.0.6
4 dogpile.cache==0.6.6
5 dogpile.core==0.4.1
5 6 decorator==4.1.2
6 7 dulwich==0.13.0
7 8 hgsubversion==1.9.2
8 9 hg-evolve==8.0.1
9 infrae.cache==1.0.1
10 lru-dict==1.1.6
10 11 mako==1.0.7
11 12 markupsafe==1.0.0
12 13 mercurial==4.6.1
13 14 msgpack-python==0.5.6
14 15
15 16 pastedeploy==1.5.2
16 17 psutil==5.4.6
17 18 pyramid==1.9.2
18 19 pyramid-mako==1.0.2
19 20
20 21 pygments==2.2.0
21 22 pathlib2==2.3.0
22 23 repoze.lru==0.7
23 24 simplejson==3.11.1
24 25 subprocess32==3.5.1
25 26
26 27 subvertpy==0.10.1
27 28
28 29 six==1.11.0
29 30 translationstring==1.3
30 31 webob==1.7.4
31 32 zope.deprecation==4.3.0
32 33 zope.interface==4.5.0
33 34
34 35 ## http servers
35 36 gevent==1.3.4
36 37 greenlet==0.4.13
37 38 gunicorn==19.9.0
38 39 waitress==1.1.0
39 40
40 41 ## debug
41 42 ipdb==0.11.0
42 43 ipython==5.1.0
43 44
44 45 ## test related requirements
45 46 -r requirements_test.txt
@@ -1,98 +1,93 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 from vcsserver.lib.rc_cache import region_meta
23 24 log = logging.getLogger(__name__)
24 25
25 26
26 27 class RepoFactory(object):
27 28 """
28 29 Utility to create instances of repository
29 30
30 31 It provides internal caching of the `repo` object based on
31 32 the :term:`call context`.
32 33 """
34 repo_type = None
33 35
34 def __init__(self, repo_cache):
35 self._cache = repo_cache
36 def __init__(self):
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
36 38
37 39 def _create_config(self, path, config):
38 40 config = {}
39 41 return config
40 42
41 43 def _create_repo(self, wire, create):
42 44 raise NotImplementedError()
43 45
44 46 def repo(self, wire, create=False):
45 47 """
46 48 Get a repository instance for the given path.
47 49
48 50 Uses internally the low level beaker API since the decorators introduce
49 51 significant overhead.
50 52 """
51 def create_new_repo():
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
52 62 return self._create_repo(wire, create)
53 63
54 return self._repo(wire, create_new_repo)
55
56 def _repo(self, wire, createfunc):
57 context = wire.get('context', None)
58 cache = wire.get('cache', True)
59
60 if context and cache:
61 cache_key = (context, wire['path'])
62 log.debug(
63 'FETCH %s@%s repo object from cache. Context: %s',
64 self.__class__.__name__, wire['path'], context)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
66 else:
67 log.debug(
68 'INIT %s@%s repo object based on wire %s. Context: %s',
69 self.__class__.__name__, wire['path'], wire, context)
70 return createfunc()
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
65 return repo
71 66
72 67
73 68 def obfuscate_qs(query_string):
74 69 if query_string is None:
75 70 return None
76 71
77 72 parsed = []
78 73 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
79 74 if k in ['auth_token', 'api_key']:
80 75 v = "*****"
81 76 parsed.append((k, v))
82 77
83 78 return '&'.join('{}{}'.format(
84 79 k, '={}'.format(v) if v else '') for k, v in parsed)
85 80
86 81
87 82 def raise_from_original(new_type):
88 83 """
89 84 Raise a new exception type with original args and traceback.
90 85 """
91 86 exc_type, exc_value, exc_traceback = sys.exc_info()
92 87
93 88 traceback.format_exception(exc_type, exc_value, exc_traceback)
94 89
95 90 try:
96 91 raise new_type(*exc_value.args), None, exc_traceback
97 92 finally:
98 93 del exc_traceback
@@ -1,670 +1,671 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 59 raise exceptions.LookupException(e.message)
60 60 except (HangupException, UnexpectedCommandError) as e:
61 61 raise exceptions.VcsException(e.message)
62 62 except Exception as e:
63 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 64 # (KeyError on empty repos), we cannot track this and catch all
65 65 # exceptions, it's an exceptions from other handlers
66 66 #if not hasattr(e, '_vcs_kind'):
67 67 #log.exception("Unhandled exception in git remote call")
68 68 #raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class Repo(DulwichRepo):
74 74 """
75 75 A wrapper for dulwich Repo class.
76 76
77 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 78 "Too many open files" error. We need to close all opened file descriptors
79 79 once the repo object is destroyed.
80 80
81 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 82 to 0.12.0 +
83 83 """
84 84 def __del__(self):
85 85 if hasattr(self, 'object_store'):
86 86 self.close()
87 87
88 88
89 89 class GitFactory(RepoFactory):
90 repo_type = 'git'
90 91
91 92 def _create_repo(self, wire, create):
92 93 repo_path = str_to_dulwich(wire['path'])
93 94 return Repo(repo_path)
94 95
95 96
96 97 class GitRemote(object):
97 98
98 99 def __init__(self, factory):
99 100 self._factory = factory
100 101
101 102 self._bulk_methods = {
102 103 "author": self.commit_attribute,
103 104 "date": self.get_object_attrs,
104 105 "message": self.commit_attribute,
105 106 "parents": self.commit_attribute,
106 107 "_commit": self.revision,
107 108 }
108 109
109 110 def _wire_to_config(self, wire):
110 111 if 'config' in wire:
111 112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 113 return {}
113 114
114 115 def _assign_ref(self, wire, ref, commit_id):
115 116 repo = self._factory.repo(wire)
116 117 repo[ref] = commit_id
117 118
118 119 @reraise_safe_exceptions
119 120 def add_object(self, wire, content):
120 121 repo = self._factory.repo(wire)
121 122 blob = objects.Blob()
122 123 blob.set_raw_string(content)
123 124 repo.object_store.add_object(blob)
124 125 return blob.id
125 126
126 127 @reraise_safe_exceptions
127 128 def assert_correct_path(self, wire):
128 129 path = wire.get('path')
129 130 try:
130 131 self._factory.repo(wire)
131 132 except NotGitRepository as e:
132 133 tb = traceback.format_exc()
133 134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 135 return False
135 136
136 137 return True
137 138
138 139 @reraise_safe_exceptions
139 140 def bare(self, wire):
140 141 repo = self._factory.repo(wire)
141 142 return repo.bare
142 143
143 144 @reraise_safe_exceptions
144 145 def blob_as_pretty_string(self, wire, sha):
145 146 repo = self._factory.repo(wire)
146 147 return repo[sha].as_pretty_string()
147 148
148 149 @reraise_safe_exceptions
149 150 def blob_raw_length(self, wire, sha):
150 151 repo = self._factory.repo(wire)
151 152 blob = repo[sha]
152 153 return blob.raw_length()
153 154
154 155 def _parse_lfs_pointer(self, raw_content):
155 156
156 157 spec_string = 'version https://git-lfs.github.com/spec'
157 158 if raw_content and raw_content.startswith(spec_string):
158 159 pattern = re.compile(r"""
159 160 (?:\n)?
160 161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 163 ^size[ ](?P<oid_size>[0-9]+)\n
163 164 (?:\n)?
164 165 """, re.VERBOSE | re.MULTILINE)
165 166 match = pattern.match(raw_content)
166 167 if match:
167 168 return match.groupdict()
168 169
169 170 return {}
170 171
171 172 @reraise_safe_exceptions
172 173 def is_large_file(self, wire, sha):
173 174 repo = self._factory.repo(wire)
174 175 blob = repo[sha]
175 176 return self._parse_lfs_pointer(blob.as_raw_string())
176 177
177 178 @reraise_safe_exceptions
178 179 def in_largefiles_store(self, wire, oid):
179 180 repo = self._factory.repo(wire)
180 181 conf = self._wire_to_config(wire)
181 182
182 183 store_location = conf.get('vcs_git_lfs_store_location')
183 184 if store_location:
184 185 repo_name = repo.path
185 186 store = LFSOidStore(
186 187 oid=oid, repo=repo_name, store_location=store_location)
187 188 return store.has_oid()
188 189
189 190 return False
190 191
191 192 @reraise_safe_exceptions
192 193 def store_path(self, wire, oid):
193 194 repo = self._factory.repo(wire)
194 195 conf = self._wire_to_config(wire)
195 196
196 197 store_location = conf.get('vcs_git_lfs_store_location')
197 198 if store_location:
198 199 repo_name = repo.path
199 200 store = LFSOidStore(
200 201 oid=oid, repo=repo_name, store_location=store_location)
201 202 return store.oid_path
202 203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 204
204 205 @reraise_safe_exceptions
205 206 def bulk_request(self, wire, rev, pre_load):
206 207 result = {}
207 208 for attr in pre_load:
208 209 try:
209 210 method = self._bulk_methods[attr]
210 211 args = [wire, rev]
211 212 if attr == "date":
212 213 args.extend(["commit_time", "commit_timezone"])
213 214 elif attr in ["author", "message", "parents"]:
214 215 args.append(attr)
215 216 result[attr] = method(*args)
216 217 except KeyError:
217 218 raise exceptions.VcsException(
218 219 "Unknown bulk attribute: %s" % attr)
219 220 return result
220 221
221 222 def _build_opener(self, url):
222 223 handlers = []
223 224 url_obj = url_parser(url)
224 225 _, authinfo = url_obj.authinfo()
225 226
226 227 if authinfo:
227 228 # create a password manager
228 229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 230 passmgr.add_password(*authinfo)
230 231
231 232 handlers.extend((httpbasicauthhandler(passmgr),
232 233 httpdigestauthhandler(passmgr)))
233 234
234 235 return urllib2.build_opener(*handlers)
235 236
236 237 @reraise_safe_exceptions
237 238 def check_url(self, url, config):
238 239 url_obj = url_parser(url)
239 240 test_uri, _ = url_obj.authinfo()
240 241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 242 url_obj.query = obfuscate_qs(url_obj.query)
242 243 cleaned_uri = str(url_obj)
243 244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 245
245 246 if not test_uri.endswith('info/refs'):
246 247 test_uri = test_uri.rstrip('/') + '/info/refs'
247 248
248 249 o = self._build_opener(url)
249 250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 251
251 252 q = {"service": 'git-upload-pack'}
252 253 qs = '?%s' % urllib.urlencode(q)
253 254 cu = "%s%s" % (test_uri, qs)
254 255 req = urllib2.Request(cu, None, {})
255 256
256 257 try:
257 258 log.debug("Trying to open URL %s", cleaned_uri)
258 259 resp = o.open(req)
259 260 if resp.code != 200:
260 261 raise exceptions.URLError('Return Code is not 200')
261 262 except Exception as e:
262 263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 264 # means it cannot be cloned
264 265 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 266
266 267 # now detect if it's proper git repo
267 268 gitdata = resp.read()
268 269 if 'service=git-upload-pack' in gitdata:
269 270 pass
270 271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 272 # old style git can return some other format !
272 273 pass
273 274 else:
274 275 raise exceptions.URLError(
275 276 "url [%s] does not look like an git" % (cleaned_uri,))
276 277
277 278 return True
278 279
279 280 @reraise_safe_exceptions
280 281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 282 remote_refs = self.fetch(wire, url, apply_refs=False)
282 283 repo = self._factory.repo(wire)
283 284 if isinstance(valid_refs, list):
284 285 valid_refs = tuple(valid_refs)
285 286
286 287 for k in remote_refs:
287 288 # only parse heads/tags and skip so called deferred tags
288 289 if k.startswith(valid_refs) and not k.endswith(deferred):
289 290 repo[k] = remote_refs[k]
290 291
291 292 if update_after_clone:
292 293 # we want to checkout HEAD
293 294 repo["HEAD"] = remote_refs["HEAD"]
294 295 index.build_index_from_tree(repo.path, repo.index_path(),
295 296 repo.object_store, repo["HEAD"].tree)
296 297
297 298 # TODO: this is quite complex, check if that can be simplified
298 299 @reraise_safe_exceptions
299 300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 301 repo = self._factory.repo(wire)
301 302 object_store = repo.object_store
302 303
303 304 # Create tree and populates it with blobs
304 305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 306
306 307 for node in updated:
307 308 # Compute subdirs if needed
308 309 dirpath, nodename = vcspath.split(node['path'])
309 310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 311 parent = commit_tree
311 312 ancestors = [('', parent)]
312 313
313 314 # Tries to dig for the deepest existing tree
314 315 while dirnames:
315 316 curdir = dirnames.pop(0)
316 317 try:
317 318 dir_id = parent[curdir][1]
318 319 except KeyError:
319 320 # put curdir back into dirnames and stops
320 321 dirnames.insert(0, curdir)
321 322 break
322 323 else:
323 324 # If found, updates parent
324 325 parent = repo[dir_id]
325 326 ancestors.append((curdir, parent))
326 327 # Now parent is deepest existing tree and we need to create
327 328 # subtrees for dirnames (in reverse order)
328 329 # [this only applies for nodes from added]
329 330 new_trees = []
330 331
331 332 blob = objects.Blob.from_string(node['content'])
332 333
333 334 if dirnames:
334 335 # If there are trees which should be created we need to build
335 336 # them now (in reverse order)
336 337 reversed_dirnames = list(reversed(dirnames))
337 338 curtree = objects.Tree()
338 339 curtree[node['node_path']] = node['mode'], blob.id
339 340 new_trees.append(curtree)
340 341 for dirname in reversed_dirnames[:-1]:
341 342 newtree = objects.Tree()
342 343 newtree[dirname] = (DIR_STAT, curtree.id)
343 344 new_trees.append(newtree)
344 345 curtree = newtree
345 346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 347 else:
347 348 parent.add(
348 349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 350
350 351 new_trees.append(parent)
351 352 # Update ancestors
352 353 reversed_ancestors = reversed(
353 354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 355 for parent, tree, path in reversed_ancestors:
355 356 parent[path] = (DIR_STAT, tree.id)
356 357 object_store.add_object(tree)
357 358
358 359 object_store.add_object(blob)
359 360 for tree in new_trees:
360 361 object_store.add_object(tree)
361 362
362 363 for node_path in removed:
363 364 paths = node_path.split('/')
364 365 tree = commit_tree
365 366 trees = [tree]
366 367 # Traverse deep into the forest...
367 368 for path in paths:
368 369 try:
369 370 obj = repo[tree[path][1]]
370 371 if isinstance(obj, objects.Tree):
371 372 trees.append(obj)
372 373 tree = obj
373 374 except KeyError:
374 375 break
375 376 # Cut down the blob and all rotten trees on the way back...
376 377 for path, tree in reversed(zip(paths, trees)):
377 378 del tree[path]
378 379 if tree:
379 380 # This tree still has elements - don't remove it or any
380 381 # of it's parents
381 382 break
382 383
383 384 object_store.add_object(commit_tree)
384 385
385 386 # Create commit
386 387 commit = objects.Commit()
387 388 commit.tree = commit_tree.id
388 389 for k, v in commit_data.iteritems():
389 390 setattr(commit, k, v)
390 391 object_store.add_object(commit)
391 392
392 393 ref = 'refs/heads/%s' % branch
393 394 repo.refs[ref] = commit.id
394 395
395 396 return commit.id
396 397
397 398 @reraise_safe_exceptions
398 399 def fetch(self, wire, url, apply_refs=True, refs=None):
399 400 if url != 'default' and '://' not in url:
400 401 client = LocalGitClient(url)
401 402 else:
402 403 url_obj = url_parser(url)
403 404 o = self._build_opener(url)
404 405 url, _ = url_obj.authinfo()
405 406 client = HttpGitClient(base_url=url, opener=o)
406 407 repo = self._factory.repo(wire)
407 408
408 409 determine_wants = repo.object_store.determine_wants_all
409 410 if refs:
410 411 def determine_wants_requested(references):
411 412 return [references[r] for r in references if r in refs]
412 413 determine_wants = determine_wants_requested
413 414
414 415 try:
415 416 remote_refs = client.fetch(
416 417 path=url, target=repo, determine_wants=determine_wants)
417 418 except NotGitRepository as e:
418 419 log.warning(
419 420 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 421 # Exception can contain unicode which we convert
421 422 raise exceptions.AbortException(repr(e))
422 423
423 424 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 425 # refs filtered by `determine_wants` function. We need to filter result
425 426 # as well
426 427 if refs:
427 428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 429
429 430 if apply_refs:
430 431 # TODO: johbo: Needs proper test coverage with a git repository
431 432 # that contains a tag object, so that we would end up with
432 433 # a peeled ref at this point.
433 434 PEELED_REF_MARKER = '^{}'
434 435 for k in remote_refs:
435 436 if k.endswith(PEELED_REF_MARKER):
436 437 log.info("Skipping peeled reference %s", k)
437 438 continue
438 439 repo[k] = remote_refs[k]
439 440
440 441 if refs:
441 442 # mikhail: explicitly set the head to the last ref.
442 443 repo['HEAD'] = remote_refs[refs[-1]]
443 444
444 445 # TODO: mikhail: should we return remote_refs here to be
445 446 # consistent?
446 447 else:
447 448 return remote_refs
448 449
449 450 @reraise_safe_exceptions
450 451 def sync_push(self, wire, url, refs=None):
451 452 if self.check_url(url, wire):
452 453 repo = self._factory.repo(wire)
453 454 self.run_git_command(
454 455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 456 _copts=['-c', 'core.askpass=""'],
456 457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457 458
458 459 @reraise_safe_exceptions
459 460 def get_remote_refs(self, wire, url):
460 461 repo = Repo(url)
461 462 return repo.get_refs()
462 463
463 464 @reraise_safe_exceptions
464 465 def get_description(self, wire):
465 466 repo = self._factory.repo(wire)
466 467 return repo.get_description()
467 468
468 469 @reraise_safe_exceptions
469 470 def get_file_history(self, wire, file_path, commit_id, limit):
470 471 repo = self._factory.repo(wire)
471 472 include = [commit_id]
472 473 paths = [file_path]
473 474
474 475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 476 return [x.commit.id for x in walker]
476 477
477 478 @reraise_safe_exceptions
478 479 def get_missing_revs(self, wire, rev1, rev2, path2):
479 480 repo = self._factory.repo(wire)
480 481 LocalGitClient(thin_packs=False).fetch(path2, repo)
481 482
482 483 wire_remote = wire.copy()
483 484 wire_remote['path'] = path2
484 485 repo_remote = self._factory.repo(wire_remote)
485 486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486 487
487 488 revs = [
488 489 x.commit.id
489 490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 491 return revs
491 492
492 493 @reraise_safe_exceptions
493 494 def get_object(self, wire, sha):
494 495 repo = self._factory.repo(wire)
495 496 obj = repo.get_object(sha)
496 497 commit_id = obj.id
497 498
498 499 if isinstance(obj, Tag):
499 500 commit_id = obj.object[1]
500 501
501 502 return {
502 503 'id': obj.id,
503 504 'type': obj.type_name,
504 505 'commit_id': commit_id
505 506 }
506 507
507 508 @reraise_safe_exceptions
508 509 def get_object_attrs(self, wire, sha, *attrs):
509 510 repo = self._factory.repo(wire)
510 511 obj = repo.get_object(sha)
511 512 return list(getattr(obj, a) for a in attrs)
512 513
513 514 @reraise_safe_exceptions
514 515 def get_refs(self, wire):
515 516 repo = self._factory.repo(wire)
516 517 result = {}
517 518 for ref, sha in repo.refs.as_dict().items():
518 519 peeled_sha = repo.get_peeled(ref)
519 520 result[ref] = peeled_sha
520 521 return result
521 522
522 523 @reraise_safe_exceptions
523 524 def get_refs_path(self, wire):
524 525 repo = self._factory.repo(wire)
525 526 return repo.refs.path
526 527
527 528 @reraise_safe_exceptions
528 529 def head(self, wire):
529 530 repo = self._factory.repo(wire)
530 531 return repo.head()
531 532
532 533 @reraise_safe_exceptions
533 534 def init(self, wire):
534 535 repo_path = str_to_dulwich(wire['path'])
535 536 self.repo = Repo.init(repo_path)
536 537
537 538 @reraise_safe_exceptions
538 539 def init_bare(self, wire):
539 540 repo_path = str_to_dulwich(wire['path'])
540 541 self.repo = Repo.init_bare(repo_path)
541 542
542 543 @reraise_safe_exceptions
543 544 def revision(self, wire, rev):
544 545 repo = self._factory.repo(wire)
545 546 obj = repo[rev]
546 547 obj_data = {
547 548 'id': obj.id,
548 549 }
549 550 try:
550 551 obj_data['tree'] = obj.tree
551 552 except AttributeError:
552 553 pass
553 554 return obj_data
554 555
555 556 @reraise_safe_exceptions
556 557 def commit_attribute(self, wire, rev, attr):
557 558 repo = self._factory.repo(wire)
558 559 obj = repo[rev]
559 560 return getattr(obj, attr)
560 561
561 562 @reraise_safe_exceptions
562 563 def set_refs(self, wire, key, value):
563 564 repo = self._factory.repo(wire)
564 565 repo.refs[key] = value
565 566
566 567 @reraise_safe_exceptions
567 568 def remove_ref(self, wire, key):
568 569 repo = self._factory.repo(wire)
569 570 del repo.refs[key]
570 571
571 572 @reraise_safe_exceptions
572 573 def tree_changes(self, wire, source_id, target_id):
573 574 repo = self._factory.repo(wire)
574 575 source = repo[source_id].tree if source_id else None
575 576 target = repo[target_id].tree
576 577 result = repo.object_store.tree_changes(source, target)
577 578 return list(result)
578 579
579 580 @reraise_safe_exceptions
580 581 def tree_items(self, wire, tree_id):
581 582 repo = self._factory.repo(wire)
582 583 tree = repo[tree_id]
583 584
584 585 result = []
585 586 for item in tree.iteritems():
586 587 item_sha = item.sha
587 588 item_mode = item.mode
588 589
589 590 if FILE_MODE(item_mode) == GIT_LINK:
590 591 item_type = "link"
591 592 else:
592 593 item_type = repo[item_sha].type_name
593 594
594 595 result.append((item.path, item_mode, item_sha, item_type))
595 596 return result
596 597
597 598 @reraise_safe_exceptions
598 599 def update_server_info(self, wire):
599 600 repo = self._factory.repo(wire)
600 601 update_server_info(repo)
601 602
602 603 @reraise_safe_exceptions
603 604 def discover_git_version(self):
604 605 stdout, _ = self.run_git_command(
605 606 {}, ['--version'], _bare=True, _safe=True)
606 607 prefix = 'git version'
607 608 if stdout.startswith(prefix):
608 609 stdout = stdout[len(prefix):]
609 610 return stdout.strip()
610 611
611 612 @reraise_safe_exceptions
612 613 def run_git_command(self, wire, cmd, **opts):
613 614 path = wire.get('path', None)
614 615
615 616 if path and os.path.isdir(path):
616 617 opts['cwd'] = path
617 618
618 619 if '_bare' in opts:
619 620 _copts = []
620 621 del opts['_bare']
621 622 else:
622 623 _copts = ['-c', 'core.quotepath=false', ]
623 624 safe_call = False
624 625 if '_safe' in opts:
625 626 # no exc on failure
626 627 del opts['_safe']
627 628 safe_call = True
628 629
629 630 if '_copts' in opts:
630 631 _copts.extend(opts['_copts'] or [])
631 632 del opts['_copts']
632 633
633 634 gitenv = os.environ.copy()
634 635 gitenv.update(opts.pop('extra_env', {}))
635 636 # need to clean fix GIT_DIR !
636 637 if 'GIT_DIR' in gitenv:
637 638 del gitenv['GIT_DIR']
638 639 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639 640 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
640 641
641 642 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
642 643
643 644 try:
644 645 _opts = {'env': gitenv, 'shell': False}
645 646 _opts.update(opts)
646 647 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
647 648
648 649 return ''.join(p), ''.join(p.error)
649 650 except (EnvironmentError, OSError) as err:
650 651 cmd = ' '.join(cmd) # human friendly CMD
651 652 tb_err = ("Couldn't run git command (%s).\n"
652 653 "Original error was:%s\n" % (cmd, err))
653 654 log.exception(tb_err)
654 655 if safe_call:
655 656 return '', err
656 657 else:
657 658 raise exceptions.VcsException(tb_err)
658 659
659 660 @reraise_safe_exceptions
660 661 def install_hooks(self, wire, force=False):
661 662 from vcsserver.hook_utils import install_git_hooks
662 663 repo = self._factory.repo(wire)
663 664 return install_git_hooks(repo.path, repo.bare, force_create=force)
664 665
665 666
666 667 def str_to_dulwich(value):
667 668 """
668 669 Dulwich 0.10.1a requires `unicode` objects to be passed in.
669 670 """
670 671 return value.decode(settings.WIRE_ENCODING)
@@ -1,791 +1,792 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 77 except (Abort, InterventionRequired):
78 78 raise_from_original(exceptions.AbortException)
79 79 except RepoLookupError:
80 80 raise_from_original(exceptions.LookupException)
81 81 except RequirementError:
82 82 raise_from_original(exceptions.RequirementException)
83 83 except RepoError:
84 84 raise_from_original(exceptions.VcsException)
85 85 except LookupError:
86 86 raise_from_original(exceptions.LookupException)
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 90 raise_from_original(exceptions.UnhandledException)
91 91 raise
92 92 return wrapper
93 93
94 94
95 95 class MercurialFactory(RepoFactory):
96 repo_type = 'hg'
96 97
97 98 def _create_config(self, config, hooks=True):
98 99 if not hooks:
99 100 hooks_to_clean = frozenset((
100 101 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 102 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 103 new_config = []
103 104 for section, option, value in config:
104 105 if section == 'hooks' and option in hooks_to_clean:
105 106 continue
106 107 new_config.append((section, option, value))
107 108 config = new_config
108 109
109 110 baseui = make_ui_from_config(config)
110 111 return baseui
111 112
112 113 def _create_repo(self, wire, create):
113 114 baseui = self._create_config(wire["config"])
114 115 return localrepository(baseui, wire["path"], create)
115 116
116 117
117 118 class HgRemote(object):
118 119
119 120 def __init__(self, factory):
120 121 self._factory = factory
121 122
122 123 self._bulk_methods = {
123 124 "affected_files": self.ctx_files,
124 125 "author": self.ctx_user,
125 126 "branch": self.ctx_branch,
126 127 "children": self.ctx_children,
127 128 "date": self.ctx_date,
128 129 "message": self.ctx_description,
129 130 "parents": self.ctx_parents,
130 131 "status": self.ctx_status,
131 132 "obsolete": self.ctx_obsolete,
132 133 "phase": self.ctx_phase,
133 134 "hidden": self.ctx_hidden,
134 135 "_file_paths": self.ctx_list,
135 136 }
136 137
137 138 @reraise_safe_exceptions
138 139 def discover_hg_version(self):
139 140 from mercurial import util
140 141 return util.version()
141 142
142 143 @reraise_safe_exceptions
143 144 def archive_repo(self, archive_path, mtime, file_info, kind):
144 145 if kind == "tgz":
145 146 archiver = archival.tarit(archive_path, mtime, "gz")
146 147 elif kind == "tbz2":
147 148 archiver = archival.tarit(archive_path, mtime, "bz2")
148 149 elif kind == 'zip':
149 150 archiver = archival.zipit(archive_path, mtime)
150 151 else:
151 152 raise exceptions.ArchiveException(
152 153 'Remote does not support: "%s".' % kind)
153 154
154 155 for f_path, f_mode, f_is_link, f_content in file_info:
155 156 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 157 archiver.done()
157 158
158 159 @reraise_safe_exceptions
159 160 def bookmarks(self, wire):
160 161 repo = self._factory.repo(wire)
161 162 return dict(repo._bookmarks)
162 163
163 164 @reraise_safe_exceptions
164 165 def branches(self, wire, normal, closed):
165 166 repo = self._factory.repo(wire)
166 167 iter_branches = repo.branchmap().iterbranches()
167 168 bt = {}
168 169 for branch_name, _heads, tip, is_closed in iter_branches:
169 170 if normal and not is_closed:
170 171 bt[branch_name] = tip
171 172 if closed and is_closed:
172 173 bt[branch_name] = tip
173 174
174 175 return bt
175 176
176 177 @reraise_safe_exceptions
177 178 def bulk_request(self, wire, rev, pre_load):
178 179 result = {}
179 180 for attr in pre_load:
180 181 try:
181 182 method = self._bulk_methods[attr]
182 183 result[attr] = method(wire, rev)
183 184 except KeyError:
184 185 raise exceptions.VcsException(
185 186 'Unknown bulk attribute: "%s"' % attr)
186 187 return result
187 188
188 189 @reraise_safe_exceptions
189 190 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 191 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 192 clone(baseui, source, dest, noupdate=not update_after_clone)
192 193
193 194 @reraise_safe_exceptions
194 195 def commitctx(
195 196 self, wire, message, parents, commit_time, commit_timezone,
196 197 user, files, extra, removed, updated):
197 198
198 199 def _filectxfn(_repo, memctx, path):
199 200 """
200 201 Marks given path as added/changed/removed in a given _repo. This is
201 202 for internal mercurial commit function.
202 203 """
203 204
204 205 # check if this path is removed
205 206 if path in removed:
206 207 # returning None is a way to mark node for removal
207 208 return None
208 209
209 210 # check if this path is added
210 211 for node in updated:
211 212 if node['path'] == path:
212 213 return memfilectx(
213 214 _repo,
214 215 changectx=memctx,
215 216 path=node['path'],
216 217 data=node['content'],
217 218 islink=False,
218 219 isexec=bool(node['mode'] & stat.S_IXUSR),
219 220 copied=False)
220 221
221 222 raise exceptions.AbortException(
222 223 "Given path haven't been marked as added, "
223 224 "changed or removed (%s)" % path)
224 225
225 226 repo = self._factory.repo(wire)
226 227
227 228 commit_ctx = memctx(
228 229 repo=repo,
229 230 parents=parents,
230 231 text=message,
231 232 files=files,
232 233 filectxfn=_filectxfn,
233 234 user=user,
234 235 date=(commit_time, commit_timezone),
235 236 extra=extra)
236 237
237 238 n = repo.commitctx(commit_ctx)
238 239 new_id = hex(n)
239 240
240 241 return new_id
241 242
242 243 @reraise_safe_exceptions
243 244 def ctx_branch(self, wire, revision):
244 245 repo = self._factory.repo(wire)
245 246 ctx = repo[revision]
246 247 return ctx.branch()
247 248
248 249 @reraise_safe_exceptions
249 250 def ctx_children(self, wire, revision):
250 251 repo = self._factory.repo(wire)
251 252 ctx = repo[revision]
252 253 return [child.rev() for child in ctx.children()]
253 254
254 255 @reraise_safe_exceptions
255 256 def ctx_date(self, wire, revision):
256 257 repo = self._factory.repo(wire)
257 258 ctx = repo[revision]
258 259 return ctx.date()
259 260
260 261 @reraise_safe_exceptions
261 262 def ctx_description(self, wire, revision):
262 263 repo = self._factory.repo(wire)
263 264 ctx = repo[revision]
264 265 return ctx.description()
265 266
266 267 @reraise_safe_exceptions
267 268 def ctx_diff(
268 269 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 270 repo = self._factory.repo(wire)
270 271 ctx = repo[revision]
271 272 result = ctx.diff(
272 273 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 274 return list(result)
274 275
275 276 @reraise_safe_exceptions
276 277 def ctx_files(self, wire, revision):
277 278 repo = self._factory.repo(wire)
278 279 ctx = repo[revision]
279 280 return ctx.files()
280 281
281 282 @reraise_safe_exceptions
282 283 def ctx_list(self, path, revision):
283 284 repo = self._factory.repo(path)
284 285 ctx = repo[revision]
285 286 return list(ctx)
286 287
287 288 @reraise_safe_exceptions
288 289 def ctx_parents(self, wire, revision):
289 290 repo = self._factory.repo(wire)
290 291 ctx = repo[revision]
291 292 return [parent.rev() for parent in ctx.parents()]
292 293
293 294 @reraise_safe_exceptions
294 295 def ctx_phase(self, wire, revision):
295 296 repo = self._factory.repo(wire)
296 297 ctx = repo[revision]
297 298 # public=0, draft=1, secret=3
298 299 return ctx.phase()
299 300
300 301 @reraise_safe_exceptions
301 302 def ctx_obsolete(self, wire, revision):
302 303 repo = self._factory.repo(wire)
303 304 ctx = repo[revision]
304 305 return ctx.obsolete()
305 306
306 307 @reraise_safe_exceptions
307 308 def ctx_hidden(self, wire, revision):
308 309 repo = self._factory.repo(wire)
309 310 ctx = repo[revision]
310 311 return ctx.hidden()
311 312
312 313 @reraise_safe_exceptions
313 314 def ctx_substate(self, wire, revision):
314 315 repo = self._factory.repo(wire)
315 316 ctx = repo[revision]
316 317 return ctx.substate
317 318
318 319 @reraise_safe_exceptions
319 320 def ctx_status(self, wire, revision):
320 321 repo = self._factory.repo(wire)
321 322 ctx = repo[revision]
322 323 status = repo[ctx.p1().node()].status(other=ctx.node())
323 324 # object of status (odd, custom named tuple in mercurial) is not
324 325 # correctly serializable, we make it a list, as the underling
325 326 # API expects this to be a list
326 327 return list(status)
327 328
328 329 @reraise_safe_exceptions
329 330 def ctx_user(self, wire, revision):
330 331 repo = self._factory.repo(wire)
331 332 ctx = repo[revision]
332 333 return ctx.user()
333 334
334 335 @reraise_safe_exceptions
335 336 def check_url(self, url, config):
336 337 _proto = None
337 338 if '+' in url[:url.find('://')]:
338 339 _proto = url[0:url.find('+')]
339 340 url = url[url.find('+') + 1:]
340 341 handlers = []
341 342 url_obj = url_parser(url)
342 343 test_uri, authinfo = url_obj.authinfo()
343 344 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 345 url_obj.query = obfuscate_qs(url_obj.query)
345 346
346 347 cleaned_uri = str(url_obj)
347 348 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348 349
349 350 if authinfo:
350 351 # create a password manager
351 352 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 353 passmgr.add_password(*authinfo)
353 354
354 355 handlers.extend((httpbasicauthhandler(passmgr),
355 356 httpdigestauthhandler(passmgr)))
356 357
357 358 o = urllib2.build_opener(*handlers)
358 359 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 360 ('Accept', 'application/mercurial-0.1')]
360 361
361 362 q = {"cmd": 'between'}
362 363 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 364 qs = '?%s' % urllib.urlencode(q)
364 365 cu = "%s%s" % (test_uri, qs)
365 366 req = urllib2.Request(cu, None, {})
366 367
367 368 try:
368 369 log.debug("Trying to open URL %s", cleaned_uri)
369 370 resp = o.open(req)
370 371 if resp.code != 200:
371 372 raise exceptions.URLError('Return Code is not 200')
372 373 except Exception as e:
373 374 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 375 # means it cannot be cloned
375 376 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
376 377
377 378 # now check if it's a proper hg repo, but don't do it for svn
378 379 try:
379 380 if _proto == 'svn':
380 381 pass
381 382 else:
382 383 # check for pure hg repos
383 384 log.debug(
384 385 "Verifying if URL is a Mercurial repository: %s",
385 386 cleaned_uri)
386 387 ui = make_ui_from_config(config)
387 388 peer_checker = makepeer(ui, url)
388 389 peer_checker.lookup('tip')
389 390 except Exception as e:
390 391 log.warning("URL is not a valid Mercurial repository: %s",
391 392 cleaned_uri)
392 393 raise exceptions.URLError(
393 394 "url [%s] does not look like an hg repo org_exc: %s"
394 395 % (cleaned_uri, e))
395 396
396 397 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
397 398 return True
398 399
399 400 @reraise_safe_exceptions
400 401 def diff(
401 402 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
402 403 context):
403 404 repo = self._factory.repo(wire)
404 405
405 406 if file_filter:
406 407 match_filter = match(file_filter[0], '', [file_filter[1]])
407 408 else:
408 409 match_filter = file_filter
409 410 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
410 411
411 412 try:
412 413 return "".join(patch.diff(
413 414 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
414 415 except RepoLookupError:
415 416 raise exceptions.LookupException()
416 417
417 418 @reraise_safe_exceptions
418 419 def file_history(self, wire, revision, path, limit):
419 420 repo = self._factory.repo(wire)
420 421
421 422 ctx = repo[revision]
422 423 fctx = ctx.filectx(path)
423 424
424 425 def history_iter():
425 426 limit_rev = fctx.rev()
426 427 for obj in reversed(list(fctx.filelog())):
427 428 obj = fctx.filectx(obj)
428 429 if limit_rev >= obj.rev():
429 430 yield obj
430 431
431 432 history = []
432 433 for cnt, obj in enumerate(history_iter()):
433 434 if limit and cnt >= limit:
434 435 break
435 436 history.append(hex(obj.node()))
436 437
437 438 return [x for x in history]
438 439
439 440 @reraise_safe_exceptions
440 441 def file_history_untill(self, wire, revision, path, limit):
441 442 repo = self._factory.repo(wire)
442 443 ctx = repo[revision]
443 444 fctx = ctx.filectx(path)
444 445
445 446 file_log = list(fctx.filelog())
446 447 if limit:
447 448 # Limit to the last n items
448 449 file_log = file_log[-limit:]
449 450
450 451 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
451 452
452 453 @reraise_safe_exceptions
453 454 def fctx_annotate(self, wire, revision, path):
454 455 repo = self._factory.repo(wire)
455 456 ctx = repo[revision]
456 457 fctx = ctx.filectx(path)
457 458
458 459 result = []
459 460 for i, annotate_obj in enumerate(fctx.annotate(), 1):
460 461 ln_no = i
461 462 sha = hex(annotate_obj.fctx.node())
462 463 content = annotate_obj.text
463 464 result.append((ln_no, sha, content))
464 465 return result
465 466
466 467 @reraise_safe_exceptions
467 468 def fctx_data(self, wire, revision, path):
468 469 repo = self._factory.repo(wire)
469 470 ctx = repo[revision]
470 471 fctx = ctx.filectx(path)
471 472 return fctx.data()
472 473
473 474 @reraise_safe_exceptions
474 475 def fctx_flags(self, wire, revision, path):
475 476 repo = self._factory.repo(wire)
476 477 ctx = repo[revision]
477 478 fctx = ctx.filectx(path)
478 479 return fctx.flags()
479 480
480 481 @reraise_safe_exceptions
481 482 def fctx_size(self, wire, revision, path):
482 483 repo = self._factory.repo(wire)
483 484 ctx = repo[revision]
484 485 fctx = ctx.filectx(path)
485 486 return fctx.size()
486 487
487 488 @reraise_safe_exceptions
488 489 def get_all_commit_ids(self, wire, name):
489 490 repo = self._factory.repo(wire)
490 491 revs = repo.filtered(name).changelog.index
491 492 return map(lambda x: hex(x[7]), revs)[:-1]
492 493
493 494 @reraise_safe_exceptions
494 495 def get_config_value(self, wire, section, name, untrusted=False):
495 496 repo = self._factory.repo(wire)
496 497 return repo.ui.config(section, name, untrusted=untrusted)
497 498
498 499 @reraise_safe_exceptions
499 500 def get_config_bool(self, wire, section, name, untrusted=False):
500 501 repo = self._factory.repo(wire)
501 502 return repo.ui.configbool(section, name, untrusted=untrusted)
502 503
503 504 @reraise_safe_exceptions
504 505 def get_config_list(self, wire, section, name, untrusted=False):
505 506 repo = self._factory.repo(wire)
506 507 return repo.ui.configlist(section, name, untrusted=untrusted)
507 508
508 509 @reraise_safe_exceptions
509 510 def is_large_file(self, wire, path):
510 511 return largefiles.lfutil.isstandin(path)
511 512
512 513 @reraise_safe_exceptions
513 514 def in_largefiles_store(self, wire, sha):
514 515 repo = self._factory.repo(wire)
515 516 return largefiles.lfutil.instore(repo, sha)
516 517
517 518 @reraise_safe_exceptions
518 519 def in_user_cache(self, wire, sha):
519 520 repo = self._factory.repo(wire)
520 521 return largefiles.lfutil.inusercache(repo.ui, sha)
521 522
522 523 @reraise_safe_exceptions
523 524 def store_path(self, wire, sha):
524 525 repo = self._factory.repo(wire)
525 526 return largefiles.lfutil.storepath(repo, sha)
526 527
527 528 @reraise_safe_exceptions
528 529 def link(self, wire, sha, path):
529 530 repo = self._factory.repo(wire)
530 531 largefiles.lfutil.link(
531 532 largefiles.lfutil.usercachepath(repo.ui, sha), path)
532 533
533 534 @reraise_safe_exceptions
534 535 def localrepository(self, wire, create=False):
535 536 self._factory.repo(wire, create=create)
536 537
537 538 @reraise_safe_exceptions
538 539 def lookup(self, wire, revision, both):
539 540
540 541 repo = self._factory.repo(wire)
541 542
542 543 if isinstance(revision, int):
543 544 # NOTE(marcink):
544 545 # since Mercurial doesn't support indexes properly
545 546 # we need to shift accordingly by one to get proper index, e.g
546 547 # repo[-1] => repo[-2]
547 548 # repo[0] => repo[-1]
548 549 # repo[1] => repo[2] we also never call repo[0] because
549 550 # it's actually second commit
550 551 if revision <= 0:
551 552 revision = revision + -1
552 553 else:
553 554 revision = revision + 1
554 555
555 556 try:
556 557 ctx = repo[revision]
557 558 except RepoLookupError:
558 559 raise exceptions.LookupException(revision)
559 560 except LookupError as e:
560 561 raise exceptions.LookupException(e.name)
561 562
562 563 if not both:
563 564 return ctx.hex()
564 565
565 566 ctx = repo[ctx.hex()]
566 567 return ctx.hex(), ctx.rev()
567 568
568 569 @reraise_safe_exceptions
569 570 def pull(self, wire, url, commit_ids=None):
570 571 repo = self._factory.repo(wire)
571 572 # Disable any prompts for this repo
572 573 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
573 574
574 575 remote = peer(repo, {}, url)
575 576 # Disable any prompts for this remote
576 577 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
577 578
578 579 if commit_ids:
579 580 commit_ids = [bin(commit_id) for commit_id in commit_ids]
580 581
581 582 return exchange.pull(
582 583 repo, remote, heads=commit_ids, force=None).cgresult
583 584
584 585 @reraise_safe_exceptions
585 586 def sync_push(self, wire, url):
586 587 if self.check_url(url, wire['config']):
587 588 repo = self._factory.repo(wire)
588 589
589 590 # Disable any prompts for this repo
590 591 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
591 592
592 593 bookmarks = dict(repo._bookmarks).keys()
593 594 remote = peer(repo, {}, url)
594 595 # Disable any prompts for this remote
595 596 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
596 597
597 598 return exchange.push(
598 599 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
599 600
600 601 @reraise_safe_exceptions
601 602 def revision(self, wire, rev):
602 603 repo = self._factory.repo(wire)
603 604 ctx = repo[rev]
604 605 return ctx.rev()
605 606
606 607 @reraise_safe_exceptions
607 608 def rev_range(self, wire, filter):
608 609 repo = self._factory.repo(wire)
609 610 revisions = [rev for rev in revrange(repo, filter)]
610 611 return revisions
611 612
612 613 @reraise_safe_exceptions
613 614 def rev_range_hash(self, wire, node):
614 615 repo = self._factory.repo(wire)
615 616
616 617 def get_revs(repo, rev_opt):
617 618 if rev_opt:
618 619 revs = revrange(repo, rev_opt)
619 620 if len(revs) == 0:
620 621 return (nullrev, nullrev)
621 622 return max(revs), min(revs)
622 623 else:
623 624 return len(repo) - 1, 0
624 625
625 626 stop, start = get_revs(repo, [node + ':'])
626 627 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
627 628 return revs
628 629
629 630 @reraise_safe_exceptions
630 631 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
631 632 other_path = kwargs.pop('other_path', None)
632 633
633 634 # case when we want to compare two independent repositories
634 635 if other_path and other_path != wire["path"]:
635 636 baseui = self._factory._create_config(wire["config"])
636 637 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
637 638 else:
638 639 repo = self._factory.repo(wire)
639 640 return list(repo.revs(rev_spec, *args))
640 641
641 642 @reraise_safe_exceptions
642 643 def strip(self, wire, revision, update, backup):
643 644 repo = self._factory.repo(wire)
644 645 ctx = repo[revision]
645 646 hgext_strip(
646 647 repo.baseui, repo, ctx.node(), update=update, backup=backup)
647 648
648 649 @reraise_safe_exceptions
649 650 def verify(self, wire,):
650 651 repo = self._factory.repo(wire)
651 652 baseui = self._factory._create_config(wire['config'])
652 653 baseui.setconfig('ui', 'quiet', 'false')
653 654 output = io.BytesIO()
654 655
655 656 def write(data, **unused_kwargs):
656 657 output.write(data)
657 658 baseui.write = write
658 659
659 660 repo.ui = baseui
660 661 verify.verify(repo)
661 662 return output.getvalue()
662 663
663 664 @reraise_safe_exceptions
664 665 def tag(self, wire, name, revision, message, local, user,
665 666 tag_time, tag_timezone):
666 667 repo = self._factory.repo(wire)
667 668 ctx = repo[revision]
668 669 node = ctx.node()
669 670
670 671 date = (tag_time, tag_timezone)
671 672 try:
672 673 hg_tag.tag(repo, name, node, message, local, user, date)
673 674 except Abort as e:
674 675 log.exception("Tag operation aborted")
675 676 # Exception can contain unicode which we convert
676 677 raise exceptions.AbortException(repr(e))
677 678
678 679 @reraise_safe_exceptions
679 680 def tags(self, wire):
680 681 repo = self._factory.repo(wire)
681 682 return repo.tags()
682 683
683 684 @reraise_safe_exceptions
684 685 def update(self, wire, node=None, clean=False):
685 686 repo = self._factory.repo(wire)
686 687 baseui = self._factory._create_config(wire['config'])
687 688 commands.update(baseui, repo, node=node, clean=clean)
688 689
689 690 @reraise_safe_exceptions
690 691 def identify(self, wire):
691 692 repo = self._factory.repo(wire)
692 693 baseui = self._factory._create_config(wire['config'])
693 694 output = io.BytesIO()
694 695 baseui.write = output.write
695 696 # This is required to get a full node id
696 697 baseui.debugflag = True
697 698 commands.identify(baseui, repo, id=True)
698 699
699 700 return output.getvalue()
700 701
701 702 @reraise_safe_exceptions
702 703 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
703 704 hooks=True):
704 705 repo = self._factory.repo(wire)
705 706 baseui = self._factory._create_config(wire['config'], hooks=hooks)
706 707
707 708 # Mercurial internally has a lot of logic that checks ONLY if
708 709 # option is defined, we just pass those if they are defined then
709 710 opts = {}
710 711 if bookmark:
711 712 opts['bookmark'] = bookmark
712 713 if branch:
713 714 opts['branch'] = branch
714 715 if revision:
715 716 opts['rev'] = revision
716 717
717 718 commands.pull(baseui, repo, source, **opts)
718 719
719 720 @reraise_safe_exceptions
720 721 def heads(self, wire, branch=None):
721 722 repo = self._factory.repo(wire)
722 723 baseui = self._factory._create_config(wire['config'])
723 724 output = io.BytesIO()
724 725
725 726 def write(data, **unused_kwargs):
726 727 output.write(data)
727 728
728 729 baseui.write = write
729 730 if branch:
730 731 args = [branch]
731 732 else:
732 733 args = []
733 734 commands.heads(baseui, repo, template='{node} ', *args)
734 735
735 736 return output.getvalue()
736 737
737 738 @reraise_safe_exceptions
738 739 def ancestor(self, wire, revision1, revision2):
739 740 repo = self._factory.repo(wire)
740 741 changelog = repo.changelog
741 742 lookup = repo.lookup
742 743 a = changelog.ancestor(lookup(revision1), lookup(revision2))
743 744 return hex(a)
744 745
745 746 @reraise_safe_exceptions
746 747 def push(self, wire, revisions, dest_path, hooks=True,
747 748 push_branches=False):
748 749 repo = self._factory.repo(wire)
749 750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
750 751 commands.push(baseui, repo, dest=dest_path, rev=revisions,
751 752 new_branch=push_branches)
752 753
753 754 @reraise_safe_exceptions
754 755 def merge(self, wire, revision):
755 756 repo = self._factory.repo(wire)
756 757 baseui = self._factory._create_config(wire['config'])
757 758 repo.ui.setconfig('ui', 'merge', 'internal:dump')
758 759
759 760 # In case of sub repositories are used mercurial prompts the user in
760 761 # case of merge conflicts or different sub repository sources. By
761 762 # setting the interactive flag to `False` mercurial doesn't prompt the
762 763 # used but instead uses a default value.
763 764 repo.ui.setconfig('ui', 'interactive', False)
764 765
765 766 commands.merge(baseui, repo, rev=revision)
766 767
767 768 @reraise_safe_exceptions
768 769 def commit(self, wire, message, username, close_branch=False):
769 770 repo = self._factory.repo(wire)
770 771 baseui = self._factory._create_config(wire['config'])
771 772 repo.ui.setconfig('ui', 'username', username)
772 773 commands.commit(baseui, repo, message=message, close_branch=close_branch)
773 774
774 775 @reraise_safe_exceptions
775 776 def rebase(self, wire, source=None, dest=None, abort=False):
776 777 repo = self._factory.repo(wire)
777 778 baseui = self._factory._create_config(wire['config'])
778 779 repo.ui.setconfig('ui', 'merge', 'internal:dump')
779 780 rebase.rebase(
780 781 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
781 782
782 783 @reraise_safe_exceptions
783 784 def bookmark(self, wire, bookmark, revision=None):
784 785 repo = self._factory.repo(wire)
785 786 baseui = self._factory._create_config(wire['config'])
786 787 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
787 788
788 789 @reraise_safe_exceptions
789 790 def install_hooks(self, wire, force=False):
790 791 # we don't need any special hooks for Mercurial
791 792 pass
@@ -1,490 +1,523 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import base64
20 20 import locale
21 21 import logging
22 22 import uuid
23 23 import wsgiref.util
24 24 import traceback
25 25 from itertools import chain
26 26
27 27 import simplejson as json
28 28 import msgpack
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
31 29 from pyramid.config import Configurator
30 from pyramid.settings import asbool, aslist
32 31 from pyramid.wsgi import wsgiapp
33 32 from pyramid.compat import configparser
34 33
35 34 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
36 35 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
37 36 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
38 37 from vcsserver.echo_stub.echo_app import EchoApp
39 38 from vcsserver.exceptions import HTTPRepoLocked
40 39 from vcsserver.server import VcsServer
41 40
42 41 try:
43 42 from vcsserver.git import GitFactory, GitRemote
44 43 except ImportError:
45 44 GitFactory = None
46 45 GitRemote = None
47 46
48 47 try:
49 48 from vcsserver.hg import MercurialFactory, HgRemote
50 49 except ImportError:
51 50 MercurialFactory = None
52 51 HgRemote = None
53 52
54 53 try:
55 54 from vcsserver.svn import SubversionFactory, SvnRemote
56 55 except ImportError:
57 56 SubversionFactory = None
58 57 SvnRemote = None
59 58
60 59 log = logging.getLogger(__name__)
61 60
62 61
63 62 def _is_request_chunked(environ):
64 63 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
65 64 return stream
66 65
67 66
67 def _int_setting(settings, name, default):
68 settings[name] = int(settings.get(name, default))
69
70
71 def _bool_setting(settings, name, default):
72 input_val = settings.get(name, default)
73 if isinstance(input_val, unicode):
74 input_val = input_val.encode('utf8')
75 settings[name] = asbool(input_val)
76
77
78 def _list_setting(settings, name, default):
79 raw_value = settings.get(name, default)
80
81 # Otherwise we assume it uses pyramids space/newline separation.
82 settings[name] = aslist(raw_value)
83
84
85 def _string_setting(settings, name, default, lower=True):
86 value = settings.get(name, default)
87 if lower:
88 value = value.lower()
89 settings[name] = value
90
91
68 92 class VCS(object):
69 93 def __init__(self, locale=None, cache_config=None):
70 94 self.locale = locale
71 95 self.cache_config = cache_config
72 96 self._configure_locale()
73 self._initialize_cache()
74 97
75 98 if GitFactory and GitRemote:
76 git_repo_cache = self.cache.get_cache_region(
77 'git', region='repo_object')
78 git_factory = GitFactory(git_repo_cache)
99 git_factory = GitFactory()
79 100 self._git_remote = GitRemote(git_factory)
80 101 else:
81 102 log.info("Git client import failed")
82 103
83 104 if MercurialFactory and HgRemote:
84 hg_repo_cache = self.cache.get_cache_region(
85 'hg', region='repo_object')
86 hg_factory = MercurialFactory(hg_repo_cache)
105 hg_factory = MercurialFactory()
87 106 self._hg_remote = HgRemote(hg_factory)
88 107 else:
89 108 log.info("Mercurial client import failed")
90 109
91 110 if SubversionFactory and SvnRemote:
92 svn_repo_cache = self.cache.get_cache_region(
93 'svn', region='repo_object')
94 svn_factory = SubversionFactory(svn_repo_cache)
111 svn_factory = SubversionFactory()
112
95 113 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
114 hg_factory = MercurialFactory()
99 115 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
100 116 else:
101 117 log.info("Subversion client import failed")
102 118
103 119 self._vcsserver = VcsServer()
104 120
105 def _initialize_cache(self):
106 cache_config = parse_cache_config_options(self.cache_config)
107 log.info('Initializing beaker cache: %s' % cache_config)
108 self.cache = CacheManager(**cache_config)
109
110 121 def _configure_locale(self):
111 122 if self.locale:
112 123 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
113 124 else:
114 125 log.info(
115 126 'Configuring locale subsystem based on environment variables')
116 127 try:
117 128 # If self.locale is the empty string, then the locale
118 129 # module will use the environment variables. See the
119 130 # documentation of the package `locale`.
120 131 locale.setlocale(locale.LC_ALL, self.locale)
121 132
122 133 language_code, encoding = locale.getlocale()
123 134 log.info(
124 135 'Locale set to language code "%s" with encoding "%s".',
125 136 language_code, encoding)
126 137 except locale.Error:
127 138 log.exception(
128 139 'Cannot set locale, not configuring the locale system')
129 140
130 141
131 142 class WsgiProxy(object):
132 143 def __init__(self, wsgi):
133 144 self.wsgi = wsgi
134 145
135 146 def __call__(self, environ, start_response):
136 147 input_data = environ['wsgi.input'].read()
137 148 input_data = msgpack.unpackb(input_data)
138 149
139 150 error = None
140 151 try:
141 152 data, status, headers = self.wsgi.handle(
142 153 input_data['environment'], input_data['input_data'],
143 154 *input_data['args'], **input_data['kwargs'])
144 155 except Exception as e:
145 156 data, status, headers = [], None, None
146 157 error = {
147 158 'message': str(e),
148 159 '_vcs_kind': getattr(e, '_vcs_kind', None)
149 160 }
150 161
151 162 start_response(200, {})
152 163 return self._iterator(error, status, headers, data)
153 164
154 165 def _iterator(self, error, status, headers, data):
155 166 initial_data = [
156 167 error,
157 168 status,
158 169 headers,
159 170 ]
160 171
161 172 for d in chain(initial_data, data):
162 173 yield msgpack.packb(d)
163 174
164 175
165 176 class HTTPApplication(object):
166 177 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
167 178
168 179 remote_wsgi = remote_wsgi
169 180 _use_echo_app = False
170 181
171 182 def __init__(self, settings=None, global_config=None):
183 self._sanitize_settings_and_apply_defaults(settings)
184
172 185 self.config = Configurator(settings=settings)
173 186 self.global_config = global_config
187 self.config.include('vcsserver.lib.rc_cache')
174 188
175 189 locale = settings.get('locale', '') or 'en_US.UTF-8'
176 190 vcs = VCS(locale=locale, cache_config=settings)
177 191 self._remotes = {
178 192 'hg': vcs._hg_remote,
179 193 'git': vcs._git_remote,
180 194 'svn': vcs._svn_remote,
181 195 'server': vcs._vcsserver,
182 196 }
183 197 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
184 198 self._use_echo_app = True
185 199 log.warning("Using EchoApp for VCS operations.")
186 200 self.remote_wsgi = remote_wsgi_stub
187 201 self._configure_settings(settings)
188 202 self._configure()
189 203
190 204 def _configure_settings(self, app_settings):
191 205 """
192 206 Configure the settings module.
193 207 """
194 208 git_path = app_settings.get('git_path', None)
195 209 if git_path:
196 210 settings.GIT_EXECUTABLE = git_path
197 211 binary_dir = app_settings.get('core.binary_dir', None)
198 212 if binary_dir:
199 213 settings.BINARY_DIR = binary_dir
200 214
215 def _sanitize_settings_and_apply_defaults(self, settings):
216 # repo_object cache
217 _string_setting(
218 settings,
219 'rc_cache.repo_object.backend',
220 'dogpile.cache.rc.memory_lru')
221 _int_setting(
222 settings,
223 'rc_cache.repo_object.expiration_time',
224 300)
225 _int_setting(
226 settings,
227 'rc_cache.repo_object.max_size',
228 1024)
229
201 230 def _configure(self):
202 231 self.config.add_renderer(
203 232 name='msgpack',
204 233 factory=self._msgpack_renderer_factory)
205 234
206 235 self.config.add_route('service', '/_service')
207 236 self.config.add_route('status', '/status')
208 237 self.config.add_route('hg_proxy', '/proxy/hg')
209 238 self.config.add_route('git_proxy', '/proxy/git')
210 239 self.config.add_route('vcs', '/{backend}')
211 240 self.config.add_route('stream_git', '/stream/git/*repo_name')
212 241 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
213 242
214 243 self.config.add_view(
215 244 self.status_view, route_name='status', renderer='json')
216 245 self.config.add_view(
217 246 self.service_view, route_name='service', renderer='msgpack')
218 247
219 248 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
220 249 self.config.add_view(self.git_proxy(), route_name='git_proxy')
221 250 self.config.add_view(
222 251 self.vcs_view, route_name='vcs', renderer='msgpack',
223 252 custom_predicates=[self.is_vcs_view])
224 253
225 254 self.config.add_view(self.hg_stream(), route_name='stream_hg')
226 255 self.config.add_view(self.git_stream(), route_name='stream_git')
227 256
228 257 def notfound(request):
229 258 return {'status': '404 NOT FOUND'}
230 259 self.config.add_notfound_view(notfound, renderer='json')
231 260
232 261 self.config.add_view(self.handle_vcs_exception, context=Exception)
233 262
234 263 self.config.add_tween(
235 264 'vcsserver.tweens.RequestWrapperTween',
236 265 )
237 266
238 267 def wsgi_app(self):
239 268 return self.config.make_wsgi_app()
240 269
241 270 def vcs_view(self, request):
242 271 remote = self._remotes[request.matchdict['backend']]
243 272 payload = msgpack.unpackb(request.body, use_list=True)
244 273 method = payload.get('method')
245 274 params = payload.get('params')
246 275 wire = params.get('wire')
247 276 args = params.get('args')
248 277 kwargs = params.get('kwargs')
278 context_uid = None
279
249 280 if wire:
250 281 try:
251 wire['context'] = uuid.UUID(wire['context'])
282 wire['context'] = context_uid = uuid.UUID(wire['context'])
252 283 except KeyError:
253 284 pass
254 285 args.insert(0, wire)
255 286
256 log.debug('method called:%s with kwargs:%s', method, kwargs)
287 log.debug('method called:%s with kwargs:%s context_uid: %s',
288 method, kwargs, context_uid)
257 289 try:
258 290 resp = getattr(remote, method)(*args, **kwargs)
259 291 except Exception as e:
260 292 tb_info = traceback.format_exc()
261 293
262 294 type_ = e.__class__.__name__
263 295 if type_ not in self.ALLOWED_EXCEPTIONS:
264 296 type_ = None
265 297
266 298 resp = {
267 299 'id': payload.get('id'),
268 300 'error': {
269 301 'message': e.message,
270 302 'traceback': tb_info,
271 303 'type': type_
272 304 }
273 305 }
274 306 try:
275 resp['error']['_vcs_kind'] = e._vcs_kind
307 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
276 308 except AttributeError:
277 309 pass
278 310 else:
279 311 resp = {
280 312 'id': payload.get('id'),
281 313 'result': resp
282 314 }
283 315
284 316 return resp
285 317
286 318 def status_view(self, request):
287 319 import vcsserver
288 320 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
289 321 'pid': os.getpid()}
290 322
291 323 def service_view(self, request):
292 324 import vcsserver
293 325
294 326 payload = msgpack.unpackb(request.body, use_list=True)
295 327
296 328 try:
297 329 path = self.global_config['__file__']
298 330 config = configparser.ConfigParser()
299 331 config.read(path)
300 332 parsed_ini = config
301 333 if parsed_ini.has_section('server:main'):
302 334 parsed_ini = dict(parsed_ini.items('server:main'))
303 335 except Exception:
304 336 log.exception('Failed to read .ini file for display')
305 337 parsed_ini = {}
306 338
307 339 resp = {
308 340 'id': payload.get('id'),
309 341 'result': dict(
310 342 version=vcsserver.__version__,
311 343 config=parsed_ini,
312 344 payload=payload,
313 345 )
314 346 }
315 347 return resp
316 348
317 349 def _msgpack_renderer_factory(self, info):
318 350 def _render(value, system):
319 351 value = msgpack.packb(value)
320 352 request = system.get('request')
321 353 if request is not None:
322 354 response = request.response
323 355 ct = response.content_type
324 356 if ct == response.default_content_type:
325 357 response.content_type = 'application/x-msgpack'
326 358 return value
327 359 return _render
328 360
329 361 def set_env_from_config(self, environ, config):
330 362 dict_conf = {}
331 363 try:
332 364 for elem in config:
333 365 if elem[0] == 'rhodecode':
334 366 dict_conf = json.loads(elem[2])
335 367 break
336 368 except Exception:
337 369 log.exception('Failed to fetch SCM CONFIG')
338 370 return
339 371
340 372 username = dict_conf.get('username')
341 373 if username:
342 374 environ['REMOTE_USER'] = username
343 375 # mercurial specific, some extension api rely on this
344 376 environ['HGUSER'] = username
345 377
346 378 ip = dict_conf.get('ip')
347 379 if ip:
348 380 environ['REMOTE_HOST'] = ip
349 381
350 382 if _is_request_chunked(environ):
351 383 # set the compatibility flag for webob
352 384 environ['wsgi.input_terminated'] = True
353 385
354 386 def hg_proxy(self):
355 387 @wsgiapp
356 388 def _hg_proxy(environ, start_response):
357 389 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
358 390 return app(environ, start_response)
359 391 return _hg_proxy
360 392
361 393 def git_proxy(self):
362 394 @wsgiapp
363 395 def _git_proxy(environ, start_response):
364 396 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
365 397 return app(environ, start_response)
366 398 return _git_proxy
367 399
368 400 def hg_stream(self):
369 401 if self._use_echo_app:
370 402 @wsgiapp
371 403 def _hg_stream(environ, start_response):
372 404 app = EchoApp('fake_path', 'fake_name', None)
373 405 return app(environ, start_response)
374 406 return _hg_stream
375 407 else:
376 408 @wsgiapp
377 409 def _hg_stream(environ, start_response):
378 410 log.debug('http-app: handling hg stream')
379 411 repo_path = environ['HTTP_X_RC_REPO_PATH']
380 412 repo_name = environ['HTTP_X_RC_REPO_NAME']
381 413 packed_config = base64.b64decode(
382 414 environ['HTTP_X_RC_REPO_CONFIG'])
383 415 config = msgpack.unpackb(packed_config)
384 416 app = scm_app.create_hg_wsgi_app(
385 417 repo_path, repo_name, config)
386 418
387 419 # Consistent path information for hgweb
388 420 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
389 421 environ['REPO_NAME'] = repo_name
390 422 self.set_env_from_config(environ, config)
391 423
392 424 log.debug('http-app: starting app handler '
393 425 'with %s and process request', app)
394 426 return app(environ, ResponseFilter(start_response))
395 427 return _hg_stream
396 428
397 429 def git_stream(self):
398 430 if self._use_echo_app:
399 431 @wsgiapp
400 432 def _git_stream(environ, start_response):
401 433 app = EchoApp('fake_path', 'fake_name', None)
402 434 return app(environ, start_response)
403 435 return _git_stream
404 436 else:
405 437 @wsgiapp
406 438 def _git_stream(environ, start_response):
407 439 log.debug('http-app: handling git stream')
408 440 repo_path = environ['HTTP_X_RC_REPO_PATH']
409 441 repo_name = environ['HTTP_X_RC_REPO_NAME']
410 442 packed_config = base64.b64decode(
411 443 environ['HTTP_X_RC_REPO_CONFIG'])
412 444 config = msgpack.unpackb(packed_config)
413 445
414 446 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
415 447 self.set_env_from_config(environ, config)
416 448
417 449 content_type = environ.get('CONTENT_TYPE', '')
418 450
419 451 path = environ['PATH_INFO']
420 452 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
421 453 log.debug(
422 454 'LFS: Detecting if request `%s` is LFS server path based '
423 455 'on content type:`%s`, is_lfs:%s',
424 456 path, content_type, is_lfs_request)
425 457
426 458 if not is_lfs_request:
427 459 # fallback detection by path
428 460 if GIT_LFS_PROTO_PAT.match(path):
429 461 is_lfs_request = True
430 462 log.debug(
431 463 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
432 464 path, is_lfs_request)
433 465
434 466 if is_lfs_request:
435 467 app = scm_app.create_git_lfs_wsgi_app(
436 468 repo_path, repo_name, config)
437 469 else:
438 470 app = scm_app.create_git_wsgi_app(
439 471 repo_path, repo_name, config)
440 472
441 473 log.debug('http-app: starting app handler '
442 474 'with %s and process request', app)
443 475
444 476 return app(environ, start_response)
445 477
446 478 return _git_stream
447 479
448 480 def is_vcs_view(self, context, request):
449 481 """
450 482 View predicate that returns true if given backend is supported by
451 483 defined remotes.
452 484 """
453 485 backend = request.matchdict.get('backend')
454 486 return backend in self._remotes
455 487
456 488 def handle_vcs_exception(self, exception, request):
457 489 _vcs_kind = getattr(exception, '_vcs_kind', '')
458 490 if _vcs_kind == 'repo_locked':
459 491 # Get custom repo-locked status code if present.
460 492 status_code = request.headers.get('X-RC-Locked-Status-Code')
461 493 return HTTPRepoLocked(
462 494 title=exception.message, status_code=status_code)
463 495 traceback_info = 'unavailable'
464 496 if request.exc_info:
465 497 traceback_info = traceback.format_exc(request.exc_info[2])
466 498
467 499 log.error(
468 500 'error occurred handling this request for path: %s, \n tb: %s',
469 501 request.path, traceback_info)
470 502 raise exception
471 503
472 504
473 505 class ResponseFilter(object):
474 506
475 507 def __init__(self, start_response):
476 508 self._start_response = start_response
477 509
478 510 def __call__(self, status, response_headers, exc_info=None):
479 511 headers = tuple(
480 512 (h, v) for h, v in response_headers
481 513 if not wsgiref.util.is_hop_by_hop(h))
482 514 return self._start_response(status, headers, exc_info)
483 515
484 516
485 517 def main(global_config, **settings):
486 518 if MercurialFactory:
487 519 hgpatches.patch_largefiles_capabilities()
488 520 hgpatches.patch_subrepo_type_mapping()
521
489 522 app = HTTPApplication(settings=settings, global_config=global_config)
490 523 return app.wsgi_app()
@@ -1,689 +1,705 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 from urllib2 import URLError
22 22 import logging
23 23 import posixpath as vcspath
24 24 import StringIO
25 25 import urllib
26 26 import traceback
27 27
28 28 import svn.client
29 29 import svn.core
30 30 import svn.delta
31 31 import svn.diff
32 32 import svn.fs
33 33 import svn.repos
34 34
35 35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 36 from vcsserver.base import RepoFactory, raise_from_original
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 # Set of svn compatible version flags.
42 42 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
43 svn_compatible_versions = {
44 44 'pre-1.4-compatible',
45 45 'pre-1.5-compatible',
46 46 'pre-1.6-compatible',
47 47 'pre-1.8-compatible',
48 'pre-1.9-compatible',
49 ])
48 'pre-1.9-compatible'
49 }
50 50
51 51 svn_compatible_versions_map = {
52 52 'pre-1.4-compatible': '1.3',
53 53 'pre-1.5-compatible': '1.4',
54 54 'pre-1.6-compatible': '1.5',
55 55 'pre-1.8-compatible': '1.7',
56 56 'pre-1.9-compatible': '1.8',
57 57 }
58 58
59 59
60 60 def reraise_safe_exceptions(func):
61 61 """Decorator for converting svn exceptions to something neutral."""
62 62 def wrapper(*args, **kwargs):
63 63 try:
64 64 return func(*args, **kwargs)
65 65 except Exception as e:
66 66 if not hasattr(e, '_vcs_kind'):
67 67 log.exception("Unhandled exception in hg remote call")
68 68 raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74 75
75 76 def _create_repo(self, wire, create, compatible_version):
76 77 path = svn.core.svn_path_canonicalize(wire['path'])
77 78 if create:
78 79 fs_config = {'compatible-version': '1.9'}
79 80 if compatible_version:
80 81 if compatible_version not in svn_compatible_versions:
81 82 raise Exception('Unknown SVN compatible version "{}"'
82 83 .format(compatible_version))
83 84 fs_config['compatible-version'] = \
84 85 svn_compatible_versions_map[compatible_version]
85 86
86 87 log.debug('Create SVN repo with config "%s"', fs_config)
87 88 repo = svn.repos.create(path, "", "", None, fs_config)
88 89 else:
89 90 repo = svn.repos.open(path)
90 91
91 92 log.debug('Got SVN object: %s', repo)
92 93 return repo
93 94
94 95 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
96 """
97 Get a repository instance for the given path.
98
99 Uses internally the low level beaker API since the decorators introduce
100 significant overhead.
101 """
102 region = self._cache_region
103 context = wire.get('context', None)
104 repo_path = wire.get('path', '')
105 context_uid = '{}'.format(context)
106 cache = wire.get('cache', True)
107 cache_on = context and cache
108
109 @region.conditional_cache_on_arguments(condition=cache_on)
110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
96 111 return self._create_repo(wire, create, compatible_version)
97 112
98 return self._repo(wire, create_new_repo)
113 return create_new_repo(self.repo_type, repo_path, context_uid,
114 compatible_version)
99 115
100 116
101 117 NODE_TYPE_MAPPING = {
102 118 svn.core.svn_node_file: 'file',
103 119 svn.core.svn_node_dir: 'dir',
104 120 }
105 121
106 122
107 123 class SvnRemote(object):
108 124
109 125 def __init__(self, factory, hg_factory=None):
110 126 self._factory = factory
111 127 # TODO: Remove once we do not use internal Mercurial objects anymore
112 128 # for subversion
113 129 self._hg_factory = hg_factory
114 130
115 131 @reraise_safe_exceptions
116 132 def discover_svn_version(self):
117 133 try:
118 134 import svn.core
119 135 svn_ver = svn.core.SVN_VERSION
120 136 except ImportError:
121 137 svn_ver = None
122 138 return svn_ver
123 139
124 140 def check_url(self, url, config_items):
125 141 # this can throw exception if not installed, but we detect this
126 142 from hgsubversion import svnrepo
127 143
128 144 baseui = self._hg_factory._create_config(config_items)
129 145 # uuid function get's only valid UUID from proper repo, else
130 146 # throws exception
131 147 try:
132 148 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 149 except Exception:
134 150 tb = traceback.format_exc()
135 151 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
136 152 raise URLError(
137 153 '"%s" is not a valid Subversion source url.' % (url, ))
138 154 return True
139 155
140 156 def is_path_valid_repository(self, wire, path):
141 157
142 158 # NOTE(marcink): short circuit the check for SVN repo
143 159 # the repos.open might be expensive to check, but we have one cheap
144 160 # pre condition that we can use, to check for 'format' file
145 161
146 162 if not os.path.isfile(os.path.join(path, 'format')):
147 163 return False
148 164
149 165 try:
150 166 svn.repos.open(path)
151 167 except svn.core.SubversionException:
152 168 tb = traceback.format_exc()
153 169 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
154 170 return False
155 171 return True
156 172
157 173 @reraise_safe_exceptions
158 174 def verify(self, wire,):
159 175 repo_path = wire['path']
160 176 if not self.is_path_valid_repository(wire, repo_path):
161 177 raise Exception(
162 178 "Path %s is not a valid Subversion repository." % repo_path)
163 179
164 180 cmd = ['svnadmin', 'info', repo_path]
165 181 stdout, stderr = subprocessio.run_command(cmd)
166 182 return stdout
167 183
168 184 def lookup(self, wire, revision):
169 185 if revision not in [-1, None, 'HEAD']:
170 186 raise NotImplementedError
171 187 repo = self._factory.repo(wire)
172 188 fs_ptr = svn.repos.fs(repo)
173 189 head = svn.fs.youngest_rev(fs_ptr)
174 190 return head
175 191
176 192 def lookup_interval(self, wire, start_ts, end_ts):
177 193 repo = self._factory.repo(wire)
178 194 fsobj = svn.repos.fs(repo)
179 195 start_rev = None
180 196 end_rev = None
181 197 if start_ts:
182 198 start_ts_svn = apr_time_t(start_ts)
183 199 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
184 200 else:
185 201 start_rev = 1
186 202 if end_ts:
187 203 end_ts_svn = apr_time_t(end_ts)
188 204 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
189 205 else:
190 206 end_rev = svn.fs.youngest_rev(fsobj)
191 207 return start_rev, end_rev
192 208
193 209 def revision_properties(self, wire, revision):
194 210 repo = self._factory.repo(wire)
195 211 fs_ptr = svn.repos.fs(repo)
196 212 return svn.fs.revision_proplist(fs_ptr, revision)
197 213
198 214 def revision_changes(self, wire, revision):
199 215
200 216 repo = self._factory.repo(wire)
201 217 fsobj = svn.repos.fs(repo)
202 218 rev_root = svn.fs.revision_root(fsobj, revision)
203 219
204 220 editor = svn.repos.ChangeCollector(fsobj, rev_root)
205 221 editor_ptr, editor_baton = svn.delta.make_editor(editor)
206 222 base_dir = ""
207 223 send_deltas = False
208 224 svn.repos.replay2(
209 225 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
210 226 editor_ptr, editor_baton, None)
211 227
212 228 added = []
213 229 changed = []
214 230 removed = []
215 231
216 232 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
217 233 for path, change in editor.changes.iteritems():
218 234 # TODO: Decide what to do with directory nodes. Subversion can add
219 235 # empty directories.
220 236
221 237 if change.item_kind == svn.core.svn_node_dir:
222 238 continue
223 239 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
224 240 added.append(path)
225 241 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
226 242 svn.repos.CHANGE_ACTION_REPLACE]:
227 243 changed.append(path)
228 244 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
229 245 removed.append(path)
230 246 else:
231 247 raise NotImplementedError(
232 248 "Action %s not supported on path %s" % (
233 249 change.action, path))
234 250
235 251 changes = {
236 252 'added': added,
237 253 'changed': changed,
238 254 'removed': removed,
239 255 }
240 256 return changes
241 257
242 258 def node_history(self, wire, path, revision, limit):
243 259 cross_copies = False
244 260 repo = self._factory.repo(wire)
245 261 fsobj = svn.repos.fs(repo)
246 262 rev_root = svn.fs.revision_root(fsobj, revision)
247 263
248 264 history_revisions = []
249 265 history = svn.fs.node_history(rev_root, path)
250 266 history = svn.fs.history_prev(history, cross_copies)
251 267 while history:
252 268 __, node_revision = svn.fs.history_location(history)
253 269 history_revisions.append(node_revision)
254 270 if limit and len(history_revisions) >= limit:
255 271 break
256 272 history = svn.fs.history_prev(history, cross_copies)
257 273 return history_revisions
258 274
259 275 def node_properties(self, wire, path, revision):
260 276 repo = self._factory.repo(wire)
261 277 fsobj = svn.repos.fs(repo)
262 278 rev_root = svn.fs.revision_root(fsobj, revision)
263 279 return svn.fs.node_proplist(rev_root, path)
264 280
265 281 def file_annotate(self, wire, path, revision):
266 282 abs_path = 'file://' + urllib.pathname2url(
267 283 vcspath.join(wire['path'], path))
268 284 file_uri = svn.core.svn_path_canonicalize(abs_path)
269 285
270 286 start_rev = svn_opt_revision_value_t(0)
271 287 peg_rev = svn_opt_revision_value_t(revision)
272 288 end_rev = peg_rev
273 289
274 290 annotations = []
275 291
276 292 def receiver(line_no, revision, author, date, line, pool):
277 293 annotations.append((line_no, revision, line))
278 294
279 295 # TODO: Cannot use blame5, missing typemap function in the swig code
280 296 try:
281 297 svn.client.blame2(
282 298 file_uri, peg_rev, start_rev, end_rev,
283 299 receiver, svn.client.create_context())
284 300 except svn.core.SubversionException as exc:
285 301 log.exception("Error during blame operation.")
286 302 raise Exception(
287 303 "Blame not supported or file does not exist at path %s. "
288 304 "Error %s." % (path, exc))
289 305
290 306 return annotations
291 307
292 308 def get_node_type(self, wire, path, rev=None):
293 309 repo = self._factory.repo(wire)
294 310 fs_ptr = svn.repos.fs(repo)
295 311 if rev is None:
296 312 rev = svn.fs.youngest_rev(fs_ptr)
297 313 root = svn.fs.revision_root(fs_ptr, rev)
298 314 node = svn.fs.check_path(root, path)
299 315 return NODE_TYPE_MAPPING.get(node, None)
300 316
301 317 def get_nodes(self, wire, path, revision=None):
302 318 repo = self._factory.repo(wire)
303 319 fsobj = svn.repos.fs(repo)
304 320 if revision is None:
305 321 revision = svn.fs.youngest_rev(fsobj)
306 322 root = svn.fs.revision_root(fsobj, revision)
307 323 entries = svn.fs.dir_entries(root, path)
308 324 result = []
309 325 for entry_path, entry_info in entries.iteritems():
310 326 result.append(
311 327 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
312 328 return result
313 329
314 330 def get_file_content(self, wire, path, rev=None):
315 331 repo = self._factory.repo(wire)
316 332 fsobj = svn.repos.fs(repo)
317 333 if rev is None:
318 334 rev = svn.fs.youngest_revision(fsobj)
319 335 root = svn.fs.revision_root(fsobj, rev)
320 336 content = svn.core.Stream(svn.fs.file_contents(root, path))
321 337 return content.read()
322 338
323 339 def get_file_size(self, wire, path, revision=None):
324 340 repo = self._factory.repo(wire)
325 341 fsobj = svn.repos.fs(repo)
326 342 if revision is None:
327 343 revision = svn.fs.youngest_revision(fsobj)
328 344 root = svn.fs.revision_root(fsobj, revision)
329 345 size = svn.fs.file_length(root, path)
330 346 return size
331 347
332 348 def create_repository(self, wire, compatible_version=None):
333 349 log.info('Creating Subversion repository in path "%s"', wire['path'])
334 350 self._factory.repo(wire, create=True,
335 351 compatible_version=compatible_version)
336 352
337 353 def import_remote_repository(self, wire, src_url):
338 354 repo_path = wire['path']
339 355 if not self.is_path_valid_repository(wire, repo_path):
340 356 raise Exception(
341 357 "Path %s is not a valid Subversion repository." % repo_path)
342 358
343 359 # TODO: johbo: URL checks ?
344 360 import subprocess
345 361 rdump = subprocess.Popen(
346 362 ['svnrdump', 'dump', '--non-interactive', src_url],
347 363 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
348 364 load = subprocess.Popen(
349 365 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
350 366
351 367 # TODO: johbo: This can be a very long operation, might be better
352 368 # to track some kind of status and provide an api to check if the
353 369 # import is done.
354 370 rdump.wait()
355 371 load.wait()
356 372
357 373 if rdump.returncode != 0:
358 374 errors = rdump.stderr.read()
359 375 log.error('svnrdump dump failed: statuscode %s: message: %s',
360 376 rdump.returncode, errors)
361 377 reason = 'UNKNOWN'
362 378 if 'svnrdump: E230001:' in errors:
363 379 reason = 'INVALID_CERTIFICATE'
364 380 raise Exception(
365 381 'Failed to dump the remote repository from %s.' % src_url,
366 382 reason)
367 383 if load.returncode != 0:
368 384 raise Exception(
369 385 'Failed to load the dump of remote repository from %s.' %
370 386 (src_url, ))
371 387
372 388 def commit(self, wire, message, author, timestamp, updated, removed):
373 389 assert isinstance(message, str)
374 390 assert isinstance(author, str)
375 391
376 392 repo = self._factory.repo(wire)
377 393 fsobj = svn.repos.fs(repo)
378 394
379 395 rev = svn.fs.youngest_rev(fsobj)
380 396 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
381 397 txn_root = svn.fs.txn_root(txn)
382 398
383 399 for node in updated:
384 400 TxnNodeProcessor(node, txn_root).update()
385 401 for node in removed:
386 402 TxnNodeProcessor(node, txn_root).remove()
387 403
388 404 commit_id = svn.repos.fs_commit_txn(repo, txn)
389 405
390 406 if timestamp:
391 407 apr_time = apr_time_t(timestamp)
392 408 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
393 409 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
394 410
395 411 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
396 412 return commit_id
397 413
398 414 def diff(self, wire, rev1, rev2, path1=None, path2=None,
399 415 ignore_whitespace=False, context=3):
400 416
401 417 wire.update(cache=False)
402 418 repo = self._factory.repo(wire)
403 419 diff_creator = SvnDiffer(
404 420 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
405 421 try:
406 422 return diff_creator.generate_diff()
407 423 except svn.core.SubversionException as e:
408 424 log.exception(
409 425 "Error during diff operation operation. "
410 426 "Path might not exist %s, %s" % (path1, path2))
411 427 return ""
412 428
413 429 @reraise_safe_exceptions
414 430 def is_large_file(self, wire, path):
415 431 return False
416 432
417 433 @reraise_safe_exceptions
418 434 def install_hooks(self, wire, force=False):
419 435 from vcsserver.hook_utils import install_svn_hooks
420 436 repo_path = wire['path']
421 437 binary_dir = settings.BINARY_DIR
422 438 executable = None
423 439 if binary_dir:
424 440 executable = os.path.join(binary_dir, 'python')
425 441 return install_svn_hooks(
426 442 repo_path, executable=executable, force_create=force)
427 443
428 444
429 445 class SvnDiffer(object):
430 446 """
431 447 Utility to create diffs based on difflib and the Subversion api
432 448 """
433 449
434 450 binary_content = False
435 451
436 452 def __init__(
437 453 self, repo, src_rev, src_path, tgt_rev, tgt_path,
438 454 ignore_whitespace, context):
439 455 self.repo = repo
440 456 self.ignore_whitespace = ignore_whitespace
441 457 self.context = context
442 458
443 459 fsobj = svn.repos.fs(repo)
444 460
445 461 self.tgt_rev = tgt_rev
446 462 self.tgt_path = tgt_path or ''
447 463 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
448 464 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
449 465
450 466 self.src_rev = src_rev
451 467 self.src_path = src_path or self.tgt_path
452 468 self.src_root = svn.fs.revision_root(fsobj, src_rev)
453 469 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
454 470
455 471 self._validate()
456 472
457 473 def _validate(self):
458 474 if (self.tgt_kind != svn.core.svn_node_none and
459 475 self.src_kind != svn.core.svn_node_none and
460 476 self.src_kind != self.tgt_kind):
461 477 # TODO: johbo: proper error handling
462 478 raise Exception(
463 479 "Source and target are not compatible for diff generation. "
464 480 "Source type: %s, target type: %s" %
465 481 (self.src_kind, self.tgt_kind))
466 482
467 483 def generate_diff(self):
468 484 buf = StringIO.StringIO()
469 485 if self.tgt_kind == svn.core.svn_node_dir:
470 486 self._generate_dir_diff(buf)
471 487 else:
472 488 self._generate_file_diff(buf)
473 489 return buf.getvalue()
474 490
475 491 def _generate_dir_diff(self, buf):
476 492 editor = DiffChangeEditor()
477 493 editor_ptr, editor_baton = svn.delta.make_editor(editor)
478 494 svn.repos.dir_delta2(
479 495 self.src_root,
480 496 self.src_path,
481 497 '', # src_entry
482 498 self.tgt_root,
483 499 self.tgt_path,
484 500 editor_ptr, editor_baton,
485 501 authorization_callback_allow_all,
486 502 False, # text_deltas
487 503 svn.core.svn_depth_infinity, # depth
488 504 False, # entry_props
489 505 False, # ignore_ancestry
490 506 )
491 507
492 508 for path, __, change in sorted(editor.changes):
493 509 self._generate_node_diff(
494 510 buf, change, path, self.tgt_path, path, self.src_path)
495 511
496 512 def _generate_file_diff(self, buf):
497 513 change = None
498 514 if self.src_kind == svn.core.svn_node_none:
499 515 change = "add"
500 516 elif self.tgt_kind == svn.core.svn_node_none:
501 517 change = "delete"
502 518 tgt_base, tgt_path = vcspath.split(self.tgt_path)
503 519 src_base, src_path = vcspath.split(self.src_path)
504 520 self._generate_node_diff(
505 521 buf, change, tgt_path, tgt_base, src_path, src_base)
506 522
507 523 def _generate_node_diff(
508 524 self, buf, change, tgt_path, tgt_base, src_path, src_base):
509 525
510 526 if self.src_rev == self.tgt_rev and tgt_base == src_base:
511 527 # makes consistent behaviour with git/hg to return empty diff if
512 528 # we compare same revisions
513 529 return
514 530
515 531 tgt_full_path = vcspath.join(tgt_base, tgt_path)
516 532 src_full_path = vcspath.join(src_base, src_path)
517 533
518 534 self.binary_content = False
519 535 mime_type = self._get_mime_type(tgt_full_path)
520 536
521 537 if mime_type and not mime_type.startswith('text'):
522 538 self.binary_content = True
523 539 buf.write("=" * 67 + '\n')
524 540 buf.write("Cannot display: file marked as a binary type.\n")
525 541 buf.write("svn:mime-type = %s\n" % mime_type)
526 542 buf.write("Index: %s\n" % (tgt_path, ))
527 543 buf.write("=" * 67 + '\n')
528 544 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
529 545 'tgt_path': tgt_path})
530 546
531 547 if change == 'add':
532 548 # TODO: johbo: SVN is missing a zero here compared to git
533 549 buf.write("new file mode 10644\n")
534 550
535 551 #TODO(marcink): intro to binary detection of svn patches
536 552 # if self.binary_content:
537 553 # buf.write('GIT binary patch\n')
538 554
539 555 buf.write("--- /dev/null\t(revision 0)\n")
540 556 src_lines = []
541 557 else:
542 558 if change == 'delete':
543 559 buf.write("deleted file mode 10644\n")
544 560
545 561 #TODO(marcink): intro to binary detection of svn patches
546 562 # if self.binary_content:
547 563 # buf.write('GIT binary patch\n')
548 564
549 565 buf.write("--- a/%s\t(revision %s)\n" % (
550 566 src_path, self.src_rev))
551 567 src_lines = self._svn_readlines(self.src_root, src_full_path)
552 568
553 569 if change == 'delete':
554 570 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
555 571 tgt_lines = []
556 572 else:
557 573 buf.write("+++ b/%s\t(revision %s)\n" % (
558 574 tgt_path, self.tgt_rev))
559 575 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
560 576
561 577 if not self.binary_content:
562 578 udiff = svn_diff.unified_diff(
563 579 src_lines, tgt_lines, context=self.context,
564 580 ignore_blank_lines=self.ignore_whitespace,
565 581 ignore_case=False,
566 582 ignore_space_changes=self.ignore_whitespace)
567 583 buf.writelines(udiff)
568 584
569 585 def _get_mime_type(self, path):
570 586 try:
571 587 mime_type = svn.fs.node_prop(
572 588 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
573 589 except svn.core.SubversionException:
574 590 mime_type = svn.fs.node_prop(
575 591 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
576 592 return mime_type
577 593
578 594 def _svn_readlines(self, fs_root, node_path):
579 595 if self.binary_content:
580 596 return []
581 597 node_kind = svn.fs.check_path(fs_root, node_path)
582 598 if node_kind not in (
583 599 svn.core.svn_node_file, svn.core.svn_node_symlink):
584 600 return []
585 601 content = svn.core.Stream(
586 602 svn.fs.file_contents(fs_root, node_path)).read()
587 603 return content.splitlines(True)
588 604
589 605
590 606
591 607 class DiffChangeEditor(svn.delta.Editor):
592 608 """
593 609 Records changes between two given revisions
594 610 """
595 611
596 612 def __init__(self):
597 613 self.changes = []
598 614
599 615 def delete_entry(self, path, revision, parent_baton, pool=None):
600 616 self.changes.append((path, None, 'delete'))
601 617
602 618 def add_file(
603 619 self, path, parent_baton, copyfrom_path, copyfrom_revision,
604 620 file_pool=None):
605 621 self.changes.append((path, 'file', 'add'))
606 622
607 623 def open_file(self, path, parent_baton, base_revision, file_pool=None):
608 624 self.changes.append((path, 'file', 'change'))
609 625
610 626
611 627 def authorization_callback_allow_all(root, path, pool):
612 628 return True
613 629
614 630
615 631 class TxnNodeProcessor(object):
616 632 """
617 633 Utility to process the change of one node within a transaction root.
618 634
619 635 It encapsulates the knowledge of how to add, update or remove
620 636 a node for a given transaction root. The purpose is to support the method
621 637 `SvnRemote.commit`.
622 638 """
623 639
624 640 def __init__(self, node, txn_root):
625 641 assert isinstance(node['path'], str)
626 642
627 643 self.node = node
628 644 self.txn_root = txn_root
629 645
630 646 def update(self):
631 647 self._ensure_parent_dirs()
632 648 self._add_file_if_node_does_not_exist()
633 649 self._update_file_content()
634 650 self._update_file_properties()
635 651
636 652 def remove(self):
637 653 svn.fs.delete(self.txn_root, self.node['path'])
638 654 # TODO: Clean up directory if empty
639 655
640 656 def _ensure_parent_dirs(self):
641 657 curdir = vcspath.dirname(self.node['path'])
642 658 dirs_to_create = []
643 659 while not self._svn_path_exists(curdir):
644 660 dirs_to_create.append(curdir)
645 661 curdir = vcspath.dirname(curdir)
646 662
647 663 for curdir in reversed(dirs_to_create):
648 664 log.debug('Creating missing directory "%s"', curdir)
649 665 svn.fs.make_dir(self.txn_root, curdir)
650 666
651 667 def _svn_path_exists(self, path):
652 668 path_status = svn.fs.check_path(self.txn_root, path)
653 669 return path_status != svn.core.svn_node_none
654 670
655 671 def _add_file_if_node_does_not_exist(self):
656 672 kind = svn.fs.check_path(self.txn_root, self.node['path'])
657 673 if kind == svn.core.svn_node_none:
658 674 svn.fs.make_file(self.txn_root, self.node['path'])
659 675
660 676 def _update_file_content(self):
661 677 assert isinstance(self.node['content'], str)
662 678 handler, baton = svn.fs.apply_textdelta(
663 679 self.txn_root, self.node['path'], None, None)
664 680 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
665 681
666 682 def _update_file_properties(self):
667 683 properties = self.node.get('properties', {})
668 684 for key, value in properties.iteritems():
669 685 svn.fs.change_node_prop(
670 686 self.txn_root, self.node['path'], key, value)
671 687
672 688
673 689 def apr_time_t(timestamp):
674 690 """
675 691 Convert a Python timestamp into APR timestamp type apr_time_t
676 692 """
677 693 return timestamp * 1E6
678 694
679 695
680 696 def svn_opt_revision_value_t(num):
681 697 """
682 698 Put `num` into a `svn_opt_revision_value_t` structure.
683 699 """
684 700 value = svn.core.svn_opt_revision_value_t()
685 701 value.number = num
686 702 revision = svn.core.svn_opt_revision_t()
687 703 revision.kind = svn.core.svn_opt_revision_number
688 704 revision.value = value
689 705 return revision
@@ -1,58 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import socket
19 19
20 20 import pytest
21 21
22 22
23 23 def pytest_addoption(parser):
24 24 parser.addoption(
25 25 '--repeat', type=int, default=100,
26 26 help="Number of repetitions in performance tests.")
27 27
28 28
29 29 @pytest.fixture(scope='session')
30 30 def repeat(request):
31 31 """
32 32 The number of repetitions is based on this fixture.
33 33
34 34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 35 tests are not too slow in our default test suite.
36 36 """
37 37 return request.config.getoption('--repeat')
38 38
39 39
40 40 @pytest.fixture(scope='session')
41 41 def vcsserver_port(request):
42 42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print('Using vcsserver port %s' % (port, ))
44 44 return port
45 45
46 46
47 47 def get_available_port():
48 48 family = socket.AF_INET
49 49 socktype = socket.SOCK_STREAM
50 50 host = '127.0.0.1'
51 51
52 52 mysocket = socket.socket(family, socktype)
53 53 mysocket.bind((host, 0))
54 54 port = mysocket.getsockname()[1]
55 55 mysocket.close()
56 56 del mysocket
57 57 return port
58
@@ -1,162 +1,165 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.fetch(
82 82 wire=None, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102 def test_remove_ref(self):
103 103 ref_to_remove = 'refs/tags/v0.1.9'
104 104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 105 self.remote_git.remove_ref(None, ref_to_remove)
106 106 assert ref_to_remove not in self.mock_repo.refs
107 107
108 108
109 109 class TestReraiseSafeExceptions(object):
110 110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 111 factory = Mock()
112 112 git_remote = git.GitRemote(factory)
113 113
114 114 def fake_function():
115 115 return None
116 116
117 117 decorator = git.reraise_safe_exceptions(fake_function)
118 118
119 119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 120 for method_name, method in methods:
121 121 if not method_name.startswith('_'):
122 122 assert method.im_func.__code__ == decorator.__code__
123 123
124 124 @pytest.mark.parametrize('side_effect, expected_type', [
125 125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 129 (dulwich.errors.HangupException(), 'error'),
130 130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 131 ])
132 132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 133 @git.reraise_safe_exceptions
134 134 def fake_method():
135 135 raise side_effect
136 136
137 137 with pytest.raises(Exception) as exc_info:
138 138 fake_method()
139 139 assert type(exc_info.value) == Exception
140 140 assert exc_info.value._vcs_kind == expected_type
141 141
142 142
143 143 class TestDulwichRepoWrapper(object):
144 144 def test_calls_close_on_delete(self):
145 145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 146 with isdir_patcher:
147 147 repo = git.Repo('/tmp/abcde')
148 148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 149 del repo
150 150 close_mock.assert_called_once_with()
151 151
152 152
153 153 class TestGitFactory(object):
154 154 def test_create_repo_returns_dulwich_wrapper(self):
155 factory = git.GitFactory(repo_cache=Mock())
155
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 mock.side_effect = {'repo_objects': ''}
158 factory = git.GitFactory()
156 159 wire = {
157 160 'path': '/tmp/abcde'
158 161 }
159 162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 163 with isdir_patcher:
161 164 result = factory._create_repo(wire, True)
162 165 assert isinstance(result, git.Repo)
@@ -1,44 +1,39 b''
1 1 """
2 2 Tests used to profile the HTTP based implementation.
3 3 """
4 4
5 5 import pytest
6 6 import webtest
7 7
8 8 from vcsserver.http_main import main
9 9
10 10
11 11 @pytest.fixture
12 12 def vcs_app():
13 13 stub_settings = {
14 14 'dev.use_echo_app': 'true',
15 'beaker.cache.regions': 'repo_object',
16 'beaker.cache.repo_object.type': 'memorylru',
17 'beaker.cache.repo_object.max_items': '100',
18 'beaker.cache.repo_object.expire': '300',
19 'beaker.cache.repo_object.enabled': 'true',
20 15 'locale': 'en_US.UTF-8',
21 16 }
22 17 vcs_app = main({}, **stub_settings)
23 18 app = webtest.TestApp(vcs_app)
24 19 return app
25 20
26 21
27 22 @pytest.fixture(scope='module')
28 23 def data():
29 24 one_kb = 'x' * 1024
30 25 return one_kb * 1024 * 10
31 26
32 27
33 28 def test_http_app_streaming_with_data(data, repeat, vcs_app):
34 29 app = vcs_app
35 30 for x in xrange(repeat / 10):
36 31 response = app.post('/stream/git/', params=data)
37 32 assert response.status_code == 200
38 33
39 34
40 35 def test_http_app_streaming_no_data(repeat, vcs_app):
41 36 app = vcs_app
42 37 for x in xrange(repeat / 10):
43 38 response = app.post('/stream/git/')
44 39 assert response.status_code == 200
@@ -1,82 +1,89 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import logging
18 import hashlib
18 19
19 20 log = logging.getLogger(__name__)
20 21
21 22
22 23 def safe_int(val, default=None):
23 24 """
24 25 Returns int() of val if val is not convertable to int use default
25 26 instead
26 27
27 28 :param val:
28 29 :param default:
29 30 """
30 31
31 32 try:
32 33 val = int(val)
33 34 except (ValueError, TypeError):
34 35 val = default
35 36
36 37 return val
37 38
38 39
39 40 def safe_str(unicode_, to_encoding=['utf8']):
40 41 """
41 42 safe str function. Does few trick to turn unicode_ into string
42 43
43 44 In case of UnicodeEncodeError, we try to return it with encoding detected
44 45 by chardet library if it fails fallback to string with errors replaced
45 46
46 47 :param unicode_: unicode to encode
47 48 :rtype: str
48 49 :returns: str object
49 50 """
50 51
51 52 # if it's not basestr cast to str
52 53 if not isinstance(unicode_, basestring):
53 54 return str(unicode_)
54 55
55 56 if isinstance(unicode_, str):
56 57 return unicode_
57 58
58 59 if not isinstance(to_encoding, (list, tuple)):
59 60 to_encoding = [to_encoding]
60 61
61 62 for enc in to_encoding:
62 63 try:
63 64 return unicode_.encode(enc)
64 65 except UnicodeEncodeError:
65 66 pass
66 67
67 68 try:
68 69 import chardet
69 70 encoding = chardet.detect(unicode_)['encoding']
70 71 if encoding is None:
71 72 raise UnicodeEncodeError()
72 73
73 74 return unicode_.encode(encoding)
74 75 except (ImportError, UnicodeEncodeError):
75 76 return unicode_.encode(to_encoding[0], 'replace')
76 77
77 78
78 79 class AttributeDict(dict):
79 80 def __getattr__(self, attr):
80 81 return self.get(attr, None)
81 82 __setattr__ = dict.__setitem__
82 83 __delattr__ = dict.__delitem__
84
85
86 def sha1(val):
87 return hashlib.sha1(val).hexdigest()
88
89
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now