##// END OF EJS Templates
caches: replaced beaker with dogpile cache.
marcink -
r483:80e9ab60 default
parent child Browse files
Show More
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19 from dogpile.cache import register_backend
20
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
24
25 log = logging.getLogger(__name__)
26
27 from . import region_meta
28 from .util import key_generator, get_default_cache_settings, make_region
29
30
31 def configure_dogpile_cache(settings):
32 cache_dir = settings.get('cache_dir')
33 if cache_dir:
34 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
35
36 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
37
38 # inspect available namespaces
39 avail_regions = set()
40 for key in rc_cache_data.keys():
41 namespace_name = key.split('.', 1)[0]
42 avail_regions.add(namespace_name)
43 log.debug('dogpile: found following cache regions: %s', avail_regions)
44
45 # register them into namespace
46 for region_name in avail_regions:
47 new_region = make_region(
48 name=region_name,
49 function_key_generator=key_generator
50 )
51
52 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
53
54 log.debug('dogpile: registering a new region %s[%s]',
55 region_name, new_region.__dict__)
56 region_meta.dogpile_cache_regions[region_name] = new_region
57
58
59 def includeme(config):
60 configure_dogpile_cache(config.registry.settings)
@@ -0,0 +1,51 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19
20 from dogpile.cache.backends import memory as memory_backend
21 from lru import LRU as LRUDict
22
23
24 _default_max_size = 1024
25
26 log = logging.getLogger(__name__)
27
28
29 class LRUMemoryBackend(memory_backend.MemoryBackend):
30 pickle_values = False
31
32 def __init__(self, arguments):
33 max_size = arguments.pop('max_size', _default_max_size)
34 callback = None
35 if arguments.pop('log_max_size_reached', None):
36 def evicted(key, value):
37 log.debug(
38 'LRU: evicting key `%s` due to max size %s reach', key, max_size)
39 callback = evicted
40
41 arguments['cache_dict'] = LRUDict(max_size, callback=callback)
42 super(LRUMemoryBackend, self).__init__(arguments)
43
44 def delete(self, key):
45 if self._cache.has_key(key):
46 del self._cache[key]
47
48 def delete_multi(self, keys):
49 for key in keys:
50 if self._cache.has_key(key):
51 del self._cache[key]
@@ -0,0 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import tempfile
20
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
@@ -0,0 +1,136 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import logging
20 import functools
21
22 from vcsserver.utils import safe_str, sha1
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
25
26 log = logging.getLogger(__name__)
27
28
29 class RhodeCodeCacheRegion(CacheRegion):
30
31 def conditional_cache_on_arguments(
32 self, namespace=None,
33 expiration_time=None,
34 should_cache_fn=None,
35 to_str=compat.string_type,
36 function_key_generator=None,
37 condition=True):
38 """
39 Custom conditional decorator, that will not touch any dogpile internals if
40 condition isn't meet. This works a bit different than should_cache_fn
41 And it's faster in cases we don't ever want to compute cached values
42 """
43 expiration_time_is_callable = compat.callable(expiration_time)
44
45 if function_key_generator is None:
46 function_key_generator = self.function_key_generator
47
48 def decorator(fn):
49 if to_str is compat.string_type:
50 # backwards compatible
51 key_generator = function_key_generator(namespace, fn)
52 else:
53 key_generator = function_key_generator(namespace, fn, to_str=to_str)
54
55 @functools.wraps(fn)
56 def decorate(*arg, **kw):
57 key = key_generator(*arg, **kw)
58
59 @functools.wraps(fn)
60 def creator():
61 return fn(*arg, **kw)
62
63 if not condition:
64 return creator()
65
66 timeout = expiration_time() if expiration_time_is_callable \
67 else expiration_time
68
69 return self.get_or_create(key, creator, timeout, should_cache_fn)
70
71 def invalidate(*arg, **kw):
72 key = key_generator(*arg, **kw)
73 self.delete(key)
74
75 def set_(value, *arg, **kw):
76 key = key_generator(*arg, **kw)
77 self.set(key, value)
78
79 def get(*arg, **kw):
80 key = key_generator(*arg, **kw)
81 return self.get(key)
82
83 def refresh(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 value = fn(*arg, **kw)
86 self.set(key, value)
87 return value
88
89 decorate.set = set_
90 decorate.invalidate = invalidate
91 decorate.refresh = refresh
92 decorate.get = get
93 decorate.original = fn
94 decorate.key_generator = key_generator
95
96 return decorate
97
98 return decorator
99
100
101 def make_region(*arg, **kw):
102 return RhodeCodeCacheRegion(*arg, **kw)
103
104
105 def get_default_cache_settings(settings, prefixes=None):
106 prefixes = prefixes or []
107 cache_settings = {}
108 for key in settings.keys():
109 for prefix in prefixes:
110 if key.startswith(prefix):
111 name = key.split(prefix)[1].strip()
112 val = settings[key]
113 if isinstance(val, basestring):
114 val = val.strip()
115 cache_settings[name] = val
116 return cache_settings
117
118
119 def compute_key_from_params(*args):
120 """
121 Helper to compute key from given params to be used in cache manager
122 """
123 return sha1("_".join(map(safe_str, args)))
124
125
126 def key_generator(namespace, fn):
127 fname = fn.__name__
128
129 def generate_key(*args):
130 namespace_pref = namespace or 'default'
131 arg_key = compute_key_from_params(*args)
132 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
133
134 return final_key
135
136 return generate_key
@@ -1,1 +1,79 b''
1 development_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 0.0.0.0
10 port = 9900
11
12 use = egg:waitress#main
13
14
15 [app:main]
16 use = egg:rhodecode-vcsserver
17
18 pyramid.default_locale_name = en
19 pyramid.includes =
20
21 ## default locale used by VCS systems
22 locale = en_US.UTF-8
23
24
25 ## path to binaries for vcsserver, it should be set by the installer
26 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 core.binary_dir = ""
28
29 ## cache region for storing repo_objects cache
30 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
31 ## cache auto-expires after N seconds
32 rc_cache.repo_object.expiration_time = 300
33 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
34 rc_cache.repo_object.max_size = 100
35
36
37 ################################
38 ### LOGGING CONFIGURATION ####
39 ################################
40 [loggers]
41 keys = root, vcsserver
42
43 [handlers]
44 keys = console
45
46 [formatters]
47 keys = generic
48
49 #############
50 ## LOGGERS ##
51 #############
52 [logger_root]
53 level = NOTSET
54 handlers = console
55
56 [logger_vcsserver]
57 level = DEBUG
58 handlers =
59 qualname = vcsserver
60 propagate = 1
61
62
63 ##############
64 ## HANDLERS ##
65 ##############
66
67 [handler_console]
68 class = StreamHandler
69 args = (sys.stderr,)
70 level = DEBUG
71 formatter = generic
72
73 ################
74 ## FORMATTERS ##
75 ################
76
77 [formatter_generic]
78 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
79 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1 +1,100 b''
1 production_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 127.0.0.1
10 port = 9900
11
12
13 ##########################
14 ## GUNICORN WSGI SERVER ##
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
21 ## process name
22 proc_name = rhodecode_vcsserver
23 ## type of worker class, currently `sync` is the only option allowed.
24 worker_class = sync
25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 #worker_connections = 10
27 ## max number of requests that worker will handle before being gracefully
28 ## restarted, could prevent memory leaks
29 max_requests = 1000
30 max_requests_jitter = 30
31 ## amount of time a worker can spend with handling a request before it
32 ## gets killed and restarted. Set to 6hrs
33 timeout = 21600
34
35
36 [app:main]
37 use = egg:rhodecode-vcsserver
38
39 pyramid.default_locale_name = en
40 pyramid.includes =
41
42 ## default locale used by VCS systems
43 locale = en_US.UTF-8
44
45
46 ## path to binaries for vcsserver, it should be set by the installer
47 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 core.binary_dir = ""
49
50 ## cache region for storing repo_objects cache
51 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
52 ## cache auto-expires after N seconds
53 rc_cache.repo_object.expiration_time = 300
54 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
55 rc_cache.repo_object.max_size = 100
56
57
58 ################################
59 ### LOGGING CONFIGURATION ####
60 ################################
61 [loggers]
62 keys = root, vcsserver
63
64 [handlers]
65 keys = console
66
67 [formatters]
68 keys = generic
69
70 #############
71 ## LOGGERS ##
72 #############
73 [logger_root]
74 level = NOTSET
75 handlers = console
76
77 [logger_vcsserver]
78 level = DEBUG
79 handlers =
80 qualname = vcsserver
81 propagate = 1
82
83
84 ##############
85 ## HANDLERS ##
86 ##############
87
88 [handler_console]
89 class = StreamHandler
90 args = (sys.stderr,)
91 level = DEBUG
92 formatter = generic
93
94 ################
95 ## FORMATTERS ##
96 ################
97
98 [formatter_generic]
99 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
100 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,60 +1,53 b''
1 # Overrides for the generated python-packages.nix
1 # Overrides for the generated python-packages.nix
2 #
2 #
3 # This function is intended to be used as an extension to the generated file
3 # This function is intended to be used as an extension to the generated file
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs
7 { pkgs
8 , basePythonPackages
8 , basePythonPackages
9 }:
9 }:
10
10
11 let
11 let
12 sed = "sed -i";
12 sed = "sed -i";
13
13
14 in
14 in
15
15
16 self: super: {
16 self: super: {
17
17
18 "beaker" = super."beaker".override (attrs: {
19 patches = [
20 ./patch_beaker/patch-beaker-lock-func-debug.diff
21 ./patch_beaker/patch-beaker-metadata-reuse.diff
22 ];
23 });
24
25 "gevent" = super."gevent".override (attrs: {
18 "gevent" = super."gevent".override (attrs: {
26 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
19 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
27 # NOTE: (marcink) odd requirements from gevent aren't set properly,
20 # NOTE: (marcink) odd requirements from gevent aren't set properly,
28 # thus we need to inject psutil manually
21 # thus we need to inject psutil manually
29 self."psutil"
22 self."psutil"
30 ];
23 ];
31 });
24 });
32
25
33 "hgsubversion" = super."hgsubversion".override (attrs: {
26 "hgsubversion" = super."hgsubversion".override (attrs: {
34 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
27 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
35 pkgs.sqlite
28 pkgs.sqlite
36 #basePythonPackages.sqlite3
29 #basePythonPackages.sqlite3
37 self.mercurial
30 self.mercurial
38 ];
31 ];
39 });
32 });
40
33
41 "subvertpy" = super."subvertpy".override (attrs: {
34 "subvertpy" = super."subvertpy".override (attrs: {
42 SVN_PREFIX = "${pkgs.subversion.dev}";
35 SVN_PREFIX = "${pkgs.subversion.dev}";
43 propagatedBuildInputs = [
36 propagatedBuildInputs = [
44 pkgs.apr.dev
37 pkgs.apr.dev
45 pkgs.aprutil
38 pkgs.aprutil
46 pkgs.subversion
39 pkgs.subversion
47 ];
40 ];
48 });
41 });
49
42
50 "mercurial" = super."mercurial".override (attrs: {
43 "mercurial" = super."mercurial".override (attrs: {
51 propagatedBuildInputs = [
44 propagatedBuildInputs = [
52 # self.python.modules.curses
45 # self.python.modules.curses
53 ];
46 ];
54 });
47 });
55
48
56 # Avoid that base packages screw up the build process
49 # Avoid that base packages screw up the build process
57 inherit (basePythonPackages)
50 inherit (basePythonPackages)
58 setuptools;
51 setuptools;
59
52
60 }
53 }
@@ -1,931 +1,936 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.1.5";
8 name = "atomicwrites-1.1.5";
9 doCheck = false;
9 doCheck = false;
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
16 };
16 };
17 };
17 };
18 "attrs" = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.1.0";
19 name = "attrs-18.1.0";
20 doCheck = false;
20 doCheck = false;
21 src = fetchurl {
21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
24 };
24 };
25 meta = {
25 meta = {
26 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
27 };
27 };
28 };
28 };
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 name = "backports.shutil-get-terminal-size-1.0.0";
30 name = "backports.shutil-get-terminal-size-1.0.0";
31 doCheck = false;
31 doCheck = false;
32 src = fetchurl {
32 src = fetchurl {
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 };
35 };
36 meta = {
36 meta = {
37 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
38 };
38 };
39 };
39 };
40 "beaker" = super.buildPythonPackage {
41 name = "beaker-1.9.1";
42 doCheck = false;
43 propagatedBuildInputs = [
44 self."funcsigs"
45 ];
46 src = fetchurl {
47 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
48 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
49 };
50 meta = {
51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 };
53 };
54 "beautifulsoup4" = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
55 name = "beautifulsoup4-4.6.0";
41 name = "beautifulsoup4-4.6.0";
56 doCheck = false;
42 doCheck = false;
57 src = fetchurl {
43 src = fetchurl {
58 url = "https://files.pythonhosted.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
44 url = "https://files.pythonhosted.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
59 sha256 = "12cf0ygpz9srpfh9gx2f9ba0swa1rzypv3sm4r0hmjyw6b4nm2w0";
45 sha256 = "12cf0ygpz9srpfh9gx2f9ba0swa1rzypv3sm4r0hmjyw6b4nm2w0";
60 };
46 };
61 meta = {
47 meta = {
62 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
63 };
49 };
64 };
50 };
65 "configobj" = super.buildPythonPackage {
51 "configobj" = super.buildPythonPackage {
66 name = "configobj-5.0.6";
52 name = "configobj-5.0.6";
67 doCheck = false;
53 doCheck = false;
68 propagatedBuildInputs = [
54 propagatedBuildInputs = [
69 self."six"
55 self."six"
70 ];
56 ];
71 src = fetchurl {
57 src = fetchurl {
72 url = "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
58 url = "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
73 sha256 = "00h9rcmws03xvdlfni11yb60bz3kxfvsj6dg6nrpzj71f03nbxd2";
59 sha256 = "00h9rcmws03xvdlfni11yb60bz3kxfvsj6dg6nrpzj71f03nbxd2";
74 };
60 };
75 meta = {
61 meta = {
76 license = [ pkgs.lib.licenses.bsdOriginal ];
62 license = [ pkgs.lib.licenses.bsdOriginal ];
77 };
63 };
78 };
64 };
79 "cov-core" = super.buildPythonPackage {
65 "cov-core" = super.buildPythonPackage {
80 name = "cov-core-1.15.0";
66 name = "cov-core-1.15.0";
81 doCheck = false;
67 doCheck = false;
82 propagatedBuildInputs = [
68 propagatedBuildInputs = [
83 self."coverage"
69 self."coverage"
84 ];
70 ];
85 src = fetchurl {
71 src = fetchurl {
86 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
87 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
88 };
74 };
89 meta = {
75 meta = {
90 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
91 };
77 };
92 };
78 };
93 "coverage" = super.buildPythonPackage {
79 "coverage" = super.buildPythonPackage {
94 name = "coverage-3.7.1";
80 name = "coverage-3.7.1";
95 doCheck = false;
81 doCheck = false;
96 src = fetchurl {
82 src = fetchurl {
97 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
98 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
99 };
85 };
100 meta = {
86 meta = {
101 license = [ pkgs.lib.licenses.bsdOriginal ];
87 license = [ pkgs.lib.licenses.bsdOriginal ];
102 };
88 };
103 };
89 };
104 "decorator" = super.buildPythonPackage {
90 "decorator" = super.buildPythonPackage {
105 name = "decorator-4.1.2";
91 name = "decorator-4.1.2";
106 doCheck = false;
92 doCheck = false;
107 src = fetchurl {
93 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
109 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
110 };
96 };
111 meta = {
97 meta = {
112 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
113 };
99 };
114 };
100 };
101 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.6.6";
103 doCheck = false;
104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
107 };
108 meta = {
109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 };
111 };
112 "dogpile.core" = super.buildPythonPackage {
113 name = "dogpile.core-0.4.1";
114 doCheck = false;
115 src = fetchurl {
116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 };
119 meta = {
120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 };
122 };
115 "dulwich" = super.buildPythonPackage {
123 "dulwich" = super.buildPythonPackage {
116 name = "dulwich-0.13.0";
124 name = "dulwich-0.13.0";
117 doCheck = false;
125 doCheck = false;
118 src = fetchurl {
126 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
120 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
121 };
129 };
122 meta = {
130 meta = {
123 license = [ pkgs.lib.licenses.gpl2Plus ];
131 license = [ pkgs.lib.licenses.gpl2Plus ];
124 };
132 };
125 };
133 };
126 "enum34" = super.buildPythonPackage {
134 "enum34" = super.buildPythonPackage {
127 name = "enum34-1.1.6";
135 name = "enum34-1.1.6";
128 doCheck = false;
136 doCheck = false;
129 src = fetchurl {
137 src = fetchurl {
130 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
131 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
132 };
140 };
133 meta = {
141 meta = {
134 license = [ pkgs.lib.licenses.bsdOriginal ];
142 license = [ pkgs.lib.licenses.bsdOriginal ];
135 };
143 };
136 };
144 };
137 "funcsigs" = super.buildPythonPackage {
145 "funcsigs" = super.buildPythonPackage {
138 name = "funcsigs-1.0.2";
146 name = "funcsigs-1.0.2";
139 doCheck = false;
147 doCheck = false;
140 src = fetchurl {
148 src = fetchurl {
141 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
142 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
143 };
151 };
144 meta = {
152 meta = {
145 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
146 };
154 };
147 };
155 };
148 "gevent" = super.buildPythonPackage {
156 "gevent" = super.buildPythonPackage {
149 name = "gevent-1.3.4";
157 name = "gevent-1.3.4";
150 doCheck = false;
158 doCheck = false;
151 propagatedBuildInputs = [
159 propagatedBuildInputs = [
152 self."greenlet"
160 self."greenlet"
153 ];
161 ];
154 src = fetchurl {
162 src = fetchurl {
155 url = "https://files.pythonhosted.org/packages/f8/85/f92a8f43c9f15ffad49d743d929863a042ce3e8de5746c63bb4d6ce51a02/gevent-1.3.4.tar.gz";
163 url = "https://files.pythonhosted.org/packages/f8/85/f92a8f43c9f15ffad49d743d929863a042ce3e8de5746c63bb4d6ce51a02/gevent-1.3.4.tar.gz";
156 sha256 = "0x2gm3iba4cprclnbkcq2i14m6br2hfqns8yv3sjil46b1qdri2k";
164 sha256 = "0x2gm3iba4cprclnbkcq2i14m6br2hfqns8yv3sjil46b1qdri2k";
157 };
165 };
158 meta = {
166 meta = {
159 license = [ pkgs.lib.licenses.mit ];
167 license = [ pkgs.lib.licenses.mit ];
160 };
168 };
161 };
169 };
162 "gprof2dot" = super.buildPythonPackage {
170 "gprof2dot" = super.buildPythonPackage {
163 name = "gprof2dot-2017.9.19";
171 name = "gprof2dot-2017.9.19";
164 doCheck = false;
172 doCheck = false;
165 src = fetchurl {
173 src = fetchurl {
166 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
167 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
168 };
176 };
169 meta = {
177 meta = {
170 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
171 };
179 };
172 };
180 };
173 "greenlet" = super.buildPythonPackage {
181 "greenlet" = super.buildPythonPackage {
174 name = "greenlet-0.4.13";
182 name = "greenlet-0.4.13";
175 doCheck = false;
183 doCheck = false;
176 src = fetchurl {
184 src = fetchurl {
177 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
178 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
179 };
187 };
180 meta = {
188 meta = {
181 license = [ pkgs.lib.licenses.mit ];
189 license = [ pkgs.lib.licenses.mit ];
182 };
190 };
183 };
191 };
184 "gunicorn" = super.buildPythonPackage {
192 "gunicorn" = super.buildPythonPackage {
185 name = "gunicorn-19.9.0";
193 name = "gunicorn-19.9.0";
186 doCheck = false;
194 doCheck = false;
187 src = fetchurl {
195 src = fetchurl {
188 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
189 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
190 };
198 };
191 meta = {
199 meta = {
192 license = [ pkgs.lib.licenses.mit ];
200 license = [ pkgs.lib.licenses.mit ];
193 };
201 };
194 };
202 };
195 "hg-evolve" = super.buildPythonPackage {
203 "hg-evolve" = super.buildPythonPackage {
196 name = "hg-evolve-8.0.1";
204 name = "hg-evolve-8.0.1";
197 doCheck = false;
205 doCheck = false;
198 src = fetchurl {
206 src = fetchurl {
199 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
200 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
201 };
209 };
202 meta = {
210 meta = {
203 license = [ { fullName = "GPLv2+"; } ];
211 license = [ { fullName = "GPLv2+"; } ];
204 };
212 };
205 };
213 };
206 "hgsubversion" = super.buildPythonPackage {
214 "hgsubversion" = super.buildPythonPackage {
207 name = "hgsubversion-1.9.2";
215 name = "hgsubversion-1.9.2";
208 doCheck = false;
216 doCheck = false;
209 propagatedBuildInputs = [
217 propagatedBuildInputs = [
210 self."mercurial"
218 self."mercurial"
211 self."subvertpy"
219 self."subvertpy"
212 ];
220 ];
213 src = fetchurl {
221 src = fetchurl {
214 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
215 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
216 };
224 };
217 meta = {
225 meta = {
218 license = [ pkgs.lib.licenses.gpl1 ];
226 license = [ pkgs.lib.licenses.gpl1 ];
219 };
227 };
220 };
228 };
221 "hupper" = super.buildPythonPackage {
229 "hupper" = super.buildPythonPackage {
222 name = "hupper-1.3";
230 name = "hupper-1.3";
223 doCheck = false;
231 doCheck = false;
224 src = fetchurl {
232 src = fetchurl {
225 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
226 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
227 };
235 };
228 meta = {
236 meta = {
229 license = [ pkgs.lib.licenses.mit ];
237 license = [ pkgs.lib.licenses.mit ];
230 };
238 };
231 };
239 };
232 "infrae.cache" = super.buildPythonPackage {
233 name = "infrae.cache-1.0.1";
234 doCheck = false;
235 propagatedBuildInputs = [
236 self."beaker"
237 self."repoze.lru"
238 ];
239 src = fetchurl {
240 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
241 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
242 };
243 meta = {
244 license = [ pkgs.lib.licenses.zpl21 ];
245 };
246 };
247 "ipdb" = super.buildPythonPackage {
240 "ipdb" = super.buildPythonPackage {
248 name = "ipdb-0.11";
241 name = "ipdb-0.11";
249 doCheck = false;
242 doCheck = false;
250 propagatedBuildInputs = [
243 propagatedBuildInputs = [
251 self."setuptools"
244 self."setuptools"
252 self."ipython"
245 self."ipython"
253 ];
246 ];
254 src = fetchurl {
247 src = fetchurl {
255 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
256 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
257 };
250 };
258 meta = {
251 meta = {
259 license = [ pkgs.lib.licenses.bsdOriginal ];
252 license = [ pkgs.lib.licenses.bsdOriginal ];
260 };
253 };
261 };
254 };
262 "ipython" = super.buildPythonPackage {
255 "ipython" = super.buildPythonPackage {
263 name = "ipython-5.1.0";
256 name = "ipython-5.1.0";
264 doCheck = false;
257 doCheck = false;
265 propagatedBuildInputs = [
258 propagatedBuildInputs = [
266 self."setuptools"
259 self."setuptools"
267 self."decorator"
260 self."decorator"
268 self."pickleshare"
261 self."pickleshare"
269 self."simplegeneric"
262 self."simplegeneric"
270 self."traitlets"
263 self."traitlets"
271 self."prompt-toolkit"
264 self."prompt-toolkit"
272 self."pygments"
265 self."pygments"
273 self."pexpect"
266 self."pexpect"
274 self."backports.shutil-get-terminal-size"
267 self."backports.shutil-get-terminal-size"
275 self."pathlib2"
268 self."pathlib2"
276 self."pexpect"
269 self."pexpect"
277 ];
270 ];
278 src = fetchurl {
271 src = fetchurl {
279 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
280 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
281 };
274 };
282 meta = {
275 meta = {
283 license = [ pkgs.lib.licenses.bsdOriginal ];
276 license = [ pkgs.lib.licenses.bsdOriginal ];
284 };
277 };
285 };
278 };
286 "ipython-genutils" = super.buildPythonPackage {
279 "ipython-genutils" = super.buildPythonPackage {
287 name = "ipython-genutils-0.2.0";
280 name = "ipython-genutils-0.2.0";
288 doCheck = false;
281 doCheck = false;
289 src = fetchurl {
282 src = fetchurl {
290 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
291 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
292 };
285 };
293 meta = {
286 meta = {
294 license = [ pkgs.lib.licenses.bsdOriginal ];
287 license = [ pkgs.lib.licenses.bsdOriginal ];
295 };
288 };
296 };
289 };
290 "lru-dict" = super.buildPythonPackage {
291 name = "lru-dict-1.1.6";
292 doCheck = false;
293 src = fetchurl {
294 url = "https://files.pythonhosted.org/packages/00/a5/32ed6e10246cd341ca8cc205acea5d208e4053f48a4dced2b1b31d45ba3f/lru-dict-1.1.6.tar.gz";
295 sha256 = "1k2lhd4dpl6xa6iialbwx4l6bkdzxmzhygms39pvf19x1rk5fm1n";
296 };
297 meta = {
298 license = [ pkgs.lib.licenses.mit ];
299 };
300 };
297 "mako" = super.buildPythonPackage {
301 "mako" = super.buildPythonPackage {
298 name = "mako-1.0.7";
302 name = "mako-1.0.7";
299 doCheck = false;
303 doCheck = false;
300 propagatedBuildInputs = [
304 propagatedBuildInputs = [
301 self."markupsafe"
305 self."markupsafe"
302 ];
306 ];
303 src = fetchurl {
307 src = fetchurl {
304 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
308 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
305 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
309 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
306 };
310 };
307 meta = {
311 meta = {
308 license = [ pkgs.lib.licenses.mit ];
312 license = [ pkgs.lib.licenses.mit ];
309 };
313 };
310 };
314 };
311 "markupsafe" = super.buildPythonPackage {
315 "markupsafe" = super.buildPythonPackage {
312 name = "markupsafe-1.0";
316 name = "markupsafe-1.0";
313 doCheck = false;
317 doCheck = false;
314 src = fetchurl {
318 src = fetchurl {
315 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
319 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
316 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
320 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
317 };
321 };
318 meta = {
322 meta = {
319 license = [ pkgs.lib.licenses.bsdOriginal ];
323 license = [ pkgs.lib.licenses.bsdOriginal ];
320 };
324 };
321 };
325 };
322 "mercurial" = super.buildPythonPackage {
326 "mercurial" = super.buildPythonPackage {
323 name = "mercurial-4.6.1";
327 name = "mercurial-4.6.1";
324 doCheck = false;
328 doCheck = false;
325 src = fetchurl {
329 src = fetchurl {
326 url = "https://files.pythonhosted.org/packages/12/e7/46894628ed3d6b0ae1e324523b09fdb8a90f0720bebe43cab88e0ea91b39/mercurial-4.6.1.tar.gz";
330 url = "https://files.pythonhosted.org/packages/12/e7/46894628ed3d6b0ae1e324523b09fdb8a90f0720bebe43cab88e0ea91b39/mercurial-4.6.1.tar.gz";
327 sha256 = "138h46k4rhr8gd0a5nwm8896f4x97dla20wqizllhvmar35qxyl9";
331 sha256 = "138h46k4rhr8gd0a5nwm8896f4x97dla20wqizllhvmar35qxyl9";
328 };
332 };
329 meta = {
333 meta = {
330 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
334 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
331 };
335 };
332 };
336 };
333 "mock" = super.buildPythonPackage {
337 "mock" = super.buildPythonPackage {
334 name = "mock-1.0.1";
338 name = "mock-1.0.1";
335 doCheck = false;
339 doCheck = false;
336 src = fetchurl {
340 src = fetchurl {
337 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
341 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
338 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
342 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
339 };
343 };
340 meta = {
344 meta = {
341 license = [ pkgs.lib.licenses.bsdOriginal ];
345 license = [ pkgs.lib.licenses.bsdOriginal ];
342 };
346 };
343 };
347 };
344 "more-itertools" = super.buildPythonPackage {
348 "more-itertools" = super.buildPythonPackage {
345 name = "more-itertools-4.2.0";
349 name = "more-itertools-4.2.0";
346 doCheck = false;
350 doCheck = false;
347 propagatedBuildInputs = [
351 propagatedBuildInputs = [
348 self."six"
352 self."six"
349 ];
353 ];
350 src = fetchurl {
354 src = fetchurl {
351 url = "https://files.pythonhosted.org/packages/c0/2f/6773347277d76c5ade4414a6c3f785ef27e7f5c4b0870ec7e888e66a8d83/more-itertools-4.2.0.tar.gz";
355 url = "https://files.pythonhosted.org/packages/c0/2f/6773347277d76c5ade4414a6c3f785ef27e7f5c4b0870ec7e888e66a8d83/more-itertools-4.2.0.tar.gz";
352 sha256 = "1s6qhl7a7jy8gqw8p545rxfp7rwz1hmjr9p6prk93zbv6f9rhsrb";
356 sha256 = "1s6qhl7a7jy8gqw8p545rxfp7rwz1hmjr9p6prk93zbv6f9rhsrb";
353 };
357 };
354 meta = {
358 meta = {
355 license = [ pkgs.lib.licenses.mit ];
359 license = [ pkgs.lib.licenses.mit ];
356 };
360 };
357 };
361 };
358 "msgpack-python" = super.buildPythonPackage {
362 "msgpack-python" = super.buildPythonPackage {
359 name = "msgpack-python-0.5.6";
363 name = "msgpack-python-0.5.6";
360 doCheck = false;
364 doCheck = false;
361 src = fetchurl {
365 src = fetchurl {
362 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
366 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
363 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
367 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
364 };
368 };
365 meta = {
369 meta = {
366 license = [ pkgs.lib.licenses.asl20 ];
370 license = [ pkgs.lib.licenses.asl20 ];
367 };
371 };
368 };
372 };
369 "pastedeploy" = super.buildPythonPackage {
373 "pastedeploy" = super.buildPythonPackage {
370 name = "pastedeploy-1.5.2";
374 name = "pastedeploy-1.5.2";
371 doCheck = false;
375 doCheck = false;
372 src = fetchurl {
376 src = fetchurl {
373 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
377 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
374 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
378 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
375 };
379 };
376 meta = {
380 meta = {
377 license = [ pkgs.lib.licenses.mit ];
381 license = [ pkgs.lib.licenses.mit ];
378 };
382 };
379 };
383 };
380 "pathlib2" = super.buildPythonPackage {
384 "pathlib2" = super.buildPythonPackage {
381 name = "pathlib2-2.3.0";
385 name = "pathlib2-2.3.0";
382 doCheck = false;
386 doCheck = false;
383 propagatedBuildInputs = [
387 propagatedBuildInputs = [
384 self."six"
388 self."six"
385 self."scandir"
389 self."scandir"
386 ];
390 ];
387 src = fetchurl {
391 src = fetchurl {
388 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
392 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
389 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
393 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
390 };
394 };
391 meta = {
395 meta = {
392 license = [ pkgs.lib.licenses.mit ];
396 license = [ pkgs.lib.licenses.mit ];
393 };
397 };
394 };
398 };
395 "pexpect" = super.buildPythonPackage {
399 "pexpect" = super.buildPythonPackage {
396 name = "pexpect-4.6.0";
400 name = "pexpect-4.6.0";
397 doCheck = false;
401 doCheck = false;
398 propagatedBuildInputs = [
402 propagatedBuildInputs = [
399 self."ptyprocess"
403 self."ptyprocess"
400 ];
404 ];
401 src = fetchurl {
405 src = fetchurl {
402 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
406 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
403 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
407 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
404 };
408 };
405 meta = {
409 meta = {
406 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
410 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
407 };
411 };
408 };
412 };
409 "pickleshare" = super.buildPythonPackage {
413 "pickleshare" = super.buildPythonPackage {
410 name = "pickleshare-0.7.4";
414 name = "pickleshare-0.7.4";
411 doCheck = false;
415 doCheck = false;
412 propagatedBuildInputs = [
416 propagatedBuildInputs = [
413 self."pathlib2"
417 self."pathlib2"
414 ];
418 ];
415 src = fetchurl {
419 src = fetchurl {
416 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
420 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
417 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
421 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
418 };
422 };
419 meta = {
423 meta = {
420 license = [ pkgs.lib.licenses.mit ];
424 license = [ pkgs.lib.licenses.mit ];
421 };
425 };
422 };
426 };
423 "plaster" = super.buildPythonPackage {
427 "plaster" = super.buildPythonPackage {
424 name = "plaster-1.0";
428 name = "plaster-1.0";
425 doCheck = false;
429 doCheck = false;
426 propagatedBuildInputs = [
430 propagatedBuildInputs = [
427 self."setuptools"
431 self."setuptools"
428 ];
432 ];
429 src = fetchurl {
433 src = fetchurl {
430 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
434 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
431 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
435 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
432 };
436 };
433 meta = {
437 meta = {
434 license = [ pkgs.lib.licenses.mit ];
438 license = [ pkgs.lib.licenses.mit ];
435 };
439 };
436 };
440 };
437 "plaster-pastedeploy" = super.buildPythonPackage {
441 "plaster-pastedeploy" = super.buildPythonPackage {
438 name = "plaster-pastedeploy-0.5";
442 name = "plaster-pastedeploy-0.5";
439 doCheck = false;
443 doCheck = false;
440 propagatedBuildInputs = [
444 propagatedBuildInputs = [
441 self."pastedeploy"
445 self."pastedeploy"
442 self."plaster"
446 self."plaster"
443 ];
447 ];
444 src = fetchurl {
448 src = fetchurl {
445 url = "https://files.pythonhosted.org/packages/e7/05/cc12d9d3efaa10046b6ec5de91b16486c95de4847dc57599bf58021a3d5c/plaster_pastedeploy-0.5.tar.gz";
449 url = "https://files.pythonhosted.org/packages/e7/05/cc12d9d3efaa10046b6ec5de91b16486c95de4847dc57599bf58021a3d5c/plaster_pastedeploy-0.5.tar.gz";
446 sha256 = "1aavz3vbh7m9m6hfidwh6gqlrs1mrxl7k6794rm9jdik59dii8vh";
450 sha256 = "1aavz3vbh7m9m6hfidwh6gqlrs1mrxl7k6794rm9jdik59dii8vh";
447 };
451 };
448 meta = {
452 meta = {
449 license = [ pkgs.lib.licenses.mit ];
453 license = [ pkgs.lib.licenses.mit ];
450 };
454 };
451 };
455 };
452 "pluggy" = super.buildPythonPackage {
456 "pluggy" = super.buildPythonPackage {
453 name = "pluggy-0.6.0";
457 name = "pluggy-0.6.0";
454 doCheck = false;
458 doCheck = false;
455 src = fetchurl {
459 src = fetchurl {
456 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
460 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
457 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
461 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
458 };
462 };
459 meta = {
463 meta = {
460 license = [ pkgs.lib.licenses.mit ];
464 license = [ pkgs.lib.licenses.mit ];
461 };
465 };
462 };
466 };
463 "prompt-toolkit" = super.buildPythonPackage {
467 "prompt-toolkit" = super.buildPythonPackage {
464 name = "prompt-toolkit-1.0.15";
468 name = "prompt-toolkit-1.0.15";
465 doCheck = false;
469 doCheck = false;
466 propagatedBuildInputs = [
470 propagatedBuildInputs = [
467 self."six"
471 self."six"
468 self."wcwidth"
472 self."wcwidth"
469 ];
473 ];
470 src = fetchurl {
474 src = fetchurl {
471 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
475 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
472 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
476 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
473 };
477 };
474 meta = {
478 meta = {
475 license = [ pkgs.lib.licenses.bsdOriginal ];
479 license = [ pkgs.lib.licenses.bsdOriginal ];
476 };
480 };
477 };
481 };
478 "psutil" = super.buildPythonPackage {
482 "psutil" = super.buildPythonPackage {
479 name = "psutil-5.4.6";
483 name = "psutil-5.4.6";
480 doCheck = false;
484 doCheck = false;
481 src = fetchurl {
485 src = fetchurl {
482 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
486 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
483 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
487 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
484 };
488 };
485 meta = {
489 meta = {
486 license = [ pkgs.lib.licenses.bsdOriginal ];
490 license = [ pkgs.lib.licenses.bsdOriginal ];
487 };
491 };
488 };
492 };
489 "ptyprocess" = super.buildPythonPackage {
493 "ptyprocess" = super.buildPythonPackage {
490 name = "ptyprocess-0.6.0";
494 name = "ptyprocess-0.6.0";
491 doCheck = false;
495 doCheck = false;
492 src = fetchurl {
496 src = fetchurl {
493 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
497 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
494 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
498 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
495 };
499 };
496 meta = {
500 meta = {
497 license = [ ];
501 license = [ ];
498 };
502 };
499 };
503 };
500 "py" = super.buildPythonPackage {
504 "py" = super.buildPythonPackage {
501 name = "py-1.5.3";
505 name = "py-1.5.3";
502 doCheck = false;
506 doCheck = false;
503 src = fetchurl {
507 src = fetchurl {
504 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
508 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
505 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
509 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
506 };
510 };
507 meta = {
511 meta = {
508 license = [ pkgs.lib.licenses.mit ];
512 license = [ pkgs.lib.licenses.mit ];
509 };
513 };
510 };
514 };
511 "pygments" = super.buildPythonPackage {
515 "pygments" = super.buildPythonPackage {
512 name = "pygments-2.2.0";
516 name = "pygments-2.2.0";
513 doCheck = false;
517 doCheck = false;
514 src = fetchurl {
518 src = fetchurl {
515 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
519 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
516 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
520 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
517 };
521 };
518 meta = {
522 meta = {
519 license = [ pkgs.lib.licenses.bsdOriginal ];
523 license = [ pkgs.lib.licenses.bsdOriginal ];
520 };
524 };
521 };
525 };
522 "pyramid" = super.buildPythonPackage {
526 "pyramid" = super.buildPythonPackage {
523 name = "pyramid-1.9.2";
527 name = "pyramid-1.9.2";
524 doCheck = false;
528 doCheck = false;
525 propagatedBuildInputs = [
529 propagatedBuildInputs = [
526 self."setuptools"
530 self."setuptools"
527 self."webob"
531 self."webob"
528 self."repoze.lru"
532 self."repoze.lru"
529 self."zope.interface"
533 self."zope.interface"
530 self."zope.deprecation"
534 self."zope.deprecation"
531 self."venusian"
535 self."venusian"
532 self."translationstring"
536 self."translationstring"
533 self."pastedeploy"
537 self."pastedeploy"
534 self."plaster"
538 self."plaster"
535 self."plaster-pastedeploy"
539 self."plaster-pastedeploy"
536 self."hupper"
540 self."hupper"
537 ];
541 ];
538 src = fetchurl {
542 src = fetchurl {
539 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
543 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
540 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
544 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
541 };
545 };
542 meta = {
546 meta = {
543 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
547 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
544 };
548 };
545 };
549 };
546 "pyramid-mako" = super.buildPythonPackage {
550 "pyramid-mako" = super.buildPythonPackage {
547 name = "pyramid-mako-1.0.2";
551 name = "pyramid-mako-1.0.2";
548 doCheck = false;
552 doCheck = false;
549 propagatedBuildInputs = [
553 propagatedBuildInputs = [
550 self."pyramid"
554 self."pyramid"
551 self."mako"
555 self."mako"
552 ];
556 ];
553 src = fetchurl {
557 src = fetchurl {
554 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
558 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
555 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
559 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
556 };
560 };
557 meta = {
561 meta = {
558 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
562 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
559 };
563 };
560 };
564 };
561 "pytest" = super.buildPythonPackage {
565 "pytest" = super.buildPythonPackage {
562 name = "pytest-3.6.0";
566 name = "pytest-3.6.0";
563 doCheck = false;
567 doCheck = false;
564 propagatedBuildInputs = [
568 propagatedBuildInputs = [
565 self."py"
569 self."py"
566 self."six"
570 self."six"
567 self."setuptools"
571 self."setuptools"
568 self."attrs"
572 self."attrs"
569 self."more-itertools"
573 self."more-itertools"
570 self."atomicwrites"
574 self."atomicwrites"
571 self."pluggy"
575 self."pluggy"
572 self."funcsigs"
576 self."funcsigs"
573 ];
577 ];
574 src = fetchurl {
578 src = fetchurl {
575 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
579 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
576 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
580 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
577 };
581 };
578 meta = {
582 meta = {
579 license = [ pkgs.lib.licenses.mit ];
583 license = [ pkgs.lib.licenses.mit ];
580 };
584 };
581 };
585 };
582 "pytest-cov" = super.buildPythonPackage {
586 "pytest-cov" = super.buildPythonPackage {
583 name = "pytest-cov-2.5.1";
587 name = "pytest-cov-2.5.1";
584 doCheck = false;
588 doCheck = false;
585 propagatedBuildInputs = [
589 propagatedBuildInputs = [
586 self."pytest"
590 self."pytest"
587 self."coverage"
591 self."coverage"
588 ];
592 ];
589 src = fetchurl {
593 src = fetchurl {
590 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
594 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
591 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
595 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
592 };
596 };
593 meta = {
597 meta = {
594 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
598 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
595 };
599 };
596 };
600 };
597 "pytest-profiling" = super.buildPythonPackage {
601 "pytest-profiling" = super.buildPythonPackage {
598 name = "pytest-profiling-1.3.0";
602 name = "pytest-profiling-1.3.0";
599 doCheck = false;
603 doCheck = false;
600 propagatedBuildInputs = [
604 propagatedBuildInputs = [
601 self."six"
605 self."six"
602 self."pytest"
606 self."pytest"
603 self."gprof2dot"
607 self."gprof2dot"
604 ];
608 ];
605 src = fetchurl {
609 src = fetchurl {
606 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
610 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
607 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
611 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
608 };
612 };
609 meta = {
613 meta = {
610 license = [ pkgs.lib.licenses.mit ];
614 license = [ pkgs.lib.licenses.mit ];
611 };
615 };
612 };
616 };
613 "pytest-runner" = super.buildPythonPackage {
617 "pytest-runner" = super.buildPythonPackage {
614 name = "pytest-runner-4.2";
618 name = "pytest-runner-4.2";
615 doCheck = false;
619 doCheck = false;
616 src = fetchurl {
620 src = fetchurl {
617 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
621 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
618 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
622 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
619 };
623 };
620 meta = {
624 meta = {
621 license = [ pkgs.lib.licenses.mit ];
625 license = [ pkgs.lib.licenses.mit ];
622 };
626 };
623 };
627 };
624 "pytest-sugar" = super.buildPythonPackage {
628 "pytest-sugar" = super.buildPythonPackage {
625 name = "pytest-sugar-0.9.1";
629 name = "pytest-sugar-0.9.1";
626 doCheck = false;
630 doCheck = false;
627 propagatedBuildInputs = [
631 propagatedBuildInputs = [
628 self."pytest"
632 self."pytest"
629 self."termcolor"
633 self."termcolor"
630 ];
634 ];
631 src = fetchurl {
635 src = fetchurl {
632 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
636 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
633 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
637 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
634 };
638 };
635 meta = {
639 meta = {
636 license = [ pkgs.lib.licenses.bsdOriginal ];
640 license = [ pkgs.lib.licenses.bsdOriginal ];
637 };
641 };
638 };
642 };
639 "pytest-timeout" = super.buildPythonPackage {
643 "pytest-timeout" = super.buildPythonPackage {
640 name = "pytest-timeout-1.2.1";
644 name = "pytest-timeout-1.2.1";
641 doCheck = false;
645 doCheck = false;
642 propagatedBuildInputs = [
646 propagatedBuildInputs = [
643 self."pytest"
647 self."pytest"
644 ];
648 ];
645 src = fetchurl {
649 src = fetchurl {
646 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
650 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
647 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
651 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
648 };
652 };
649 meta = {
653 meta = {
650 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
654 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
651 };
655 };
652 };
656 };
653 "repoze.lru" = super.buildPythonPackage {
657 "repoze.lru" = super.buildPythonPackage {
654 name = "repoze.lru-0.7";
658 name = "repoze.lru-0.7";
655 doCheck = false;
659 doCheck = false;
656 src = fetchurl {
660 src = fetchurl {
657 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
661 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
658 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
662 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
659 };
663 };
660 meta = {
664 meta = {
661 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
662 };
666 };
663 };
667 };
664 "rhodecode-vcsserver" = super.buildPythonPackage {
668 "rhodecode-vcsserver" = super.buildPythonPackage {
665 name = "rhodecode-vcsserver-4.13.0";
669 name = "rhodecode-vcsserver-4.13.0";
666 buildInputs = [
670 buildInputs = [
667 self."pytest"
671 self."pytest"
668 self."py"
672 self."py"
669 self."pytest-cov"
673 self."pytest-cov"
670 self."pytest-sugar"
674 self."pytest-sugar"
671 self."pytest-runner"
675 self."pytest-runner"
672 self."pytest-profiling"
676 self."pytest-profiling"
673 self."gprof2dot"
677 self."gprof2dot"
674 self."pytest-timeout"
678 self."pytest-timeout"
675 self."mock"
679 self."mock"
676 self."webtest"
680 self."webtest"
677 self."cov-core"
681 self."cov-core"
678 self."coverage"
682 self."coverage"
679 self."configobj"
683 self."configobj"
680 ];
684 ];
681 doCheck = true;
685 doCheck = true;
682 propagatedBuildInputs = [
686 propagatedBuildInputs = [
683 self."beaker"
684 self."configobj"
687 self."configobj"
688 self."dogpile.cache"
689 self."dogpile.core"
685 self."decorator"
690 self."decorator"
686 self."dulwich"
691 self."dulwich"
687 self."hgsubversion"
692 self."hgsubversion"
688 self."hg-evolve"
693 self."hg-evolve"
689 self."infrae.cache"
694 self."lru-dict"
690 self."mako"
695 self."mako"
691 self."markupsafe"
696 self."markupsafe"
692 self."mercurial"
697 self."mercurial"
693 self."msgpack-python"
698 self."msgpack-python"
694 self."pastedeploy"
699 self."pastedeploy"
695 self."psutil"
700 self."psutil"
696 self."pyramid"
701 self."pyramid"
697 self."pyramid-mako"
702 self."pyramid-mako"
698 self."pygments"
703 self."pygments"
699 self."pathlib2"
704 self."pathlib2"
700 self."repoze.lru"
705 self."repoze.lru"
701 self."simplejson"
706 self."simplejson"
702 self."subprocess32"
707 self."subprocess32"
703 self."subvertpy"
708 self."subvertpy"
704 self."six"
709 self."six"
705 self."translationstring"
710 self."translationstring"
706 self."webob"
711 self."webob"
707 self."zope.deprecation"
712 self."zope.deprecation"
708 self."zope.interface"
713 self."zope.interface"
709 self."gevent"
714 self."gevent"
710 self."greenlet"
715 self."greenlet"
711 self."gunicorn"
716 self."gunicorn"
712 self."waitress"
717 self."waitress"
713 self."ipdb"
718 self."ipdb"
714 self."ipython"
719 self."ipython"
715 self."pytest"
720 self."pytest"
716 self."py"
721 self."py"
717 self."pytest-cov"
722 self."pytest-cov"
718 self."pytest-sugar"
723 self."pytest-sugar"
719 self."pytest-runner"
724 self."pytest-runner"
720 self."pytest-profiling"
725 self."pytest-profiling"
721 self."gprof2dot"
726 self."gprof2dot"
722 self."pytest-timeout"
727 self."pytest-timeout"
723 self."mock"
728 self."mock"
724 self."webtest"
729 self."webtest"
725 self."cov-core"
730 self."cov-core"
726 self."coverage"
731 self."coverage"
727 ];
732 ];
728 src = ./.;
733 src = ./.;
729 meta = {
734 meta = {
730 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
735 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
731 };
736 };
732 };
737 };
733 "scandir" = super.buildPythonPackage {
738 "scandir" = super.buildPythonPackage {
734 name = "scandir-1.7";
739 name = "scandir-1.7";
735 doCheck = false;
740 doCheck = false;
736 src = fetchurl {
741 src = fetchurl {
737 url = "https://files.pythonhosted.org/packages/13/bb/e541b74230bbf7a20a3949a2ee6631be299378a784f5445aa5d0047c192b/scandir-1.7.tar.gz";
742 url = "https://files.pythonhosted.org/packages/13/bb/e541b74230bbf7a20a3949a2ee6631be299378a784f5445aa5d0047c192b/scandir-1.7.tar.gz";
738 sha256 = "0gbnhjzg42rj87ljv9kb648rfxph69ly3c8r9841dxy4d7l5pmdj";
743 sha256 = "0gbnhjzg42rj87ljv9kb648rfxph69ly3c8r9841dxy4d7l5pmdj";
739 };
744 };
740 meta = {
745 meta = {
741 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
746 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
742 };
747 };
743 };
748 };
744 "simplegeneric" = super.buildPythonPackage {
749 "simplegeneric" = super.buildPythonPackage {
745 name = "simplegeneric-0.8.1";
750 name = "simplegeneric-0.8.1";
746 doCheck = false;
751 doCheck = false;
747 src = fetchurl {
752 src = fetchurl {
748 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
753 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
749 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
754 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
750 };
755 };
751 meta = {
756 meta = {
752 license = [ pkgs.lib.licenses.zpl21 ];
757 license = [ pkgs.lib.licenses.zpl21 ];
753 };
758 };
754 };
759 };
755 "simplejson" = super.buildPythonPackage {
760 "simplejson" = super.buildPythonPackage {
756 name = "simplejson-3.11.1";
761 name = "simplejson-3.11.1";
757 doCheck = false;
762 doCheck = false;
758 src = fetchurl {
763 src = fetchurl {
759 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
764 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
760 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
765 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
761 };
766 };
762 meta = {
767 meta = {
763 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
768 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
764 };
769 };
765 };
770 };
766 "six" = super.buildPythonPackage {
771 "six" = super.buildPythonPackage {
767 name = "six-1.11.0";
772 name = "six-1.11.0";
768 doCheck = false;
773 doCheck = false;
769 src = fetchurl {
774 src = fetchurl {
770 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
775 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
771 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
776 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
772 };
777 };
773 meta = {
778 meta = {
774 license = [ pkgs.lib.licenses.mit ];
779 license = [ pkgs.lib.licenses.mit ];
775 };
780 };
776 };
781 };
777 "subprocess32" = super.buildPythonPackage {
782 "subprocess32" = super.buildPythonPackage {
778 name = "subprocess32-3.5.1";
783 name = "subprocess32-3.5.1";
779 doCheck = false;
784 doCheck = false;
780 src = fetchurl {
785 src = fetchurl {
781 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
786 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
782 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
787 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
783 };
788 };
784 meta = {
789 meta = {
785 license = [ pkgs.lib.licenses.psfl ];
790 license = [ pkgs.lib.licenses.psfl ];
786 };
791 };
787 };
792 };
788 "subvertpy" = super.buildPythonPackage {
793 "subvertpy" = super.buildPythonPackage {
789 name = "subvertpy-0.10.1";
794 name = "subvertpy-0.10.1";
790 doCheck = false;
795 doCheck = false;
791 src = fetchurl {
796 src = fetchurl {
792 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
797 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
793 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
798 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
794 };
799 };
795 meta = {
800 meta = {
796 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
801 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
797 };
802 };
798 };
803 };
799 "termcolor" = super.buildPythonPackage {
804 "termcolor" = super.buildPythonPackage {
800 name = "termcolor-1.1.0";
805 name = "termcolor-1.1.0";
801 doCheck = false;
806 doCheck = false;
802 src = fetchurl {
807 src = fetchurl {
803 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
808 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
804 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
809 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
805 };
810 };
806 meta = {
811 meta = {
807 license = [ pkgs.lib.licenses.mit ];
812 license = [ pkgs.lib.licenses.mit ];
808 };
813 };
809 };
814 };
810 "traitlets" = super.buildPythonPackage {
815 "traitlets" = super.buildPythonPackage {
811 name = "traitlets-4.3.2";
816 name = "traitlets-4.3.2";
812 doCheck = false;
817 doCheck = false;
813 propagatedBuildInputs = [
818 propagatedBuildInputs = [
814 self."ipython-genutils"
819 self."ipython-genutils"
815 self."six"
820 self."six"
816 self."decorator"
821 self."decorator"
817 self."enum34"
822 self."enum34"
818 ];
823 ];
819 src = fetchurl {
824 src = fetchurl {
820 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
825 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
821 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
826 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
822 };
827 };
823 meta = {
828 meta = {
824 license = [ pkgs.lib.licenses.bsdOriginal ];
829 license = [ pkgs.lib.licenses.bsdOriginal ];
825 };
830 };
826 };
831 };
827 "translationstring" = super.buildPythonPackage {
832 "translationstring" = super.buildPythonPackage {
828 name = "translationstring-1.3";
833 name = "translationstring-1.3";
829 doCheck = false;
834 doCheck = false;
830 src = fetchurl {
835 src = fetchurl {
831 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
836 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
832 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
837 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
833 };
838 };
834 meta = {
839 meta = {
835 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
840 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
836 };
841 };
837 };
842 };
838 "venusian" = super.buildPythonPackage {
843 "venusian" = super.buildPythonPackage {
839 name = "venusian-1.1.0";
844 name = "venusian-1.1.0";
840 doCheck = false;
845 doCheck = false;
841 src = fetchurl {
846 src = fetchurl {
842 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
847 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
843 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
848 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
844 };
849 };
845 meta = {
850 meta = {
846 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
851 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
847 };
852 };
848 };
853 };
849 "waitress" = super.buildPythonPackage {
854 "waitress" = super.buildPythonPackage {
850 name = "waitress-1.1.0";
855 name = "waitress-1.1.0";
851 doCheck = false;
856 doCheck = false;
852 src = fetchurl {
857 src = fetchurl {
853 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
858 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
854 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
859 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
855 };
860 };
856 meta = {
861 meta = {
857 license = [ pkgs.lib.licenses.zpl21 ];
862 license = [ pkgs.lib.licenses.zpl21 ];
858 };
863 };
859 };
864 };
860 "wcwidth" = super.buildPythonPackage {
865 "wcwidth" = super.buildPythonPackage {
861 name = "wcwidth-0.1.7";
866 name = "wcwidth-0.1.7";
862 doCheck = false;
867 doCheck = false;
863 src = fetchurl {
868 src = fetchurl {
864 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
869 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
865 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
870 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
866 };
871 };
867 meta = {
872 meta = {
868 license = [ pkgs.lib.licenses.mit ];
873 license = [ pkgs.lib.licenses.mit ];
869 };
874 };
870 };
875 };
871 "webob" = super.buildPythonPackage {
876 "webob" = super.buildPythonPackage {
872 name = "webob-1.7.4";
877 name = "webob-1.7.4";
873 doCheck = false;
878 doCheck = false;
874 src = fetchurl {
879 src = fetchurl {
875 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
880 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
876 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
881 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
877 };
882 };
878 meta = {
883 meta = {
879 license = [ pkgs.lib.licenses.mit ];
884 license = [ pkgs.lib.licenses.mit ];
880 };
885 };
881 };
886 };
882 "webtest" = super.buildPythonPackage {
887 "webtest" = super.buildPythonPackage {
883 name = "webtest-2.0.29";
888 name = "webtest-2.0.29";
884 doCheck = false;
889 doCheck = false;
885 propagatedBuildInputs = [
890 propagatedBuildInputs = [
886 self."six"
891 self."six"
887 self."webob"
892 self."webob"
888 self."waitress"
893 self."waitress"
889 self."beautifulsoup4"
894 self."beautifulsoup4"
890 ];
895 ];
891 src = fetchurl {
896 src = fetchurl {
892 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
897 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
893 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
898 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
894 };
899 };
895 meta = {
900 meta = {
896 license = [ pkgs.lib.licenses.mit ];
901 license = [ pkgs.lib.licenses.mit ];
897 };
902 };
898 };
903 };
899 "zope.deprecation" = super.buildPythonPackage {
904 "zope.deprecation" = super.buildPythonPackage {
900 name = "zope.deprecation-4.3.0";
905 name = "zope.deprecation-4.3.0";
901 doCheck = false;
906 doCheck = false;
902 propagatedBuildInputs = [
907 propagatedBuildInputs = [
903 self."setuptools"
908 self."setuptools"
904 ];
909 ];
905 src = fetchurl {
910 src = fetchurl {
906 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
911 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
907 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
912 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
908 };
913 };
909 meta = {
914 meta = {
910 license = [ pkgs.lib.licenses.zpl21 ];
915 license = [ pkgs.lib.licenses.zpl21 ];
911 };
916 };
912 };
917 };
913 "zope.interface" = super.buildPythonPackage {
918 "zope.interface" = super.buildPythonPackage {
914 name = "zope.interface-4.5.0";
919 name = "zope.interface-4.5.0";
915 doCheck = false;
920 doCheck = false;
916 propagatedBuildInputs = [
921 propagatedBuildInputs = [
917 self."setuptools"
922 self."setuptools"
918 ];
923 ];
919 src = fetchurl {
924 src = fetchurl {
920 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
925 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
921 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
926 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
922 };
927 };
923 meta = {
928 meta = {
924 license = [ pkgs.lib.licenses.zpl21 ];
929 license = [ pkgs.lib.licenses.zpl21 ];
925 };
930 };
926 };
931 };
927
932
928 ### Test requirements
933 ### Test requirements
929
934
930
935
931 }
936 }
@@ -1,45 +1,46 b''
1 ## dependencies
1 ## dependencies
2
2
3 beaker==1.9.1
4 configobj==5.0.6
3 configobj==5.0.6
4 dogpile.cache==0.6.6
5 dogpile.core==0.4.1
5 decorator==4.1.2
6 decorator==4.1.2
6 dulwich==0.13.0
7 dulwich==0.13.0
7 hgsubversion==1.9.2
8 hgsubversion==1.9.2
8 hg-evolve==8.0.1
9 hg-evolve==8.0.1
9 infrae.cache==1.0.1
10 lru-dict==1.1.6
10 mako==1.0.7
11 mako==1.0.7
11 markupsafe==1.0.0
12 markupsafe==1.0.0
12 mercurial==4.6.1
13 mercurial==4.6.1
13 msgpack-python==0.5.6
14 msgpack-python==0.5.6
14
15
15 pastedeploy==1.5.2
16 pastedeploy==1.5.2
16 psutil==5.4.6
17 psutil==5.4.6
17 pyramid==1.9.2
18 pyramid==1.9.2
18 pyramid-mako==1.0.2
19 pyramid-mako==1.0.2
19
20
20 pygments==2.2.0
21 pygments==2.2.0
21 pathlib2==2.3.0
22 pathlib2==2.3.0
22 repoze.lru==0.7
23 repoze.lru==0.7
23 simplejson==3.11.1
24 simplejson==3.11.1
24 subprocess32==3.5.1
25 subprocess32==3.5.1
25
26
26 subvertpy==0.10.1
27 subvertpy==0.10.1
27
28
28 six==1.11.0
29 six==1.11.0
29 translationstring==1.3
30 translationstring==1.3
30 webob==1.7.4
31 webob==1.7.4
31 zope.deprecation==4.3.0
32 zope.deprecation==4.3.0
32 zope.interface==4.5.0
33 zope.interface==4.5.0
33
34
34 ## http servers
35 ## http servers
35 gevent==1.3.4
36 gevent==1.3.4
36 greenlet==0.4.13
37 greenlet==0.4.13
37 gunicorn==19.9.0
38 gunicorn==19.9.0
38 waitress==1.1.0
39 waitress==1.1.0
39
40
40 ## debug
41 ## debug
41 ipdb==0.11.0
42 ipdb==0.11.0
42 ipython==5.1.0
43 ipython==5.1.0
43
44
44 ## test related requirements
45 ## test related requirements
45 -r requirements_test.txt
46 -r requirements_test.txt
@@ -1,98 +1,93 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
24
25
25
26
26 class RepoFactory(object):
27 class RepoFactory(object):
27 """
28 """
28 Utility to create instances of repository
29 Utility to create instances of repository
29
30
30 It provides internal caching of the `repo` object based on
31 It provides internal caching of the `repo` object based on
31 the :term:`call context`.
32 the :term:`call context`.
32 """
33 """
34 repo_type = None
33
35
34 def __init__(self, repo_cache):
36 def __init__(self):
35 self._cache = repo_cache
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
36
38
37 def _create_config(self, path, config):
39 def _create_config(self, path, config):
38 config = {}
40 config = {}
39 return config
41 return config
40
42
41 def _create_repo(self, wire, create):
43 def _create_repo(self, wire, create):
42 raise NotImplementedError()
44 raise NotImplementedError()
43
45
44 def repo(self, wire, create=False):
46 def repo(self, wire, create=False):
45 """
47 """
46 Get a repository instance for the given path.
48 Get a repository instance for the given path.
47
49
48 Uses internally the low level beaker API since the decorators introduce
50 Uses internally the low level beaker API since the decorators introduce
49 significant overhead.
51 significant overhead.
50 """
52 """
51 def create_new_repo():
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
52 return self._create_repo(wire, create)
62 return self._create_repo(wire, create)
53
63
54 return self._repo(wire, create_new_repo)
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
55
65 return repo
56 def _repo(self, wire, createfunc):
57 context = wire.get('context', None)
58 cache = wire.get('cache', True)
59
60 if context and cache:
61 cache_key = (context, wire['path'])
62 log.debug(
63 'FETCH %s@%s repo object from cache. Context: %s',
64 self.__class__.__name__, wire['path'], context)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
66 else:
67 log.debug(
68 'INIT %s@%s repo object based on wire %s. Context: %s',
69 self.__class__.__name__, wire['path'], wire, context)
70 return createfunc()
71
66
72
67
73 def obfuscate_qs(query_string):
68 def obfuscate_qs(query_string):
74 if query_string is None:
69 if query_string is None:
75 return None
70 return None
76
71
77 parsed = []
72 parsed = []
78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
73 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
79 if k in ['auth_token', 'api_key']:
74 if k in ['auth_token', 'api_key']:
80 v = "*****"
75 v = "*****"
81 parsed.append((k, v))
76 parsed.append((k, v))
82
77
83 return '&'.join('{}{}'.format(
78 return '&'.join('{}{}'.format(
84 k, '={}'.format(v) if v else '') for k, v in parsed)
79 k, '={}'.format(v) if v else '') for k, v in parsed)
85
80
86
81
87 def raise_from_original(new_type):
82 def raise_from_original(new_type):
88 """
83 """
89 Raise a new exception type with original args and traceback.
84 Raise a new exception type with original args and traceback.
90 """
85 """
91 exc_type, exc_value, exc_traceback = sys.exc_info()
86 exc_type, exc_value, exc_traceback = sys.exc_info()
92
87
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
88 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
89
95 try:
90 try:
96 raise new_type(*exc_value.args), None, exc_traceback
91 raise new_type(*exc_value.args), None, exc_traceback
97 finally:
92 finally:
98 del exc_traceback
93 del exc_traceback
@@ -1,670 +1,671 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 from dulwich import index, objects
28 from dulwich import index, objects
29 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.errors import (
30 from dulwich.errors import (
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 MissingCommitError, ObjectMissing, HangupException,
32 MissingCommitError, ObjectMissing, HangupException,
33 UnexpectedCommandError)
33 UnexpectedCommandError)
34 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.server import update_server_info
35 from dulwich.server import update_server_info
36
36
37 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver.utils import safe_str
38 from vcsserver.utils import safe_str
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.hgcompat import (
40 from vcsserver.hgcompat import (
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 from vcsserver.git_lfs.lib import LFSOidStore
42 from vcsserver.git_lfs.lib import LFSOidStore
43
43
44 DIR_STAT = stat.S_IFDIR
44 DIR_STAT = stat.S_IFDIR
45 FILE_MODE = stat.S_IFMT
45 FILE_MODE = stat.S_IFMT
46 GIT_LINK = objects.S_IFGITLINK
46 GIT_LINK = objects.S_IFGITLINK
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 def reraise_safe_exceptions(func):
51 def reraise_safe_exceptions(func):
52 """Converts Dulwich exceptions to something neutral."""
52 """Converts Dulwich exceptions to something neutral."""
53 @wraps(func)
53 @wraps(func)
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 ObjectMissing) as e:
58 ObjectMissing) as e:
59 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e.message)
60 except (HangupException, UnexpectedCommandError) as e:
60 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e.message)
62 except Exception as e:
62 except Exception as e:
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 # (KeyError on empty repos), we cannot track this and catch all
64 # (KeyError on empty repos), we cannot track this and catch all
65 # exceptions, it's an exceptions from other handlers
65 # exceptions, it's an exceptions from other handlers
66 #if not hasattr(e, '_vcs_kind'):
66 #if not hasattr(e, '_vcs_kind'):
67 #log.exception("Unhandled exception in git remote call")
67 #log.exception("Unhandled exception in git remote call")
68 #raise_from_original(exceptions.UnhandledException)
68 #raise_from_original(exceptions.UnhandledException)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class Repo(DulwichRepo):
73 class Repo(DulwichRepo):
74 """
74 """
75 A wrapper for dulwich Repo class.
75 A wrapper for dulwich Repo class.
76
76
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 "Too many open files" error. We need to close all opened file descriptors
78 "Too many open files" error. We need to close all opened file descriptors
79 once the repo object is destroyed.
79 once the repo object is destroyed.
80
80
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 to 0.12.0 +
82 to 0.12.0 +
83 """
83 """
84 def __del__(self):
84 def __del__(self):
85 if hasattr(self, 'object_store'):
85 if hasattr(self, 'object_store'):
86 self.close()
86 self.close()
87
87
88
88
89 class GitFactory(RepoFactory):
89 class GitFactory(RepoFactory):
90 repo_type = 'git'
90
91
91 def _create_repo(self, wire, create):
92 def _create_repo(self, wire, create):
92 repo_path = str_to_dulwich(wire['path'])
93 repo_path = str_to_dulwich(wire['path'])
93 return Repo(repo_path)
94 return Repo(repo_path)
94
95
95
96
96 class GitRemote(object):
97 class GitRemote(object):
97
98
98 def __init__(self, factory):
99 def __init__(self, factory):
99 self._factory = factory
100 self._factory = factory
100
101
101 self._bulk_methods = {
102 self._bulk_methods = {
102 "author": self.commit_attribute,
103 "author": self.commit_attribute,
103 "date": self.get_object_attrs,
104 "date": self.get_object_attrs,
104 "message": self.commit_attribute,
105 "message": self.commit_attribute,
105 "parents": self.commit_attribute,
106 "parents": self.commit_attribute,
106 "_commit": self.revision,
107 "_commit": self.revision,
107 }
108 }
108
109
109 def _wire_to_config(self, wire):
110 def _wire_to_config(self, wire):
110 if 'config' in wire:
111 if 'config' in wire:
111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 return {}
113 return {}
113
114
114 def _assign_ref(self, wire, ref, commit_id):
115 def _assign_ref(self, wire, ref, commit_id):
115 repo = self._factory.repo(wire)
116 repo = self._factory.repo(wire)
116 repo[ref] = commit_id
117 repo[ref] = commit_id
117
118
118 @reraise_safe_exceptions
119 @reraise_safe_exceptions
119 def add_object(self, wire, content):
120 def add_object(self, wire, content):
120 repo = self._factory.repo(wire)
121 repo = self._factory.repo(wire)
121 blob = objects.Blob()
122 blob = objects.Blob()
122 blob.set_raw_string(content)
123 blob.set_raw_string(content)
123 repo.object_store.add_object(blob)
124 repo.object_store.add_object(blob)
124 return blob.id
125 return blob.id
125
126
126 @reraise_safe_exceptions
127 @reraise_safe_exceptions
127 def assert_correct_path(self, wire):
128 def assert_correct_path(self, wire):
128 path = wire.get('path')
129 path = wire.get('path')
129 try:
130 try:
130 self._factory.repo(wire)
131 self._factory.repo(wire)
131 except NotGitRepository as e:
132 except NotGitRepository as e:
132 tb = traceback.format_exc()
133 tb = traceback.format_exc()
133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 return False
135 return False
135
136
136 return True
137 return True
137
138
138 @reraise_safe_exceptions
139 @reraise_safe_exceptions
139 def bare(self, wire):
140 def bare(self, wire):
140 repo = self._factory.repo(wire)
141 repo = self._factory.repo(wire)
141 return repo.bare
142 return repo.bare
142
143
143 @reraise_safe_exceptions
144 @reraise_safe_exceptions
144 def blob_as_pretty_string(self, wire, sha):
145 def blob_as_pretty_string(self, wire, sha):
145 repo = self._factory.repo(wire)
146 repo = self._factory.repo(wire)
146 return repo[sha].as_pretty_string()
147 return repo[sha].as_pretty_string()
147
148
148 @reraise_safe_exceptions
149 @reraise_safe_exceptions
149 def blob_raw_length(self, wire, sha):
150 def blob_raw_length(self, wire, sha):
150 repo = self._factory.repo(wire)
151 repo = self._factory.repo(wire)
151 blob = repo[sha]
152 blob = repo[sha]
152 return blob.raw_length()
153 return blob.raw_length()
153
154
154 def _parse_lfs_pointer(self, raw_content):
155 def _parse_lfs_pointer(self, raw_content):
155
156
156 spec_string = 'version https://git-lfs.github.com/spec'
157 spec_string = 'version https://git-lfs.github.com/spec'
157 if raw_content and raw_content.startswith(spec_string):
158 if raw_content and raw_content.startswith(spec_string):
158 pattern = re.compile(r"""
159 pattern = re.compile(r"""
159 (?:\n)?
160 (?:\n)?
160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 ^size[ ](?P<oid_size>[0-9]+)\n
163 ^size[ ](?P<oid_size>[0-9]+)\n
163 (?:\n)?
164 (?:\n)?
164 """, re.VERBOSE | re.MULTILINE)
165 """, re.VERBOSE | re.MULTILINE)
165 match = pattern.match(raw_content)
166 match = pattern.match(raw_content)
166 if match:
167 if match:
167 return match.groupdict()
168 return match.groupdict()
168
169
169 return {}
170 return {}
170
171
171 @reraise_safe_exceptions
172 @reraise_safe_exceptions
172 def is_large_file(self, wire, sha):
173 def is_large_file(self, wire, sha):
173 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
174 blob = repo[sha]
175 blob = repo[sha]
175 return self._parse_lfs_pointer(blob.as_raw_string())
176 return self._parse_lfs_pointer(blob.as_raw_string())
176
177
177 @reraise_safe_exceptions
178 @reraise_safe_exceptions
178 def in_largefiles_store(self, wire, oid):
179 def in_largefiles_store(self, wire, oid):
179 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
180 conf = self._wire_to_config(wire)
181 conf = self._wire_to_config(wire)
181
182
182 store_location = conf.get('vcs_git_lfs_store_location')
183 store_location = conf.get('vcs_git_lfs_store_location')
183 if store_location:
184 if store_location:
184 repo_name = repo.path
185 repo_name = repo.path
185 store = LFSOidStore(
186 store = LFSOidStore(
186 oid=oid, repo=repo_name, store_location=store_location)
187 oid=oid, repo=repo_name, store_location=store_location)
187 return store.has_oid()
188 return store.has_oid()
188
189
189 return False
190 return False
190
191
191 @reraise_safe_exceptions
192 @reraise_safe_exceptions
192 def store_path(self, wire, oid):
193 def store_path(self, wire, oid):
193 repo = self._factory.repo(wire)
194 repo = self._factory.repo(wire)
194 conf = self._wire_to_config(wire)
195 conf = self._wire_to_config(wire)
195
196
196 store_location = conf.get('vcs_git_lfs_store_location')
197 store_location = conf.get('vcs_git_lfs_store_location')
197 if store_location:
198 if store_location:
198 repo_name = repo.path
199 repo_name = repo.path
199 store = LFSOidStore(
200 store = LFSOidStore(
200 oid=oid, repo=repo_name, store_location=store_location)
201 oid=oid, repo=repo_name, store_location=store_location)
201 return store.oid_path
202 return store.oid_path
202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203
204
204 @reraise_safe_exceptions
205 @reraise_safe_exceptions
205 def bulk_request(self, wire, rev, pre_load):
206 def bulk_request(self, wire, rev, pre_load):
206 result = {}
207 result = {}
207 for attr in pre_load:
208 for attr in pre_load:
208 try:
209 try:
209 method = self._bulk_methods[attr]
210 method = self._bulk_methods[attr]
210 args = [wire, rev]
211 args = [wire, rev]
211 if attr == "date":
212 if attr == "date":
212 args.extend(["commit_time", "commit_timezone"])
213 args.extend(["commit_time", "commit_timezone"])
213 elif attr in ["author", "message", "parents"]:
214 elif attr in ["author", "message", "parents"]:
214 args.append(attr)
215 args.append(attr)
215 result[attr] = method(*args)
216 result[attr] = method(*args)
216 except KeyError:
217 except KeyError:
217 raise exceptions.VcsException(
218 raise exceptions.VcsException(
218 "Unknown bulk attribute: %s" % attr)
219 "Unknown bulk attribute: %s" % attr)
219 return result
220 return result
220
221
221 def _build_opener(self, url):
222 def _build_opener(self, url):
222 handlers = []
223 handlers = []
223 url_obj = url_parser(url)
224 url_obj = url_parser(url)
224 _, authinfo = url_obj.authinfo()
225 _, authinfo = url_obj.authinfo()
225
226
226 if authinfo:
227 if authinfo:
227 # create a password manager
228 # create a password manager
228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 passmgr.add_password(*authinfo)
230 passmgr.add_password(*authinfo)
230
231
231 handlers.extend((httpbasicauthhandler(passmgr),
232 handlers.extend((httpbasicauthhandler(passmgr),
232 httpdigestauthhandler(passmgr)))
233 httpdigestauthhandler(passmgr)))
233
234
234 return urllib2.build_opener(*handlers)
235 return urllib2.build_opener(*handlers)
235
236
236 @reraise_safe_exceptions
237 @reraise_safe_exceptions
237 def check_url(self, url, config):
238 def check_url(self, url, config):
238 url_obj = url_parser(url)
239 url_obj = url_parser(url)
239 test_uri, _ = url_obj.authinfo()
240 test_uri, _ = url_obj.authinfo()
240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 url_obj.query = obfuscate_qs(url_obj.query)
242 url_obj.query = obfuscate_qs(url_obj.query)
242 cleaned_uri = str(url_obj)
243 cleaned_uri = str(url_obj)
243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244
245
245 if not test_uri.endswith('info/refs'):
246 if not test_uri.endswith('info/refs'):
246 test_uri = test_uri.rstrip('/') + '/info/refs'
247 test_uri = test_uri.rstrip('/') + '/info/refs'
247
248
248 o = self._build_opener(url)
249 o = self._build_opener(url)
249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250
251
251 q = {"service": 'git-upload-pack'}
252 q = {"service": 'git-upload-pack'}
252 qs = '?%s' % urllib.urlencode(q)
253 qs = '?%s' % urllib.urlencode(q)
253 cu = "%s%s" % (test_uri, qs)
254 cu = "%s%s" % (test_uri, qs)
254 req = urllib2.Request(cu, None, {})
255 req = urllib2.Request(cu, None, {})
255
256
256 try:
257 try:
257 log.debug("Trying to open URL %s", cleaned_uri)
258 log.debug("Trying to open URL %s", cleaned_uri)
258 resp = o.open(req)
259 resp = o.open(req)
259 if resp.code != 200:
260 if resp.code != 200:
260 raise exceptions.URLError('Return Code is not 200')
261 raise exceptions.URLError('Return Code is not 200')
261 except Exception as e:
262 except Exception as e:
262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 # means it cannot be cloned
264 # means it cannot be cloned
264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265
266
266 # now detect if it's proper git repo
267 # now detect if it's proper git repo
267 gitdata = resp.read()
268 gitdata = resp.read()
268 if 'service=git-upload-pack' in gitdata:
269 if 'service=git-upload-pack' in gitdata:
269 pass
270 pass
270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 # old style git can return some other format !
272 # old style git can return some other format !
272 pass
273 pass
273 else:
274 else:
274 raise exceptions.URLError(
275 raise exceptions.URLError(
275 "url [%s] does not look like an git" % (cleaned_uri,))
276 "url [%s] does not look like an git" % (cleaned_uri,))
276
277
277 return True
278 return True
278
279
279 @reraise_safe_exceptions
280 @reraise_safe_exceptions
280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 remote_refs = self.fetch(wire, url, apply_refs=False)
282 remote_refs = self.fetch(wire, url, apply_refs=False)
282 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
283 if isinstance(valid_refs, list):
284 if isinstance(valid_refs, list):
284 valid_refs = tuple(valid_refs)
285 valid_refs = tuple(valid_refs)
285
286
286 for k in remote_refs:
287 for k in remote_refs:
287 # only parse heads/tags and skip so called deferred tags
288 # only parse heads/tags and skip so called deferred tags
288 if k.startswith(valid_refs) and not k.endswith(deferred):
289 if k.startswith(valid_refs) and not k.endswith(deferred):
289 repo[k] = remote_refs[k]
290 repo[k] = remote_refs[k]
290
291
291 if update_after_clone:
292 if update_after_clone:
292 # we want to checkout HEAD
293 # we want to checkout HEAD
293 repo["HEAD"] = remote_refs["HEAD"]
294 repo["HEAD"] = remote_refs["HEAD"]
294 index.build_index_from_tree(repo.path, repo.index_path(),
295 index.build_index_from_tree(repo.path, repo.index_path(),
295 repo.object_store, repo["HEAD"].tree)
296 repo.object_store, repo["HEAD"].tree)
296
297
297 # TODO: this is quite complex, check if that can be simplified
298 # TODO: this is quite complex, check if that can be simplified
298 @reraise_safe_exceptions
299 @reraise_safe_exceptions
299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 repo = self._factory.repo(wire)
301 repo = self._factory.repo(wire)
301 object_store = repo.object_store
302 object_store = repo.object_store
302
303
303 # Create tree and populates it with blobs
304 # Create tree and populates it with blobs
304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305
306
306 for node in updated:
307 for node in updated:
307 # Compute subdirs if needed
308 # Compute subdirs if needed
308 dirpath, nodename = vcspath.split(node['path'])
309 dirpath, nodename = vcspath.split(node['path'])
309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 parent = commit_tree
311 parent = commit_tree
311 ancestors = [('', parent)]
312 ancestors = [('', parent)]
312
313
313 # Tries to dig for the deepest existing tree
314 # Tries to dig for the deepest existing tree
314 while dirnames:
315 while dirnames:
315 curdir = dirnames.pop(0)
316 curdir = dirnames.pop(0)
316 try:
317 try:
317 dir_id = parent[curdir][1]
318 dir_id = parent[curdir][1]
318 except KeyError:
319 except KeyError:
319 # put curdir back into dirnames and stops
320 # put curdir back into dirnames and stops
320 dirnames.insert(0, curdir)
321 dirnames.insert(0, curdir)
321 break
322 break
322 else:
323 else:
323 # If found, updates parent
324 # If found, updates parent
324 parent = repo[dir_id]
325 parent = repo[dir_id]
325 ancestors.append((curdir, parent))
326 ancestors.append((curdir, parent))
326 # Now parent is deepest existing tree and we need to create
327 # Now parent is deepest existing tree and we need to create
327 # subtrees for dirnames (in reverse order)
328 # subtrees for dirnames (in reverse order)
328 # [this only applies for nodes from added]
329 # [this only applies for nodes from added]
329 new_trees = []
330 new_trees = []
330
331
331 blob = objects.Blob.from_string(node['content'])
332 blob = objects.Blob.from_string(node['content'])
332
333
333 if dirnames:
334 if dirnames:
334 # If there are trees which should be created we need to build
335 # If there are trees which should be created we need to build
335 # them now (in reverse order)
336 # them now (in reverse order)
336 reversed_dirnames = list(reversed(dirnames))
337 reversed_dirnames = list(reversed(dirnames))
337 curtree = objects.Tree()
338 curtree = objects.Tree()
338 curtree[node['node_path']] = node['mode'], blob.id
339 curtree[node['node_path']] = node['mode'], blob.id
339 new_trees.append(curtree)
340 new_trees.append(curtree)
340 for dirname in reversed_dirnames[:-1]:
341 for dirname in reversed_dirnames[:-1]:
341 newtree = objects.Tree()
342 newtree = objects.Tree()
342 newtree[dirname] = (DIR_STAT, curtree.id)
343 newtree[dirname] = (DIR_STAT, curtree.id)
343 new_trees.append(newtree)
344 new_trees.append(newtree)
344 curtree = newtree
345 curtree = newtree
345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 else:
347 else:
347 parent.add(
348 parent.add(
348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349
350
350 new_trees.append(parent)
351 new_trees.append(parent)
351 # Update ancestors
352 # Update ancestors
352 reversed_ancestors = reversed(
353 reversed_ancestors = reversed(
353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 for parent, tree, path in reversed_ancestors:
355 for parent, tree, path in reversed_ancestors:
355 parent[path] = (DIR_STAT, tree.id)
356 parent[path] = (DIR_STAT, tree.id)
356 object_store.add_object(tree)
357 object_store.add_object(tree)
357
358
358 object_store.add_object(blob)
359 object_store.add_object(blob)
359 for tree in new_trees:
360 for tree in new_trees:
360 object_store.add_object(tree)
361 object_store.add_object(tree)
361
362
362 for node_path in removed:
363 for node_path in removed:
363 paths = node_path.split('/')
364 paths = node_path.split('/')
364 tree = commit_tree
365 tree = commit_tree
365 trees = [tree]
366 trees = [tree]
366 # Traverse deep into the forest...
367 # Traverse deep into the forest...
367 for path in paths:
368 for path in paths:
368 try:
369 try:
369 obj = repo[tree[path][1]]
370 obj = repo[tree[path][1]]
370 if isinstance(obj, objects.Tree):
371 if isinstance(obj, objects.Tree):
371 trees.append(obj)
372 trees.append(obj)
372 tree = obj
373 tree = obj
373 except KeyError:
374 except KeyError:
374 break
375 break
375 # Cut down the blob and all rotten trees on the way back...
376 # Cut down the blob and all rotten trees on the way back...
376 for path, tree in reversed(zip(paths, trees)):
377 for path, tree in reversed(zip(paths, trees)):
377 del tree[path]
378 del tree[path]
378 if tree:
379 if tree:
379 # This tree still has elements - don't remove it or any
380 # This tree still has elements - don't remove it or any
380 # of it's parents
381 # of it's parents
381 break
382 break
382
383
383 object_store.add_object(commit_tree)
384 object_store.add_object(commit_tree)
384
385
385 # Create commit
386 # Create commit
386 commit = objects.Commit()
387 commit = objects.Commit()
387 commit.tree = commit_tree.id
388 commit.tree = commit_tree.id
388 for k, v in commit_data.iteritems():
389 for k, v in commit_data.iteritems():
389 setattr(commit, k, v)
390 setattr(commit, k, v)
390 object_store.add_object(commit)
391 object_store.add_object(commit)
391
392
392 ref = 'refs/heads/%s' % branch
393 ref = 'refs/heads/%s' % branch
393 repo.refs[ref] = commit.id
394 repo.refs[ref] = commit.id
394
395
395 return commit.id
396 return commit.id
396
397
397 @reraise_safe_exceptions
398 @reraise_safe_exceptions
398 def fetch(self, wire, url, apply_refs=True, refs=None):
399 def fetch(self, wire, url, apply_refs=True, refs=None):
399 if url != 'default' and '://' not in url:
400 if url != 'default' and '://' not in url:
400 client = LocalGitClient(url)
401 client = LocalGitClient(url)
401 else:
402 else:
402 url_obj = url_parser(url)
403 url_obj = url_parser(url)
403 o = self._build_opener(url)
404 o = self._build_opener(url)
404 url, _ = url_obj.authinfo()
405 url, _ = url_obj.authinfo()
405 client = HttpGitClient(base_url=url, opener=o)
406 client = HttpGitClient(base_url=url, opener=o)
406 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
407
408
408 determine_wants = repo.object_store.determine_wants_all
409 determine_wants = repo.object_store.determine_wants_all
409 if refs:
410 if refs:
410 def determine_wants_requested(references):
411 def determine_wants_requested(references):
411 return [references[r] for r in references if r in refs]
412 return [references[r] for r in references if r in refs]
412 determine_wants = determine_wants_requested
413 determine_wants = determine_wants_requested
413
414
414 try:
415 try:
415 remote_refs = client.fetch(
416 remote_refs = client.fetch(
416 path=url, target=repo, determine_wants=determine_wants)
417 path=url, target=repo, determine_wants=determine_wants)
417 except NotGitRepository as e:
418 except NotGitRepository as e:
418 log.warning(
419 log.warning(
419 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 # Exception can contain unicode which we convert
421 # Exception can contain unicode which we convert
421 raise exceptions.AbortException(repr(e))
422 raise exceptions.AbortException(repr(e))
422
423
423 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # refs filtered by `determine_wants` function. We need to filter result
425 # refs filtered by `determine_wants` function. We need to filter result
425 # as well
426 # as well
426 if refs:
427 if refs:
427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428
429
429 if apply_refs:
430 if apply_refs:
430 # TODO: johbo: Needs proper test coverage with a git repository
431 # TODO: johbo: Needs proper test coverage with a git repository
431 # that contains a tag object, so that we would end up with
432 # that contains a tag object, so that we would end up with
432 # a peeled ref at this point.
433 # a peeled ref at this point.
433 PEELED_REF_MARKER = '^{}'
434 PEELED_REF_MARKER = '^{}'
434 for k in remote_refs:
435 for k in remote_refs:
435 if k.endswith(PEELED_REF_MARKER):
436 if k.endswith(PEELED_REF_MARKER):
436 log.info("Skipping peeled reference %s", k)
437 log.info("Skipping peeled reference %s", k)
437 continue
438 continue
438 repo[k] = remote_refs[k]
439 repo[k] = remote_refs[k]
439
440
440 if refs:
441 if refs:
441 # mikhail: explicitly set the head to the last ref.
442 # mikhail: explicitly set the head to the last ref.
442 repo['HEAD'] = remote_refs[refs[-1]]
443 repo['HEAD'] = remote_refs[refs[-1]]
443
444
444 # TODO: mikhail: should we return remote_refs here to be
445 # TODO: mikhail: should we return remote_refs here to be
445 # consistent?
446 # consistent?
446 else:
447 else:
447 return remote_refs
448 return remote_refs
448
449
449 @reraise_safe_exceptions
450 @reraise_safe_exceptions
450 def sync_push(self, wire, url, refs=None):
451 def sync_push(self, wire, url, refs=None):
451 if self.check_url(url, wire):
452 if self.check_url(url, wire):
452 repo = self._factory.repo(wire)
453 repo = self._factory.repo(wire)
453 self.run_git_command(
454 self.run_git_command(
454 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 _copts=['-c', 'core.askpass=""'],
456 _copts=['-c', 'core.askpass=""'],
456 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457
458
458 @reraise_safe_exceptions
459 @reraise_safe_exceptions
459 def get_remote_refs(self, wire, url):
460 def get_remote_refs(self, wire, url):
460 repo = Repo(url)
461 repo = Repo(url)
461 return repo.get_refs()
462 return repo.get_refs()
462
463
463 @reraise_safe_exceptions
464 @reraise_safe_exceptions
464 def get_description(self, wire):
465 def get_description(self, wire):
465 repo = self._factory.repo(wire)
466 repo = self._factory.repo(wire)
466 return repo.get_description()
467 return repo.get_description()
467
468
468 @reraise_safe_exceptions
469 @reraise_safe_exceptions
469 def get_file_history(self, wire, file_path, commit_id, limit):
470 def get_file_history(self, wire, file_path, commit_id, limit):
470 repo = self._factory.repo(wire)
471 repo = self._factory.repo(wire)
471 include = [commit_id]
472 include = [commit_id]
472 paths = [file_path]
473 paths = [file_path]
473
474
474 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 return [x.commit.id for x in walker]
476 return [x.commit.id for x in walker]
476
477
477 @reraise_safe_exceptions
478 @reraise_safe_exceptions
478 def get_missing_revs(self, wire, rev1, rev2, path2):
479 def get_missing_revs(self, wire, rev1, rev2, path2):
479 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
480 LocalGitClient(thin_packs=False).fetch(path2, repo)
481 LocalGitClient(thin_packs=False).fetch(path2, repo)
481
482
482 wire_remote = wire.copy()
483 wire_remote = wire.copy()
483 wire_remote['path'] = path2
484 wire_remote['path'] = path2
484 repo_remote = self._factory.repo(wire_remote)
485 repo_remote = self._factory.repo(wire_remote)
485 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486
487
487 revs = [
488 revs = [
488 x.commit.id
489 x.commit.id
489 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 return revs
491 return revs
491
492
492 @reraise_safe_exceptions
493 @reraise_safe_exceptions
493 def get_object(self, wire, sha):
494 def get_object(self, wire, sha):
494 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
495 obj = repo.get_object(sha)
496 obj = repo.get_object(sha)
496 commit_id = obj.id
497 commit_id = obj.id
497
498
498 if isinstance(obj, Tag):
499 if isinstance(obj, Tag):
499 commit_id = obj.object[1]
500 commit_id = obj.object[1]
500
501
501 return {
502 return {
502 'id': obj.id,
503 'id': obj.id,
503 'type': obj.type_name,
504 'type': obj.type_name,
504 'commit_id': commit_id
505 'commit_id': commit_id
505 }
506 }
506
507
507 @reraise_safe_exceptions
508 @reraise_safe_exceptions
508 def get_object_attrs(self, wire, sha, *attrs):
509 def get_object_attrs(self, wire, sha, *attrs):
509 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
510 obj = repo.get_object(sha)
511 obj = repo.get_object(sha)
511 return list(getattr(obj, a) for a in attrs)
512 return list(getattr(obj, a) for a in attrs)
512
513
513 @reraise_safe_exceptions
514 @reraise_safe_exceptions
514 def get_refs(self, wire):
515 def get_refs(self, wire):
515 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
516 result = {}
517 result = {}
517 for ref, sha in repo.refs.as_dict().items():
518 for ref, sha in repo.refs.as_dict().items():
518 peeled_sha = repo.get_peeled(ref)
519 peeled_sha = repo.get_peeled(ref)
519 result[ref] = peeled_sha
520 result[ref] = peeled_sha
520 return result
521 return result
521
522
522 @reraise_safe_exceptions
523 @reraise_safe_exceptions
523 def get_refs_path(self, wire):
524 def get_refs_path(self, wire):
524 repo = self._factory.repo(wire)
525 repo = self._factory.repo(wire)
525 return repo.refs.path
526 return repo.refs.path
526
527
527 @reraise_safe_exceptions
528 @reraise_safe_exceptions
528 def head(self, wire):
529 def head(self, wire):
529 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
530 return repo.head()
531 return repo.head()
531
532
532 @reraise_safe_exceptions
533 @reraise_safe_exceptions
533 def init(self, wire):
534 def init(self, wire):
534 repo_path = str_to_dulwich(wire['path'])
535 repo_path = str_to_dulwich(wire['path'])
535 self.repo = Repo.init(repo_path)
536 self.repo = Repo.init(repo_path)
536
537
537 @reraise_safe_exceptions
538 @reraise_safe_exceptions
538 def init_bare(self, wire):
539 def init_bare(self, wire):
539 repo_path = str_to_dulwich(wire['path'])
540 repo_path = str_to_dulwich(wire['path'])
540 self.repo = Repo.init_bare(repo_path)
541 self.repo = Repo.init_bare(repo_path)
541
542
542 @reraise_safe_exceptions
543 @reraise_safe_exceptions
543 def revision(self, wire, rev):
544 def revision(self, wire, rev):
544 repo = self._factory.repo(wire)
545 repo = self._factory.repo(wire)
545 obj = repo[rev]
546 obj = repo[rev]
546 obj_data = {
547 obj_data = {
547 'id': obj.id,
548 'id': obj.id,
548 }
549 }
549 try:
550 try:
550 obj_data['tree'] = obj.tree
551 obj_data['tree'] = obj.tree
551 except AttributeError:
552 except AttributeError:
552 pass
553 pass
553 return obj_data
554 return obj_data
554
555
555 @reraise_safe_exceptions
556 @reraise_safe_exceptions
556 def commit_attribute(self, wire, rev, attr):
557 def commit_attribute(self, wire, rev, attr):
557 repo = self._factory.repo(wire)
558 repo = self._factory.repo(wire)
558 obj = repo[rev]
559 obj = repo[rev]
559 return getattr(obj, attr)
560 return getattr(obj, attr)
560
561
561 @reraise_safe_exceptions
562 @reraise_safe_exceptions
562 def set_refs(self, wire, key, value):
563 def set_refs(self, wire, key, value):
563 repo = self._factory.repo(wire)
564 repo = self._factory.repo(wire)
564 repo.refs[key] = value
565 repo.refs[key] = value
565
566
566 @reraise_safe_exceptions
567 @reraise_safe_exceptions
567 def remove_ref(self, wire, key):
568 def remove_ref(self, wire, key):
568 repo = self._factory.repo(wire)
569 repo = self._factory.repo(wire)
569 del repo.refs[key]
570 del repo.refs[key]
570
571
571 @reraise_safe_exceptions
572 @reraise_safe_exceptions
572 def tree_changes(self, wire, source_id, target_id):
573 def tree_changes(self, wire, source_id, target_id):
573 repo = self._factory.repo(wire)
574 repo = self._factory.repo(wire)
574 source = repo[source_id].tree if source_id else None
575 source = repo[source_id].tree if source_id else None
575 target = repo[target_id].tree
576 target = repo[target_id].tree
576 result = repo.object_store.tree_changes(source, target)
577 result = repo.object_store.tree_changes(source, target)
577 return list(result)
578 return list(result)
578
579
579 @reraise_safe_exceptions
580 @reraise_safe_exceptions
580 def tree_items(self, wire, tree_id):
581 def tree_items(self, wire, tree_id):
581 repo = self._factory.repo(wire)
582 repo = self._factory.repo(wire)
582 tree = repo[tree_id]
583 tree = repo[tree_id]
583
584
584 result = []
585 result = []
585 for item in tree.iteritems():
586 for item in tree.iteritems():
586 item_sha = item.sha
587 item_sha = item.sha
587 item_mode = item.mode
588 item_mode = item.mode
588
589
589 if FILE_MODE(item_mode) == GIT_LINK:
590 if FILE_MODE(item_mode) == GIT_LINK:
590 item_type = "link"
591 item_type = "link"
591 else:
592 else:
592 item_type = repo[item_sha].type_name
593 item_type = repo[item_sha].type_name
593
594
594 result.append((item.path, item_mode, item_sha, item_type))
595 result.append((item.path, item_mode, item_sha, item_type))
595 return result
596 return result
596
597
597 @reraise_safe_exceptions
598 @reraise_safe_exceptions
598 def update_server_info(self, wire):
599 def update_server_info(self, wire):
599 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
600 update_server_info(repo)
601 update_server_info(repo)
601
602
602 @reraise_safe_exceptions
603 @reraise_safe_exceptions
603 def discover_git_version(self):
604 def discover_git_version(self):
604 stdout, _ = self.run_git_command(
605 stdout, _ = self.run_git_command(
605 {}, ['--version'], _bare=True, _safe=True)
606 {}, ['--version'], _bare=True, _safe=True)
606 prefix = 'git version'
607 prefix = 'git version'
607 if stdout.startswith(prefix):
608 if stdout.startswith(prefix):
608 stdout = stdout[len(prefix):]
609 stdout = stdout[len(prefix):]
609 return stdout.strip()
610 return stdout.strip()
610
611
611 @reraise_safe_exceptions
612 @reraise_safe_exceptions
612 def run_git_command(self, wire, cmd, **opts):
613 def run_git_command(self, wire, cmd, **opts):
613 path = wire.get('path', None)
614 path = wire.get('path', None)
614
615
615 if path and os.path.isdir(path):
616 if path and os.path.isdir(path):
616 opts['cwd'] = path
617 opts['cwd'] = path
617
618
618 if '_bare' in opts:
619 if '_bare' in opts:
619 _copts = []
620 _copts = []
620 del opts['_bare']
621 del opts['_bare']
621 else:
622 else:
622 _copts = ['-c', 'core.quotepath=false', ]
623 _copts = ['-c', 'core.quotepath=false', ]
623 safe_call = False
624 safe_call = False
624 if '_safe' in opts:
625 if '_safe' in opts:
625 # no exc on failure
626 # no exc on failure
626 del opts['_safe']
627 del opts['_safe']
627 safe_call = True
628 safe_call = True
628
629
629 if '_copts' in opts:
630 if '_copts' in opts:
630 _copts.extend(opts['_copts'] or [])
631 _copts.extend(opts['_copts'] or [])
631 del opts['_copts']
632 del opts['_copts']
632
633
633 gitenv = os.environ.copy()
634 gitenv = os.environ.copy()
634 gitenv.update(opts.pop('extra_env', {}))
635 gitenv.update(opts.pop('extra_env', {}))
635 # need to clean fix GIT_DIR !
636 # need to clean fix GIT_DIR !
636 if 'GIT_DIR' in gitenv:
637 if 'GIT_DIR' in gitenv:
637 del gitenv['GIT_DIR']
638 del gitenv['GIT_DIR']
638 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
640 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
640
641
641 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
642 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
642
643
643 try:
644 try:
644 _opts = {'env': gitenv, 'shell': False}
645 _opts = {'env': gitenv, 'shell': False}
645 _opts.update(opts)
646 _opts.update(opts)
646 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
647 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
647
648
648 return ''.join(p), ''.join(p.error)
649 return ''.join(p), ''.join(p.error)
649 except (EnvironmentError, OSError) as err:
650 except (EnvironmentError, OSError) as err:
650 cmd = ' '.join(cmd) # human friendly CMD
651 cmd = ' '.join(cmd) # human friendly CMD
651 tb_err = ("Couldn't run git command (%s).\n"
652 tb_err = ("Couldn't run git command (%s).\n"
652 "Original error was:%s\n" % (cmd, err))
653 "Original error was:%s\n" % (cmd, err))
653 log.exception(tb_err)
654 log.exception(tb_err)
654 if safe_call:
655 if safe_call:
655 return '', err
656 return '', err
656 else:
657 else:
657 raise exceptions.VcsException(tb_err)
658 raise exceptions.VcsException(tb_err)
658
659
659 @reraise_safe_exceptions
660 @reraise_safe_exceptions
660 def install_hooks(self, wire, force=False):
661 def install_hooks(self, wire, force=False):
661 from vcsserver.hook_utils import install_git_hooks
662 from vcsserver.hook_utils import install_git_hooks
662 repo = self._factory.repo(wire)
663 repo = self._factory.repo(wire)
663 return install_git_hooks(repo.path, repo.bare, force_create=force)
664 return install_git_hooks(repo.path, repo.bare, force_create=force)
664
665
665
666
666 def str_to_dulwich(value):
667 def str_to_dulwich(value):
667 """
668 """
668 Dulwich 0.10.1a requires `unicode` objects to be passed in.
669 Dulwich 0.10.1a requires `unicode` objects to be passed in.
669 """
670 """
670 return value.decode(settings.WIRE_ENCODING)
671 return value.decode(settings.WIRE_ENCODING)
@@ -1,791 +1,792 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 baseui.setconfig('ui', 'paginate', 'never')
56 baseui.setconfig('ui', 'paginate', 'never')
57 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 # signal in a non-main thread, thus generating a ValueError.
58 # signal in a non-main thread, thus generating a ValueError.
59 baseui.setconfig('worker', 'numcpus', 1)
59 baseui.setconfig('worker', 'numcpus', 1)
60
60
61 # If there is no config for the largefiles extension, we explicitly disable
61 # If there is no config for the largefiles extension, we explicitly disable
62 # it here. This overrides settings from repositories hgrc file. Recent
62 # it here. This overrides settings from repositories hgrc file. Recent
63 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 # repo.
64 # repo.
65 if not baseui.hasconfig('extensions', 'largefiles'):
65 if not baseui.hasconfig('extensions', 'largefiles'):
66 log.debug('Explicitly disable largefiles extension for repo.')
66 log.debug('Explicitly disable largefiles extension for repo.')
67 baseui.setconfig('extensions', 'largefiles', '!')
67 baseui.setconfig('extensions', 'largefiles', '!')
68
68
69 return baseui
69 return baseui
70
70
71
71
72 def reraise_safe_exceptions(func):
72 def reraise_safe_exceptions(func):
73 """Decorator for converting mercurial exceptions to something neutral."""
73 """Decorator for converting mercurial exceptions to something neutral."""
74 def wrapper(*args, **kwargs):
74 def wrapper(*args, **kwargs):
75 try:
75 try:
76 return func(*args, **kwargs)
76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired):
77 except (Abort, InterventionRequired):
78 raise_from_original(exceptions.AbortException)
78 raise_from_original(exceptions.AbortException)
79 except RepoLookupError:
79 except RepoLookupError:
80 raise_from_original(exceptions.LookupException)
80 raise_from_original(exceptions.LookupException)
81 except RequirementError:
81 except RequirementError:
82 raise_from_original(exceptions.RequirementException)
82 raise_from_original(exceptions.RequirementException)
83 except RepoError:
83 except RepoError:
84 raise_from_original(exceptions.VcsException)
84 raise_from_original(exceptions.VcsException)
85 except LookupError:
85 except LookupError:
86 raise_from_original(exceptions.LookupException)
86 raise_from_original(exceptions.LookupException)
87 except Exception as e:
87 except Exception as e:
88 if not hasattr(e, '_vcs_kind'):
88 if not hasattr(e, '_vcs_kind'):
89 log.exception("Unhandled exception in hg remote call")
89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException)
91 raise
91 raise
92 return wrapper
92 return wrapper
93
93
94
94
95 class MercurialFactory(RepoFactory):
95 class MercurialFactory(RepoFactory):
96 repo_type = 'hg'
96
97
97 def _create_config(self, config, hooks=True):
98 def _create_config(self, config, hooks=True):
98 if not hooks:
99 if not hooks:
99 hooks_to_clean = frozenset((
100 hooks_to_clean = frozenset((
100 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 new_config = []
103 new_config = []
103 for section, option, value in config:
104 for section, option, value in config:
104 if section == 'hooks' and option in hooks_to_clean:
105 if section == 'hooks' and option in hooks_to_clean:
105 continue
106 continue
106 new_config.append((section, option, value))
107 new_config.append((section, option, value))
107 config = new_config
108 config = new_config
108
109
109 baseui = make_ui_from_config(config)
110 baseui = make_ui_from_config(config)
110 return baseui
111 return baseui
111
112
112 def _create_repo(self, wire, create):
113 def _create_repo(self, wire, create):
113 baseui = self._create_config(wire["config"])
114 baseui = self._create_config(wire["config"])
114 return localrepository(baseui, wire["path"], create)
115 return localrepository(baseui, wire["path"], create)
115
116
116
117
117 class HgRemote(object):
118 class HgRemote(object):
118
119
119 def __init__(self, factory):
120 def __init__(self, factory):
120 self._factory = factory
121 self._factory = factory
121
122
122 self._bulk_methods = {
123 self._bulk_methods = {
123 "affected_files": self.ctx_files,
124 "affected_files": self.ctx_files,
124 "author": self.ctx_user,
125 "author": self.ctx_user,
125 "branch": self.ctx_branch,
126 "branch": self.ctx_branch,
126 "children": self.ctx_children,
127 "children": self.ctx_children,
127 "date": self.ctx_date,
128 "date": self.ctx_date,
128 "message": self.ctx_description,
129 "message": self.ctx_description,
129 "parents": self.ctx_parents,
130 "parents": self.ctx_parents,
130 "status": self.ctx_status,
131 "status": self.ctx_status,
131 "obsolete": self.ctx_obsolete,
132 "obsolete": self.ctx_obsolete,
132 "phase": self.ctx_phase,
133 "phase": self.ctx_phase,
133 "hidden": self.ctx_hidden,
134 "hidden": self.ctx_hidden,
134 "_file_paths": self.ctx_list,
135 "_file_paths": self.ctx_list,
135 }
136 }
136
137
137 @reraise_safe_exceptions
138 @reraise_safe_exceptions
138 def discover_hg_version(self):
139 def discover_hg_version(self):
139 from mercurial import util
140 from mercurial import util
140 return util.version()
141 return util.version()
141
142
142 @reraise_safe_exceptions
143 @reraise_safe_exceptions
143 def archive_repo(self, archive_path, mtime, file_info, kind):
144 def archive_repo(self, archive_path, mtime, file_info, kind):
144 if kind == "tgz":
145 if kind == "tgz":
145 archiver = archival.tarit(archive_path, mtime, "gz")
146 archiver = archival.tarit(archive_path, mtime, "gz")
146 elif kind == "tbz2":
147 elif kind == "tbz2":
147 archiver = archival.tarit(archive_path, mtime, "bz2")
148 archiver = archival.tarit(archive_path, mtime, "bz2")
148 elif kind == 'zip':
149 elif kind == 'zip':
149 archiver = archival.zipit(archive_path, mtime)
150 archiver = archival.zipit(archive_path, mtime)
150 else:
151 else:
151 raise exceptions.ArchiveException(
152 raise exceptions.ArchiveException(
152 'Remote does not support: "%s".' % kind)
153 'Remote does not support: "%s".' % kind)
153
154
154 for f_path, f_mode, f_is_link, f_content in file_info:
155 for f_path, f_mode, f_is_link, f_content in file_info:
155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 archiver.done()
157 archiver.done()
157
158
158 @reraise_safe_exceptions
159 @reraise_safe_exceptions
159 def bookmarks(self, wire):
160 def bookmarks(self, wire):
160 repo = self._factory.repo(wire)
161 repo = self._factory.repo(wire)
161 return dict(repo._bookmarks)
162 return dict(repo._bookmarks)
162
163
163 @reraise_safe_exceptions
164 @reraise_safe_exceptions
164 def branches(self, wire, normal, closed):
165 def branches(self, wire, normal, closed):
165 repo = self._factory.repo(wire)
166 repo = self._factory.repo(wire)
166 iter_branches = repo.branchmap().iterbranches()
167 iter_branches = repo.branchmap().iterbranches()
167 bt = {}
168 bt = {}
168 for branch_name, _heads, tip, is_closed in iter_branches:
169 for branch_name, _heads, tip, is_closed in iter_branches:
169 if normal and not is_closed:
170 if normal and not is_closed:
170 bt[branch_name] = tip
171 bt[branch_name] = tip
171 if closed and is_closed:
172 if closed and is_closed:
172 bt[branch_name] = tip
173 bt[branch_name] = tip
173
174
174 return bt
175 return bt
175
176
176 @reraise_safe_exceptions
177 @reraise_safe_exceptions
177 def bulk_request(self, wire, rev, pre_load):
178 def bulk_request(self, wire, rev, pre_load):
178 result = {}
179 result = {}
179 for attr in pre_load:
180 for attr in pre_load:
180 try:
181 try:
181 method = self._bulk_methods[attr]
182 method = self._bulk_methods[attr]
182 result[attr] = method(wire, rev)
183 result[attr] = method(wire, rev)
183 except KeyError:
184 except KeyError:
184 raise exceptions.VcsException(
185 raise exceptions.VcsException(
185 'Unknown bulk attribute: "%s"' % attr)
186 'Unknown bulk attribute: "%s"' % attr)
186 return result
187 return result
187
188
188 @reraise_safe_exceptions
189 @reraise_safe_exceptions
189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 clone(baseui, source, dest, noupdate=not update_after_clone)
192 clone(baseui, source, dest, noupdate=not update_after_clone)
192
193
193 @reraise_safe_exceptions
194 @reraise_safe_exceptions
194 def commitctx(
195 def commitctx(
195 self, wire, message, parents, commit_time, commit_timezone,
196 self, wire, message, parents, commit_time, commit_timezone,
196 user, files, extra, removed, updated):
197 user, files, extra, removed, updated):
197
198
198 def _filectxfn(_repo, memctx, path):
199 def _filectxfn(_repo, memctx, path):
199 """
200 """
200 Marks given path as added/changed/removed in a given _repo. This is
201 Marks given path as added/changed/removed in a given _repo. This is
201 for internal mercurial commit function.
202 for internal mercurial commit function.
202 """
203 """
203
204
204 # check if this path is removed
205 # check if this path is removed
205 if path in removed:
206 if path in removed:
206 # returning None is a way to mark node for removal
207 # returning None is a way to mark node for removal
207 return None
208 return None
208
209
209 # check if this path is added
210 # check if this path is added
210 for node in updated:
211 for node in updated:
211 if node['path'] == path:
212 if node['path'] == path:
212 return memfilectx(
213 return memfilectx(
213 _repo,
214 _repo,
214 changectx=memctx,
215 changectx=memctx,
215 path=node['path'],
216 path=node['path'],
216 data=node['content'],
217 data=node['content'],
217 islink=False,
218 islink=False,
218 isexec=bool(node['mode'] & stat.S_IXUSR),
219 isexec=bool(node['mode'] & stat.S_IXUSR),
219 copied=False)
220 copied=False)
220
221
221 raise exceptions.AbortException(
222 raise exceptions.AbortException(
222 "Given path haven't been marked as added, "
223 "Given path haven't been marked as added, "
223 "changed or removed (%s)" % path)
224 "changed or removed (%s)" % path)
224
225
225 repo = self._factory.repo(wire)
226 repo = self._factory.repo(wire)
226
227
227 commit_ctx = memctx(
228 commit_ctx = memctx(
228 repo=repo,
229 repo=repo,
229 parents=parents,
230 parents=parents,
230 text=message,
231 text=message,
231 files=files,
232 files=files,
232 filectxfn=_filectxfn,
233 filectxfn=_filectxfn,
233 user=user,
234 user=user,
234 date=(commit_time, commit_timezone),
235 date=(commit_time, commit_timezone),
235 extra=extra)
236 extra=extra)
236
237
237 n = repo.commitctx(commit_ctx)
238 n = repo.commitctx(commit_ctx)
238 new_id = hex(n)
239 new_id = hex(n)
239
240
240 return new_id
241 return new_id
241
242
242 @reraise_safe_exceptions
243 @reraise_safe_exceptions
243 def ctx_branch(self, wire, revision):
244 def ctx_branch(self, wire, revision):
244 repo = self._factory.repo(wire)
245 repo = self._factory.repo(wire)
245 ctx = repo[revision]
246 ctx = repo[revision]
246 return ctx.branch()
247 return ctx.branch()
247
248
248 @reraise_safe_exceptions
249 @reraise_safe_exceptions
249 def ctx_children(self, wire, revision):
250 def ctx_children(self, wire, revision):
250 repo = self._factory.repo(wire)
251 repo = self._factory.repo(wire)
251 ctx = repo[revision]
252 ctx = repo[revision]
252 return [child.rev() for child in ctx.children()]
253 return [child.rev() for child in ctx.children()]
253
254
254 @reraise_safe_exceptions
255 @reraise_safe_exceptions
255 def ctx_date(self, wire, revision):
256 def ctx_date(self, wire, revision):
256 repo = self._factory.repo(wire)
257 repo = self._factory.repo(wire)
257 ctx = repo[revision]
258 ctx = repo[revision]
258 return ctx.date()
259 return ctx.date()
259
260
260 @reraise_safe_exceptions
261 @reraise_safe_exceptions
261 def ctx_description(self, wire, revision):
262 def ctx_description(self, wire, revision):
262 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
263 ctx = repo[revision]
264 ctx = repo[revision]
264 return ctx.description()
265 return ctx.description()
265
266
266 @reraise_safe_exceptions
267 @reraise_safe_exceptions
267 def ctx_diff(
268 def ctx_diff(
268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 repo = self._factory.repo(wire)
270 repo = self._factory.repo(wire)
270 ctx = repo[revision]
271 ctx = repo[revision]
271 result = ctx.diff(
272 result = ctx.diff(
272 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 return list(result)
274 return list(result)
274
275
275 @reraise_safe_exceptions
276 @reraise_safe_exceptions
276 def ctx_files(self, wire, revision):
277 def ctx_files(self, wire, revision):
277 repo = self._factory.repo(wire)
278 repo = self._factory.repo(wire)
278 ctx = repo[revision]
279 ctx = repo[revision]
279 return ctx.files()
280 return ctx.files()
280
281
281 @reraise_safe_exceptions
282 @reraise_safe_exceptions
282 def ctx_list(self, path, revision):
283 def ctx_list(self, path, revision):
283 repo = self._factory.repo(path)
284 repo = self._factory.repo(path)
284 ctx = repo[revision]
285 ctx = repo[revision]
285 return list(ctx)
286 return list(ctx)
286
287
287 @reraise_safe_exceptions
288 @reraise_safe_exceptions
288 def ctx_parents(self, wire, revision):
289 def ctx_parents(self, wire, revision):
289 repo = self._factory.repo(wire)
290 repo = self._factory.repo(wire)
290 ctx = repo[revision]
291 ctx = repo[revision]
291 return [parent.rev() for parent in ctx.parents()]
292 return [parent.rev() for parent in ctx.parents()]
292
293
293 @reraise_safe_exceptions
294 @reraise_safe_exceptions
294 def ctx_phase(self, wire, revision):
295 def ctx_phase(self, wire, revision):
295 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
296 ctx = repo[revision]
297 ctx = repo[revision]
297 # public=0, draft=1, secret=3
298 # public=0, draft=1, secret=3
298 return ctx.phase()
299 return ctx.phase()
299
300
300 @reraise_safe_exceptions
301 @reraise_safe_exceptions
301 def ctx_obsolete(self, wire, revision):
302 def ctx_obsolete(self, wire, revision):
302 repo = self._factory.repo(wire)
303 repo = self._factory.repo(wire)
303 ctx = repo[revision]
304 ctx = repo[revision]
304 return ctx.obsolete()
305 return ctx.obsolete()
305
306
306 @reraise_safe_exceptions
307 @reraise_safe_exceptions
307 def ctx_hidden(self, wire, revision):
308 def ctx_hidden(self, wire, revision):
308 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
309 ctx = repo[revision]
310 ctx = repo[revision]
310 return ctx.hidden()
311 return ctx.hidden()
311
312
312 @reraise_safe_exceptions
313 @reraise_safe_exceptions
313 def ctx_substate(self, wire, revision):
314 def ctx_substate(self, wire, revision):
314 repo = self._factory.repo(wire)
315 repo = self._factory.repo(wire)
315 ctx = repo[revision]
316 ctx = repo[revision]
316 return ctx.substate
317 return ctx.substate
317
318
318 @reraise_safe_exceptions
319 @reraise_safe_exceptions
319 def ctx_status(self, wire, revision):
320 def ctx_status(self, wire, revision):
320 repo = self._factory.repo(wire)
321 repo = self._factory.repo(wire)
321 ctx = repo[revision]
322 ctx = repo[revision]
322 status = repo[ctx.p1().node()].status(other=ctx.node())
323 status = repo[ctx.p1().node()].status(other=ctx.node())
323 # object of status (odd, custom named tuple in mercurial) is not
324 # object of status (odd, custom named tuple in mercurial) is not
324 # correctly serializable, we make it a list, as the underling
325 # correctly serializable, we make it a list, as the underling
325 # API expects this to be a list
326 # API expects this to be a list
326 return list(status)
327 return list(status)
327
328
328 @reraise_safe_exceptions
329 @reraise_safe_exceptions
329 def ctx_user(self, wire, revision):
330 def ctx_user(self, wire, revision):
330 repo = self._factory.repo(wire)
331 repo = self._factory.repo(wire)
331 ctx = repo[revision]
332 ctx = repo[revision]
332 return ctx.user()
333 return ctx.user()
333
334
334 @reraise_safe_exceptions
335 @reraise_safe_exceptions
335 def check_url(self, url, config):
336 def check_url(self, url, config):
336 _proto = None
337 _proto = None
337 if '+' in url[:url.find('://')]:
338 if '+' in url[:url.find('://')]:
338 _proto = url[0:url.find('+')]
339 _proto = url[0:url.find('+')]
339 url = url[url.find('+') + 1:]
340 url = url[url.find('+') + 1:]
340 handlers = []
341 handlers = []
341 url_obj = url_parser(url)
342 url_obj = url_parser(url)
342 test_uri, authinfo = url_obj.authinfo()
343 test_uri, authinfo = url_obj.authinfo()
343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 url_obj.query = obfuscate_qs(url_obj.query)
345 url_obj.query = obfuscate_qs(url_obj.query)
345
346
346 cleaned_uri = str(url_obj)
347 cleaned_uri = str(url_obj)
347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348
349
349 if authinfo:
350 if authinfo:
350 # create a password manager
351 # create a password manager
351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 passmgr.add_password(*authinfo)
353 passmgr.add_password(*authinfo)
353
354
354 handlers.extend((httpbasicauthhandler(passmgr),
355 handlers.extend((httpbasicauthhandler(passmgr),
355 httpdigestauthhandler(passmgr)))
356 httpdigestauthhandler(passmgr)))
356
357
357 o = urllib2.build_opener(*handlers)
358 o = urllib2.build_opener(*handlers)
358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 ('Accept', 'application/mercurial-0.1')]
360 ('Accept', 'application/mercurial-0.1')]
360
361
361 q = {"cmd": 'between'}
362 q = {"cmd": 'between'}
362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 qs = '?%s' % urllib.urlencode(q)
364 qs = '?%s' % urllib.urlencode(q)
364 cu = "%s%s" % (test_uri, qs)
365 cu = "%s%s" % (test_uri, qs)
365 req = urllib2.Request(cu, None, {})
366 req = urllib2.Request(cu, None, {})
366
367
367 try:
368 try:
368 log.debug("Trying to open URL %s", cleaned_uri)
369 log.debug("Trying to open URL %s", cleaned_uri)
369 resp = o.open(req)
370 resp = o.open(req)
370 if resp.code != 200:
371 if resp.code != 200:
371 raise exceptions.URLError('Return Code is not 200')
372 raise exceptions.URLError('Return Code is not 200')
372 except Exception as e:
373 except Exception as e:
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 # means it cannot be cloned
375 # means it cannot be cloned
375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
376 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
376
377
377 # now check if it's a proper hg repo, but don't do it for svn
378 # now check if it's a proper hg repo, but don't do it for svn
378 try:
379 try:
379 if _proto == 'svn':
380 if _proto == 'svn':
380 pass
381 pass
381 else:
382 else:
382 # check for pure hg repos
383 # check for pure hg repos
383 log.debug(
384 log.debug(
384 "Verifying if URL is a Mercurial repository: %s",
385 "Verifying if URL is a Mercurial repository: %s",
385 cleaned_uri)
386 cleaned_uri)
386 ui = make_ui_from_config(config)
387 ui = make_ui_from_config(config)
387 peer_checker = makepeer(ui, url)
388 peer_checker = makepeer(ui, url)
388 peer_checker.lookup('tip')
389 peer_checker.lookup('tip')
389 except Exception as e:
390 except Exception as e:
390 log.warning("URL is not a valid Mercurial repository: %s",
391 log.warning("URL is not a valid Mercurial repository: %s",
391 cleaned_uri)
392 cleaned_uri)
392 raise exceptions.URLError(
393 raise exceptions.URLError(
393 "url [%s] does not look like an hg repo org_exc: %s"
394 "url [%s] does not look like an hg repo org_exc: %s"
394 % (cleaned_uri, e))
395 % (cleaned_uri, e))
395
396
396 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
397 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
397 return True
398 return True
398
399
399 @reraise_safe_exceptions
400 @reraise_safe_exceptions
400 def diff(
401 def diff(
401 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
402 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
402 context):
403 context):
403 repo = self._factory.repo(wire)
404 repo = self._factory.repo(wire)
404
405
405 if file_filter:
406 if file_filter:
406 match_filter = match(file_filter[0], '', [file_filter[1]])
407 match_filter = match(file_filter[0], '', [file_filter[1]])
407 else:
408 else:
408 match_filter = file_filter
409 match_filter = file_filter
409 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
410 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
410
411
411 try:
412 try:
412 return "".join(patch.diff(
413 return "".join(patch.diff(
413 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
414 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
414 except RepoLookupError:
415 except RepoLookupError:
415 raise exceptions.LookupException()
416 raise exceptions.LookupException()
416
417
417 @reraise_safe_exceptions
418 @reraise_safe_exceptions
418 def file_history(self, wire, revision, path, limit):
419 def file_history(self, wire, revision, path, limit):
419 repo = self._factory.repo(wire)
420 repo = self._factory.repo(wire)
420
421
421 ctx = repo[revision]
422 ctx = repo[revision]
422 fctx = ctx.filectx(path)
423 fctx = ctx.filectx(path)
423
424
424 def history_iter():
425 def history_iter():
425 limit_rev = fctx.rev()
426 limit_rev = fctx.rev()
426 for obj in reversed(list(fctx.filelog())):
427 for obj in reversed(list(fctx.filelog())):
427 obj = fctx.filectx(obj)
428 obj = fctx.filectx(obj)
428 if limit_rev >= obj.rev():
429 if limit_rev >= obj.rev():
429 yield obj
430 yield obj
430
431
431 history = []
432 history = []
432 for cnt, obj in enumerate(history_iter()):
433 for cnt, obj in enumerate(history_iter()):
433 if limit and cnt >= limit:
434 if limit and cnt >= limit:
434 break
435 break
435 history.append(hex(obj.node()))
436 history.append(hex(obj.node()))
436
437
437 return [x for x in history]
438 return [x for x in history]
438
439
439 @reraise_safe_exceptions
440 @reraise_safe_exceptions
440 def file_history_untill(self, wire, revision, path, limit):
441 def file_history_untill(self, wire, revision, path, limit):
441 repo = self._factory.repo(wire)
442 repo = self._factory.repo(wire)
442 ctx = repo[revision]
443 ctx = repo[revision]
443 fctx = ctx.filectx(path)
444 fctx = ctx.filectx(path)
444
445
445 file_log = list(fctx.filelog())
446 file_log = list(fctx.filelog())
446 if limit:
447 if limit:
447 # Limit to the last n items
448 # Limit to the last n items
448 file_log = file_log[-limit:]
449 file_log = file_log[-limit:]
449
450
450 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
451 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
451
452
452 @reraise_safe_exceptions
453 @reraise_safe_exceptions
453 def fctx_annotate(self, wire, revision, path):
454 def fctx_annotate(self, wire, revision, path):
454 repo = self._factory.repo(wire)
455 repo = self._factory.repo(wire)
455 ctx = repo[revision]
456 ctx = repo[revision]
456 fctx = ctx.filectx(path)
457 fctx = ctx.filectx(path)
457
458
458 result = []
459 result = []
459 for i, annotate_obj in enumerate(fctx.annotate(), 1):
460 for i, annotate_obj in enumerate(fctx.annotate(), 1):
460 ln_no = i
461 ln_no = i
461 sha = hex(annotate_obj.fctx.node())
462 sha = hex(annotate_obj.fctx.node())
462 content = annotate_obj.text
463 content = annotate_obj.text
463 result.append((ln_no, sha, content))
464 result.append((ln_no, sha, content))
464 return result
465 return result
465
466
466 @reraise_safe_exceptions
467 @reraise_safe_exceptions
467 def fctx_data(self, wire, revision, path):
468 def fctx_data(self, wire, revision, path):
468 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
469 ctx = repo[revision]
470 ctx = repo[revision]
470 fctx = ctx.filectx(path)
471 fctx = ctx.filectx(path)
471 return fctx.data()
472 return fctx.data()
472
473
473 @reraise_safe_exceptions
474 @reraise_safe_exceptions
474 def fctx_flags(self, wire, revision, path):
475 def fctx_flags(self, wire, revision, path):
475 repo = self._factory.repo(wire)
476 repo = self._factory.repo(wire)
476 ctx = repo[revision]
477 ctx = repo[revision]
477 fctx = ctx.filectx(path)
478 fctx = ctx.filectx(path)
478 return fctx.flags()
479 return fctx.flags()
479
480
480 @reraise_safe_exceptions
481 @reraise_safe_exceptions
481 def fctx_size(self, wire, revision, path):
482 def fctx_size(self, wire, revision, path):
482 repo = self._factory.repo(wire)
483 repo = self._factory.repo(wire)
483 ctx = repo[revision]
484 ctx = repo[revision]
484 fctx = ctx.filectx(path)
485 fctx = ctx.filectx(path)
485 return fctx.size()
486 return fctx.size()
486
487
487 @reraise_safe_exceptions
488 @reraise_safe_exceptions
488 def get_all_commit_ids(self, wire, name):
489 def get_all_commit_ids(self, wire, name):
489 repo = self._factory.repo(wire)
490 repo = self._factory.repo(wire)
490 revs = repo.filtered(name).changelog.index
491 revs = repo.filtered(name).changelog.index
491 return map(lambda x: hex(x[7]), revs)[:-1]
492 return map(lambda x: hex(x[7]), revs)[:-1]
492
493
493 @reraise_safe_exceptions
494 @reraise_safe_exceptions
494 def get_config_value(self, wire, section, name, untrusted=False):
495 def get_config_value(self, wire, section, name, untrusted=False):
495 repo = self._factory.repo(wire)
496 repo = self._factory.repo(wire)
496 return repo.ui.config(section, name, untrusted=untrusted)
497 return repo.ui.config(section, name, untrusted=untrusted)
497
498
498 @reraise_safe_exceptions
499 @reraise_safe_exceptions
499 def get_config_bool(self, wire, section, name, untrusted=False):
500 def get_config_bool(self, wire, section, name, untrusted=False):
500 repo = self._factory.repo(wire)
501 repo = self._factory.repo(wire)
501 return repo.ui.configbool(section, name, untrusted=untrusted)
502 return repo.ui.configbool(section, name, untrusted=untrusted)
502
503
503 @reraise_safe_exceptions
504 @reraise_safe_exceptions
504 def get_config_list(self, wire, section, name, untrusted=False):
505 def get_config_list(self, wire, section, name, untrusted=False):
505 repo = self._factory.repo(wire)
506 repo = self._factory.repo(wire)
506 return repo.ui.configlist(section, name, untrusted=untrusted)
507 return repo.ui.configlist(section, name, untrusted=untrusted)
507
508
508 @reraise_safe_exceptions
509 @reraise_safe_exceptions
509 def is_large_file(self, wire, path):
510 def is_large_file(self, wire, path):
510 return largefiles.lfutil.isstandin(path)
511 return largefiles.lfutil.isstandin(path)
511
512
512 @reraise_safe_exceptions
513 @reraise_safe_exceptions
513 def in_largefiles_store(self, wire, sha):
514 def in_largefiles_store(self, wire, sha):
514 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
515 return largefiles.lfutil.instore(repo, sha)
516 return largefiles.lfutil.instore(repo, sha)
516
517
517 @reraise_safe_exceptions
518 @reraise_safe_exceptions
518 def in_user_cache(self, wire, sha):
519 def in_user_cache(self, wire, sha):
519 repo = self._factory.repo(wire)
520 repo = self._factory.repo(wire)
520 return largefiles.lfutil.inusercache(repo.ui, sha)
521 return largefiles.lfutil.inusercache(repo.ui, sha)
521
522
522 @reraise_safe_exceptions
523 @reraise_safe_exceptions
523 def store_path(self, wire, sha):
524 def store_path(self, wire, sha):
524 repo = self._factory.repo(wire)
525 repo = self._factory.repo(wire)
525 return largefiles.lfutil.storepath(repo, sha)
526 return largefiles.lfutil.storepath(repo, sha)
526
527
527 @reraise_safe_exceptions
528 @reraise_safe_exceptions
528 def link(self, wire, sha, path):
529 def link(self, wire, sha, path):
529 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
530 largefiles.lfutil.link(
531 largefiles.lfutil.link(
531 largefiles.lfutil.usercachepath(repo.ui, sha), path)
532 largefiles.lfutil.usercachepath(repo.ui, sha), path)
532
533
533 @reraise_safe_exceptions
534 @reraise_safe_exceptions
534 def localrepository(self, wire, create=False):
535 def localrepository(self, wire, create=False):
535 self._factory.repo(wire, create=create)
536 self._factory.repo(wire, create=create)
536
537
537 @reraise_safe_exceptions
538 @reraise_safe_exceptions
538 def lookup(self, wire, revision, both):
539 def lookup(self, wire, revision, both):
539
540
540 repo = self._factory.repo(wire)
541 repo = self._factory.repo(wire)
541
542
542 if isinstance(revision, int):
543 if isinstance(revision, int):
543 # NOTE(marcink):
544 # NOTE(marcink):
544 # since Mercurial doesn't support indexes properly
545 # since Mercurial doesn't support indexes properly
545 # we need to shift accordingly by one to get proper index, e.g
546 # we need to shift accordingly by one to get proper index, e.g
546 # repo[-1] => repo[-2]
547 # repo[-1] => repo[-2]
547 # repo[0] => repo[-1]
548 # repo[0] => repo[-1]
548 # repo[1] => repo[2] we also never call repo[0] because
549 # repo[1] => repo[2] we also never call repo[0] because
549 # it's actually second commit
550 # it's actually second commit
550 if revision <= 0:
551 if revision <= 0:
551 revision = revision + -1
552 revision = revision + -1
552 else:
553 else:
553 revision = revision + 1
554 revision = revision + 1
554
555
555 try:
556 try:
556 ctx = repo[revision]
557 ctx = repo[revision]
557 except RepoLookupError:
558 except RepoLookupError:
558 raise exceptions.LookupException(revision)
559 raise exceptions.LookupException(revision)
559 except LookupError as e:
560 except LookupError as e:
560 raise exceptions.LookupException(e.name)
561 raise exceptions.LookupException(e.name)
561
562
562 if not both:
563 if not both:
563 return ctx.hex()
564 return ctx.hex()
564
565
565 ctx = repo[ctx.hex()]
566 ctx = repo[ctx.hex()]
566 return ctx.hex(), ctx.rev()
567 return ctx.hex(), ctx.rev()
567
568
568 @reraise_safe_exceptions
569 @reraise_safe_exceptions
569 def pull(self, wire, url, commit_ids=None):
570 def pull(self, wire, url, commit_ids=None):
570 repo = self._factory.repo(wire)
571 repo = self._factory.repo(wire)
571 # Disable any prompts for this repo
572 # Disable any prompts for this repo
572 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
573 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
573
574
574 remote = peer(repo, {}, url)
575 remote = peer(repo, {}, url)
575 # Disable any prompts for this remote
576 # Disable any prompts for this remote
576 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
577 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
577
578
578 if commit_ids:
579 if commit_ids:
579 commit_ids = [bin(commit_id) for commit_id in commit_ids]
580 commit_ids = [bin(commit_id) for commit_id in commit_ids]
580
581
581 return exchange.pull(
582 return exchange.pull(
582 repo, remote, heads=commit_ids, force=None).cgresult
583 repo, remote, heads=commit_ids, force=None).cgresult
583
584
584 @reraise_safe_exceptions
585 @reraise_safe_exceptions
585 def sync_push(self, wire, url):
586 def sync_push(self, wire, url):
586 if self.check_url(url, wire['config']):
587 if self.check_url(url, wire['config']):
587 repo = self._factory.repo(wire)
588 repo = self._factory.repo(wire)
588
589
589 # Disable any prompts for this repo
590 # Disable any prompts for this repo
590 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
591 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
591
592
592 bookmarks = dict(repo._bookmarks).keys()
593 bookmarks = dict(repo._bookmarks).keys()
593 remote = peer(repo, {}, url)
594 remote = peer(repo, {}, url)
594 # Disable any prompts for this remote
595 # Disable any prompts for this remote
595 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
596 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
596
597
597 return exchange.push(
598 return exchange.push(
598 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
599 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
599
600
600 @reraise_safe_exceptions
601 @reraise_safe_exceptions
601 def revision(self, wire, rev):
602 def revision(self, wire, rev):
602 repo = self._factory.repo(wire)
603 repo = self._factory.repo(wire)
603 ctx = repo[rev]
604 ctx = repo[rev]
604 return ctx.rev()
605 return ctx.rev()
605
606
606 @reraise_safe_exceptions
607 @reraise_safe_exceptions
607 def rev_range(self, wire, filter):
608 def rev_range(self, wire, filter):
608 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
609 revisions = [rev for rev in revrange(repo, filter)]
610 revisions = [rev for rev in revrange(repo, filter)]
610 return revisions
611 return revisions
611
612
612 @reraise_safe_exceptions
613 @reraise_safe_exceptions
613 def rev_range_hash(self, wire, node):
614 def rev_range_hash(self, wire, node):
614 repo = self._factory.repo(wire)
615 repo = self._factory.repo(wire)
615
616
616 def get_revs(repo, rev_opt):
617 def get_revs(repo, rev_opt):
617 if rev_opt:
618 if rev_opt:
618 revs = revrange(repo, rev_opt)
619 revs = revrange(repo, rev_opt)
619 if len(revs) == 0:
620 if len(revs) == 0:
620 return (nullrev, nullrev)
621 return (nullrev, nullrev)
621 return max(revs), min(revs)
622 return max(revs), min(revs)
622 else:
623 else:
623 return len(repo) - 1, 0
624 return len(repo) - 1, 0
624
625
625 stop, start = get_revs(repo, [node + ':'])
626 stop, start = get_revs(repo, [node + ':'])
626 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
627 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
627 return revs
628 return revs
628
629
629 @reraise_safe_exceptions
630 @reraise_safe_exceptions
630 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
631 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
631 other_path = kwargs.pop('other_path', None)
632 other_path = kwargs.pop('other_path', None)
632
633
633 # case when we want to compare two independent repositories
634 # case when we want to compare two independent repositories
634 if other_path and other_path != wire["path"]:
635 if other_path and other_path != wire["path"]:
635 baseui = self._factory._create_config(wire["config"])
636 baseui = self._factory._create_config(wire["config"])
636 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
637 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
637 else:
638 else:
638 repo = self._factory.repo(wire)
639 repo = self._factory.repo(wire)
639 return list(repo.revs(rev_spec, *args))
640 return list(repo.revs(rev_spec, *args))
640
641
641 @reraise_safe_exceptions
642 @reraise_safe_exceptions
642 def strip(self, wire, revision, update, backup):
643 def strip(self, wire, revision, update, backup):
643 repo = self._factory.repo(wire)
644 repo = self._factory.repo(wire)
644 ctx = repo[revision]
645 ctx = repo[revision]
645 hgext_strip(
646 hgext_strip(
646 repo.baseui, repo, ctx.node(), update=update, backup=backup)
647 repo.baseui, repo, ctx.node(), update=update, backup=backup)
647
648
648 @reraise_safe_exceptions
649 @reraise_safe_exceptions
649 def verify(self, wire,):
650 def verify(self, wire,):
650 repo = self._factory.repo(wire)
651 repo = self._factory.repo(wire)
651 baseui = self._factory._create_config(wire['config'])
652 baseui = self._factory._create_config(wire['config'])
652 baseui.setconfig('ui', 'quiet', 'false')
653 baseui.setconfig('ui', 'quiet', 'false')
653 output = io.BytesIO()
654 output = io.BytesIO()
654
655
655 def write(data, **unused_kwargs):
656 def write(data, **unused_kwargs):
656 output.write(data)
657 output.write(data)
657 baseui.write = write
658 baseui.write = write
658
659
659 repo.ui = baseui
660 repo.ui = baseui
660 verify.verify(repo)
661 verify.verify(repo)
661 return output.getvalue()
662 return output.getvalue()
662
663
663 @reraise_safe_exceptions
664 @reraise_safe_exceptions
664 def tag(self, wire, name, revision, message, local, user,
665 def tag(self, wire, name, revision, message, local, user,
665 tag_time, tag_timezone):
666 tag_time, tag_timezone):
666 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
667 ctx = repo[revision]
668 ctx = repo[revision]
668 node = ctx.node()
669 node = ctx.node()
669
670
670 date = (tag_time, tag_timezone)
671 date = (tag_time, tag_timezone)
671 try:
672 try:
672 hg_tag.tag(repo, name, node, message, local, user, date)
673 hg_tag.tag(repo, name, node, message, local, user, date)
673 except Abort as e:
674 except Abort as e:
674 log.exception("Tag operation aborted")
675 log.exception("Tag operation aborted")
675 # Exception can contain unicode which we convert
676 # Exception can contain unicode which we convert
676 raise exceptions.AbortException(repr(e))
677 raise exceptions.AbortException(repr(e))
677
678
678 @reraise_safe_exceptions
679 @reraise_safe_exceptions
679 def tags(self, wire):
680 def tags(self, wire):
680 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
681 return repo.tags()
682 return repo.tags()
682
683
683 @reraise_safe_exceptions
684 @reraise_safe_exceptions
684 def update(self, wire, node=None, clean=False):
685 def update(self, wire, node=None, clean=False):
685 repo = self._factory.repo(wire)
686 repo = self._factory.repo(wire)
686 baseui = self._factory._create_config(wire['config'])
687 baseui = self._factory._create_config(wire['config'])
687 commands.update(baseui, repo, node=node, clean=clean)
688 commands.update(baseui, repo, node=node, clean=clean)
688
689
689 @reraise_safe_exceptions
690 @reraise_safe_exceptions
690 def identify(self, wire):
691 def identify(self, wire):
691 repo = self._factory.repo(wire)
692 repo = self._factory.repo(wire)
692 baseui = self._factory._create_config(wire['config'])
693 baseui = self._factory._create_config(wire['config'])
693 output = io.BytesIO()
694 output = io.BytesIO()
694 baseui.write = output.write
695 baseui.write = output.write
695 # This is required to get a full node id
696 # This is required to get a full node id
696 baseui.debugflag = True
697 baseui.debugflag = True
697 commands.identify(baseui, repo, id=True)
698 commands.identify(baseui, repo, id=True)
698
699
699 return output.getvalue()
700 return output.getvalue()
700
701
701 @reraise_safe_exceptions
702 @reraise_safe_exceptions
702 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
703 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
703 hooks=True):
704 hooks=True):
704 repo = self._factory.repo(wire)
705 repo = self._factory.repo(wire)
705 baseui = self._factory._create_config(wire['config'], hooks=hooks)
706 baseui = self._factory._create_config(wire['config'], hooks=hooks)
706
707
707 # Mercurial internally has a lot of logic that checks ONLY if
708 # Mercurial internally has a lot of logic that checks ONLY if
708 # option is defined, we just pass those if they are defined then
709 # option is defined, we just pass those if they are defined then
709 opts = {}
710 opts = {}
710 if bookmark:
711 if bookmark:
711 opts['bookmark'] = bookmark
712 opts['bookmark'] = bookmark
712 if branch:
713 if branch:
713 opts['branch'] = branch
714 opts['branch'] = branch
714 if revision:
715 if revision:
715 opts['rev'] = revision
716 opts['rev'] = revision
716
717
717 commands.pull(baseui, repo, source, **opts)
718 commands.pull(baseui, repo, source, **opts)
718
719
719 @reraise_safe_exceptions
720 @reraise_safe_exceptions
720 def heads(self, wire, branch=None):
721 def heads(self, wire, branch=None):
721 repo = self._factory.repo(wire)
722 repo = self._factory.repo(wire)
722 baseui = self._factory._create_config(wire['config'])
723 baseui = self._factory._create_config(wire['config'])
723 output = io.BytesIO()
724 output = io.BytesIO()
724
725
725 def write(data, **unused_kwargs):
726 def write(data, **unused_kwargs):
726 output.write(data)
727 output.write(data)
727
728
728 baseui.write = write
729 baseui.write = write
729 if branch:
730 if branch:
730 args = [branch]
731 args = [branch]
731 else:
732 else:
732 args = []
733 args = []
733 commands.heads(baseui, repo, template='{node} ', *args)
734 commands.heads(baseui, repo, template='{node} ', *args)
734
735
735 return output.getvalue()
736 return output.getvalue()
736
737
737 @reraise_safe_exceptions
738 @reraise_safe_exceptions
738 def ancestor(self, wire, revision1, revision2):
739 def ancestor(self, wire, revision1, revision2):
739 repo = self._factory.repo(wire)
740 repo = self._factory.repo(wire)
740 changelog = repo.changelog
741 changelog = repo.changelog
741 lookup = repo.lookup
742 lookup = repo.lookup
742 a = changelog.ancestor(lookup(revision1), lookup(revision2))
743 a = changelog.ancestor(lookup(revision1), lookup(revision2))
743 return hex(a)
744 return hex(a)
744
745
745 @reraise_safe_exceptions
746 @reraise_safe_exceptions
746 def push(self, wire, revisions, dest_path, hooks=True,
747 def push(self, wire, revisions, dest_path, hooks=True,
747 push_branches=False):
748 push_branches=False):
748 repo = self._factory.repo(wire)
749 repo = self._factory.repo(wire)
749 baseui = self._factory._create_config(wire['config'], hooks=hooks)
750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
750 commands.push(baseui, repo, dest=dest_path, rev=revisions,
751 commands.push(baseui, repo, dest=dest_path, rev=revisions,
751 new_branch=push_branches)
752 new_branch=push_branches)
752
753
753 @reraise_safe_exceptions
754 @reraise_safe_exceptions
754 def merge(self, wire, revision):
755 def merge(self, wire, revision):
755 repo = self._factory.repo(wire)
756 repo = self._factory.repo(wire)
756 baseui = self._factory._create_config(wire['config'])
757 baseui = self._factory._create_config(wire['config'])
757 repo.ui.setconfig('ui', 'merge', 'internal:dump')
758 repo.ui.setconfig('ui', 'merge', 'internal:dump')
758
759
759 # In case of sub repositories are used mercurial prompts the user in
760 # In case of sub repositories are used mercurial prompts the user in
760 # case of merge conflicts or different sub repository sources. By
761 # case of merge conflicts or different sub repository sources. By
761 # setting the interactive flag to `False` mercurial doesn't prompt the
762 # setting the interactive flag to `False` mercurial doesn't prompt the
762 # used but instead uses a default value.
763 # used but instead uses a default value.
763 repo.ui.setconfig('ui', 'interactive', False)
764 repo.ui.setconfig('ui', 'interactive', False)
764
765
765 commands.merge(baseui, repo, rev=revision)
766 commands.merge(baseui, repo, rev=revision)
766
767
767 @reraise_safe_exceptions
768 @reraise_safe_exceptions
768 def commit(self, wire, message, username, close_branch=False):
769 def commit(self, wire, message, username, close_branch=False):
769 repo = self._factory.repo(wire)
770 repo = self._factory.repo(wire)
770 baseui = self._factory._create_config(wire['config'])
771 baseui = self._factory._create_config(wire['config'])
771 repo.ui.setconfig('ui', 'username', username)
772 repo.ui.setconfig('ui', 'username', username)
772 commands.commit(baseui, repo, message=message, close_branch=close_branch)
773 commands.commit(baseui, repo, message=message, close_branch=close_branch)
773
774
774 @reraise_safe_exceptions
775 @reraise_safe_exceptions
775 def rebase(self, wire, source=None, dest=None, abort=False):
776 def rebase(self, wire, source=None, dest=None, abort=False):
776 repo = self._factory.repo(wire)
777 repo = self._factory.repo(wire)
777 baseui = self._factory._create_config(wire['config'])
778 baseui = self._factory._create_config(wire['config'])
778 repo.ui.setconfig('ui', 'merge', 'internal:dump')
779 repo.ui.setconfig('ui', 'merge', 'internal:dump')
779 rebase.rebase(
780 rebase.rebase(
780 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
781 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
781
782
782 @reraise_safe_exceptions
783 @reraise_safe_exceptions
783 def bookmark(self, wire, bookmark, revision=None):
784 def bookmark(self, wire, bookmark, revision=None):
784 repo = self._factory.repo(wire)
785 repo = self._factory.repo(wire)
785 baseui = self._factory._create_config(wire['config'])
786 baseui = self._factory._create_config(wire['config'])
786 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
787 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
787
788
788 @reraise_safe_exceptions
789 @reraise_safe_exceptions
789 def install_hooks(self, wire, force=False):
790 def install_hooks(self, wire, force=False):
790 # we don't need any special hooks for Mercurial
791 # we don't need any special hooks for Mercurial
791 pass
792 pass
@@ -1,490 +1,523 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import base64
19 import base64
20 import locale
20 import locale
21 import logging
21 import logging
22 import uuid
22 import uuid
23 import wsgiref.util
23 import wsgiref.util
24 import traceback
24 import traceback
25 from itertools import chain
25 from itertools import chain
26
26
27 import simplejson as json
27 import simplejson as json
28 import msgpack
28 import msgpack
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
31 from pyramid.config import Configurator
29 from pyramid.config import Configurator
30 from pyramid.settings import asbool, aslist
32 from pyramid.wsgi import wsgiapp
31 from pyramid.wsgi import wsgiapp
33 from pyramid.compat import configparser
32 from pyramid.compat import configparser
34
33
35 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
34 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
36 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
35 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
37 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
36 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
38 from vcsserver.echo_stub.echo_app import EchoApp
37 from vcsserver.echo_stub.echo_app import EchoApp
39 from vcsserver.exceptions import HTTPRepoLocked
38 from vcsserver.exceptions import HTTPRepoLocked
40 from vcsserver.server import VcsServer
39 from vcsserver.server import VcsServer
41
40
42 try:
41 try:
43 from vcsserver.git import GitFactory, GitRemote
42 from vcsserver.git import GitFactory, GitRemote
44 except ImportError:
43 except ImportError:
45 GitFactory = None
44 GitFactory = None
46 GitRemote = None
45 GitRemote = None
47
46
48 try:
47 try:
49 from vcsserver.hg import MercurialFactory, HgRemote
48 from vcsserver.hg import MercurialFactory, HgRemote
50 except ImportError:
49 except ImportError:
51 MercurialFactory = None
50 MercurialFactory = None
52 HgRemote = None
51 HgRemote = None
53
52
54 try:
53 try:
55 from vcsserver.svn import SubversionFactory, SvnRemote
54 from vcsserver.svn import SubversionFactory, SvnRemote
56 except ImportError:
55 except ImportError:
57 SubversionFactory = None
56 SubversionFactory = None
58 SvnRemote = None
57 SvnRemote = None
59
58
60 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
61
60
62
61
63 def _is_request_chunked(environ):
62 def _is_request_chunked(environ):
64 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
63 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
65 return stream
64 return stream
66
65
67
66
67 def _int_setting(settings, name, default):
68 settings[name] = int(settings.get(name, default))
69
70
71 def _bool_setting(settings, name, default):
72 input_val = settings.get(name, default)
73 if isinstance(input_val, unicode):
74 input_val = input_val.encode('utf8')
75 settings[name] = asbool(input_val)
76
77
78 def _list_setting(settings, name, default):
79 raw_value = settings.get(name, default)
80
81 # Otherwise we assume it uses pyramids space/newline separation.
82 settings[name] = aslist(raw_value)
83
84
85 def _string_setting(settings, name, default, lower=True):
86 value = settings.get(name, default)
87 if lower:
88 value = value.lower()
89 settings[name] = value
90
91
68 class VCS(object):
92 class VCS(object):
69 def __init__(self, locale=None, cache_config=None):
93 def __init__(self, locale=None, cache_config=None):
70 self.locale = locale
94 self.locale = locale
71 self.cache_config = cache_config
95 self.cache_config = cache_config
72 self._configure_locale()
96 self._configure_locale()
73 self._initialize_cache()
74
97
75 if GitFactory and GitRemote:
98 if GitFactory and GitRemote:
76 git_repo_cache = self.cache.get_cache_region(
99 git_factory = GitFactory()
77 'git', region='repo_object')
78 git_factory = GitFactory(git_repo_cache)
79 self._git_remote = GitRemote(git_factory)
100 self._git_remote = GitRemote(git_factory)
80 else:
101 else:
81 log.info("Git client import failed")
102 log.info("Git client import failed")
82
103
83 if MercurialFactory and HgRemote:
104 if MercurialFactory and HgRemote:
84 hg_repo_cache = self.cache.get_cache_region(
105 hg_factory = MercurialFactory()
85 'hg', region='repo_object')
86 hg_factory = MercurialFactory(hg_repo_cache)
87 self._hg_remote = HgRemote(hg_factory)
106 self._hg_remote = HgRemote(hg_factory)
88 else:
107 else:
89 log.info("Mercurial client import failed")
108 log.info("Mercurial client import failed")
90
109
91 if SubversionFactory and SvnRemote:
110 if SubversionFactory and SvnRemote:
92 svn_repo_cache = self.cache.get_cache_region(
111 svn_factory = SubversionFactory()
93 'svn', region='repo_object')
112
94 svn_factory = SubversionFactory(svn_repo_cache)
95 # hg factory is used for svn url validation
113 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
114 hg_factory = MercurialFactory()
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
99 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
115 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
100 else:
116 else:
101 log.info("Subversion client import failed")
117 log.info("Subversion client import failed")
102
118
103 self._vcsserver = VcsServer()
119 self._vcsserver = VcsServer()
104
120
105 def _initialize_cache(self):
106 cache_config = parse_cache_config_options(self.cache_config)
107 log.info('Initializing beaker cache: %s' % cache_config)
108 self.cache = CacheManager(**cache_config)
109
110 def _configure_locale(self):
121 def _configure_locale(self):
111 if self.locale:
122 if self.locale:
112 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
123 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
113 else:
124 else:
114 log.info(
125 log.info(
115 'Configuring locale subsystem based on environment variables')
126 'Configuring locale subsystem based on environment variables')
116 try:
127 try:
117 # If self.locale is the empty string, then the locale
128 # If self.locale is the empty string, then the locale
118 # module will use the environment variables. See the
129 # module will use the environment variables. See the
119 # documentation of the package `locale`.
130 # documentation of the package `locale`.
120 locale.setlocale(locale.LC_ALL, self.locale)
131 locale.setlocale(locale.LC_ALL, self.locale)
121
132
122 language_code, encoding = locale.getlocale()
133 language_code, encoding = locale.getlocale()
123 log.info(
134 log.info(
124 'Locale set to language code "%s" with encoding "%s".',
135 'Locale set to language code "%s" with encoding "%s".',
125 language_code, encoding)
136 language_code, encoding)
126 except locale.Error:
137 except locale.Error:
127 log.exception(
138 log.exception(
128 'Cannot set locale, not configuring the locale system')
139 'Cannot set locale, not configuring the locale system')
129
140
130
141
131 class WsgiProxy(object):
142 class WsgiProxy(object):
132 def __init__(self, wsgi):
143 def __init__(self, wsgi):
133 self.wsgi = wsgi
144 self.wsgi = wsgi
134
145
135 def __call__(self, environ, start_response):
146 def __call__(self, environ, start_response):
136 input_data = environ['wsgi.input'].read()
147 input_data = environ['wsgi.input'].read()
137 input_data = msgpack.unpackb(input_data)
148 input_data = msgpack.unpackb(input_data)
138
149
139 error = None
150 error = None
140 try:
151 try:
141 data, status, headers = self.wsgi.handle(
152 data, status, headers = self.wsgi.handle(
142 input_data['environment'], input_data['input_data'],
153 input_data['environment'], input_data['input_data'],
143 *input_data['args'], **input_data['kwargs'])
154 *input_data['args'], **input_data['kwargs'])
144 except Exception as e:
155 except Exception as e:
145 data, status, headers = [], None, None
156 data, status, headers = [], None, None
146 error = {
157 error = {
147 'message': str(e),
158 'message': str(e),
148 '_vcs_kind': getattr(e, '_vcs_kind', None)
159 '_vcs_kind': getattr(e, '_vcs_kind', None)
149 }
160 }
150
161
151 start_response(200, {})
162 start_response(200, {})
152 return self._iterator(error, status, headers, data)
163 return self._iterator(error, status, headers, data)
153
164
154 def _iterator(self, error, status, headers, data):
165 def _iterator(self, error, status, headers, data):
155 initial_data = [
166 initial_data = [
156 error,
167 error,
157 status,
168 status,
158 headers,
169 headers,
159 ]
170 ]
160
171
161 for d in chain(initial_data, data):
172 for d in chain(initial_data, data):
162 yield msgpack.packb(d)
173 yield msgpack.packb(d)
163
174
164
175
165 class HTTPApplication(object):
176 class HTTPApplication(object):
166 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
177 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
167
178
168 remote_wsgi = remote_wsgi
179 remote_wsgi = remote_wsgi
169 _use_echo_app = False
180 _use_echo_app = False
170
181
171 def __init__(self, settings=None, global_config=None):
182 def __init__(self, settings=None, global_config=None):
183 self._sanitize_settings_and_apply_defaults(settings)
184
172 self.config = Configurator(settings=settings)
185 self.config = Configurator(settings=settings)
173 self.global_config = global_config
186 self.global_config = global_config
187 self.config.include('vcsserver.lib.rc_cache')
174
188
175 locale = settings.get('locale', '') or 'en_US.UTF-8'
189 locale = settings.get('locale', '') or 'en_US.UTF-8'
176 vcs = VCS(locale=locale, cache_config=settings)
190 vcs = VCS(locale=locale, cache_config=settings)
177 self._remotes = {
191 self._remotes = {
178 'hg': vcs._hg_remote,
192 'hg': vcs._hg_remote,
179 'git': vcs._git_remote,
193 'git': vcs._git_remote,
180 'svn': vcs._svn_remote,
194 'svn': vcs._svn_remote,
181 'server': vcs._vcsserver,
195 'server': vcs._vcsserver,
182 }
196 }
183 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
197 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
184 self._use_echo_app = True
198 self._use_echo_app = True
185 log.warning("Using EchoApp for VCS operations.")
199 log.warning("Using EchoApp for VCS operations.")
186 self.remote_wsgi = remote_wsgi_stub
200 self.remote_wsgi = remote_wsgi_stub
187 self._configure_settings(settings)
201 self._configure_settings(settings)
188 self._configure()
202 self._configure()
189
203
190 def _configure_settings(self, app_settings):
204 def _configure_settings(self, app_settings):
191 """
205 """
192 Configure the settings module.
206 Configure the settings module.
193 """
207 """
194 git_path = app_settings.get('git_path', None)
208 git_path = app_settings.get('git_path', None)
195 if git_path:
209 if git_path:
196 settings.GIT_EXECUTABLE = git_path
210 settings.GIT_EXECUTABLE = git_path
197 binary_dir = app_settings.get('core.binary_dir', None)
211 binary_dir = app_settings.get('core.binary_dir', None)
198 if binary_dir:
212 if binary_dir:
199 settings.BINARY_DIR = binary_dir
213 settings.BINARY_DIR = binary_dir
200
214
215 def _sanitize_settings_and_apply_defaults(self, settings):
216 # repo_object cache
217 _string_setting(
218 settings,
219 'rc_cache.repo_object.backend',
220 'dogpile.cache.rc.memory_lru')
221 _int_setting(
222 settings,
223 'rc_cache.repo_object.expiration_time',
224 300)
225 _int_setting(
226 settings,
227 'rc_cache.repo_object.max_size',
228 1024)
229
201 def _configure(self):
230 def _configure(self):
202 self.config.add_renderer(
231 self.config.add_renderer(
203 name='msgpack',
232 name='msgpack',
204 factory=self._msgpack_renderer_factory)
233 factory=self._msgpack_renderer_factory)
205
234
206 self.config.add_route('service', '/_service')
235 self.config.add_route('service', '/_service')
207 self.config.add_route('status', '/status')
236 self.config.add_route('status', '/status')
208 self.config.add_route('hg_proxy', '/proxy/hg')
237 self.config.add_route('hg_proxy', '/proxy/hg')
209 self.config.add_route('git_proxy', '/proxy/git')
238 self.config.add_route('git_proxy', '/proxy/git')
210 self.config.add_route('vcs', '/{backend}')
239 self.config.add_route('vcs', '/{backend}')
211 self.config.add_route('stream_git', '/stream/git/*repo_name')
240 self.config.add_route('stream_git', '/stream/git/*repo_name')
212 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
241 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
213
242
214 self.config.add_view(
243 self.config.add_view(
215 self.status_view, route_name='status', renderer='json')
244 self.status_view, route_name='status', renderer='json')
216 self.config.add_view(
245 self.config.add_view(
217 self.service_view, route_name='service', renderer='msgpack')
246 self.service_view, route_name='service', renderer='msgpack')
218
247
219 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
248 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
220 self.config.add_view(self.git_proxy(), route_name='git_proxy')
249 self.config.add_view(self.git_proxy(), route_name='git_proxy')
221 self.config.add_view(
250 self.config.add_view(
222 self.vcs_view, route_name='vcs', renderer='msgpack',
251 self.vcs_view, route_name='vcs', renderer='msgpack',
223 custom_predicates=[self.is_vcs_view])
252 custom_predicates=[self.is_vcs_view])
224
253
225 self.config.add_view(self.hg_stream(), route_name='stream_hg')
254 self.config.add_view(self.hg_stream(), route_name='stream_hg')
226 self.config.add_view(self.git_stream(), route_name='stream_git')
255 self.config.add_view(self.git_stream(), route_name='stream_git')
227
256
228 def notfound(request):
257 def notfound(request):
229 return {'status': '404 NOT FOUND'}
258 return {'status': '404 NOT FOUND'}
230 self.config.add_notfound_view(notfound, renderer='json')
259 self.config.add_notfound_view(notfound, renderer='json')
231
260
232 self.config.add_view(self.handle_vcs_exception, context=Exception)
261 self.config.add_view(self.handle_vcs_exception, context=Exception)
233
262
234 self.config.add_tween(
263 self.config.add_tween(
235 'vcsserver.tweens.RequestWrapperTween',
264 'vcsserver.tweens.RequestWrapperTween',
236 )
265 )
237
266
238 def wsgi_app(self):
267 def wsgi_app(self):
239 return self.config.make_wsgi_app()
268 return self.config.make_wsgi_app()
240
269
241 def vcs_view(self, request):
270 def vcs_view(self, request):
242 remote = self._remotes[request.matchdict['backend']]
271 remote = self._remotes[request.matchdict['backend']]
243 payload = msgpack.unpackb(request.body, use_list=True)
272 payload = msgpack.unpackb(request.body, use_list=True)
244 method = payload.get('method')
273 method = payload.get('method')
245 params = payload.get('params')
274 params = payload.get('params')
246 wire = params.get('wire')
275 wire = params.get('wire')
247 args = params.get('args')
276 args = params.get('args')
248 kwargs = params.get('kwargs')
277 kwargs = params.get('kwargs')
278 context_uid = None
279
249 if wire:
280 if wire:
250 try:
281 try:
251 wire['context'] = uuid.UUID(wire['context'])
282 wire['context'] = context_uid = uuid.UUID(wire['context'])
252 except KeyError:
283 except KeyError:
253 pass
284 pass
254 args.insert(0, wire)
285 args.insert(0, wire)
255
286
256 log.debug('method called:%s with kwargs:%s', method, kwargs)
287 log.debug('method called:%s with kwargs:%s context_uid: %s',
288 method, kwargs, context_uid)
257 try:
289 try:
258 resp = getattr(remote, method)(*args, **kwargs)
290 resp = getattr(remote, method)(*args, **kwargs)
259 except Exception as e:
291 except Exception as e:
260 tb_info = traceback.format_exc()
292 tb_info = traceback.format_exc()
261
293
262 type_ = e.__class__.__name__
294 type_ = e.__class__.__name__
263 if type_ not in self.ALLOWED_EXCEPTIONS:
295 if type_ not in self.ALLOWED_EXCEPTIONS:
264 type_ = None
296 type_ = None
265
297
266 resp = {
298 resp = {
267 'id': payload.get('id'),
299 'id': payload.get('id'),
268 'error': {
300 'error': {
269 'message': e.message,
301 'message': e.message,
270 'traceback': tb_info,
302 'traceback': tb_info,
271 'type': type_
303 'type': type_
272 }
304 }
273 }
305 }
274 try:
306 try:
275 resp['error']['_vcs_kind'] = e._vcs_kind
307 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
276 except AttributeError:
308 except AttributeError:
277 pass
309 pass
278 else:
310 else:
279 resp = {
311 resp = {
280 'id': payload.get('id'),
312 'id': payload.get('id'),
281 'result': resp
313 'result': resp
282 }
314 }
283
315
284 return resp
316 return resp
285
317
286 def status_view(self, request):
318 def status_view(self, request):
287 import vcsserver
319 import vcsserver
288 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
320 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
289 'pid': os.getpid()}
321 'pid': os.getpid()}
290
322
291 def service_view(self, request):
323 def service_view(self, request):
292 import vcsserver
324 import vcsserver
293
325
294 payload = msgpack.unpackb(request.body, use_list=True)
326 payload = msgpack.unpackb(request.body, use_list=True)
295
327
296 try:
328 try:
297 path = self.global_config['__file__']
329 path = self.global_config['__file__']
298 config = configparser.ConfigParser()
330 config = configparser.ConfigParser()
299 config.read(path)
331 config.read(path)
300 parsed_ini = config
332 parsed_ini = config
301 if parsed_ini.has_section('server:main'):
333 if parsed_ini.has_section('server:main'):
302 parsed_ini = dict(parsed_ini.items('server:main'))
334 parsed_ini = dict(parsed_ini.items('server:main'))
303 except Exception:
335 except Exception:
304 log.exception('Failed to read .ini file for display')
336 log.exception('Failed to read .ini file for display')
305 parsed_ini = {}
337 parsed_ini = {}
306
338
307 resp = {
339 resp = {
308 'id': payload.get('id'),
340 'id': payload.get('id'),
309 'result': dict(
341 'result': dict(
310 version=vcsserver.__version__,
342 version=vcsserver.__version__,
311 config=parsed_ini,
343 config=parsed_ini,
312 payload=payload,
344 payload=payload,
313 )
345 )
314 }
346 }
315 return resp
347 return resp
316
348
317 def _msgpack_renderer_factory(self, info):
349 def _msgpack_renderer_factory(self, info):
318 def _render(value, system):
350 def _render(value, system):
319 value = msgpack.packb(value)
351 value = msgpack.packb(value)
320 request = system.get('request')
352 request = system.get('request')
321 if request is not None:
353 if request is not None:
322 response = request.response
354 response = request.response
323 ct = response.content_type
355 ct = response.content_type
324 if ct == response.default_content_type:
356 if ct == response.default_content_type:
325 response.content_type = 'application/x-msgpack'
357 response.content_type = 'application/x-msgpack'
326 return value
358 return value
327 return _render
359 return _render
328
360
329 def set_env_from_config(self, environ, config):
361 def set_env_from_config(self, environ, config):
330 dict_conf = {}
362 dict_conf = {}
331 try:
363 try:
332 for elem in config:
364 for elem in config:
333 if elem[0] == 'rhodecode':
365 if elem[0] == 'rhodecode':
334 dict_conf = json.loads(elem[2])
366 dict_conf = json.loads(elem[2])
335 break
367 break
336 except Exception:
368 except Exception:
337 log.exception('Failed to fetch SCM CONFIG')
369 log.exception('Failed to fetch SCM CONFIG')
338 return
370 return
339
371
340 username = dict_conf.get('username')
372 username = dict_conf.get('username')
341 if username:
373 if username:
342 environ['REMOTE_USER'] = username
374 environ['REMOTE_USER'] = username
343 # mercurial specific, some extension api rely on this
375 # mercurial specific, some extension api rely on this
344 environ['HGUSER'] = username
376 environ['HGUSER'] = username
345
377
346 ip = dict_conf.get('ip')
378 ip = dict_conf.get('ip')
347 if ip:
379 if ip:
348 environ['REMOTE_HOST'] = ip
380 environ['REMOTE_HOST'] = ip
349
381
350 if _is_request_chunked(environ):
382 if _is_request_chunked(environ):
351 # set the compatibility flag for webob
383 # set the compatibility flag for webob
352 environ['wsgi.input_terminated'] = True
384 environ['wsgi.input_terminated'] = True
353
385
354 def hg_proxy(self):
386 def hg_proxy(self):
355 @wsgiapp
387 @wsgiapp
356 def _hg_proxy(environ, start_response):
388 def _hg_proxy(environ, start_response):
357 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
389 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
358 return app(environ, start_response)
390 return app(environ, start_response)
359 return _hg_proxy
391 return _hg_proxy
360
392
361 def git_proxy(self):
393 def git_proxy(self):
362 @wsgiapp
394 @wsgiapp
363 def _git_proxy(environ, start_response):
395 def _git_proxy(environ, start_response):
364 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
396 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
365 return app(environ, start_response)
397 return app(environ, start_response)
366 return _git_proxy
398 return _git_proxy
367
399
368 def hg_stream(self):
400 def hg_stream(self):
369 if self._use_echo_app:
401 if self._use_echo_app:
370 @wsgiapp
402 @wsgiapp
371 def _hg_stream(environ, start_response):
403 def _hg_stream(environ, start_response):
372 app = EchoApp('fake_path', 'fake_name', None)
404 app = EchoApp('fake_path', 'fake_name', None)
373 return app(environ, start_response)
405 return app(environ, start_response)
374 return _hg_stream
406 return _hg_stream
375 else:
407 else:
376 @wsgiapp
408 @wsgiapp
377 def _hg_stream(environ, start_response):
409 def _hg_stream(environ, start_response):
378 log.debug('http-app: handling hg stream')
410 log.debug('http-app: handling hg stream')
379 repo_path = environ['HTTP_X_RC_REPO_PATH']
411 repo_path = environ['HTTP_X_RC_REPO_PATH']
380 repo_name = environ['HTTP_X_RC_REPO_NAME']
412 repo_name = environ['HTTP_X_RC_REPO_NAME']
381 packed_config = base64.b64decode(
413 packed_config = base64.b64decode(
382 environ['HTTP_X_RC_REPO_CONFIG'])
414 environ['HTTP_X_RC_REPO_CONFIG'])
383 config = msgpack.unpackb(packed_config)
415 config = msgpack.unpackb(packed_config)
384 app = scm_app.create_hg_wsgi_app(
416 app = scm_app.create_hg_wsgi_app(
385 repo_path, repo_name, config)
417 repo_path, repo_name, config)
386
418
387 # Consistent path information for hgweb
419 # Consistent path information for hgweb
388 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
420 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
389 environ['REPO_NAME'] = repo_name
421 environ['REPO_NAME'] = repo_name
390 self.set_env_from_config(environ, config)
422 self.set_env_from_config(environ, config)
391
423
392 log.debug('http-app: starting app handler '
424 log.debug('http-app: starting app handler '
393 'with %s and process request', app)
425 'with %s and process request', app)
394 return app(environ, ResponseFilter(start_response))
426 return app(environ, ResponseFilter(start_response))
395 return _hg_stream
427 return _hg_stream
396
428
397 def git_stream(self):
429 def git_stream(self):
398 if self._use_echo_app:
430 if self._use_echo_app:
399 @wsgiapp
431 @wsgiapp
400 def _git_stream(environ, start_response):
432 def _git_stream(environ, start_response):
401 app = EchoApp('fake_path', 'fake_name', None)
433 app = EchoApp('fake_path', 'fake_name', None)
402 return app(environ, start_response)
434 return app(environ, start_response)
403 return _git_stream
435 return _git_stream
404 else:
436 else:
405 @wsgiapp
437 @wsgiapp
406 def _git_stream(environ, start_response):
438 def _git_stream(environ, start_response):
407 log.debug('http-app: handling git stream')
439 log.debug('http-app: handling git stream')
408 repo_path = environ['HTTP_X_RC_REPO_PATH']
440 repo_path = environ['HTTP_X_RC_REPO_PATH']
409 repo_name = environ['HTTP_X_RC_REPO_NAME']
441 repo_name = environ['HTTP_X_RC_REPO_NAME']
410 packed_config = base64.b64decode(
442 packed_config = base64.b64decode(
411 environ['HTTP_X_RC_REPO_CONFIG'])
443 environ['HTTP_X_RC_REPO_CONFIG'])
412 config = msgpack.unpackb(packed_config)
444 config = msgpack.unpackb(packed_config)
413
445
414 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
446 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
415 self.set_env_from_config(environ, config)
447 self.set_env_from_config(environ, config)
416
448
417 content_type = environ.get('CONTENT_TYPE', '')
449 content_type = environ.get('CONTENT_TYPE', '')
418
450
419 path = environ['PATH_INFO']
451 path = environ['PATH_INFO']
420 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
452 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
421 log.debug(
453 log.debug(
422 'LFS: Detecting if request `%s` is LFS server path based '
454 'LFS: Detecting if request `%s` is LFS server path based '
423 'on content type:`%s`, is_lfs:%s',
455 'on content type:`%s`, is_lfs:%s',
424 path, content_type, is_lfs_request)
456 path, content_type, is_lfs_request)
425
457
426 if not is_lfs_request:
458 if not is_lfs_request:
427 # fallback detection by path
459 # fallback detection by path
428 if GIT_LFS_PROTO_PAT.match(path):
460 if GIT_LFS_PROTO_PAT.match(path):
429 is_lfs_request = True
461 is_lfs_request = True
430 log.debug(
462 log.debug(
431 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
463 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
432 path, is_lfs_request)
464 path, is_lfs_request)
433
465
434 if is_lfs_request:
466 if is_lfs_request:
435 app = scm_app.create_git_lfs_wsgi_app(
467 app = scm_app.create_git_lfs_wsgi_app(
436 repo_path, repo_name, config)
468 repo_path, repo_name, config)
437 else:
469 else:
438 app = scm_app.create_git_wsgi_app(
470 app = scm_app.create_git_wsgi_app(
439 repo_path, repo_name, config)
471 repo_path, repo_name, config)
440
472
441 log.debug('http-app: starting app handler '
473 log.debug('http-app: starting app handler '
442 'with %s and process request', app)
474 'with %s and process request', app)
443
475
444 return app(environ, start_response)
476 return app(environ, start_response)
445
477
446 return _git_stream
478 return _git_stream
447
479
448 def is_vcs_view(self, context, request):
480 def is_vcs_view(self, context, request):
449 """
481 """
450 View predicate that returns true if given backend is supported by
482 View predicate that returns true if given backend is supported by
451 defined remotes.
483 defined remotes.
452 """
484 """
453 backend = request.matchdict.get('backend')
485 backend = request.matchdict.get('backend')
454 return backend in self._remotes
486 return backend in self._remotes
455
487
456 def handle_vcs_exception(self, exception, request):
488 def handle_vcs_exception(self, exception, request):
457 _vcs_kind = getattr(exception, '_vcs_kind', '')
489 _vcs_kind = getattr(exception, '_vcs_kind', '')
458 if _vcs_kind == 'repo_locked':
490 if _vcs_kind == 'repo_locked':
459 # Get custom repo-locked status code if present.
491 # Get custom repo-locked status code if present.
460 status_code = request.headers.get('X-RC-Locked-Status-Code')
492 status_code = request.headers.get('X-RC-Locked-Status-Code')
461 return HTTPRepoLocked(
493 return HTTPRepoLocked(
462 title=exception.message, status_code=status_code)
494 title=exception.message, status_code=status_code)
463 traceback_info = 'unavailable'
495 traceback_info = 'unavailable'
464 if request.exc_info:
496 if request.exc_info:
465 traceback_info = traceback.format_exc(request.exc_info[2])
497 traceback_info = traceback.format_exc(request.exc_info[2])
466
498
467 log.error(
499 log.error(
468 'error occurred handling this request for path: %s, \n tb: %s',
500 'error occurred handling this request for path: %s, \n tb: %s',
469 request.path, traceback_info)
501 request.path, traceback_info)
470 raise exception
502 raise exception
471
503
472
504
473 class ResponseFilter(object):
505 class ResponseFilter(object):
474
506
475 def __init__(self, start_response):
507 def __init__(self, start_response):
476 self._start_response = start_response
508 self._start_response = start_response
477
509
478 def __call__(self, status, response_headers, exc_info=None):
510 def __call__(self, status, response_headers, exc_info=None):
479 headers = tuple(
511 headers = tuple(
480 (h, v) for h, v in response_headers
512 (h, v) for h, v in response_headers
481 if not wsgiref.util.is_hop_by_hop(h))
513 if not wsgiref.util.is_hop_by_hop(h))
482 return self._start_response(status, headers, exc_info)
514 return self._start_response(status, headers, exc_info)
483
515
484
516
485 def main(global_config, **settings):
517 def main(global_config, **settings):
486 if MercurialFactory:
518 if MercurialFactory:
487 hgpatches.patch_largefiles_capabilities()
519 hgpatches.patch_largefiles_capabilities()
488 hgpatches.patch_subrepo_type_mapping()
520 hgpatches.patch_subrepo_type_mapping()
521
489 app = HTTPApplication(settings=settings, global_config=global_config)
522 app = HTTPApplication(settings=settings, global_config=global_config)
490 return app.wsgi_app()
523 return app.wsgi_app()
@@ -1,689 +1,705 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 from urllib2 import URLError
21 from urllib2 import URLError
22 import logging
22 import logging
23 import posixpath as vcspath
23 import posixpath as vcspath
24 import StringIO
24 import StringIO
25 import urllib
25 import urllib
26 import traceback
26 import traceback
27
27
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.delta
30 import svn.delta
31 import svn.diff
31 import svn.diff
32 import svn.fs
32 import svn.fs
33 import svn.repos
33 import svn.repos
34
34
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 from vcsserver.base import RepoFactory, raise_from_original
36 from vcsserver.base import RepoFactory, raise_from_original
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 # Set of svn compatible version flags.
41 # Set of svn compatible version flags.
42 # Compare with subversion/svnadmin/svnadmin.c
42 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
43 svn_compatible_versions = {
44 'pre-1.4-compatible',
44 'pre-1.4-compatible',
45 'pre-1.5-compatible',
45 'pre-1.5-compatible',
46 'pre-1.6-compatible',
46 'pre-1.6-compatible',
47 'pre-1.8-compatible',
47 'pre-1.8-compatible',
48 'pre-1.9-compatible',
48 'pre-1.9-compatible'
49 ])
49 }
50
50
51 svn_compatible_versions_map = {
51 svn_compatible_versions_map = {
52 'pre-1.4-compatible': '1.3',
52 'pre-1.4-compatible': '1.3',
53 'pre-1.5-compatible': '1.4',
53 'pre-1.5-compatible': '1.4',
54 'pre-1.6-compatible': '1.5',
54 'pre-1.6-compatible': '1.5',
55 'pre-1.8-compatible': '1.7',
55 'pre-1.8-compatible': '1.7',
56 'pre-1.9-compatible': '1.8',
56 'pre-1.9-compatible': '1.8',
57 }
57 }
58
58
59
59
60 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
61 """Decorator for converting svn exceptions to something neutral."""
61 """Decorator for converting svn exceptions to something neutral."""
62 def wrapper(*args, **kwargs):
62 def wrapper(*args, **kwargs):
63 try:
63 try:
64 return func(*args, **kwargs)
64 return func(*args, **kwargs)
65 except Exception as e:
65 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in hg remote call")
67 log.exception("Unhandled exception in hg remote call")
68 raise_from_original(exceptions.UnhandledException)
68 raise_from_original(exceptions.UnhandledException)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74
75
75 def _create_repo(self, wire, create, compatible_version):
76 def _create_repo(self, wire, create, compatible_version):
76 path = svn.core.svn_path_canonicalize(wire['path'])
77 path = svn.core.svn_path_canonicalize(wire['path'])
77 if create:
78 if create:
78 fs_config = {'compatible-version': '1.9'}
79 fs_config = {'compatible-version': '1.9'}
79 if compatible_version:
80 if compatible_version:
80 if compatible_version not in svn_compatible_versions:
81 if compatible_version not in svn_compatible_versions:
81 raise Exception('Unknown SVN compatible version "{}"'
82 raise Exception('Unknown SVN compatible version "{}"'
82 .format(compatible_version))
83 .format(compatible_version))
83 fs_config['compatible-version'] = \
84 fs_config['compatible-version'] = \
84 svn_compatible_versions_map[compatible_version]
85 svn_compatible_versions_map[compatible_version]
85
86
86 log.debug('Create SVN repo with config "%s"', fs_config)
87 log.debug('Create SVN repo with config "%s"', fs_config)
87 repo = svn.repos.create(path, "", "", None, fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
88 else:
89 else:
89 repo = svn.repos.open(path)
90 repo = svn.repos.open(path)
90
91
91 log.debug('Got SVN object: %s', repo)
92 log.debug('Got SVN object: %s', repo)
92 return repo
93 return repo
93
94
94 def repo(self, wire, create=False, compatible_version=None):
95 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
96 """
97 Get a repository instance for the given path.
98
99 Uses internally the low level beaker API since the decorators introduce
100 significant overhead.
101 """
102 region = self._cache_region
103 context = wire.get('context', None)
104 repo_path = wire.get('path', '')
105 context_uid = '{}'.format(context)
106 cache = wire.get('cache', True)
107 cache_on = context and cache
108
109 @region.conditional_cache_on_arguments(condition=cache_on)
110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
96 return self._create_repo(wire, create, compatible_version)
111 return self._create_repo(wire, create, compatible_version)
97
112
98 return self._repo(wire, create_new_repo)
113 return create_new_repo(self.repo_type, repo_path, context_uid,
114 compatible_version)
99
115
100
116
101 NODE_TYPE_MAPPING = {
117 NODE_TYPE_MAPPING = {
102 svn.core.svn_node_file: 'file',
118 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_dir: 'dir',
119 svn.core.svn_node_dir: 'dir',
104 }
120 }
105
121
106
122
107 class SvnRemote(object):
123 class SvnRemote(object):
108
124
109 def __init__(self, factory, hg_factory=None):
125 def __init__(self, factory, hg_factory=None):
110 self._factory = factory
126 self._factory = factory
111 # TODO: Remove once we do not use internal Mercurial objects anymore
127 # TODO: Remove once we do not use internal Mercurial objects anymore
112 # for subversion
128 # for subversion
113 self._hg_factory = hg_factory
129 self._hg_factory = hg_factory
114
130
115 @reraise_safe_exceptions
131 @reraise_safe_exceptions
116 def discover_svn_version(self):
132 def discover_svn_version(self):
117 try:
133 try:
118 import svn.core
134 import svn.core
119 svn_ver = svn.core.SVN_VERSION
135 svn_ver = svn.core.SVN_VERSION
120 except ImportError:
136 except ImportError:
121 svn_ver = None
137 svn_ver = None
122 return svn_ver
138 return svn_ver
123
139
124 def check_url(self, url, config_items):
140 def check_url(self, url, config_items):
125 # this can throw exception if not installed, but we detect this
141 # this can throw exception if not installed, but we detect this
126 from hgsubversion import svnrepo
142 from hgsubversion import svnrepo
127
143
128 baseui = self._hg_factory._create_config(config_items)
144 baseui = self._hg_factory._create_config(config_items)
129 # uuid function get's only valid UUID from proper repo, else
145 # uuid function get's only valid UUID from proper repo, else
130 # throws exception
146 # throws exception
131 try:
147 try:
132 svnrepo.svnremoterepo(baseui, url).svn.uuid
148 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 except Exception:
149 except Exception:
134 tb = traceback.format_exc()
150 tb = traceback.format_exc()
135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
151 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
136 raise URLError(
152 raise URLError(
137 '"%s" is not a valid Subversion source url.' % (url, ))
153 '"%s" is not a valid Subversion source url.' % (url, ))
138 return True
154 return True
139
155
140 def is_path_valid_repository(self, wire, path):
156 def is_path_valid_repository(self, wire, path):
141
157
142 # NOTE(marcink): short circuit the check for SVN repo
158 # NOTE(marcink): short circuit the check for SVN repo
143 # the repos.open might be expensive to check, but we have one cheap
159 # the repos.open might be expensive to check, but we have one cheap
144 # pre condition that we can use, to check for 'format' file
160 # pre condition that we can use, to check for 'format' file
145
161
146 if not os.path.isfile(os.path.join(path, 'format')):
162 if not os.path.isfile(os.path.join(path, 'format')):
147 return False
163 return False
148
164
149 try:
165 try:
150 svn.repos.open(path)
166 svn.repos.open(path)
151 except svn.core.SubversionException:
167 except svn.core.SubversionException:
152 tb = traceback.format_exc()
168 tb = traceback.format_exc()
153 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
169 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
154 return False
170 return False
155 return True
171 return True
156
172
157 @reraise_safe_exceptions
173 @reraise_safe_exceptions
158 def verify(self, wire,):
174 def verify(self, wire,):
159 repo_path = wire['path']
175 repo_path = wire['path']
160 if not self.is_path_valid_repository(wire, repo_path):
176 if not self.is_path_valid_repository(wire, repo_path):
161 raise Exception(
177 raise Exception(
162 "Path %s is not a valid Subversion repository." % repo_path)
178 "Path %s is not a valid Subversion repository." % repo_path)
163
179
164 cmd = ['svnadmin', 'info', repo_path]
180 cmd = ['svnadmin', 'info', repo_path]
165 stdout, stderr = subprocessio.run_command(cmd)
181 stdout, stderr = subprocessio.run_command(cmd)
166 return stdout
182 return stdout
167
183
168 def lookup(self, wire, revision):
184 def lookup(self, wire, revision):
169 if revision not in [-1, None, 'HEAD']:
185 if revision not in [-1, None, 'HEAD']:
170 raise NotImplementedError
186 raise NotImplementedError
171 repo = self._factory.repo(wire)
187 repo = self._factory.repo(wire)
172 fs_ptr = svn.repos.fs(repo)
188 fs_ptr = svn.repos.fs(repo)
173 head = svn.fs.youngest_rev(fs_ptr)
189 head = svn.fs.youngest_rev(fs_ptr)
174 return head
190 return head
175
191
176 def lookup_interval(self, wire, start_ts, end_ts):
192 def lookup_interval(self, wire, start_ts, end_ts):
177 repo = self._factory.repo(wire)
193 repo = self._factory.repo(wire)
178 fsobj = svn.repos.fs(repo)
194 fsobj = svn.repos.fs(repo)
179 start_rev = None
195 start_rev = None
180 end_rev = None
196 end_rev = None
181 if start_ts:
197 if start_ts:
182 start_ts_svn = apr_time_t(start_ts)
198 start_ts_svn = apr_time_t(start_ts)
183 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
199 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
184 else:
200 else:
185 start_rev = 1
201 start_rev = 1
186 if end_ts:
202 if end_ts:
187 end_ts_svn = apr_time_t(end_ts)
203 end_ts_svn = apr_time_t(end_ts)
188 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
204 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
189 else:
205 else:
190 end_rev = svn.fs.youngest_rev(fsobj)
206 end_rev = svn.fs.youngest_rev(fsobj)
191 return start_rev, end_rev
207 return start_rev, end_rev
192
208
193 def revision_properties(self, wire, revision):
209 def revision_properties(self, wire, revision):
194 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
195 fs_ptr = svn.repos.fs(repo)
211 fs_ptr = svn.repos.fs(repo)
196 return svn.fs.revision_proplist(fs_ptr, revision)
212 return svn.fs.revision_proplist(fs_ptr, revision)
197
213
198 def revision_changes(self, wire, revision):
214 def revision_changes(self, wire, revision):
199
215
200 repo = self._factory.repo(wire)
216 repo = self._factory.repo(wire)
201 fsobj = svn.repos.fs(repo)
217 fsobj = svn.repos.fs(repo)
202 rev_root = svn.fs.revision_root(fsobj, revision)
218 rev_root = svn.fs.revision_root(fsobj, revision)
203
219
204 editor = svn.repos.ChangeCollector(fsobj, rev_root)
220 editor = svn.repos.ChangeCollector(fsobj, rev_root)
205 editor_ptr, editor_baton = svn.delta.make_editor(editor)
221 editor_ptr, editor_baton = svn.delta.make_editor(editor)
206 base_dir = ""
222 base_dir = ""
207 send_deltas = False
223 send_deltas = False
208 svn.repos.replay2(
224 svn.repos.replay2(
209 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
225 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
210 editor_ptr, editor_baton, None)
226 editor_ptr, editor_baton, None)
211
227
212 added = []
228 added = []
213 changed = []
229 changed = []
214 removed = []
230 removed = []
215
231
216 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
232 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
217 for path, change in editor.changes.iteritems():
233 for path, change in editor.changes.iteritems():
218 # TODO: Decide what to do with directory nodes. Subversion can add
234 # TODO: Decide what to do with directory nodes. Subversion can add
219 # empty directories.
235 # empty directories.
220
236
221 if change.item_kind == svn.core.svn_node_dir:
237 if change.item_kind == svn.core.svn_node_dir:
222 continue
238 continue
223 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
239 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
224 added.append(path)
240 added.append(path)
225 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
241 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
226 svn.repos.CHANGE_ACTION_REPLACE]:
242 svn.repos.CHANGE_ACTION_REPLACE]:
227 changed.append(path)
243 changed.append(path)
228 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
244 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
229 removed.append(path)
245 removed.append(path)
230 else:
246 else:
231 raise NotImplementedError(
247 raise NotImplementedError(
232 "Action %s not supported on path %s" % (
248 "Action %s not supported on path %s" % (
233 change.action, path))
249 change.action, path))
234
250
235 changes = {
251 changes = {
236 'added': added,
252 'added': added,
237 'changed': changed,
253 'changed': changed,
238 'removed': removed,
254 'removed': removed,
239 }
255 }
240 return changes
256 return changes
241
257
242 def node_history(self, wire, path, revision, limit):
258 def node_history(self, wire, path, revision, limit):
243 cross_copies = False
259 cross_copies = False
244 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
245 fsobj = svn.repos.fs(repo)
261 fsobj = svn.repos.fs(repo)
246 rev_root = svn.fs.revision_root(fsobj, revision)
262 rev_root = svn.fs.revision_root(fsobj, revision)
247
263
248 history_revisions = []
264 history_revisions = []
249 history = svn.fs.node_history(rev_root, path)
265 history = svn.fs.node_history(rev_root, path)
250 history = svn.fs.history_prev(history, cross_copies)
266 history = svn.fs.history_prev(history, cross_copies)
251 while history:
267 while history:
252 __, node_revision = svn.fs.history_location(history)
268 __, node_revision = svn.fs.history_location(history)
253 history_revisions.append(node_revision)
269 history_revisions.append(node_revision)
254 if limit and len(history_revisions) >= limit:
270 if limit and len(history_revisions) >= limit:
255 break
271 break
256 history = svn.fs.history_prev(history, cross_copies)
272 history = svn.fs.history_prev(history, cross_copies)
257 return history_revisions
273 return history_revisions
258
274
259 def node_properties(self, wire, path, revision):
275 def node_properties(self, wire, path, revision):
260 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
261 fsobj = svn.repos.fs(repo)
277 fsobj = svn.repos.fs(repo)
262 rev_root = svn.fs.revision_root(fsobj, revision)
278 rev_root = svn.fs.revision_root(fsobj, revision)
263 return svn.fs.node_proplist(rev_root, path)
279 return svn.fs.node_proplist(rev_root, path)
264
280
265 def file_annotate(self, wire, path, revision):
281 def file_annotate(self, wire, path, revision):
266 abs_path = 'file://' + urllib.pathname2url(
282 abs_path = 'file://' + urllib.pathname2url(
267 vcspath.join(wire['path'], path))
283 vcspath.join(wire['path'], path))
268 file_uri = svn.core.svn_path_canonicalize(abs_path)
284 file_uri = svn.core.svn_path_canonicalize(abs_path)
269
285
270 start_rev = svn_opt_revision_value_t(0)
286 start_rev = svn_opt_revision_value_t(0)
271 peg_rev = svn_opt_revision_value_t(revision)
287 peg_rev = svn_opt_revision_value_t(revision)
272 end_rev = peg_rev
288 end_rev = peg_rev
273
289
274 annotations = []
290 annotations = []
275
291
276 def receiver(line_no, revision, author, date, line, pool):
292 def receiver(line_no, revision, author, date, line, pool):
277 annotations.append((line_no, revision, line))
293 annotations.append((line_no, revision, line))
278
294
279 # TODO: Cannot use blame5, missing typemap function in the swig code
295 # TODO: Cannot use blame5, missing typemap function in the swig code
280 try:
296 try:
281 svn.client.blame2(
297 svn.client.blame2(
282 file_uri, peg_rev, start_rev, end_rev,
298 file_uri, peg_rev, start_rev, end_rev,
283 receiver, svn.client.create_context())
299 receiver, svn.client.create_context())
284 except svn.core.SubversionException as exc:
300 except svn.core.SubversionException as exc:
285 log.exception("Error during blame operation.")
301 log.exception("Error during blame operation.")
286 raise Exception(
302 raise Exception(
287 "Blame not supported or file does not exist at path %s. "
303 "Blame not supported or file does not exist at path %s. "
288 "Error %s." % (path, exc))
304 "Error %s." % (path, exc))
289
305
290 return annotations
306 return annotations
291
307
292 def get_node_type(self, wire, path, rev=None):
308 def get_node_type(self, wire, path, rev=None):
293 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
294 fs_ptr = svn.repos.fs(repo)
310 fs_ptr = svn.repos.fs(repo)
295 if rev is None:
311 if rev is None:
296 rev = svn.fs.youngest_rev(fs_ptr)
312 rev = svn.fs.youngest_rev(fs_ptr)
297 root = svn.fs.revision_root(fs_ptr, rev)
313 root = svn.fs.revision_root(fs_ptr, rev)
298 node = svn.fs.check_path(root, path)
314 node = svn.fs.check_path(root, path)
299 return NODE_TYPE_MAPPING.get(node, None)
315 return NODE_TYPE_MAPPING.get(node, None)
300
316
301 def get_nodes(self, wire, path, revision=None):
317 def get_nodes(self, wire, path, revision=None):
302 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
303 fsobj = svn.repos.fs(repo)
319 fsobj = svn.repos.fs(repo)
304 if revision is None:
320 if revision is None:
305 revision = svn.fs.youngest_rev(fsobj)
321 revision = svn.fs.youngest_rev(fsobj)
306 root = svn.fs.revision_root(fsobj, revision)
322 root = svn.fs.revision_root(fsobj, revision)
307 entries = svn.fs.dir_entries(root, path)
323 entries = svn.fs.dir_entries(root, path)
308 result = []
324 result = []
309 for entry_path, entry_info in entries.iteritems():
325 for entry_path, entry_info in entries.iteritems():
310 result.append(
326 result.append(
311 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
327 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
312 return result
328 return result
313
329
314 def get_file_content(self, wire, path, rev=None):
330 def get_file_content(self, wire, path, rev=None):
315 repo = self._factory.repo(wire)
331 repo = self._factory.repo(wire)
316 fsobj = svn.repos.fs(repo)
332 fsobj = svn.repos.fs(repo)
317 if rev is None:
333 if rev is None:
318 rev = svn.fs.youngest_revision(fsobj)
334 rev = svn.fs.youngest_revision(fsobj)
319 root = svn.fs.revision_root(fsobj, rev)
335 root = svn.fs.revision_root(fsobj, rev)
320 content = svn.core.Stream(svn.fs.file_contents(root, path))
336 content = svn.core.Stream(svn.fs.file_contents(root, path))
321 return content.read()
337 return content.read()
322
338
323 def get_file_size(self, wire, path, revision=None):
339 def get_file_size(self, wire, path, revision=None):
324 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
325 fsobj = svn.repos.fs(repo)
341 fsobj = svn.repos.fs(repo)
326 if revision is None:
342 if revision is None:
327 revision = svn.fs.youngest_revision(fsobj)
343 revision = svn.fs.youngest_revision(fsobj)
328 root = svn.fs.revision_root(fsobj, revision)
344 root = svn.fs.revision_root(fsobj, revision)
329 size = svn.fs.file_length(root, path)
345 size = svn.fs.file_length(root, path)
330 return size
346 return size
331
347
332 def create_repository(self, wire, compatible_version=None):
348 def create_repository(self, wire, compatible_version=None):
333 log.info('Creating Subversion repository in path "%s"', wire['path'])
349 log.info('Creating Subversion repository in path "%s"', wire['path'])
334 self._factory.repo(wire, create=True,
350 self._factory.repo(wire, create=True,
335 compatible_version=compatible_version)
351 compatible_version=compatible_version)
336
352
337 def import_remote_repository(self, wire, src_url):
353 def import_remote_repository(self, wire, src_url):
338 repo_path = wire['path']
354 repo_path = wire['path']
339 if not self.is_path_valid_repository(wire, repo_path):
355 if not self.is_path_valid_repository(wire, repo_path):
340 raise Exception(
356 raise Exception(
341 "Path %s is not a valid Subversion repository." % repo_path)
357 "Path %s is not a valid Subversion repository." % repo_path)
342
358
343 # TODO: johbo: URL checks ?
359 # TODO: johbo: URL checks ?
344 import subprocess
360 import subprocess
345 rdump = subprocess.Popen(
361 rdump = subprocess.Popen(
346 ['svnrdump', 'dump', '--non-interactive', src_url],
362 ['svnrdump', 'dump', '--non-interactive', src_url],
347 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
363 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
348 load = subprocess.Popen(
364 load = subprocess.Popen(
349 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
365 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
350
366
351 # TODO: johbo: This can be a very long operation, might be better
367 # TODO: johbo: This can be a very long operation, might be better
352 # to track some kind of status and provide an api to check if the
368 # to track some kind of status and provide an api to check if the
353 # import is done.
369 # import is done.
354 rdump.wait()
370 rdump.wait()
355 load.wait()
371 load.wait()
356
372
357 if rdump.returncode != 0:
373 if rdump.returncode != 0:
358 errors = rdump.stderr.read()
374 errors = rdump.stderr.read()
359 log.error('svnrdump dump failed: statuscode %s: message: %s',
375 log.error('svnrdump dump failed: statuscode %s: message: %s',
360 rdump.returncode, errors)
376 rdump.returncode, errors)
361 reason = 'UNKNOWN'
377 reason = 'UNKNOWN'
362 if 'svnrdump: E230001:' in errors:
378 if 'svnrdump: E230001:' in errors:
363 reason = 'INVALID_CERTIFICATE'
379 reason = 'INVALID_CERTIFICATE'
364 raise Exception(
380 raise Exception(
365 'Failed to dump the remote repository from %s.' % src_url,
381 'Failed to dump the remote repository from %s.' % src_url,
366 reason)
382 reason)
367 if load.returncode != 0:
383 if load.returncode != 0:
368 raise Exception(
384 raise Exception(
369 'Failed to load the dump of remote repository from %s.' %
385 'Failed to load the dump of remote repository from %s.' %
370 (src_url, ))
386 (src_url, ))
371
387
372 def commit(self, wire, message, author, timestamp, updated, removed):
388 def commit(self, wire, message, author, timestamp, updated, removed):
373 assert isinstance(message, str)
389 assert isinstance(message, str)
374 assert isinstance(author, str)
390 assert isinstance(author, str)
375
391
376 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
377 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
378
394
379 rev = svn.fs.youngest_rev(fsobj)
395 rev = svn.fs.youngest_rev(fsobj)
380 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
396 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
381 txn_root = svn.fs.txn_root(txn)
397 txn_root = svn.fs.txn_root(txn)
382
398
383 for node in updated:
399 for node in updated:
384 TxnNodeProcessor(node, txn_root).update()
400 TxnNodeProcessor(node, txn_root).update()
385 for node in removed:
401 for node in removed:
386 TxnNodeProcessor(node, txn_root).remove()
402 TxnNodeProcessor(node, txn_root).remove()
387
403
388 commit_id = svn.repos.fs_commit_txn(repo, txn)
404 commit_id = svn.repos.fs_commit_txn(repo, txn)
389
405
390 if timestamp:
406 if timestamp:
391 apr_time = apr_time_t(timestamp)
407 apr_time = apr_time_t(timestamp)
392 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
408 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
393 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
409 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
394
410
395 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
411 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
396 return commit_id
412 return commit_id
397
413
398 def diff(self, wire, rev1, rev2, path1=None, path2=None,
414 def diff(self, wire, rev1, rev2, path1=None, path2=None,
399 ignore_whitespace=False, context=3):
415 ignore_whitespace=False, context=3):
400
416
401 wire.update(cache=False)
417 wire.update(cache=False)
402 repo = self._factory.repo(wire)
418 repo = self._factory.repo(wire)
403 diff_creator = SvnDiffer(
419 diff_creator = SvnDiffer(
404 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
420 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
405 try:
421 try:
406 return diff_creator.generate_diff()
422 return diff_creator.generate_diff()
407 except svn.core.SubversionException as e:
423 except svn.core.SubversionException as e:
408 log.exception(
424 log.exception(
409 "Error during diff operation operation. "
425 "Error during diff operation operation. "
410 "Path might not exist %s, %s" % (path1, path2))
426 "Path might not exist %s, %s" % (path1, path2))
411 return ""
427 return ""
412
428
413 @reraise_safe_exceptions
429 @reraise_safe_exceptions
414 def is_large_file(self, wire, path):
430 def is_large_file(self, wire, path):
415 return False
431 return False
416
432
417 @reraise_safe_exceptions
433 @reraise_safe_exceptions
418 def install_hooks(self, wire, force=False):
434 def install_hooks(self, wire, force=False):
419 from vcsserver.hook_utils import install_svn_hooks
435 from vcsserver.hook_utils import install_svn_hooks
420 repo_path = wire['path']
436 repo_path = wire['path']
421 binary_dir = settings.BINARY_DIR
437 binary_dir = settings.BINARY_DIR
422 executable = None
438 executable = None
423 if binary_dir:
439 if binary_dir:
424 executable = os.path.join(binary_dir, 'python')
440 executable = os.path.join(binary_dir, 'python')
425 return install_svn_hooks(
441 return install_svn_hooks(
426 repo_path, executable=executable, force_create=force)
442 repo_path, executable=executable, force_create=force)
427
443
428
444
429 class SvnDiffer(object):
445 class SvnDiffer(object):
430 """
446 """
431 Utility to create diffs based on difflib and the Subversion api
447 Utility to create diffs based on difflib and the Subversion api
432 """
448 """
433
449
434 binary_content = False
450 binary_content = False
435
451
436 def __init__(
452 def __init__(
437 self, repo, src_rev, src_path, tgt_rev, tgt_path,
453 self, repo, src_rev, src_path, tgt_rev, tgt_path,
438 ignore_whitespace, context):
454 ignore_whitespace, context):
439 self.repo = repo
455 self.repo = repo
440 self.ignore_whitespace = ignore_whitespace
456 self.ignore_whitespace = ignore_whitespace
441 self.context = context
457 self.context = context
442
458
443 fsobj = svn.repos.fs(repo)
459 fsobj = svn.repos.fs(repo)
444
460
445 self.tgt_rev = tgt_rev
461 self.tgt_rev = tgt_rev
446 self.tgt_path = tgt_path or ''
462 self.tgt_path = tgt_path or ''
447 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
463 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
448 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
464 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
449
465
450 self.src_rev = src_rev
466 self.src_rev = src_rev
451 self.src_path = src_path or self.tgt_path
467 self.src_path = src_path or self.tgt_path
452 self.src_root = svn.fs.revision_root(fsobj, src_rev)
468 self.src_root = svn.fs.revision_root(fsobj, src_rev)
453 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
469 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
454
470
455 self._validate()
471 self._validate()
456
472
457 def _validate(self):
473 def _validate(self):
458 if (self.tgt_kind != svn.core.svn_node_none and
474 if (self.tgt_kind != svn.core.svn_node_none and
459 self.src_kind != svn.core.svn_node_none and
475 self.src_kind != svn.core.svn_node_none and
460 self.src_kind != self.tgt_kind):
476 self.src_kind != self.tgt_kind):
461 # TODO: johbo: proper error handling
477 # TODO: johbo: proper error handling
462 raise Exception(
478 raise Exception(
463 "Source and target are not compatible for diff generation. "
479 "Source and target are not compatible for diff generation. "
464 "Source type: %s, target type: %s" %
480 "Source type: %s, target type: %s" %
465 (self.src_kind, self.tgt_kind))
481 (self.src_kind, self.tgt_kind))
466
482
467 def generate_diff(self):
483 def generate_diff(self):
468 buf = StringIO.StringIO()
484 buf = StringIO.StringIO()
469 if self.tgt_kind == svn.core.svn_node_dir:
485 if self.tgt_kind == svn.core.svn_node_dir:
470 self._generate_dir_diff(buf)
486 self._generate_dir_diff(buf)
471 else:
487 else:
472 self._generate_file_diff(buf)
488 self._generate_file_diff(buf)
473 return buf.getvalue()
489 return buf.getvalue()
474
490
475 def _generate_dir_diff(self, buf):
491 def _generate_dir_diff(self, buf):
476 editor = DiffChangeEditor()
492 editor = DiffChangeEditor()
477 editor_ptr, editor_baton = svn.delta.make_editor(editor)
493 editor_ptr, editor_baton = svn.delta.make_editor(editor)
478 svn.repos.dir_delta2(
494 svn.repos.dir_delta2(
479 self.src_root,
495 self.src_root,
480 self.src_path,
496 self.src_path,
481 '', # src_entry
497 '', # src_entry
482 self.tgt_root,
498 self.tgt_root,
483 self.tgt_path,
499 self.tgt_path,
484 editor_ptr, editor_baton,
500 editor_ptr, editor_baton,
485 authorization_callback_allow_all,
501 authorization_callback_allow_all,
486 False, # text_deltas
502 False, # text_deltas
487 svn.core.svn_depth_infinity, # depth
503 svn.core.svn_depth_infinity, # depth
488 False, # entry_props
504 False, # entry_props
489 False, # ignore_ancestry
505 False, # ignore_ancestry
490 )
506 )
491
507
492 for path, __, change in sorted(editor.changes):
508 for path, __, change in sorted(editor.changes):
493 self._generate_node_diff(
509 self._generate_node_diff(
494 buf, change, path, self.tgt_path, path, self.src_path)
510 buf, change, path, self.tgt_path, path, self.src_path)
495
511
496 def _generate_file_diff(self, buf):
512 def _generate_file_diff(self, buf):
497 change = None
513 change = None
498 if self.src_kind == svn.core.svn_node_none:
514 if self.src_kind == svn.core.svn_node_none:
499 change = "add"
515 change = "add"
500 elif self.tgt_kind == svn.core.svn_node_none:
516 elif self.tgt_kind == svn.core.svn_node_none:
501 change = "delete"
517 change = "delete"
502 tgt_base, tgt_path = vcspath.split(self.tgt_path)
518 tgt_base, tgt_path = vcspath.split(self.tgt_path)
503 src_base, src_path = vcspath.split(self.src_path)
519 src_base, src_path = vcspath.split(self.src_path)
504 self._generate_node_diff(
520 self._generate_node_diff(
505 buf, change, tgt_path, tgt_base, src_path, src_base)
521 buf, change, tgt_path, tgt_base, src_path, src_base)
506
522
507 def _generate_node_diff(
523 def _generate_node_diff(
508 self, buf, change, tgt_path, tgt_base, src_path, src_base):
524 self, buf, change, tgt_path, tgt_base, src_path, src_base):
509
525
510 if self.src_rev == self.tgt_rev and tgt_base == src_base:
526 if self.src_rev == self.tgt_rev and tgt_base == src_base:
511 # makes consistent behaviour with git/hg to return empty diff if
527 # makes consistent behaviour with git/hg to return empty diff if
512 # we compare same revisions
528 # we compare same revisions
513 return
529 return
514
530
515 tgt_full_path = vcspath.join(tgt_base, tgt_path)
531 tgt_full_path = vcspath.join(tgt_base, tgt_path)
516 src_full_path = vcspath.join(src_base, src_path)
532 src_full_path = vcspath.join(src_base, src_path)
517
533
518 self.binary_content = False
534 self.binary_content = False
519 mime_type = self._get_mime_type(tgt_full_path)
535 mime_type = self._get_mime_type(tgt_full_path)
520
536
521 if mime_type and not mime_type.startswith('text'):
537 if mime_type and not mime_type.startswith('text'):
522 self.binary_content = True
538 self.binary_content = True
523 buf.write("=" * 67 + '\n')
539 buf.write("=" * 67 + '\n')
524 buf.write("Cannot display: file marked as a binary type.\n")
540 buf.write("Cannot display: file marked as a binary type.\n")
525 buf.write("svn:mime-type = %s\n" % mime_type)
541 buf.write("svn:mime-type = %s\n" % mime_type)
526 buf.write("Index: %s\n" % (tgt_path, ))
542 buf.write("Index: %s\n" % (tgt_path, ))
527 buf.write("=" * 67 + '\n')
543 buf.write("=" * 67 + '\n')
528 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
544 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
529 'tgt_path': tgt_path})
545 'tgt_path': tgt_path})
530
546
531 if change == 'add':
547 if change == 'add':
532 # TODO: johbo: SVN is missing a zero here compared to git
548 # TODO: johbo: SVN is missing a zero here compared to git
533 buf.write("new file mode 10644\n")
549 buf.write("new file mode 10644\n")
534
550
535 #TODO(marcink): intro to binary detection of svn patches
551 #TODO(marcink): intro to binary detection of svn patches
536 # if self.binary_content:
552 # if self.binary_content:
537 # buf.write('GIT binary patch\n')
553 # buf.write('GIT binary patch\n')
538
554
539 buf.write("--- /dev/null\t(revision 0)\n")
555 buf.write("--- /dev/null\t(revision 0)\n")
540 src_lines = []
556 src_lines = []
541 else:
557 else:
542 if change == 'delete':
558 if change == 'delete':
543 buf.write("deleted file mode 10644\n")
559 buf.write("deleted file mode 10644\n")
544
560
545 #TODO(marcink): intro to binary detection of svn patches
561 #TODO(marcink): intro to binary detection of svn patches
546 # if self.binary_content:
562 # if self.binary_content:
547 # buf.write('GIT binary patch\n')
563 # buf.write('GIT binary patch\n')
548
564
549 buf.write("--- a/%s\t(revision %s)\n" % (
565 buf.write("--- a/%s\t(revision %s)\n" % (
550 src_path, self.src_rev))
566 src_path, self.src_rev))
551 src_lines = self._svn_readlines(self.src_root, src_full_path)
567 src_lines = self._svn_readlines(self.src_root, src_full_path)
552
568
553 if change == 'delete':
569 if change == 'delete':
554 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
570 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
555 tgt_lines = []
571 tgt_lines = []
556 else:
572 else:
557 buf.write("+++ b/%s\t(revision %s)\n" % (
573 buf.write("+++ b/%s\t(revision %s)\n" % (
558 tgt_path, self.tgt_rev))
574 tgt_path, self.tgt_rev))
559 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
575 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
560
576
561 if not self.binary_content:
577 if not self.binary_content:
562 udiff = svn_diff.unified_diff(
578 udiff = svn_diff.unified_diff(
563 src_lines, tgt_lines, context=self.context,
579 src_lines, tgt_lines, context=self.context,
564 ignore_blank_lines=self.ignore_whitespace,
580 ignore_blank_lines=self.ignore_whitespace,
565 ignore_case=False,
581 ignore_case=False,
566 ignore_space_changes=self.ignore_whitespace)
582 ignore_space_changes=self.ignore_whitespace)
567 buf.writelines(udiff)
583 buf.writelines(udiff)
568
584
569 def _get_mime_type(self, path):
585 def _get_mime_type(self, path):
570 try:
586 try:
571 mime_type = svn.fs.node_prop(
587 mime_type = svn.fs.node_prop(
572 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
588 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
573 except svn.core.SubversionException:
589 except svn.core.SubversionException:
574 mime_type = svn.fs.node_prop(
590 mime_type = svn.fs.node_prop(
575 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
591 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
576 return mime_type
592 return mime_type
577
593
578 def _svn_readlines(self, fs_root, node_path):
594 def _svn_readlines(self, fs_root, node_path):
579 if self.binary_content:
595 if self.binary_content:
580 return []
596 return []
581 node_kind = svn.fs.check_path(fs_root, node_path)
597 node_kind = svn.fs.check_path(fs_root, node_path)
582 if node_kind not in (
598 if node_kind not in (
583 svn.core.svn_node_file, svn.core.svn_node_symlink):
599 svn.core.svn_node_file, svn.core.svn_node_symlink):
584 return []
600 return []
585 content = svn.core.Stream(
601 content = svn.core.Stream(
586 svn.fs.file_contents(fs_root, node_path)).read()
602 svn.fs.file_contents(fs_root, node_path)).read()
587 return content.splitlines(True)
603 return content.splitlines(True)
588
604
589
605
590
606
591 class DiffChangeEditor(svn.delta.Editor):
607 class DiffChangeEditor(svn.delta.Editor):
592 """
608 """
593 Records changes between two given revisions
609 Records changes between two given revisions
594 """
610 """
595
611
596 def __init__(self):
612 def __init__(self):
597 self.changes = []
613 self.changes = []
598
614
599 def delete_entry(self, path, revision, parent_baton, pool=None):
615 def delete_entry(self, path, revision, parent_baton, pool=None):
600 self.changes.append((path, None, 'delete'))
616 self.changes.append((path, None, 'delete'))
601
617
602 def add_file(
618 def add_file(
603 self, path, parent_baton, copyfrom_path, copyfrom_revision,
619 self, path, parent_baton, copyfrom_path, copyfrom_revision,
604 file_pool=None):
620 file_pool=None):
605 self.changes.append((path, 'file', 'add'))
621 self.changes.append((path, 'file', 'add'))
606
622
607 def open_file(self, path, parent_baton, base_revision, file_pool=None):
623 def open_file(self, path, parent_baton, base_revision, file_pool=None):
608 self.changes.append((path, 'file', 'change'))
624 self.changes.append((path, 'file', 'change'))
609
625
610
626
611 def authorization_callback_allow_all(root, path, pool):
627 def authorization_callback_allow_all(root, path, pool):
612 return True
628 return True
613
629
614
630
615 class TxnNodeProcessor(object):
631 class TxnNodeProcessor(object):
616 """
632 """
617 Utility to process the change of one node within a transaction root.
633 Utility to process the change of one node within a transaction root.
618
634
619 It encapsulates the knowledge of how to add, update or remove
635 It encapsulates the knowledge of how to add, update or remove
620 a node for a given transaction root. The purpose is to support the method
636 a node for a given transaction root. The purpose is to support the method
621 `SvnRemote.commit`.
637 `SvnRemote.commit`.
622 """
638 """
623
639
624 def __init__(self, node, txn_root):
640 def __init__(self, node, txn_root):
625 assert isinstance(node['path'], str)
641 assert isinstance(node['path'], str)
626
642
627 self.node = node
643 self.node = node
628 self.txn_root = txn_root
644 self.txn_root = txn_root
629
645
630 def update(self):
646 def update(self):
631 self._ensure_parent_dirs()
647 self._ensure_parent_dirs()
632 self._add_file_if_node_does_not_exist()
648 self._add_file_if_node_does_not_exist()
633 self._update_file_content()
649 self._update_file_content()
634 self._update_file_properties()
650 self._update_file_properties()
635
651
636 def remove(self):
652 def remove(self):
637 svn.fs.delete(self.txn_root, self.node['path'])
653 svn.fs.delete(self.txn_root, self.node['path'])
638 # TODO: Clean up directory if empty
654 # TODO: Clean up directory if empty
639
655
640 def _ensure_parent_dirs(self):
656 def _ensure_parent_dirs(self):
641 curdir = vcspath.dirname(self.node['path'])
657 curdir = vcspath.dirname(self.node['path'])
642 dirs_to_create = []
658 dirs_to_create = []
643 while not self._svn_path_exists(curdir):
659 while not self._svn_path_exists(curdir):
644 dirs_to_create.append(curdir)
660 dirs_to_create.append(curdir)
645 curdir = vcspath.dirname(curdir)
661 curdir = vcspath.dirname(curdir)
646
662
647 for curdir in reversed(dirs_to_create):
663 for curdir in reversed(dirs_to_create):
648 log.debug('Creating missing directory "%s"', curdir)
664 log.debug('Creating missing directory "%s"', curdir)
649 svn.fs.make_dir(self.txn_root, curdir)
665 svn.fs.make_dir(self.txn_root, curdir)
650
666
651 def _svn_path_exists(self, path):
667 def _svn_path_exists(self, path):
652 path_status = svn.fs.check_path(self.txn_root, path)
668 path_status = svn.fs.check_path(self.txn_root, path)
653 return path_status != svn.core.svn_node_none
669 return path_status != svn.core.svn_node_none
654
670
655 def _add_file_if_node_does_not_exist(self):
671 def _add_file_if_node_does_not_exist(self):
656 kind = svn.fs.check_path(self.txn_root, self.node['path'])
672 kind = svn.fs.check_path(self.txn_root, self.node['path'])
657 if kind == svn.core.svn_node_none:
673 if kind == svn.core.svn_node_none:
658 svn.fs.make_file(self.txn_root, self.node['path'])
674 svn.fs.make_file(self.txn_root, self.node['path'])
659
675
660 def _update_file_content(self):
676 def _update_file_content(self):
661 assert isinstance(self.node['content'], str)
677 assert isinstance(self.node['content'], str)
662 handler, baton = svn.fs.apply_textdelta(
678 handler, baton = svn.fs.apply_textdelta(
663 self.txn_root, self.node['path'], None, None)
679 self.txn_root, self.node['path'], None, None)
664 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
680 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
665
681
666 def _update_file_properties(self):
682 def _update_file_properties(self):
667 properties = self.node.get('properties', {})
683 properties = self.node.get('properties', {})
668 for key, value in properties.iteritems():
684 for key, value in properties.iteritems():
669 svn.fs.change_node_prop(
685 svn.fs.change_node_prop(
670 self.txn_root, self.node['path'], key, value)
686 self.txn_root, self.node['path'], key, value)
671
687
672
688
673 def apr_time_t(timestamp):
689 def apr_time_t(timestamp):
674 """
690 """
675 Convert a Python timestamp into APR timestamp type apr_time_t
691 Convert a Python timestamp into APR timestamp type apr_time_t
676 """
692 """
677 return timestamp * 1E6
693 return timestamp * 1E6
678
694
679
695
680 def svn_opt_revision_value_t(num):
696 def svn_opt_revision_value_t(num):
681 """
697 """
682 Put `num` into a `svn_opt_revision_value_t` structure.
698 Put `num` into a `svn_opt_revision_value_t` structure.
683 """
699 """
684 value = svn.core.svn_opt_revision_value_t()
700 value = svn.core.svn_opt_revision_value_t()
685 value.number = num
701 value.number = num
686 revision = svn.core.svn_opt_revision_t()
702 revision = svn.core.svn_opt_revision_t()
687 revision.kind = svn.core.svn_opt_revision_number
703 revision.kind = svn.core.svn_opt_revision_number
688 revision.value = value
704 revision.value = value
689 return revision
705 return revision
@@ -1,58 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19
19
20 import pytest
20 import pytest
21
21
22
22
23 def pytest_addoption(parser):
23 def pytest_addoption(parser):
24 parser.addoption(
24 parser.addoption(
25 '--repeat', type=int, default=100,
25 '--repeat', type=int, default=100,
26 help="Number of repetitions in performance tests.")
26 help="Number of repetitions in performance tests.")
27
27
28
28
29 @pytest.fixture(scope='session')
29 @pytest.fixture(scope='session')
30 def repeat(request):
30 def repeat(request):
31 """
31 """
32 The number of repetitions is based on this fixture.
32 The number of repetitions is based on this fixture.
33
33
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 tests are not too slow in our default test suite.
35 tests are not too slow in our default test suite.
36 """
36 """
37 return request.config.getoption('--repeat')
37 return request.config.getoption('--repeat')
38
38
39
39
40 @pytest.fixture(scope='session')
40 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
41 def vcsserver_port(request):
42 port = get_available_port()
42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print('Using vcsserver port %s' % (port, ))
44 return port
44 return port
45
45
46
46
47 def get_available_port():
47 def get_available_port():
48 family = socket.AF_INET
48 family = socket.AF_INET
49 socktype = socket.SOCK_STREAM
49 socktype = socket.SOCK_STREAM
50 host = '127.0.0.1'
50 host = '127.0.0.1'
51
51
52 mysocket = socket.socket(family, socktype)
52 mysocket = socket.socket(family, socktype)
53 mysocket.bind((host, 0))
53 mysocket.bind((host, 0))
54 port = mysocket.getsockname()[1]
54 port = mysocket.getsockname()[1]
55 mysocket.close()
55 mysocket.close()
56 del mysocket
56 del mysocket
57 return port
57 return port
58
@@ -1,162 +1,165 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.fetch(
81 self.remote_git.fetch(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire=None, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102 def test_remove_ref(self):
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
106 assert ref_to_remove not in self.mock_repo.refs
107
107
108
108
109 class TestReraiseSafeExceptions(object):
109 class TestReraiseSafeExceptions(object):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 factory = Mock()
111 factory = Mock()
112 git_remote = git.GitRemote(factory)
112 git_remote = git.GitRemote(factory)
113
113
114 def fake_function():
114 def fake_function():
115 return None
115 return None
116
116
117 decorator = git.reraise_safe_exceptions(fake_function)
117 decorator = git.reraise_safe_exceptions(fake_function)
118
118
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 for method_name, method in methods:
120 for method_name, method in methods:
121 if not method_name.startswith('_'):
121 if not method_name.startswith('_'):
122 assert method.im_func.__code__ == decorator.__code__
122 assert method.im_func.__code__ == decorator.__code__
123
123
124 @pytest.mark.parametrize('side_effect, expected_type', [
124 @pytest.mark.parametrize('side_effect, expected_type', [
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 (dulwich.errors.HangupException(), 'error'),
129 (dulwich.errors.HangupException(), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 ])
131 ])
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 @git.reraise_safe_exceptions
133 @git.reraise_safe_exceptions
134 def fake_method():
134 def fake_method():
135 raise side_effect
135 raise side_effect
136
136
137 with pytest.raises(Exception) as exc_info:
137 with pytest.raises(Exception) as exc_info:
138 fake_method()
138 fake_method()
139 assert type(exc_info.value) == Exception
139 assert type(exc_info.value) == Exception
140 assert exc_info.value._vcs_kind == expected_type
140 assert exc_info.value._vcs_kind == expected_type
141
141
142
142
143 class TestDulwichRepoWrapper(object):
143 class TestDulwichRepoWrapper(object):
144 def test_calls_close_on_delete(self):
144 def test_calls_close_on_delete(self):
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 with isdir_patcher:
146 with isdir_patcher:
147 repo = git.Repo('/tmp/abcde')
147 repo = git.Repo('/tmp/abcde')
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 del repo
149 del repo
150 close_mock.assert_called_once_with()
150 close_mock.assert_called_once_with()
151
151
152
152
153 class TestGitFactory(object):
153 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
154 def test_create_repo_returns_dulwich_wrapper(self):
155 factory = git.GitFactory(repo_cache=Mock())
155
156 wire = {
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 'path': '/tmp/abcde'
157 mock.side_effect = {'repo_objects': ''}
158 }
158 factory = git.GitFactory()
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 wire = {
160 with isdir_patcher:
160 'path': '/tmp/abcde'
161 result = factory._create_repo(wire, True)
161 }
162 assert isinstance(result, git.Repo)
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 with isdir_patcher:
164 result = factory._create_repo(wire, True)
165 assert isinstance(result, git.Repo)
@@ -1,44 +1,39 b''
1 """
1 """
2 Tests used to profile the HTTP based implementation.
2 Tests used to profile the HTTP based implementation.
3 """
3 """
4
4
5 import pytest
5 import pytest
6 import webtest
6 import webtest
7
7
8 from vcsserver.http_main import main
8 from vcsserver.http_main import main
9
9
10
10
11 @pytest.fixture
11 @pytest.fixture
12 def vcs_app():
12 def vcs_app():
13 stub_settings = {
13 stub_settings = {
14 'dev.use_echo_app': 'true',
14 'dev.use_echo_app': 'true',
15 'beaker.cache.regions': 'repo_object',
16 'beaker.cache.repo_object.type': 'memorylru',
17 'beaker.cache.repo_object.max_items': '100',
18 'beaker.cache.repo_object.expire': '300',
19 'beaker.cache.repo_object.enabled': 'true',
20 'locale': 'en_US.UTF-8',
15 'locale': 'en_US.UTF-8',
21 }
16 }
22 vcs_app = main({}, **stub_settings)
17 vcs_app = main({}, **stub_settings)
23 app = webtest.TestApp(vcs_app)
18 app = webtest.TestApp(vcs_app)
24 return app
19 return app
25
20
26
21
27 @pytest.fixture(scope='module')
22 @pytest.fixture(scope='module')
28 def data():
23 def data():
29 one_kb = 'x' * 1024
24 one_kb = 'x' * 1024
30 return one_kb * 1024 * 10
25 return one_kb * 1024 * 10
31
26
32
27
33 def test_http_app_streaming_with_data(data, repeat, vcs_app):
28 def test_http_app_streaming_with_data(data, repeat, vcs_app):
34 app = vcs_app
29 app = vcs_app
35 for x in xrange(repeat / 10):
30 for x in xrange(repeat / 10):
36 response = app.post('/stream/git/', params=data)
31 response = app.post('/stream/git/', params=data)
37 assert response.status_code == 200
32 assert response.status_code == 200
38
33
39
34
40 def test_http_app_streaming_no_data(repeat, vcs_app):
35 def test_http_app_streaming_no_data(repeat, vcs_app):
41 app = vcs_app
36 app = vcs_app
42 for x in xrange(repeat / 10):
37 for x in xrange(repeat / 10):
43 response = app.post('/stream/git/')
38 response = app.post('/stream/git/')
44 assert response.status_code == 200
39 assert response.status_code == 200
@@ -1,82 +1,89 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18 import hashlib
18
19
19 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
20
21
21
22
22 def safe_int(val, default=None):
23 def safe_int(val, default=None):
23 """
24 """
24 Returns int() of val if val is not convertable to int use default
25 Returns int() of val if val is not convertable to int use default
25 instead
26 instead
26
27
27 :param val:
28 :param val:
28 :param default:
29 :param default:
29 """
30 """
30
31
31 try:
32 try:
32 val = int(val)
33 val = int(val)
33 except (ValueError, TypeError):
34 except (ValueError, TypeError):
34 val = default
35 val = default
35
36
36 return val
37 return val
37
38
38
39
39 def safe_str(unicode_, to_encoding=['utf8']):
40 def safe_str(unicode_, to_encoding=['utf8']):
40 """
41 """
41 safe str function. Does few trick to turn unicode_ into string
42 safe str function. Does few trick to turn unicode_ into string
42
43
43 In case of UnicodeEncodeError, we try to return it with encoding detected
44 In case of UnicodeEncodeError, we try to return it with encoding detected
44 by chardet library if it fails fallback to string with errors replaced
45 by chardet library if it fails fallback to string with errors replaced
45
46
46 :param unicode_: unicode to encode
47 :param unicode_: unicode to encode
47 :rtype: str
48 :rtype: str
48 :returns: str object
49 :returns: str object
49 """
50 """
50
51
51 # if it's not basestr cast to str
52 # if it's not basestr cast to str
52 if not isinstance(unicode_, basestring):
53 if not isinstance(unicode_, basestring):
53 return str(unicode_)
54 return str(unicode_)
54
55
55 if isinstance(unicode_, str):
56 if isinstance(unicode_, str):
56 return unicode_
57 return unicode_
57
58
58 if not isinstance(to_encoding, (list, tuple)):
59 if not isinstance(to_encoding, (list, tuple)):
59 to_encoding = [to_encoding]
60 to_encoding = [to_encoding]
60
61
61 for enc in to_encoding:
62 for enc in to_encoding:
62 try:
63 try:
63 return unicode_.encode(enc)
64 return unicode_.encode(enc)
64 except UnicodeEncodeError:
65 except UnicodeEncodeError:
65 pass
66 pass
66
67
67 try:
68 try:
68 import chardet
69 import chardet
69 encoding = chardet.detect(unicode_)['encoding']
70 encoding = chardet.detect(unicode_)['encoding']
70 if encoding is None:
71 if encoding is None:
71 raise UnicodeEncodeError()
72 raise UnicodeEncodeError()
72
73
73 return unicode_.encode(encoding)
74 return unicode_.encode(encoding)
74 except (ImportError, UnicodeEncodeError):
75 except (ImportError, UnicodeEncodeError):
75 return unicode_.encode(to_encoding[0], 'replace')
76 return unicode_.encode(to_encoding[0], 'replace')
76
77
77
78
78 class AttributeDict(dict):
79 class AttributeDict(dict):
79 def __getattr__(self, attr):
80 def __getattr__(self, attr):
80 return self.get(attr, None)
81 return self.get(attr, None)
81 __setattr__ = dict.__setitem__
82 __setattr__ = dict.__setitem__
82 __delattr__ = dict.__delitem__
83 __delattr__ = dict.__delitem__
84
85
86 def sha1(val):
87 return hashlib.sha1(val).hexdigest()
88
89
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now