##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r511:00fadf7b merge stable
parent child Browse files
Show More
@@ -0,0 +1,152 b''
1 """
2 gunicorn config extension and hooks. Sets additional configuration that is
3 available post the .ini config.
4
5 - workers = ${cpu_number}
6 - threads = 1
7 - proc_name = ${gunicorn_proc_name}
8 - worker_class = sync
9 - worker_connections = 10
10 - max_requests = 1000
11 - max_requests_jitter = 30
12 - timeout = 21600
13
14 """
15
16 import multiprocessing
17 import sys
18 import time
19 import datetime
20 import threading
21 import traceback
22 from gunicorn.glogging import Logger
23
24
25 # GLOBAL
26 errorlog = '-'
27 accesslog = '-'
28 loglevel = 'debug'
29
30 # SECURITY
31
32 # The maximum size of HTTP request line in bytes.
33 limit_request_line = 4094
34
35 # Limit the number of HTTP headers fields in a request.
36 limit_request_fields = 1024
37
38 # Limit the allowed size of an HTTP request header field.
39 # Value is a positive number or 0.
40 # Setting it to 0 will allow unlimited header field sizes.
41 limit_request_field_size = 0
42
43
44 # Timeout for graceful workers restart.
45 # After receiving a restart signal, workers have this much time to finish
46 # serving requests. Workers still alive after the timeout (starting from the
47 # receipt of the restart signal) are force killed.
48 graceful_timeout = 30
49
50
51 # The number of seconds to wait for requests on a Keep-Alive connection.
52 # Generally set in the 1-5 seconds range.
53 keepalive = 2
54
55
56 # SERVER MECHANICS
57 # None == system temp dir
58 # worker_tmp_dir is recommended to be set to some tmpfs
59 worker_tmp_dir = None
60 tmp_upload_dir = None
61
62 # Custom log format
63 access_log_format = (
64 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
65
66 # self adjust workers based on CPU count
67 # workers = multiprocessing.cpu_count() * 2 + 1
68
69
70 def post_fork(server, worker):
71 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
72
73
74 def pre_fork(server, worker):
75 pass
76
77
78 def pre_exec(server):
79 server.log.info("Forked child, re-executing.")
80
81
82 def on_starting(server):
83 server.log.info("Server is starting.")
84
85
86 def when_ready(server):
87 server.log.info("Server is ready. Spawning workers")
88
89
90 def on_reload(server):
91 pass
92
93
94 def worker_int(worker):
95 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
96
97 # get traceback info, on worker crash
98 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
99 code = []
100 for thread_id, stack in sys._current_frames().items():
101 code.append(
102 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
103 for fname, lineno, name, line in traceback.extract_stack(stack):
104 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
105 if line:
106 code.append(" %s" % (line.strip()))
107 worker.log.debug("\n".join(code))
108
109
110 def worker_abort(worker):
111 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
112
113
114 def worker_exit(server, worker):
115 worker.log.info("[<%-10s>] worker exit", worker.pid)
116
117
118 def child_exit(server, worker):
119 worker.log.info("[<%-10s>] worker child exit", worker.pid)
120
121
122 def pre_request(worker, req):
123 worker.start_time = time.time()
124 worker.log.debug(
125 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
126
127
128 def post_request(worker, req, environ, resp):
129 total_time = time.time() - worker.start_time
130 worker.log.debug(
131 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
132 worker.nr, req.method, req.path, resp.status_code, total_time)
133
134
135 class RhodeCodeLogger(Logger):
136 """
137 Custom Logger that allows some customization that gunicorn doesn't allow
138 """
139
140 datefmt = r"%Y-%m-%d %H:%M:%S"
141
142 def __init__(self, cfg):
143 Logger.__init__(self, cfg)
144
145 def now(self):
146 """ return date in RhodeCode Log format """
147 now = time.time()
148 msecs = int((now - long(now)) * 1000)
149 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
150
151
152 logger_class = RhodeCodeLogger
@@ -0,0 +1,28 b''
1
2 ==============================
3 Generate the Nix expressions
4 ==============================
5
6 Details can be found in the repository of `RhodeCode Enterprise CE`_ inside of
7 the file `docs/contributing/dependencies.rst`.
8
9 Start the environment as follows:
10
11 .. code:: shell
12
13 nix-shell pkgs/shell-generate.nix
14
15
16 Python dependencies
17 ===================
18
19 .. code:: shell
20
21 pip2nix generate --licenses
22 # or faster
23 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
24
25
26 .. Links
27
28 .. _RhodeCode Enterprise CE: https://code.rhodecode.com/rhodecode-enterprise-ce
@@ -0,0 +1,17 b''
1 { pkgs
2 , pythonPackages
3 }:
4
5 rec {
6 pip2nix-src = pkgs.fetchzip {
7 url = https://github.com/johbo/pip2nix/archive/51e6fdae34d0e8ded9efeef7a8601730249687a6.tar.gz;
8 sha256 = "02a4jjgi7lsvf8mhrxsd56s9a3yg20081rl9bgc2m84w60v2gbz2";
9 };
10
11 pip2nix = import pip2nix-src {
12 inherit
13 pkgs
14 pythonPackages;
15 };
16
17 }
@@ -0,0 +1,45 b''
1 self: super: {
2 # bump GIT version
3 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 name = "git-2.17.1";
5 src = self.fetchurl {
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.17.1.tar.xz";
7 sha256 = "0pm6bdnrrm165k3krnazxcxadifk2gqi30awlbcf9fism1x6w4vr";
8 };
9
10 patches = [
11 ./git_patches/docbook2texi.patch
12 ./git_patches/symlinks-in-bin.patch
13 ./git_patches/git-sh-i18n.patch
14 ./git_patches/ssh-path.patch
15 ];
16
17 });
18
19 # Override subversion derivation to
20 # - activate python bindings
21 subversion =
22 let
23 subversionWithPython = super.subversion.override {
24 httpSupport = true;
25 pythonBindings = true;
26 python = self.python27Packages.python;
27 };
28 in
29 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
30 name = "subversion-1.10.2";
31 src = self.fetchurl {
32 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
33 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
34 };
35
36 ## use internal lz4/utf8proc because it is stable and shipped with SVN
37 configureFlags = oldAttrs.configureFlags ++ [
38 " --with-lz4=internal"
39 " --with-utf8proc=internal"
40 ];
41
42
43 });
44
45 }
@@ -0,0 +1,41 b''
1 { pkgs ? (import <nixpkgs> {})
2 , pythonPackages ? "python27Packages"
3 }:
4
5 with pkgs.lib;
6
7 let _pythonPackages = pythonPackages; in
8 let
9 pythonPackages = getAttr _pythonPackages pkgs;
10
11 pip2nix = import ./nix-common/pip2nix.nix {
12 inherit
13 pkgs
14 pythonPackages;
15 };
16
17 in
18
19 pkgs.stdenv.mkDerivation {
20 name = "pip2nix-generated";
21 buildInputs = [
22 pip2nix.pip2nix
23 pythonPackages.pip-tools
24 pkgs.apr
25 pkgs.aprutil
26 ];
27
28 shellHook = ''
29 runHook preShellHook
30 echo "Setting SVN_* variables"
31 export SVN_LIBRARY_PATH=${pkgs.subversion}/lib
32 export SVN_HEADER_PATH=${pkgs.subversion.dev}/include
33 runHook postShellHook
34 '';
35
36 preShellHook = ''
37 echo "Starting Generate Shell"
38 # Custom prompt to distinguish from other dev envs.
39 export PS1="\n\[\033[1;32m\][Generate-shell:\w]$\[\033[0m\] "
40 '';
41 }
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,146 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 import os
22 import time
23 import datetime
24 import msgpack
25 import logging
26 import traceback
27 import tempfile
28
29
30 log = logging.getLogger(__name__)
31
32 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 global_prefix = 'vcsserver'
34
35
36 def exc_serialize(exc_id, tb, exc_type):
37
38 data = {
39 'version': 'v1',
40 'exc_id': exc_id,
41 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
42 'exc_timestamp': repr(time.time()),
43 'exc_message': tb,
44 'exc_type': exc_type,
45 }
46 return msgpack.packb(data), data
47
48
49 def exc_unserialize(tb):
50 return msgpack.unpackb(tb)
51
52
53 def get_exc_store():
54 """
55 Get and create exception store if it's not existing
56 """
57 exc_store_dir = 'rc_exception_store_v1'
58 # fallback
59 _exc_store_path = os.path.join(tempfile.gettempdir(), exc_store_dir)
60
61 exc_store_dir = '' # TODO: need a persistent cross instance store here
62 if exc_store_dir:
63 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir)
64
65 _exc_store_path = os.path.abspath(_exc_store_path)
66 if not os.path.isdir(_exc_store_path):
67 os.makedirs(_exc_store_path)
68 log.debug('Initializing exceptions store at %s', _exc_store_path)
69 return _exc_store_path
70
71
72 def _store_exception(exc_id, exc_info, prefix):
73 exc_type, exc_value, exc_traceback = exc_info
74 tb = ''.join(traceback.format_exception(
75 exc_type, exc_value, exc_traceback, None))
76
77 exc_type_name = exc_type.__name__
78 exc_store_path = get_exc_store()
79 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
80 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
81 if not os.path.isdir(exc_store_path):
82 os.makedirs(exc_store_path)
83 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
84 with open(stored_exc_path, 'wb') as f:
85 f.write(exc_data)
86 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
87
88
89 def store_exception(exc_id, exc_info, prefix=global_prefix):
90 try:
91 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
92 except Exception:
93 log.exception('Failed to store exception `%s` information', exc_id)
94 # there's no way this can fail, it will crash server badly if it does.
95 pass
96
97
98 def _find_exc_file(exc_id, prefix=global_prefix):
99 exc_store_path = get_exc_store()
100 if prefix:
101 exc_id = '{}_{}'.format(exc_id, prefix)
102 else:
103 # search without a prefix
104 exc_id = '{}'.format(exc_id)
105
106 # we need to search the store for such start pattern as above
107 for fname in os.listdir(exc_store_path):
108 if fname.startswith(exc_id):
109 exc_id = os.path.join(exc_store_path, fname)
110 break
111 continue
112 else:
113 exc_id = None
114
115 return exc_id
116
117
118 def _read_exception(exc_id, prefix):
119 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
120 if exc_id_file_path:
121 with open(exc_id_file_path, 'rb') as f:
122 return exc_unserialize(f.read())
123 else:
124 log.debug('Exception File `%s` not found', exc_id_file_path)
125 return None
126
127
128 def read_exception(exc_id, prefix=global_prefix):
129 try:
130 return _read_exception(exc_id=exc_id, prefix=prefix)
131 except Exception:
132 log.exception('Failed to read exception `%s` information', exc_id)
133 # there's no way this can fail, it will crash server badly if it does.
134 return None
135
136
137 def delete_exception(exc_id, prefix=global_prefix):
138 try:
139 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
140 if exc_id_file_path:
141 os.remove(exc_id_file_path)
142
143 except Exception:
144 log.exception('Failed to remove exception `%s` information', exc_id)
145 # there's no way this can fail, it will crash server badly if it does.
146 pass
@@ -0,0 +1,65 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 import logging
22
23 from repoze.lru import LRUCache
24
25 from vcsserver.utils import safe_str
26
27 log = logging.getLogger(__name__)
28
29
30 class LRUDict(LRUCache):
31 """
32 Wrapper to provide partial dict access
33 """
34
35 def __setitem__(self, key, value):
36 return self.put(key, value)
37
38 def __getitem__(self, key):
39 return self.get(key)
40
41 def __contains__(self, key):
42 return bool(self.get(key))
43
44 def __delitem__(self, key):
45 del self.data[key]
46
47 def keys(self):
48 return self.data.keys()
49
50
51 class LRUDictDebug(LRUDict):
52 """
53 Wrapper to provide some debug options
54 """
55 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 # trick for pformat print it more nicely
58 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s' % (elems_cnt, fmt))
62
63 def __getitem__(self, key):
64 self._report_keys()
65 return self.get(key)
@@ -0,0 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19 from dogpile.cache import register_backend
20
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
24
25 log = logging.getLogger(__name__)
26
27 from . import region_meta
28 from .util import key_generator, get_default_cache_settings, make_region
29
30
31 def configure_dogpile_cache(settings):
32 cache_dir = settings.get('cache_dir')
33 if cache_dir:
34 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
35
36 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
37
38 # inspect available namespaces
39 avail_regions = set()
40 for key in rc_cache_data.keys():
41 namespace_name = key.split('.', 1)[0]
42 avail_regions.add(namespace_name)
43 log.debug('dogpile: found following cache regions: %s', avail_regions)
44
45 # register them into namespace
46 for region_name in avail_regions:
47 new_region = make_region(
48 name=region_name,
49 function_key_generator=key_generator
50 )
51
52 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
53
54 log.debug('dogpile: registering a new region %s[%s]',
55 region_name, new_region.__dict__)
56 region_meta.dogpile_cache_regions[region_name] = new_region
57
58
59 def includeme(config):
60 configure_dogpile_cache(config.registry.settings)
@@ -0,0 +1,51 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19
20 from dogpile.cache.backends import memory as memory_backend
21 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
22
23
24 _default_max_size = 1024
25
26 log = logging.getLogger(__name__)
27
28
29 class LRUMemoryBackend(memory_backend.MemoryBackend):
30 pickle_values = False
31
32 def __init__(self, arguments):
33 max_size = arguments.pop('max_size', _default_max_size)
34
35 LRUDictClass = LRUDict
36 if arguments.pop('log_key_count', None):
37 LRUDictClass = LRUDictDebug
38
39 arguments['cache_dict'] = LRUDictClass(max_size)
40 super(LRUMemoryBackend, self).__init__(arguments)
41
42 def delete(self, key):
43 try:
44 del self._cache[key]
45 except KeyError:
46 # we don't care if key isn't there at deletion
47 pass
48
49 def delete_multi(self, keys):
50 for key in keys:
51 self.delete(key)
@@ -0,0 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import tempfile
20
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
@@ -0,0 +1,136 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import logging
20 import functools
21
22 from vcsserver.utils import safe_str, sha1
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
25
26 log = logging.getLogger(__name__)
27
28
29 class RhodeCodeCacheRegion(CacheRegion):
30
31 def conditional_cache_on_arguments(
32 self, namespace=None,
33 expiration_time=None,
34 should_cache_fn=None,
35 to_str=compat.string_type,
36 function_key_generator=None,
37 condition=True):
38 """
39 Custom conditional decorator, that will not touch any dogpile internals if
40 condition isn't meet. This works a bit different than should_cache_fn
41 And it's faster in cases we don't ever want to compute cached values
42 """
43 expiration_time_is_callable = compat.callable(expiration_time)
44
45 if function_key_generator is None:
46 function_key_generator = self.function_key_generator
47
48 def decorator(fn):
49 if to_str is compat.string_type:
50 # backwards compatible
51 key_generator = function_key_generator(namespace, fn)
52 else:
53 key_generator = function_key_generator(namespace, fn, to_str=to_str)
54
55 @functools.wraps(fn)
56 def decorate(*arg, **kw):
57 key = key_generator(*arg, **kw)
58
59 @functools.wraps(fn)
60 def creator():
61 return fn(*arg, **kw)
62
63 if not condition:
64 return creator()
65
66 timeout = expiration_time() if expiration_time_is_callable \
67 else expiration_time
68
69 return self.get_or_create(key, creator, timeout, should_cache_fn)
70
71 def invalidate(*arg, **kw):
72 key = key_generator(*arg, **kw)
73 self.delete(key)
74
75 def set_(value, *arg, **kw):
76 key = key_generator(*arg, **kw)
77 self.set(key, value)
78
79 def get(*arg, **kw):
80 key = key_generator(*arg, **kw)
81 return self.get(key)
82
83 def refresh(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 value = fn(*arg, **kw)
86 self.set(key, value)
87 return value
88
89 decorate.set = set_
90 decorate.invalidate = invalidate
91 decorate.refresh = refresh
92 decorate.get = get
93 decorate.original = fn
94 decorate.key_generator = key_generator
95
96 return decorate
97
98 return decorator
99
100
101 def make_region(*arg, **kw):
102 return RhodeCodeCacheRegion(*arg, **kw)
103
104
105 def get_default_cache_settings(settings, prefixes=None):
106 prefixes = prefixes or []
107 cache_settings = {}
108 for key in settings.keys():
109 for prefix in prefixes:
110 if key.startswith(prefix):
111 name = key.split(prefix)[1].strip()
112 val = settings[key]
113 if isinstance(val, basestring):
114 val = val.strip()
115 cache_settings[name] = val
116 return cache_settings
117
118
119 def compute_key_from_params(*args):
120 """
121 Helper to compute key from given params to be used in cache manager
122 """
123 return sha1("_".join(map(safe_str, args)))
124
125
126 def key_generator(namespace, fn):
127 fname = fn.__name__
128
129 def generate_key(*args):
130 namespace_pref = namespace or 'default'
131 arg_key = compute_key_from_params(*args)
132 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
133
134 return final_key
135
136 return generate_key
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.12.4
2 current_version = 4.13.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.12.4
12 state = in_progress
13 version = 4.13.0
16 14
@@ -1,18 +1,18 b''
1 1
2 2 .PHONY: clean test test-clean test-only
3 3
4 4
5 5 clean:
6 6 make test-clean
7 7 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' \) -exec rm '{}' ';'
8 8
9 9 test:
10 10 make test-clean
11 11 make test-only
12 12
13 13 test-clean:
14 14 rm -rf coverage.xml htmlcov junit.xml pylint.log result
15 15 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
16 16
17 17 test-only:
18 PYTHONHASHSEED=random py.test -vv -r xw --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
18 PYTHONHASHSEED=random py.test -vv -r xw -p no:sugar --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
@@ -1,1 +1,79 b''
1 development_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 0.0.0.0
10 port = 9900
11
12 use = egg:waitress#main
13
14
15 [app:main]
16 use = egg:rhodecode-vcsserver
17
18 pyramid.default_locale_name = en
19 pyramid.includes =
20
21 ## default locale used by VCS systems
22 locale = en_US.UTF-8
23
24
25 ## path to binaries for vcsserver, it should be set by the installer
26 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 core.binary_dir = ""
28
29 ## cache region for storing repo_objects cache
30 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
31 ## cache auto-expires after N seconds
32 rc_cache.repo_object.expiration_time = 300
33 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
34 rc_cache.repo_object.max_size = 100
35
36
37 ################################
38 ### LOGGING CONFIGURATION ####
39 ################################
40 [loggers]
41 keys = root, vcsserver
42
43 [handlers]
44 keys = console
45
46 [formatters]
47 keys = generic
48
49 #############
50 ## LOGGERS ##
51 #############
52 [logger_root]
53 level = NOTSET
54 handlers = console
55
56 [logger_vcsserver]
57 level = DEBUG
58 handlers =
59 qualname = vcsserver
60 propagate = 1
61
62
63 ##############
64 ## HANDLERS ##
65 ##############
66
67 [handler_console]
68 class = StreamHandler
69 args = (sys.stderr,)
70 level = DEBUG
71 formatter = generic
72
73 ################
74 ## FORMATTERS ##
75 ################
76
77 [formatter_generic]
78 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
79 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1 +1,100 b''
1 production_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 127.0.0.1
10 port = 9900
11
12
13 ##########################
14 ## GUNICORN WSGI SERVER ##
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
21 ## process name
22 proc_name = rhodecode_vcsserver
23 ## type of worker class, currently `sync` is the only option allowed.
24 worker_class = sync
25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 #worker_connections = 10
27 ## max number of requests that worker will handle before being gracefully
28 ## restarted, could prevent memory leaks
29 max_requests = 1000
30 max_requests_jitter = 30
31 ## amount of time a worker can spend with handling a request before it
32 ## gets killed and restarted. Set to 6hrs
33 timeout = 21600
34
35
36 [app:main]
37 use = egg:rhodecode-vcsserver
38
39 pyramid.default_locale_name = en
40 pyramid.includes =
41
42 ## default locale used by VCS systems
43 locale = en_US.UTF-8
44
45
46 ## path to binaries for vcsserver, it should be set by the installer
47 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 core.binary_dir = ""
49
50 ## cache region for storing repo_objects cache
51 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
52 ## cache auto-expires after N seconds
53 rc_cache.repo_object.expiration_time = 300
54 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
55 rc_cache.repo_object.max_size = 100
56
57
58 ################################
59 ### LOGGING CONFIGURATION ####
60 ################################
61 [loggers]
62 keys = root, vcsserver
63
64 [handlers]
65 keys = console
66
67 [formatters]
68 keys = generic
69
70 #############
71 ## LOGGERS ##
72 #############
73 [logger_root]
74 level = NOTSET
75 handlers = console
76
77 [logger_vcsserver]
78 level = DEBUG
79 handlers =
80 qualname = vcsserver
81 propagate = 1
82
83
84 ##############
85 ## HANDLERS ##
86 ##############
87
88 [handler_console]
89 class = StreamHandler
90 args = (sys.stderr,)
91 level = DEBUG
92 formatter = generic
93
94 ################
95 ## FORMATTERS ##
96 ################
97
98 [formatter_generic]
99 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
100 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,166 +1,178 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 { pkgs ? (import <nixpkgs> {})
8 , pythonPackages ? "python27Packages"
7 args@
8 { pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? false
11 , ...
11 12 }:
12 13
13 let pkgs_ = pkgs; in
14 let pkgs_ = (import <nixpkgs> {}); in
14 15
15 16 let
16 pkgs = pkgs_.overridePackages (self: super: {
17 # bump GIT version
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.16.4";
20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.16.4.tar.xz";
22 sha256 = "0cnmidjvbdf81mybcvxvl0c2r2x2nvq2jj2dl59dmrc7qklv0sbf";
23 };
24
25 patches = [
26 ./pkgs/git_patches/docbook2texi.patch
27 ./pkgs/git_patches/symlinks-in-bin.patch
28 ./pkgs/git_patches/git-sh-i18n.patch
29 ./pkgs/git_patches/ssh-path.patch
30 ];
31
32 });
33 17
34 # Override subversion derivation to
35 # - activate python bindings
36 subversion = let
37 subversionWithPython = super.subversion.override {
38 httpSupport = true;
39 pythonBindings = true;
40 python = self.python27Packages.python;
41 };
42
43 in
18 # TODO: Currently we ignore the passed in pkgs, instead we should use it
19 # somehow as a base and apply overlays to it.
20 pkgs = import <nixpkgs> {
21 overlays = [
22 (import ./pkgs/overlays.nix)
23 ];
24 inherit (pkgs_)
25 system;
26 };
44 27
45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
46 name = "subversion-1.9.7";
47 src = pkgs.fetchurl {
48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
50 };
51
52 });
53
54 });
28 # Works with the new python-packages, still can fallback to the old
29 # variant.
30 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
31 self: basePythonPackages.override (a: { inherit self; }));
55 32
56 inherit (pkgs.lib) fix extends;
57 basePythonPackages = with builtins; if isAttrs pythonPackages
58 then pythonPackages
59 else getAttr pythonPackages pkgs;
33 # Evaluates to the last segment of a file system path.
34 basename = path: with pkgs.lib; last (splitString "/" path);
60 35
61 elem = builtins.elem;
62 basename = path: with pkgs.lib; last (splitString "/" path);
63 startsWith = prefix: full: let
64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
65 in actualPrefix == prefix;
66
36 # source code filter used as arugment to builtins.filterSource.
67 37 src-filter = path: type: with pkgs.lib;
68 38 let
69 39 ext = last (splitString "." path);
70 40 in
71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
72 "node_modules" "build" "data" "tmp"] &&
73 !elem ext ["egg-info" "pyc"] &&
74 !startsWith "result" path;
41 !builtins.elem (basename path) [
42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
43 "bower_components" "node_modules"
44 "build" "data" "result" "tmp"] &&
45 !builtins.elem ext ["egg-info" "pyc"] &&
46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
47 # it would still be good to restore it since we want to ignore "result-*".
48 !hasPrefix "result" path;
75 49
50 sources =
51 let
52 inherit (pkgs.lib) all isString attrValues;
53 sourcesConfig = pkgs.config.rc.sources or {};
54 in
55 # Ensure that sources are configured as strings. Using a path
56 # would result in a copy into the nix store.
57 assert all isString (attrValues sourcesConfig);
58 sourcesConfig;
59
60 version = builtins.readFile "${rhodecode-vcsserver-src}/vcsserver/VERSION";
76 61 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
77 62
78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
79 inherit self;
80 }) // (scopedImport {
81 self = self;
82 super = basePythonPackages;
83 inherit pkgs;
84 inherit (pkgs) fetchurl fetchgit;
85 } ./pkgs/python-packages.nix);
86
87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
88 inherit basePythonPackages pkgs;
89 };
90
91 version = builtins.readFile ./vcsserver/VERSION;
92
93 63 pythonLocalOverrides = self: super: {
94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
95 inherit doCheck version;
64 rhodecode-vcsserver =
65 let
66 releaseName = "RhodeCodeVCSServer-${version}";
67 in super.rhodecode-vcsserver.override (attrs: {
68 inherit
69 doCheck
70 version;
96 71
97 72 name = "rhodecode-vcsserver-${version}";
98 releaseName = "RhodeCodeVCSServer-${version}";
73 releaseName = releaseName;
99 74 src = rhodecode-vcsserver-src;
100 75 dontStrip = true; # prevent strip, we don't need it.
101 76
102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
103 pkgs.git
104 pkgs.subversion
105 ]);
106
107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
108 # pkgs/default.nix?
77 # expose following attributed outside
109 78 passthru = {
110 79 pythonPackages = self;
111 80 };
112 81
113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
82 propagatedBuildInputs =
83 attrs.propagatedBuildInputs or [] ++ [
84 pkgs.git
85 pkgs.subversion
86 ];
87
88 # set some default locale env variables
89 LC_ALL = "en_US.UTF-8";
90 LOCALE_ARCHIVE =
91 if pkgs.stdenv.isLinux
92 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
93 else "";
94
95 # Add bin directory to path so that tests can find 'vcsserver'.
114 96 preCheck = ''
115 97 export PATH="$out/bin:$PATH"
116 98 '';
117 99
118 # put custom attrs here
100 # custom check phase for testing
119 101 checkPhase = ''
120 102 runHook preCheck
121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
103 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
122 104 runHook postCheck
123 105 '';
124 106
107 postCheck = ''
108 echo "Cleanup of vcsserver/tests"
109 rm -rf $out/lib/${self.python.libPrefix}/site-packages/vcsserver/tests
110 '';
111
125 112 postInstall = ''
126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
113 echo "Writing vcsserver meta information for rccontrol to nix-support/rccontrol"
127 114 mkdir -p $out/nix-support/rccontrol
128 115 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
129 echo "DONE: Meta information for rccontrol written"
116 echo "DONE: vcsserver meta information for rccontrol written"
117
118 mkdir -p $out/etc
119 cp configs/production.ini $out/etc
120 echo "DONE: saved vcsserver production.ini into $out/etc"
130 121
131 122 # python based programs need to be wrapped
123 mkdir -p $out/bin
124 ln -s ${self.python}/bin/python $out/bin
132 125 ln -s ${self.pyramid}/bin/* $out/bin/
133 126 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
134 127
135 128 # Symlink version control utilities
136 #
137 129 # We ensure that always the correct version is available as a symlink.
138 130 # So that users calling them via the profile path will always use the
139 131 # correct version.
140 ln -s ${self.python}/bin/python $out/bin
132
141 133 ln -s ${pkgs.git}/bin/git $out/bin
142 134 ln -s ${self.mercurial}/bin/hg $out/bin
143 135 ln -s ${pkgs.subversion}/bin/svn* $out/bin
136 echo "DONE: created symlinks into $out/bin"
144 137
145 138 for file in $out/bin/*;
146 139 do
147 140 wrapProgram $file \
148 --set PATH $PATH \
149 --set PYTHONPATH $PYTHONPATH \
141 --prefix PATH : $PATH \
142 --prefix PYTHONPATH : $PYTHONPATH \
150 143 --set PYTHONHASHSEED random
151 144 done
145 echo "DONE: vcsserver binary wrapping"
152 146
153 147 '';
154 148
155 149 });
156 150 };
157 151
152 basePythonPackages = with builtins;
153 if isAttrs pythonPackages then
154 pythonPackages
155 else
156 getAttr pythonPackages pkgs;
157
158 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
159 inherit pkgs;
160 inherit (pkgs) fetchurl fetchgit fetchhg;
161 };
162
163 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
164 inherit pkgs basePythonPackages;
165 };
166
167
158 168 # Apply all overrides and fix the final package set
159 myPythonPackages =
160 (fix
169 myPythonPackagesUnfix = with pkgs.lib;
161 170 (extends pythonExternalOverrides
162 171 (extends pythonLocalOverrides
163 (extends pythonOverrides
164 pythonGeneratedPackages))));
172 (extends pythonVCSServerOverrides
173 (extends pythonGeneratedPackages
174 basePythonPackagesUnfix))));
175
176 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
165 177
166 178 in myPythonPackages.rhodecode-vcsserver
@@ -1,60 +1,53 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 { pkgs, basePythonPackages }:
7 { pkgs
8 , basePythonPackages
9 }:
8 10
9 11 let
10 12 sed = "sed -i";
13
11 14 in
12 15
13 16 self: super: {
14 17
15 Beaker = super.Beaker.override (attrs: {
16 patches = [
17 ./patch-beaker-lock-func-debug.diff
18 "gevent" = super."gevent".override (attrs: {
19 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
20 # NOTE: (marcink) odd requirements from gevent aren't set properly,
21 # thus we need to inject psutil manually
22 self."psutil"
18 23 ];
19 24 });
20 25
21 subvertpy = super.subvertpy.override (attrs: {
22 # TODO: johbo: Remove the "or" once we drop 16.03 support
23 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
26 "hgsubversion" = super."hgsubversion".override (attrs: {
24 27 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 pkgs.sqlite
29 #basePythonPackages.sqlite3
30 self.mercurial
31 ];
32 });
33
34 "subvertpy" = super."subvertpy".override (attrs: {
35 SVN_PREFIX = "${pkgs.subversion.dev}";
36 propagatedBuildInputs = [
37 pkgs.apr.dev
25 38 pkgs.aprutil
26 39 pkgs.subversion
27 40 ];
28 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
29 ${sed} -e "s/'gcc'/'clang'/" setup.py
30 '';
31 41 });
32 42
33 hgsubversion = super.hgsubversion.override (attrs: {
34 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
35 pkgs.sqlite
36 basePythonPackages.sqlite3
43 "mercurial" = super."mercurial".override (attrs: {
44 propagatedBuildInputs = [
45 # self.python.modules.curses
37 46 ];
38 47 });
39 48
40 mercurial = super.mercurial.override (attrs: {
41 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
42 self.python.modules.curses
43 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
44 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
45 });
46
47 pyramid = super.pyramid.override (attrs: {
48 postFixup = ''
49 wrapPythonPrograms
50 # TODO: johbo: "wrapPython" adds this magic line which
51 # confuses pserve.
52 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
53 '';
54 });
55
56 # Avoid that setuptools is replaced, this leads to trouble
57 # with buildPythonPackage.
58 setuptools = basePythonPackages.setuptools;
49 # Avoid that base packages screw up the build process
50 inherit (basePythonPackages)
51 setuptools;
59 52
60 53 }
This diff has been collapsed as it changes many lines, (988 lines changed) Show them Hide them
@@ -1,877 +1,947 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 {
5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.9.1";
7 buildInputs = with self; [];
8 doCheck = false;
9 propagatedBuildInputs = with self; [funcsigs];
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
12 md5 = "46fda0a164e2b0d24ccbda51a2310301";
13 };
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
17 };
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.9.6";
20 buildInputs = with self; [];
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.1.5";
21 9 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
23 10 src = fetchurl {
24 url = "https://pypi.python.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
25 md5 = "6411537324b4dba0956aaa8109f3c77b";
26 };
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
30 };
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.7";
33 buildInputs = with self; [];
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 md5 = "5836cc997b1b773ef389bf6629c30e65";
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
39 13 };
40 14 meta = {
41 15 license = [ pkgs.lib.licenses.mit ];
42 16 };
43 17 };
44 MarkupSafe = super.buildPythonPackage {
45 name = "MarkupSafe-1.0";
46 buildInputs = with self; [];
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.1.0";
47 20 doCheck = false;
48 propagatedBuildInputs = with self; [];
49 21 src = fetchurl {
50 url = "https://pypi.python.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
51 md5 = "2fcedc9284d50e577b5192e8e3578355";
52 };
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
56 };
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
65 24 };
66 25 meta = {
67 26 license = [ pkgs.lib.licenses.mit ];
68 27 };
69 28 };
70 WebOb = super.buildPythonPackage {
71 name = "WebOb-1.7.4";
72 buildInputs = with self; [];
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 name = "backports.shutil-get-terminal-size-1.0.0";
73 31 doCheck = false;
74 propagatedBuildInputs = with self; [];
75 32 src = fetchurl {
76 url = "https://pypi.python.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
77 md5 = "397e46892d7f199b1a07eb20a2d3d9bd";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
78 35 };
79 36 meta = {
80 37 license = [ pkgs.lib.licenses.mit ];
81 38 };
82 39 };
83 WebTest = super.buildPythonPackage {
84 name = "WebTest-2.0.29";
85 buildInputs = with self; [];
40 "beautifulsoup4" = super.buildPythonPackage {
41 name = "beautifulsoup4-4.6.3";
86 42 doCheck = false;
87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 43 src = fetchurl {
89 url = "https://pypi.python.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
90 md5 = "30b4cf0d340b9a5335fac4389e6f84fc";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
91 46 };
92 47 meta = {
93 48 license = [ pkgs.lib.licenses.mit ];
94 49 };
95 50 };
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
97 name = "backports.shutil-get-terminal-size-1.0.0";
98 buildInputs = with self; [];
99 doCheck = false;
100 propagatedBuildInputs = with self; [];
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 md5 = "03267762480bd86b50580dc19dff3c66";
104 };
105 meta = {
106 license = [ pkgs.lib.licenses.mit ];
107 };
108 };
109 beautifulsoup4 = super.buildPythonPackage {
110 name = "beautifulsoup4-4.6.0";
111 buildInputs = with self; [];
51 "configobj" = super.buildPythonPackage {
52 name = "configobj-5.0.6";
112 53 doCheck = false;
113 propagatedBuildInputs = with self; [];
54 propagatedBuildInputs = [
55 self."six"
56 ];
114 57 src = fetchurl {
115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
116 md5 = "c17714d0f91a23b708a592cb3c697728";
117 };
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
120 };
121 };
122 configobj = super.buildPythonPackage {
123 name = "configobj-5.0.6";
124 buildInputs = with self; [];
125 doCheck = false;
126 propagatedBuildInputs = with self; [six];
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
130 60 };
131 61 meta = {
132 62 license = [ pkgs.lib.licenses.bsdOriginal ];
133 63 };
134 64 };
135 cov-core = super.buildPythonPackage {
65 "cov-core" = super.buildPythonPackage {
136 66 name = "cov-core-1.15.0";
137 buildInputs = with self; [];
138 67 doCheck = false;
139 propagatedBuildInputs = with self; [coverage];
68 propagatedBuildInputs = [
69 self."coverage"
70 ];
140 71 src = fetchurl {
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
143 74 };
144 75 meta = {
145 76 license = [ pkgs.lib.licenses.mit ];
146 77 };
147 78 };
148 coverage = super.buildPythonPackage {
79 "coverage" = super.buildPythonPackage {
149 80 name = "coverage-3.7.1";
150 buildInputs = with self; [];
151 81 doCheck = false;
152 propagatedBuildInputs = with self; [];
153 82 src = fetchurl {
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
156 85 };
157 86 meta = {
158 87 license = [ pkgs.lib.licenses.bsdOriginal ];
159 88 };
160 89 };
161 decorator = super.buildPythonPackage {
90 "decorator" = super.buildPythonPackage {
162 91 name = "decorator-4.1.2";
163 buildInputs = with self; [];
164 92 doCheck = false;
165 propagatedBuildInputs = with self; [];
166 93 src = fetchurl {
167 url = "https://pypi.python.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
168 md5 = "a0f7f4fe00ae2dde93494d90c192cf8c";
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
169 96 };
170 97 meta = {
171 98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 99 };
173 100 };
174 dulwich = super.buildPythonPackage {
175 name = "dulwich-0.13.0";
176 buildInputs = with self; [];
101 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.6.6";
103 doCheck = false;
104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
107 };
108 meta = {
109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 };
111 };
112 "dogpile.core" = super.buildPythonPackage {
113 name = "dogpile.core-0.4.1";
177 114 doCheck = false;
178 propagatedBuildInputs = with self; [];
179 115 src = fetchurl {
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 };
119 meta = {
120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 };
122 };
123 "dulwich" = super.buildPythonPackage {
124 name = "dulwich-0.13.0";
125 doCheck = false;
126 src = fetchurl {
127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
182 129 };
183 130 meta = {
184 131 license = [ pkgs.lib.licenses.gpl2Plus ];
185 132 };
186 133 };
187 enum34 = super.buildPythonPackage {
134 "enum34" = super.buildPythonPackage {
188 135 name = "enum34-1.1.6";
189 buildInputs = with self; [];
190 136 doCheck = false;
191 propagatedBuildInputs = with self; [];
192 137 src = fetchurl {
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
195 140 };
196 141 meta = {
197 142 license = [ pkgs.lib.licenses.bsdOriginal ];
198 143 };
199 144 };
200 funcsigs = super.buildPythonPackage {
145 "funcsigs" = super.buildPythonPackage {
201 146 name = "funcsigs-1.0.2";
202 buildInputs = with self; [];
203 147 doCheck = false;
204 propagatedBuildInputs = with self; [];
205 148 src = fetchurl {
206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
208 151 };
209 152 meta = {
210 153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 154 };
212 155 };
213 gevent = super.buildPythonPackage {
214 name = "gevent-1.2.2";
215 buildInputs = with self; [];
156 "gevent" = super.buildPythonPackage {
157 name = "gevent-1.3.5";
216 158 doCheck = false;
217 propagatedBuildInputs = with self; [greenlet];
159 propagatedBuildInputs = [
160 self."greenlet"
161 ];
218 162 src = fetchurl {
219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
220 md5 = "7f0baf355384fe5ff2ecf66853422554";
163 url = "https://files.pythonhosted.org/packages/e6/0a/fc345c6e6161f84484870dbcaa58e427c10bd9bdcd08a69bed3d6b398bf1/gevent-1.3.5.tar.gz";
164 sha256 = "1w3gydxirgd2f60c5yv579w4903ds9s4g3587ik4jby97hgqc5bz";
221 165 };
222 166 meta = {
223 167 license = [ pkgs.lib.licenses.mit ];
224 168 };
225 169 };
226 gprof2dot = super.buildPythonPackage {
170 "gprof2dot" = super.buildPythonPackage {
227 171 name = "gprof2dot-2017.9.19";
228 buildInputs = with self; [];
229 172 doCheck = false;
230 propagatedBuildInputs = with self; [];
231 173 src = fetchurl {
232 url = "https://pypi.python.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
233 md5 = "cda2d552bb0d0b9f16e6824a9aabd225";
174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
234 176 };
235 177 meta = {
236 178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
237 179 };
238 180 };
239 greenlet = super.buildPythonPackage {
181 "greenlet" = super.buildPythonPackage {
240 182 name = "greenlet-0.4.13";
241 buildInputs = with self; [];
242 183 doCheck = false;
243 propagatedBuildInputs = with self; [];
244 184 src = fetchurl {
245 url = "https://pypi.python.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
246 md5 = "6e0b9dd5385f81d478451ec8ed1d62b3";
185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
187 };
188 meta = {
189 license = [ pkgs.lib.licenses.mit ];
190 };
191 };
192 "gunicorn" = super.buildPythonPackage {
193 name = "gunicorn-19.9.0";
194 doCheck = false;
195 src = fetchurl {
196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
247 198 };
248 199 meta = {
249 200 license = [ pkgs.lib.licenses.mit ];
250 201 };
251 202 };
252 gunicorn = super.buildPythonPackage {
253 name = "gunicorn-19.7.1";
254 buildInputs = with self; [];
203 "hg-evolve" = super.buildPythonPackage {
204 name = "hg-evolve-8.0.1";
205 doCheck = false;
206 src = fetchurl {
207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
209 };
210 meta = {
211 license = [ { fullName = "GPLv2+"; } ];
212 };
213 };
214 "hgsubversion" = super.buildPythonPackage {
215 name = "hgsubversion-1.9.2";
255 216 doCheck = false;
256 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = [
218 self."mercurial"
219 self."subvertpy"
220 ];
257 221 src = fetchurl {
258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
259 md5 = "174d3c3cd670a5be0404d84c484e590c";
222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
224 };
225 meta = {
226 license = [ pkgs.lib.licenses.gpl1 ];
227 };
228 };
229 "hupper" = super.buildPythonPackage {
230 name = "hupper-1.3";
231 doCheck = false;
232 src = fetchurl {
233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
260 235 };
261 236 meta = {
262 237 license = [ pkgs.lib.licenses.mit ];
263 238 };
264 239 };
265 hg-evolve = super.buildPythonPackage {
266 name = "hg-evolve-7.0.1";
267 buildInputs = with self; [];
240 "ipdb" = super.buildPythonPackage {
241 name = "ipdb-0.11";
268 242 doCheck = false;
269 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = [
244 self."setuptools"
245 self."ipython"
246 ];
270 247 src = fetchurl {
271 url = "https://pypi.python.org/packages/92/5c/4c216be1a08f326a12076b645f4892a2b0865810db1f4a0c9648f1f4c113/hg-evolve-7.0.1.tar.gz";
272 md5 = "2dfa926846ea873a8406bababb06b277";
248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
273 250 };
274 251 meta = {
275 license = [ { fullName = "GPLv2+"; } ];
252 license = [ pkgs.lib.licenses.bsdOriginal ];
276 253 };
277 254 };
278 hgsubversion = super.buildPythonPackage {
279 name = "hgsubversion-1.9";
280 buildInputs = with self; [];
255 "ipython" = super.buildPythonPackage {
256 name = "ipython-5.1.0";
281 257 doCheck = false;
282 propagatedBuildInputs = with self; [mercurial subvertpy];
258 propagatedBuildInputs = [
259 self."setuptools"
260 self."decorator"
261 self."pickleshare"
262 self."simplegeneric"
263 self."traitlets"
264 self."prompt-toolkit"
265 self."pygments"
266 self."pexpect"
267 self."backports.shutil-get-terminal-size"
268 self."pathlib2"
269 self."pexpect"
270 ];
283 271 src = fetchurl {
284 url = "https://pypi.python.org/packages/db/26/7293a6c6b85e2a74ab452e9ba7f00b04ff0e440e6cd4f84131ac5d5e6b22/hgsubversion-1.9.tar.gz";
285 md5 = "0c6f93ef12cc2e7fe67286f16bcc7211";
272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
286 274 };
287 275 meta = {
288 license = [ pkgs.lib.licenses.gpl1 ];
276 license = [ pkgs.lib.licenses.bsdOriginal ];
289 277 };
290 278 };
291 hupper = super.buildPythonPackage {
292 name = "hupper-1.0";
293 buildInputs = with self; [];
279 "ipython-genutils" = super.buildPythonPackage {
280 name = "ipython-genutils-0.2.0";
294 281 doCheck = false;
295 propagatedBuildInputs = with self; [];
296 282 src = fetchurl {
297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
285 };
286 meta = {
287 license = [ pkgs.lib.licenses.bsdOriginal ];
288 };
289 };
290 "mako" = super.buildPythonPackage {
291 name = "mako-1.0.7";
292 doCheck = false;
293 propagatedBuildInputs = [
294 self."markupsafe"
295 ];
296 src = fetchurl {
297 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
298 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 infrae.cache = super.buildPythonPackage {
305 name = "infrae.cache-1.0.1";
306 buildInputs = with self; [];
304 "markupsafe" = super.buildPythonPackage {
305 name = "markupsafe-1.0";
307 306 doCheck = false;
308 propagatedBuildInputs = with self; [Beaker repoze.lru];
309 307 src = fetchurl {
310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
311 md5 = "b09076a766747e6ed2a755cc62088e32";
312 };
313 meta = {
314 license = [ pkgs.lib.licenses.zpt21 ];
315 };
316 };
317 ipdb = super.buildPythonPackage {
318 name = "ipdb-0.10.3";
319 buildInputs = with self; [];
320 doCheck = false;
321 propagatedBuildInputs = with self; [setuptools ipython];
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
308 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
309 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
325 310 };
326 311 meta = {
327 312 license = [ pkgs.lib.licenses.bsdOriginal ];
328 313 };
329 314 };
330 ipython = super.buildPythonPackage {
331 name = "ipython-5.1.0";
332 buildInputs = with self; [];
315 "mercurial" = super.buildPythonPackage {
316 name = "mercurial-4.6.2";
333 317 doCheck = false;
334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
335 318 src = fetchurl {
336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
319 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
320 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
338 321 };
339 322 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal ];
323 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
341 324 };
342 325 };
343 ipython-genutils = super.buildPythonPackage {
344 name = "ipython-genutils-0.2.0";
345 buildInputs = with self; [];
326 "mock" = super.buildPythonPackage {
327 name = "mock-1.0.1";
346 328 doCheck = false;
347 propagatedBuildInputs = with self; [];
348 329 src = fetchurl {
349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
330 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
331 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
351 332 };
352 333 meta = {
353 334 license = [ pkgs.lib.licenses.bsdOriginal ];
354 335 };
355 336 };
356 mercurial = super.buildPythonPackage {
357 name = "mercurial-4.4.2";
358 buildInputs = with self; [];
337 "more-itertools" = super.buildPythonPackage {
338 name = "more-itertools-4.3.0";
359 339 doCheck = false;
360 propagatedBuildInputs = with self; [];
340 propagatedBuildInputs = [
341 self."six"
342 ];
361 343 src = fetchurl {
362 url = "https://pypi.python.org/packages/d0/83/92a5fa662ba277128db305e39e7ea5a638f2f1cbbc6dc5fbf4c14aefae22/mercurial-4.4.2.tar.gz";
363 md5 = "95769125cf7e9dbc341a983253acefcd";
344 url = "https://files.pythonhosted.org/packages/88/ff/6d485d7362f39880810278bdc906c13300db05485d9c65971dec1142da6a/more-itertools-4.3.0.tar.gz";
345 sha256 = "17h3na0rdh8xq30w4b9pizgkdxmm51896bxw600x84jflg9vaxn4";
364 346 };
365 347 meta = {
366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
348 license = [ pkgs.lib.licenses.mit ];
367 349 };
368 350 };
369 mock = super.buildPythonPackage {
370 name = "mock-1.0.1";
371 buildInputs = with self; [];
351 "msgpack-python" = super.buildPythonPackage {
352 name = "msgpack-python-0.5.6";
372 353 doCheck = false;
373 propagatedBuildInputs = with self; [];
374 354 src = fetchurl {
375 url = "https://pypi.python.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
376 md5 = "c3971991738caa55ec7c356bbc154ee2";
377 };
378 meta = {
379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 };
381 };
382 msgpack-python = super.buildPythonPackage {
383 name = "msgpack-python-0.4.8";
384 buildInputs = with self; [];
385 doCheck = false;
386 propagatedBuildInputs = with self; [];
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
355 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
356 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
390 357 };
391 358 meta = {
392 359 license = [ pkgs.lib.licenses.asl20 ];
393 360 };
394 361 };
395 pathlib2 = super.buildPythonPackage {
396 name = "pathlib2-2.3.0";
397 buildInputs = with self; [];
362 "pastedeploy" = super.buildPythonPackage {
363 name = "pastedeploy-1.5.2";
398 364 doCheck = false;
399 propagatedBuildInputs = with self; [six scandir];
400 365 src = fetchurl {
401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
402 md5 = "89c90409d11fd5947966b6a30a47d18c";
366 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
367 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
368 };
369 meta = {
370 license = [ pkgs.lib.licenses.mit ];
371 };
372 };
373 "pathlib2" = super.buildPythonPackage {
374 name = "pathlib2-2.3.0";
375 doCheck = false;
376 propagatedBuildInputs = [
377 self."six"
378 self."scandir"
379 ];
380 src = fetchurl {
381 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
382 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
403 383 };
404 384 meta = {
405 385 license = [ pkgs.lib.licenses.mit ];
406 386 };
407 387 };
408 pexpect = super.buildPythonPackage {
409 name = "pexpect-4.4.0";
410 buildInputs = with self; [];
388 "pexpect" = super.buildPythonPackage {
389 name = "pexpect-4.6.0";
411 390 doCheck = false;
412 propagatedBuildInputs = with self; [ptyprocess];
391 propagatedBuildInputs = [
392 self."ptyprocess"
393 ];
413 394 src = fetchurl {
414 url = "https://pypi.python.org/packages/fa/c3/60c0cbf96f242d0b47a82e9ca634dcd6dcb043832cf05e17540812e1c707/pexpect-4.4.0.tar.gz";
415 md5 = "e9b07f0765df8245ac72201d757baaef";
395 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
396 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
416 397 };
417 398 meta = {
418 399 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
419 400 };
420 401 };
421 pickleshare = super.buildPythonPackage {
402 "pickleshare" = super.buildPythonPackage {
422 403 name = "pickleshare-0.7.4";
423 buildInputs = with self; [];
404 doCheck = false;
405 propagatedBuildInputs = [
406 self."pathlib2"
407 ];
408 src = fetchurl {
409 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
410 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
411 };
412 meta = {
413 license = [ pkgs.lib.licenses.mit ];
414 };
415 };
416 "plaster" = super.buildPythonPackage {
417 name = "plaster-1.0";
424 418 doCheck = false;
425 propagatedBuildInputs = with self; [pathlib2];
419 propagatedBuildInputs = [
420 self."setuptools"
421 ];
426 422 src = fetchurl {
427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
423 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
424 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
425 };
426 meta = {
427 license = [ pkgs.lib.licenses.mit ];
428 };
429 };
430 "plaster-pastedeploy" = super.buildPythonPackage {
431 name = "plaster-pastedeploy-0.6";
432 doCheck = false;
433 propagatedBuildInputs = [
434 self."pastedeploy"
435 self."plaster"
436 ];
437 src = fetchurl {
438 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
439 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
440 };
441 meta = {
442 license = [ pkgs.lib.licenses.mit ];
443 };
444 };
445 "pluggy" = super.buildPythonPackage {
446 name = "pluggy-0.6.0";
447 doCheck = false;
448 src = fetchurl {
449 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
450 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
429 451 };
430 452 meta = {
431 453 license = [ pkgs.lib.licenses.mit ];
432 454 };
433 455 };
434 plaster = super.buildPythonPackage {
435 name = "plaster-1.0";
436 buildInputs = with self; [];
456 "prompt-toolkit" = super.buildPythonPackage {
457 name = "prompt-toolkit-1.0.15";
437 458 doCheck = false;
438 propagatedBuildInputs = with self; [setuptools];
459 propagatedBuildInputs = [
460 self."six"
461 self."wcwidth"
462 ];
439 463 src = fetchurl {
440 url = "https://pypi.python.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
441 md5 = "80e6beb4760c16fea31754babcc0576e";
464 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
465 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
442 466 };
443 467 meta = {
444 license = [ pkgs.lib.licenses.mit ];
468 license = [ pkgs.lib.licenses.bsdOriginal ];
445 469 };
446 470 };
447 plaster-pastedeploy = super.buildPythonPackage {
448 name = "plaster-pastedeploy-0.4.2";
449 buildInputs = with self; [];
471 "psutil" = super.buildPythonPackage {
472 name = "psutil-5.4.6";
450 473 doCheck = false;
451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 474 src = fetchurl {
453 url = "https://pypi.python.org/packages/2c/62/0daf9c0be958e785023e583e51baac15863699e956bfb3d448898d80edd8/plaster_pastedeploy-0.4.2.tar.gz";
454 md5 = "58fd7852002909378e818c9d5b71e90a";
455 };
456 meta = {
457 license = [ pkgs.lib.licenses.mit ];
458 };
459 };
460 prompt-toolkit = super.buildPythonPackage {
461 name = "prompt-toolkit-1.0.15";
462 buildInputs = with self; [];
463 doCheck = false;
464 propagatedBuildInputs = with self; [six wcwidth];
465 src = fetchurl {
466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 md5 = "8fe70295006dbc8afedd43e5eba99032";
475 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
476 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
468 477 };
469 478 meta = {
470 479 license = [ pkgs.lib.licenses.bsdOriginal ];
471 480 };
472 481 };
473 ptyprocess = super.buildPythonPackage {
474 name = "ptyprocess-0.5.2";
475 buildInputs = with self; [];
482 "ptyprocess" = super.buildPythonPackage {
483 name = "ptyprocess-0.6.0";
476 484 doCheck = false;
477 propagatedBuildInputs = with self; [];
478 485 src = fetchurl {
479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
486 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
487 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
481 488 };
482 489 meta = {
483 490 license = [ ];
484 491 };
485 492 };
486 py = super.buildPythonPackage {
487 name = "py-1.5.2";
488 buildInputs = with self; [];
493 "py" = super.buildPythonPackage {
494 name = "py-1.5.3";
489 495 doCheck = false;
490 propagatedBuildInputs = with self; [];
491 496 src = fetchurl {
492 url = "https://pypi.python.org/packages/90/e3/e075127d39d35f09a500ebb4a90afd10f9ef0a1d28a6d09abeec0e444fdd/py-1.5.2.tar.gz";
493 md5 = "279ca69c632069e1b71e11b14641ca28";
497 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
498 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
494 499 };
495 500 meta = {
496 501 license = [ pkgs.lib.licenses.mit ];
497 502 };
498 503 };
499 pygments = super.buildPythonPackage {
504 "pygments" = super.buildPythonPackage {
500 505 name = "pygments-2.2.0";
501 buildInputs = with self; [];
502 506 doCheck = false;
503 propagatedBuildInputs = with self; [];
504 507 src = fetchurl {
505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
506 md5 = "13037baca42f16917cbd5ad2fab50844";
508 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
509 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
507 510 };
508 511 meta = {
509 512 license = [ pkgs.lib.licenses.bsdOriginal ];
510 513 };
511 514 };
512 pyramid = super.buildPythonPackage {
513 name = "pyramid-1.9.1";
514 buildInputs = with self; [];
515 "pyramid" = super.buildPythonPackage {
516 name = "pyramid-1.9.2";
515 517 doCheck = false;
516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
518 propagatedBuildInputs = [
519 self."setuptools"
520 self."webob"
521 self."repoze.lru"
522 self."zope.interface"
523 self."zope.deprecation"
524 self."venusian"
525 self."translationstring"
526 self."pastedeploy"
527 self."plaster"
528 self."plaster-pastedeploy"
529 self."hupper"
530 ];
517 531 src = fetchurl {
518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
532 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
533 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
520 534 };
521 535 meta = {
522 536 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 537 };
524 538 };
525 pyramid-jinja2 = super.buildPythonPackage {
526 name = "pyramid-jinja2-2.7";
527 buildInputs = with self; [];
539 "pyramid-mako" = super.buildPythonPackage {
540 name = "pyramid-mako-1.0.2";
528 541 doCheck = false;
529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
542 propagatedBuildInputs = [
543 self."pyramid"
544 self."mako"
545 ];
530 546 src = fetchurl {
531 url = "https://pypi.python.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
532 md5 = "c2f8b2cd7b73a6f1d9a311fcfaf4fb92";
547 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
548 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
533 549 };
534 550 meta = {
535 551 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 552 };
537 553 };
538 pyramid-mako = super.buildPythonPackage {
539 name = "pyramid-mako-1.0.2";
540 buildInputs = with self; [];
541 doCheck = false;
542 propagatedBuildInputs = with self; [pyramid Mako];
543 src = fetchurl {
544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
546 };
547 meta = {
548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
549 };
550 };
551 pytest = super.buildPythonPackage {
552 name = "pytest-3.2.5";
553 buildInputs = with self; [];
554 "pytest" = super.buildPythonPackage {
555 name = "pytest-3.6.0";
554 556 doCheck = false;
555 propagatedBuildInputs = with self; [py setuptools];
557 propagatedBuildInputs = [
558 self."py"
559 self."six"
560 self."setuptools"
561 self."attrs"
562 self."more-itertools"
563 self."atomicwrites"
564 self."pluggy"
565 self."funcsigs"
566 ];
556 567 src = fetchurl {
557 url = "https://pypi.python.org/packages/1f/f8/8cd74c16952163ce0db0bd95fdd8810cbf093c08be00e6e665ebf0dc3138/pytest-3.2.5.tar.gz";
558 md5 = "6dbe9bb093883f75394a689a1426ac6f";
559 };
560 meta = {
561 license = [ pkgs.lib.licenses.mit ];
562 };
563 };
564 pytest-catchlog = super.buildPythonPackage {
565 name = "pytest-catchlog-1.2.2";
566 buildInputs = with self; [];
567 doCheck = false;
568 propagatedBuildInputs = with self; [py pytest];
569 src = fetchurl {
570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
571 md5 = "09d890c54c7456c818102b7ff8c182c8";
568 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
569 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
572 570 };
573 571 meta = {
574 572 license = [ pkgs.lib.licenses.mit ];
575 573 };
576 574 };
577 pytest-cov = super.buildPythonPackage {
575 "pytest-cov" = super.buildPythonPackage {
578 576 name = "pytest-cov-2.5.1";
579 buildInputs = with self; [];
580 577 doCheck = false;
581 propagatedBuildInputs = with self; [pytest coverage];
578 propagatedBuildInputs = [
579 self."pytest"
580 self."coverage"
581 ];
582 582 src = fetchurl {
583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
583 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
585 585 };
586 586 meta = {
587 587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 588 };
589 589 };
590 pytest-profiling = super.buildPythonPackage {
591 name = "pytest-profiling-1.2.11";
592 buildInputs = with self; [];
590 "pytest-profiling" = super.buildPythonPackage {
591 name = "pytest-profiling-1.3.0";
593 592 doCheck = false;
594 propagatedBuildInputs = with self; [six pytest gprof2dot];
593 propagatedBuildInputs = [
594 self."six"
595 self."pytest"
596 self."gprof2dot"
597 ];
595 598 src = fetchurl {
596 url = "https://pypi.python.org/packages/c0/4a/b4aa786e93c07a86f1f87c581a36bf355a9e06a9da7e00dbd05047626bd2/pytest-profiling-1.2.11.tar.gz";
597 md5 = "9ef6b60248731be5d44477980408e8f7";
599 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
600 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
598 601 };
599 602 meta = {
600 603 license = [ pkgs.lib.licenses.mit ];
601 604 };
602 605 };
603 pytest-runner = super.buildPythonPackage {
604 name = "pytest-runner-3.0";
605 buildInputs = with self; [];
606 "pytest-runner" = super.buildPythonPackage {
607 name = "pytest-runner-4.2";
606 608 doCheck = false;
607 propagatedBuildInputs = with self; [];
608 609 src = fetchurl {
609 url = "https://pypi.python.org/packages/65/b4/ae89338cd2d81e2cc54bd6db2e962bfe948f612303610d68ab24539ac2d1/pytest-runner-3.0.tar.gz";
610 md5 = "8f8363a52bbabc4cedd5e239beb2ba11";
610 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
611 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
611 612 };
612 613 meta = {
613 614 license = [ pkgs.lib.licenses.mit ];
614 615 };
615 616 };
616 pytest-sugar = super.buildPythonPackage {
617 name = "pytest-sugar-0.9.0";
618 buildInputs = with self; [];
617 "pytest-sugar" = super.buildPythonPackage {
618 name = "pytest-sugar-0.9.1";
619 619 doCheck = false;
620 propagatedBuildInputs = with self; [pytest termcolor];
620 propagatedBuildInputs = [
621 self."pytest"
622 self."termcolor"
623 ];
621 624 src = fetchurl {
622 url = "https://pypi.python.org/packages/49/d8/c5ff6cca3ce2ebd8b73eec89779bf6b4a7737456a70e8ea4d44c1ff90f71/pytest-sugar-0.9.0.tar.gz";
623 md5 = "89fbff17277fa6a95a560a04b68cb9f9";
625 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
626 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
624 627 };
625 628 meta = {
626 629 license = [ pkgs.lib.licenses.bsdOriginal ];
627 630 };
628 631 };
629 pytest-timeout = super.buildPythonPackage {
630 name = "pytest-timeout-1.2.0";
631 buildInputs = with self; [];
632 "pytest-timeout" = super.buildPythonPackage {
633 name = "pytest-timeout-1.2.1";
632 634 doCheck = false;
633 propagatedBuildInputs = with self; [pytest];
635 propagatedBuildInputs = [
636 self."pytest"
637 ];
634 638 src = fetchurl {
635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
636 md5 = "83607d91aa163562c7ee835da57d061d";
639 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
640 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
637 641 };
638 642 meta = {
639 643 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
640 644 };
641 645 };
642 repoze.lru = super.buildPythonPackage {
646 "repoze.lru" = super.buildPythonPackage {
643 647 name = "repoze.lru-0.7";
644 buildInputs = with self; [];
645 648 doCheck = false;
646 propagatedBuildInputs = with self; [];
647 649 src = fetchurl {
648 url = "https://pypi.python.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
649 md5 = "c08cc030387e0b1fc53c5c7d964b35e2";
650 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
651 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
650 652 };
651 653 meta = {
652 654 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
653 655 };
654 656 };
655 rhodecode-vcsserver = super.buildPythonPackage {
656 name = "rhodecode-vcsserver-4.12.4";
657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
657 "rhodecode-vcsserver" = super.buildPythonPackage {
658 name = "rhodecode-vcsserver-4.13.0";
659 buildInputs = [
660 self."pytest"
661 self."py"
662 self."pytest-cov"
663 self."pytest-sugar"
664 self."pytest-runner"
665 self."pytest-profiling"
666 self."gprof2dot"
667 self."pytest-timeout"
668 self."mock"
669 self."webtest"
670 self."cov-core"
671 self."coverage"
672 self."configobj"
673 ];
658 674 doCheck = true;
659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
675 propagatedBuildInputs = [
676 self."configobj"
677 self."dogpile.cache"
678 self."dogpile.core"
679 self."decorator"
680 self."dulwich"
681 self."hgsubversion"
682 self."hg-evolve"
683 self."mako"
684 self."markupsafe"
685 self."mercurial"
686 self."msgpack-python"
687 self."pastedeploy"
688 self."psutil"
689 self."pyramid"
690 self."pyramid-mako"
691 self."pygments"
692 self."pathlib2"
693 self."repoze.lru"
694 self."simplejson"
695 self."subprocess32"
696 self."setproctitle"
697 self."subvertpy"
698 self."six"
699 self."translationstring"
700 self."webob"
701 self."zope.deprecation"
702 self."zope.interface"
703 self."gevent"
704 self."greenlet"
705 self."gunicorn"
706 self."waitress"
707 self."ipdb"
708 self."ipython"
709 self."pytest"
710 self."py"
711 self."pytest-cov"
712 self."pytest-sugar"
713 self."pytest-runner"
714 self."pytest-profiling"
715 self."gprof2dot"
716 self."pytest-timeout"
717 self."mock"
718 self."webtest"
719 self."cov-core"
720 self."coverage"
721 ];
660 722 src = ./.;
661 723 meta = {
662 724 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
663 725 };
664 726 };
665 scandir = super.buildPythonPackage {
666 name = "scandir-1.7";
667 buildInputs = with self; [];
727 "scandir" = super.buildPythonPackage {
728 name = "scandir-1.9.0";
668 729 doCheck = false;
669 propagatedBuildInputs = with self; [];
670 730 src = fetchurl {
671 url = "https://pypi.python.org/packages/13/bb/e541b74230bbf7a20a3949a2ee6631be299378a784f5445aa5d0047c192b/scandir-1.7.tar.gz";
672 md5 = "037e5f24d1a0e78b17faca72dea9555f";
731 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
732 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
673 733 };
674 734 meta = {
675 735 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 736 };
677 737 };
678 setuptools = super.buildPythonPackage {
679 name = "setuptools-30.1.0";
680 buildInputs = with self; [];
738 "setproctitle" = super.buildPythonPackage {
739 name = "setproctitle-1.1.10";
681 740 doCheck = false;
682 propagatedBuildInputs = with self; [];
683 741 src = fetchurl {
684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
742 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
743 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
744 };
745 meta = {
746 license = [ pkgs.lib.licenses.bsdOriginal ];
747 };
748 };
749 "setuptools" = super.buildPythonPackage {
750 name = "setuptools-40.1.0";
751 doCheck = false;
752 src = fetchurl {
753 url = "https://files.pythonhosted.org/packages/5a/df/b2e3d9693bb0dcbeac516a73dd7a9eb82b126ae52e4a74605a9b01beddd5/setuptools-40.1.0.zip";
754 sha256 = "0w1blx5ajga5y15dci0mddk49cf2xpq0mp7rp7jrqr2diqk00ib6";
686 755 };
687 756 meta = {
688 757 license = [ pkgs.lib.licenses.mit ];
689 758 };
690 759 };
691 simplegeneric = super.buildPythonPackage {
760 "simplegeneric" = super.buildPythonPackage {
692 761 name = "simplegeneric-0.8.1";
693 buildInputs = with self; [];
694 762 doCheck = false;
695 propagatedBuildInputs = with self; [];
696 763 src = fetchurl {
697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
698 md5 = "f9c1fab00fd981be588fc32759f474e3";
764 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
765 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
699 766 };
700 767 meta = {
701 license = [ pkgs.lib.licenses.zpt21 ];
768 license = [ pkgs.lib.licenses.zpl21 ];
702 769 };
703 770 };
704 simplejson = super.buildPythonPackage {
771 "simplejson" = super.buildPythonPackage {
705 772 name = "simplejson-3.11.1";
706 buildInputs = with self; [];
707 773 doCheck = false;
708 propagatedBuildInputs = with self; [];
709 774 src = fetchurl {
710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
775 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
776 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
712 777 };
713 778 meta = {
714 779 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
715 780 };
716 781 };
717 six = super.buildPythonPackage {
782 "six" = super.buildPythonPackage {
718 783 name = "six-1.11.0";
719 buildInputs = with self; [];
720 784 doCheck = false;
721 propagatedBuildInputs = with self; [];
722 785 src = fetchurl {
723 url = "https://pypi.python.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
724 md5 = "d12789f9baf7e9fb2524c0c64f1773f8";
786 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
787 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
725 788 };
726 789 meta = {
727 790 license = [ pkgs.lib.licenses.mit ];
728 791 };
729 792 };
730 subprocess32 = super.buildPythonPackage {
731 name = "subprocess32-3.2.7";
732 buildInputs = with self; [];
793 "subprocess32" = super.buildPythonPackage {
794 name = "subprocess32-3.5.1";
733 795 doCheck = false;
734 propagatedBuildInputs = with self; [];
735 796 src = fetchurl {
736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
737 md5 = "824c801e479d3e916879aae3e9c15e16";
797 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
798 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
738 799 };
739 800 meta = {
740 801 license = [ pkgs.lib.licenses.psfl ];
741 802 };
742 803 };
743 subvertpy = super.buildPythonPackage {
804 "subvertpy" = super.buildPythonPackage {
744 805 name = "subvertpy-0.10.1";
745 buildInputs = with self; [];
746 806 doCheck = false;
747 propagatedBuildInputs = with self; [];
748 807 src = fetchurl {
749 url = "https://pypi.python.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
750 md5 = "a70e03579902d480f5e9f8c570f6536b";
808 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
809 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
751 810 };
752 811 meta = {
753 812 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
754 813 };
755 814 };
756 termcolor = super.buildPythonPackage {
815 "termcolor" = super.buildPythonPackage {
757 816 name = "termcolor-1.1.0";
758 buildInputs = with self; [];
759 817 doCheck = false;
760 propagatedBuildInputs = with self; [];
761 818 src = fetchurl {
762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
763 md5 = "043e89644f8909d462fbbfa511c768df";
819 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
820 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
764 821 };
765 822 meta = {
766 823 license = [ pkgs.lib.licenses.mit ];
767 824 };
768 825 };
769 traitlets = super.buildPythonPackage {
826 "traitlets" = super.buildPythonPackage {
770 827 name = "traitlets-4.3.2";
771 buildInputs = with self; [];
772 828 doCheck = false;
773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
829 propagatedBuildInputs = [
830 self."ipython-genutils"
831 self."six"
832 self."decorator"
833 self."enum34"
834 ];
774 835 src = fetchurl {
775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
836 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
837 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
777 838 };
778 839 meta = {
779 840 license = [ pkgs.lib.licenses.bsdOriginal ];
780 841 };
781 842 };
782 translationstring = super.buildPythonPackage {
843 "translationstring" = super.buildPythonPackage {
783 844 name = "translationstring-1.3";
784 buildInputs = with self; [];
785 845 doCheck = false;
786 propagatedBuildInputs = with self; [];
787 846 src = fetchurl {
788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
847 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
848 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
790 849 };
791 850 meta = {
792 851 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
793 852 };
794 853 };
795 venusian = super.buildPythonPackage {
854 "venusian" = super.buildPythonPackage {
796 855 name = "venusian-1.1.0";
797 buildInputs = with self; [];
798 856 doCheck = false;
799 propagatedBuildInputs = with self; [];
800 857 src = fetchurl {
801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
858 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
859 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
803 860 };
804 861 meta = {
805 862 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
806 863 };
807 864 };
808 waitress = super.buildPythonPackage {
865 "waitress" = super.buildPythonPackage {
809 866 name = "waitress-1.1.0";
810 buildInputs = with self; [];
811 867 doCheck = false;
812 propagatedBuildInputs = with self; [];
813 868 src = fetchurl {
814 url = "https://pypi.python.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
815 md5 = "0f1eb7fdfdbf2e6d18decbda1733045c";
869 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
870 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
816 871 };
817 872 meta = {
818 license = [ pkgs.lib.licenses.zpt21 ];
873 license = [ pkgs.lib.licenses.zpl21 ];
819 874 };
820 875 };
821 wcwidth = super.buildPythonPackage {
876 "wcwidth" = super.buildPythonPackage {
822 877 name = "wcwidth-0.1.7";
823 buildInputs = with self; [];
824 878 doCheck = false;
825 propagatedBuildInputs = with self; [];
826 879 src = fetchurl {
827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
880 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
881 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
882 };
883 meta = {
884 license = [ pkgs.lib.licenses.mit ];
885 };
886 };
887 "webob" = super.buildPythonPackage {
888 name = "webob-1.7.4";
889 doCheck = false;
890 src = fetchurl {
891 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
892 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
829 893 };
830 894 meta = {
831 895 license = [ pkgs.lib.licenses.mit ];
832 896 };
833 897 };
834 wheel = super.buildPythonPackage {
835 name = "wheel-0.29.0";
836 buildInputs = with self; [];
898 "webtest" = super.buildPythonPackage {
899 name = "webtest-2.0.29";
837 900 doCheck = false;
838 propagatedBuildInputs = with self; [];
901 propagatedBuildInputs = [
902 self."six"
903 self."webob"
904 self."waitress"
905 self."beautifulsoup4"
906 ];
839 907 src = fetchurl {
840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
841 md5 = "555a67e4507cedee23a0deb9651e452f";
908 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
909 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
842 910 };
843 911 meta = {
844 912 license = [ pkgs.lib.licenses.mit ];
845 913 };
846 914 };
847 zope.deprecation = super.buildPythonPackage {
915 "zope.deprecation" = super.buildPythonPackage {
848 916 name = "zope.deprecation-4.3.0";
849 buildInputs = with self; [];
850 917 doCheck = false;
851 propagatedBuildInputs = with self; [setuptools];
918 propagatedBuildInputs = [
919 self."setuptools"
920 ];
852 921 src = fetchurl {
853 url = "https://pypi.python.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
854 md5 = "2166b2cb7e0e96a21104e6f8f9b696bb";
922 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
923 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
855 924 };
856 925 meta = {
857 license = [ pkgs.lib.licenses.zpt21 ];
926 license = [ pkgs.lib.licenses.zpl21 ];
858 927 };
859 928 };
860 zope.interface = super.buildPythonPackage {
861 name = "zope.interface-4.4.3";
862 buildInputs = with self; [];
929 "zope.interface" = super.buildPythonPackage {
930 name = "zope.interface-4.5.0";
863 931 doCheck = false;
864 propagatedBuildInputs = with self; [setuptools];
932 propagatedBuildInputs = [
933 self."setuptools"
934 ];
865 935 src = fetchurl {
866 url = "https://pypi.python.org/packages/bd/d2/25349ed41f9dcff7b3baf87bd88a4c82396cf6e02f1f42bb68657a3132af/zope.interface-4.4.3.tar.gz";
867 md5 = "8700a4f527c1203b34b10c2b4e7a6912";
936 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
937 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
868 938 };
869 939 meta = {
870 license = [ pkgs.lib.licenses.zpt21 ];
940 license = [ pkgs.lib.licenses.zpl21 ];
871 941 };
872 942 };
873 943
874 944 ### Test requirements
875 945
876 946
877 947 }
@@ -1,3 +1,8 b''
1 1 [pytest]
2 testpaths = ./vcsserver
3 addopts = -v
2 testpaths = vcsserver
3 norecursedirs = vcsserver/hook_utils/hook_templates
4 cache_dir = /tmp/.pytest_cache
5
6
7 addopts =
8 --pdbcls=IPython.terminal.debugger:TerminalPdb
@@ -1,15 +1,16 b''
1 # This file defines how to "build" for packaging.
2
1 3 { pkgs ? import <nixpkgs> {}
2 4 , doCheck ? true
3 5 }:
4 6
5 7 let
6
7 8 vcsserver = import ./default.nix {
8 9 inherit
9 10 doCheck
10 11 pkgs;
11 12 };
12 13
13 14 in {
14 15 build = vcsserver;
15 16 }
@@ -1,41 +1,46 b''
1 ## core
2 setuptools==30.1.0
1 ## dependencies
3 2
4 Beaker==1.9.1
5 configobj==5.0.6
3 # our custom configobj
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 dogpile.cache==0.6.6
6 dogpile.core==0.4.1
6 7 decorator==4.1.2
7 8 dulwich==0.13.0
8 hgsubversion==1.9.0
9 hg-evolve==7.0.1
10 infrae.cache==1.0.1
11 mercurial==4.4.2
12 msgpack-python==0.4.8
13 pyramid-jinja2==2.7
14 Jinja2==2.9.6
15 pyramid==1.9.1
9 hgsubversion==1.9.2
10 hg-evolve==8.0.1
11 mako==1.0.7
12 markupsafe==1.0.0
13 mercurial==4.6.2
14 msgpack-python==0.5.6
15
16 pastedeploy==1.5.2
17 psutil==5.4.6
18 pyramid==1.9.2
16 19 pyramid-mako==1.0.2
20
21 pygments==2.2.0
22 pathlib2==2.3.0
17 23 repoze.lru==0.7
18 24 simplejson==3.11.1
19 subprocess32==3.2.7
20
25 subprocess32==3.5.1
26 setproctitle==1.1.10
21 27 subvertpy==0.10.1
22 28
23 29 six==1.11.0
24 30 translationstring==1.3
25 WebOb==1.7.4
26 wheel==0.29.0
31 webob==1.7.4
27 32 zope.deprecation==4.3.0
28 zope.interface==4.4.3
33 zope.interface==4.5.0
29 34
30 35 ## http servers
31 gevent==1.2.2
36 gevent==1.3.5
32 37 greenlet==0.4.13
33 gunicorn==19.7.1
38 gunicorn==19.9.0
34 39 waitress==1.1.0
35 40
36 41 ## debug
37 ipdb==0.10.3
42 ipdb==0.11.0
38 43 ipython==5.1.0
39 44
40 45 ## test related requirements
41 46 -r requirements_test.txt
@@ -1,15 +1,14 b''
1 1 # test related requirements
2 pytest==3.2.5
3 py==1.5.2
2 pytest==3.6.0
3 py==1.5.3
4 4 pytest-cov==2.5.1
5 pytest-sugar==0.9.0
6 pytest-runner==3.0.0
7 pytest-catchlog==1.2.2
8 pytest-profiling==1.2.11
5 pytest-sugar==0.9.1
6 pytest-runner==4.2.0
7 pytest-profiling==1.3.0
9 8 gprof2dot==2017.9.19
10 pytest-timeout==1.2.0
9 pytest-timeout==1.2.1
11 10
12 11 mock==1.0.1
13 WebTest==2.0.29
12 webtest==2.0.29
14 13 cov-core==1.15.0
15 14 coverage==3.7.1
@@ -1,132 +1,139 b''
1 1 # -*- coding: utf-8 -*-
2 2 # RhodeCode VCSServer provides access to different vcs backends via network.
3 3 # Copyright (C) 2014-2017 RodeCode GmbH
4 4 #
5 5 # This program is free software; you can redistribute it and/or modify
6 6 # it under the terms of the GNU General Public License as published by
7 7 # the Free Software Foundation; either version 3 of the License, or
8 8 # (at your option) any later version.
9 9 #
10 10 # This program is distributed in the hope that it will be useful,
11 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 13 # GNU General Public License for more details.
14 14 #
15 15 # You should have received a copy of the GNU General Public License
16 16 # along with this program; if not, write to the Free Software Foundation,
17 17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 18
19 19 # Import early to make sure things are patched up properly
20 20 from setuptools import setup, find_packages
21 21
22 22 import os
23 23 import sys
24 24 import pkgutil
25 25 import platform
26 import codecs
26 27
27 from pip.download import PipSession
28 from pip.req import parse_requirements
28 try: # for pip >= 10
29 from pip._internal.req import parse_requirements
30 except ImportError: # for pip <= 9.0.3
31 from pip.req import parse_requirements
29 32
30 from codecs import open
33 try: # for pip >= 10
34 from pip._internal.download import PipSession
35 except ImportError: # for pip <= 9.0.3
36 from pip.download import PipSession
37
31 38
32 39
33 40 if sys.version_info < (2, 7):
34 41 raise Exception('VCSServer requires Python 2.7 or later')
35 42
36 43 here = os.path.abspath(os.path.dirname(__file__))
37 44
38 45 # defines current platform
39 46 __platform__ = platform.system()
40 47 __license__ = 'GPL V3'
41 48 __author__ = 'RhodeCode GmbH'
42 49 __url__ = 'https://code.rhodecode.com'
43 50 is_windows = __platform__ in ('Windows',)
44 51
45 52
46 53 def _get_requirements(req_filename, exclude=None, extras=None):
47 54 extras = extras or []
48 55 exclude = exclude or []
49 56
50 57 try:
51 58 parsed = parse_requirements(
52 59 os.path.join(here, req_filename), session=PipSession())
53 60 except TypeError:
54 61 # try pip < 6.0.0, that doesn't support session
55 62 parsed = parse_requirements(os.path.join(here, req_filename))
56 63
57 64 requirements = []
58 65 for ir in parsed:
59 66 if ir.req and ir.name not in exclude:
60 67 requirements.append(str(ir.req))
61 68 return requirements + extras
62 69
63 70
64 71 # requirements extract
65 72 setup_requirements = ['pytest-runner']
66 73 install_requirements = _get_requirements(
67 74 'requirements.txt', exclude=['setuptools'])
68 75 test_requirements = _get_requirements(
69 76 'requirements_test.txt', extras=['configobj'])
70 77
71 78
72 79 def get_version():
73 80 version = pkgutil.get_data('vcsserver', 'VERSION')
74 81 return version.strip()
75 82
76 83
77 84 # additional files that goes into package itself
78 85 package_data = {
79 86 '': ['*.txt', '*.rst'],
80 87 'configs': ['*.ini'],
81 88 'vcsserver': ['VERSION'],
82 89 }
83 90
84 91 description = 'Version Control System Server'
85 92 keywords = ' '.join([
86 93 'CLI', 'RhodeCode', 'RhodeCode Enterprise', 'RhodeCode Tools'])
87 94
88 95 # README/DESCRIPTION generation
89 96 readme_file = 'README.rst'
90 97 changelog_file = 'CHANGES.rst'
91 98 try:
92 long_description = open(readme_file).read() + '\n\n' + \
93 open(changelog_file).read()
99 long_description = codecs.open(readme_file).read() + '\n\n' + \
100 codecs.open(changelog_file).read()
94 101 except IOError as err:
95 102 sys.stderr.write(
96 103 "[WARNING] Cannot find file specified as long_description (%s)\n "
97 104 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
98 105 long_description = description
99 106
100 107
101 108 setup(
102 109 name='rhodecode-vcsserver',
103 110 version=get_version(),
104 111 description=description,
105 112 long_description=long_description,
106 113 keywords=keywords,
107 114 license=__license__,
108 115 author=__author__,
109 author_email='marcin@rhodecode.com',
116 author_email='admin@rhodecode.com',
110 117 url=__url__,
111 118 setup_requires=setup_requirements,
112 119 install_requires=install_requirements,
113 120 tests_require=test_requirements,
114 121 zip_safe=False,
115 122 packages=find_packages(exclude=["docs", "tests*"]),
116 123 package_data=package_data,
117 124 include_package_data=True,
118 125 classifiers=[
119 126 'Development Status :: 6 - Mature',
120 127 'Intended Audience :: Developers',
121 128 'Operating System :: OS Independent',
122 129 'Topic :: Software Development :: Version Control',
123 130 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
124 131 'Programming Language :: Python :: 2.7',
125 132 ],
126 133 entry_points={
127 134 'console_scripts': [
128 135 'vcsserver=vcsserver.main:main',
129 136 ],
130 137 'paste.app_factory': ['main=vcsserver.http_main:main']
131 138 },
132 139 )
@@ -1,41 +1,67 b''
1 { pkgs ? import <nixpkgs> {}
1 # This file contains the adjustments which are desired for a development
2 # environment.
3
4 { pkgs ? (import <nixpkgs> {})
5 , pythonPackages ? "python27Packages"
2 6 , doCheck ? false
3 7 }:
4 8
5 9 let
6 10
7 11 vcsserver = import ./default.nix {
8 inherit pkgs doCheck;
12 inherit
13 pkgs
14 doCheck;
9 15 };
10 16
11 17 vcs-pythonPackages = vcsserver.pythonPackages;
12 18
13 19 in vcsserver.override (attrs: {
14
15 20 # Avoid that we dump any sources into the store when entering the shell and
16 21 # make development a little bit more convenient.
17 22 src = null;
18 23
24 # Add dependencies which are useful for the development environment.
19 25 buildInputs =
20 26 attrs.buildInputs ++
21 27 (with vcs-pythonPackages; [
22 28 ipdb
23 29 ]);
24 30
25 # Somewhat snappier setup of the development environment
26 # TODO: think of supporting a stable path again, so that multiple shells
27 # can share it.
28 postShellHook = ''
29 # Set locale
30 export LC_ALL="en_US.UTF-8"
31 # place to inject some required libs from develop installs
32 propagatedBuildInputs =
33 attrs.propagatedBuildInputs ++
34 [];
35
36
37 # Make sure we execute both hooks
38 shellHook = ''
39 runHook preShellHook
40 runHook postShellHook
41 '';
42
43 preShellHook = ''
44 echo "Entering VCS-Shell"
31 45
32 46 # Custom prompt to distinguish from other dev envs.
33 47 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
34 48
49 # Set locale
50 export LC_ALL="en_US.UTF-8"
51
52 # Setup a temporary directory.
35 53 tmp_path=$(mktemp -d)
36 54 export PATH="$tmp_path/bin:$PATH"
37 55 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
38 56 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
57
58 # Develop installation
59 echo "[BEGIN]: develop install of rhodecode-vcsserver"
39 60 python setup.py develop --prefix $tmp_path --allow-hosts ""
40 61 '';
62
63 postShellHook = ''
64
65 '';
66
41 67 })
@@ -1,1 +1,1 b''
1 4.12.4 No newline at end of file
1 4.13.0 No newline at end of file
@@ -1,98 +1,91 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 from vcsserver.lib.rc_cache import region_meta
23 24 log = logging.getLogger(__name__)
24 25
25 26
26 27 class RepoFactory(object):
27 28 """
28 29 Utility to create instances of repository
29 30
30 31 It provides internal caching of the `repo` object based on
31 32 the :term:`call context`.
32 33 """
34 repo_type = None
33 35
34 def __init__(self, repo_cache):
35 self._cache = repo_cache
36 def __init__(self):
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
36 38
37 39 def _create_config(self, path, config):
38 40 config = {}
39 41 return config
40 42
41 43 def _create_repo(self, wire, create):
42 44 raise NotImplementedError()
43 45
44 46 def repo(self, wire, create=False):
45 47 """
46 48 Get a repository instance for the given path.
47 49
48 50 Uses internally the low level beaker API since the decorators introduce
49 51 significant overhead.
50 52 """
51 def create_new_repo():
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
52 62 return self._create_repo(wire, create)
53 63
54 return self._repo(wire, create_new_repo)
55
56 def _repo(self, wire, createfunc):
57 context = wire.get('context', None)
58 cache = wire.get('cache', True)
59
60 if context and cache:
61 cache_key = (context, wire['path'])
62 log.debug(
63 'FETCH %s@%s repo object from cache. Context: %s',
64 self.__class__.__name__, wire['path'], context)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
66 else:
67 log.debug(
68 'INIT %s@%s repo object based on wire %s. Context: %s',
69 self.__class__.__name__, wire['path'], wire, context)
70 return createfunc()
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
65 return repo
71 66
72 67
73 68 def obfuscate_qs(query_string):
74 69 if query_string is None:
75 70 return None
76 71
77 72 parsed = []
78 73 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
79 74 if k in ['auth_token', 'api_key']:
80 75 v = "*****"
81 76 parsed.append((k, v))
82 77
83 78 return '&'.join('{}{}'.format(
84 79 k, '={}'.format(v) if v else '') for k, v in parsed)
85 80
86 81
87 82 def raise_from_original(new_type):
88 83 """
89 84 Raise a new exception type with original args and traceback.
90 85 """
91 86 exc_type, exc_value, exc_traceback = sys.exc_info()
92 87
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
95 88 try:
96 89 raise new_type(*exc_value.args), None, exc_traceback
97 90 finally:
98 91 del exc_traceback
@@ -1,70 +1,116 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 import functools
28 from pyramid.httpexceptions import HTTPLocked
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
29 28
30 29
31 def _make_exception(kind, *args):
30 def _make_exception(kind, org_exc, *args):
32 31 """
33 32 Prepares a base `Exception` instance to be sent over the wire.
34 33
35 34 To give our caller a hint what this is about, it will attach an attribute
36 35 `_vcs_kind` to the exception.
37 36 """
38 37 exc = Exception(*args)
39 38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
40 40 return exc
41 41
42 42
43 AbortException = functools.partial(_make_exception, 'abort')
43 def AbortException(org_exc=None):
44 def _make_exception_wrapper(*args):
45 return _make_exception('abort', org_exc, *args)
46 return _make_exception_wrapper
47
44 48
45 ArchiveException = functools.partial(_make_exception, 'archive')
49 def ArchiveException(org_exc=None):
50 def _make_exception_wrapper(*args):
51 return _make_exception('archive', org_exc, *args)
52 return _make_exception_wrapper
53
46 54
47 LookupException = functools.partial(_make_exception, 'lookup')
55 def LookupException(org_exc=None):
56 def _make_exception_wrapper(*args):
57 return _make_exception('lookup', org_exc, *args)
58 return _make_exception_wrapper
59
48 60
49 VcsException = functools.partial(_make_exception, 'error')
61 def VcsException(org_exc=None):
62 def _make_exception_wrapper(*args):
63 return _make_exception('error', org_exc, *args)
64 return _make_exception_wrapper
65
66
67 def RepositoryLockedException(org_exc=None):
68 def _make_exception_wrapper(*args):
69 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception_wrapper
50 71
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
72
73 def RepositoryBranchProtectedException(org_exc=None):
74 def _make_exception_wrapper(*args):
75 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception_wrapper
52 77
53 RequirementException = functools.partial(_make_exception, 'requirement')
78
79 def RequirementException(org_exc=None):
80 def _make_exception_wrapper(*args):
81 return _make_exception('requirement', org_exc, *args)
82 return _make_exception_wrapper
83
54 84
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
85 def UnhandledException(org_exc=None):
86 def _make_exception_wrapper(*args):
87 return _make_exception('unhandled', org_exc, *args)
88 return _make_exception_wrapper
89
56 90
57 URLError = functools.partial(_make_exception, 'url_error')
91 def URLError(org_exc=None):
92 def _make_exception_wrapper(*args):
93 return _make_exception('url_error', org_exc, *args)
94 return _make_exception_wrapper
58 95
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
96
97 def SubrepoMergeException(org_exc=None):
98 def _make_exception_wrapper(*args):
99 return _make_exception('subrepo_merge_error', org_exc, *args)
100 return _make_exception_wrapper
60 101
61 102
62 103 class HTTPRepoLocked(HTTPLocked):
63 104 """
64 105 Subclass of HTTPLocked response that allows to set the title and status
65 106 code via constructor arguments.
66 107 """
67 108 def __init__(self, title, status_code=None, **kwargs):
68 109 self.code = status_code or HTTPLocked.code
69 110 self.title = title
70 111 super(HTTPRepoLocked, self).__init__(**kwargs)
112
113
114 class HTTPRepoBranchProtected(HTTPForbidden):
115 def __init__(self, *args, **kwargs):
116 super(HTTPForbidden, self).__init__(*args, **kwargs)
@@ -1,670 +1,675 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e)(e.message)
60 60 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e)(e.message)
62 62 except Exception as e:
63 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 64 # (KeyError on empty repos), we cannot track this and catch all
65 65 # exceptions, it's an exceptions from other handlers
66 66 #if not hasattr(e, '_vcs_kind'):
67 67 #log.exception("Unhandled exception in git remote call")
68 68 #raise_from_original(exceptions.UnhandledException)
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class Repo(DulwichRepo):
74 74 """
75 75 A wrapper for dulwich Repo class.
76 76
77 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 78 "Too many open files" error. We need to close all opened file descriptors
79 79 once the repo object is destroyed.
80 80
81 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 82 to 0.12.0 +
83 83 """
84 84 def __del__(self):
85 85 if hasattr(self, 'object_store'):
86 86 self.close()
87 87
88 88
89 89 class GitFactory(RepoFactory):
90 repo_type = 'git'
90 91
91 92 def _create_repo(self, wire, create):
92 93 repo_path = str_to_dulwich(wire['path'])
93 94 return Repo(repo_path)
94 95
95 96
96 97 class GitRemote(object):
97 98
98 99 def __init__(self, factory):
99 100 self._factory = factory
100 101
101 102 self._bulk_methods = {
102 103 "author": self.commit_attribute,
103 104 "date": self.get_object_attrs,
104 105 "message": self.commit_attribute,
105 106 "parents": self.commit_attribute,
106 107 "_commit": self.revision,
107 108 }
108 109
109 110 def _wire_to_config(self, wire):
110 111 if 'config' in wire:
111 112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 113 return {}
113 114
114 115 def _assign_ref(self, wire, ref, commit_id):
115 116 repo = self._factory.repo(wire)
116 117 repo[ref] = commit_id
117 118
118 119 @reraise_safe_exceptions
119 120 def add_object(self, wire, content):
120 121 repo = self._factory.repo(wire)
121 122 blob = objects.Blob()
122 123 blob.set_raw_string(content)
123 124 repo.object_store.add_object(blob)
124 125 return blob.id
125 126
126 127 @reraise_safe_exceptions
127 128 def assert_correct_path(self, wire):
128 129 path = wire.get('path')
129 130 try:
130 131 self._factory.repo(wire)
131 132 except NotGitRepository as e:
132 133 tb = traceback.format_exc()
133 134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 135 return False
135 136
136 137 return True
137 138
138 139 @reraise_safe_exceptions
139 140 def bare(self, wire):
140 141 repo = self._factory.repo(wire)
141 142 return repo.bare
142 143
143 144 @reraise_safe_exceptions
144 145 def blob_as_pretty_string(self, wire, sha):
145 146 repo = self._factory.repo(wire)
146 147 return repo[sha].as_pretty_string()
147 148
148 149 @reraise_safe_exceptions
149 150 def blob_raw_length(self, wire, sha):
150 151 repo = self._factory.repo(wire)
151 152 blob = repo[sha]
152 153 return blob.raw_length()
153 154
154 155 def _parse_lfs_pointer(self, raw_content):
155 156
156 157 spec_string = 'version https://git-lfs.github.com/spec'
157 158 if raw_content and raw_content.startswith(spec_string):
158 159 pattern = re.compile(r"""
159 160 (?:\n)?
160 161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 163 ^size[ ](?P<oid_size>[0-9]+)\n
163 164 (?:\n)?
164 165 """, re.VERBOSE | re.MULTILINE)
165 166 match = pattern.match(raw_content)
166 167 if match:
167 168 return match.groupdict()
168 169
169 170 return {}
170 171
171 172 @reraise_safe_exceptions
172 173 def is_large_file(self, wire, sha):
173 174 repo = self._factory.repo(wire)
174 175 blob = repo[sha]
175 176 return self._parse_lfs_pointer(blob.as_raw_string())
176 177
177 178 @reraise_safe_exceptions
178 179 def in_largefiles_store(self, wire, oid):
179 180 repo = self._factory.repo(wire)
180 181 conf = self._wire_to_config(wire)
181 182
182 183 store_location = conf.get('vcs_git_lfs_store_location')
183 184 if store_location:
184 185 repo_name = repo.path
185 186 store = LFSOidStore(
186 187 oid=oid, repo=repo_name, store_location=store_location)
187 188 return store.has_oid()
188 189
189 190 return False
190 191
191 192 @reraise_safe_exceptions
192 193 def store_path(self, wire, oid):
193 194 repo = self._factory.repo(wire)
194 195 conf = self._wire_to_config(wire)
195 196
196 197 store_location = conf.get('vcs_git_lfs_store_location')
197 198 if store_location:
198 199 repo_name = repo.path
199 200 store = LFSOidStore(
200 201 oid=oid, repo=repo_name, store_location=store_location)
201 202 return store.oid_path
202 203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 204
204 205 @reraise_safe_exceptions
205 206 def bulk_request(self, wire, rev, pre_load):
206 207 result = {}
207 208 for attr in pre_load:
208 209 try:
209 210 method = self._bulk_methods[attr]
210 211 args = [wire, rev]
211 212 if attr == "date":
212 213 args.extend(["commit_time", "commit_timezone"])
213 214 elif attr in ["author", "message", "parents"]:
214 215 args.append(attr)
215 216 result[attr] = method(*args)
216 except KeyError:
217 raise exceptions.VcsException(
217 except KeyError as e:
218 raise exceptions.VcsException(e)(
218 219 "Unknown bulk attribute: %s" % attr)
219 220 return result
220 221
221 222 def _build_opener(self, url):
222 223 handlers = []
223 224 url_obj = url_parser(url)
224 225 _, authinfo = url_obj.authinfo()
225 226
226 227 if authinfo:
227 228 # create a password manager
228 229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 230 passmgr.add_password(*authinfo)
230 231
231 232 handlers.extend((httpbasicauthhandler(passmgr),
232 233 httpdigestauthhandler(passmgr)))
233 234
234 235 return urllib2.build_opener(*handlers)
235 236
236 237 @reraise_safe_exceptions
237 238 def check_url(self, url, config):
238 239 url_obj = url_parser(url)
239 240 test_uri, _ = url_obj.authinfo()
240 241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 242 url_obj.query = obfuscate_qs(url_obj.query)
242 243 cleaned_uri = str(url_obj)
243 244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 245
245 246 if not test_uri.endswith('info/refs'):
246 247 test_uri = test_uri.rstrip('/') + '/info/refs'
247 248
248 249 o = self._build_opener(url)
249 250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 251
251 252 q = {"service": 'git-upload-pack'}
252 253 qs = '?%s' % urllib.urlencode(q)
253 254 cu = "%s%s" % (test_uri, qs)
254 255 req = urllib2.Request(cu, None, {})
255 256
256 257 try:
257 258 log.debug("Trying to open URL %s", cleaned_uri)
258 259 resp = o.open(req)
259 260 if resp.code != 200:
260 raise exceptions.URLError('Return Code is not 200')
261 raise exceptions.URLError()('Return Code is not 200')
261 262 except Exception as e:
262 263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 264 # means it cannot be cloned
264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
265 266
266 267 # now detect if it's proper git repo
267 268 gitdata = resp.read()
268 269 if 'service=git-upload-pack' in gitdata:
269 270 pass
270 271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 272 # old style git can return some other format !
272 273 pass
273 274 else:
274 raise exceptions.URLError(
275 raise exceptions.URLError()(
275 276 "url [%s] does not look like an git" % (cleaned_uri,))
276 277
277 278 return True
278 279
279 280 @reraise_safe_exceptions
280 281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 282 remote_refs = self.fetch(wire, url, apply_refs=False)
282 283 repo = self._factory.repo(wire)
283 284 if isinstance(valid_refs, list):
284 285 valid_refs = tuple(valid_refs)
285 286
286 287 for k in remote_refs:
287 288 # only parse heads/tags and skip so called deferred tags
288 289 if k.startswith(valid_refs) and not k.endswith(deferred):
289 290 repo[k] = remote_refs[k]
290 291
291 292 if update_after_clone:
292 293 # we want to checkout HEAD
293 294 repo["HEAD"] = remote_refs["HEAD"]
294 295 index.build_index_from_tree(repo.path, repo.index_path(),
295 296 repo.object_store, repo["HEAD"].tree)
296 297
297 298 # TODO: this is quite complex, check if that can be simplified
298 299 @reraise_safe_exceptions
299 300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 301 repo = self._factory.repo(wire)
301 302 object_store = repo.object_store
302 303
303 304 # Create tree and populates it with blobs
304 305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 306
306 307 for node in updated:
307 308 # Compute subdirs if needed
308 309 dirpath, nodename = vcspath.split(node['path'])
309 310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 311 parent = commit_tree
311 312 ancestors = [('', parent)]
312 313
313 314 # Tries to dig for the deepest existing tree
314 315 while dirnames:
315 316 curdir = dirnames.pop(0)
316 317 try:
317 318 dir_id = parent[curdir][1]
318 319 except KeyError:
319 320 # put curdir back into dirnames and stops
320 321 dirnames.insert(0, curdir)
321 322 break
322 323 else:
323 324 # If found, updates parent
324 325 parent = repo[dir_id]
325 326 ancestors.append((curdir, parent))
326 327 # Now parent is deepest existing tree and we need to create
327 328 # subtrees for dirnames (in reverse order)
328 329 # [this only applies for nodes from added]
329 330 new_trees = []
330 331
331 332 blob = objects.Blob.from_string(node['content'])
332 333
333 334 if dirnames:
334 335 # If there are trees which should be created we need to build
335 336 # them now (in reverse order)
336 337 reversed_dirnames = list(reversed(dirnames))
337 338 curtree = objects.Tree()
338 339 curtree[node['node_path']] = node['mode'], blob.id
339 340 new_trees.append(curtree)
340 341 for dirname in reversed_dirnames[:-1]:
341 342 newtree = objects.Tree()
342 343 newtree[dirname] = (DIR_STAT, curtree.id)
343 344 new_trees.append(newtree)
344 345 curtree = newtree
345 346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 347 else:
347 348 parent.add(
348 349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 350
350 351 new_trees.append(parent)
351 352 # Update ancestors
352 353 reversed_ancestors = reversed(
353 354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 355 for parent, tree, path in reversed_ancestors:
355 356 parent[path] = (DIR_STAT, tree.id)
356 357 object_store.add_object(tree)
357 358
358 359 object_store.add_object(blob)
359 360 for tree in new_trees:
360 361 object_store.add_object(tree)
361 362
362 363 for node_path in removed:
363 364 paths = node_path.split('/')
364 365 tree = commit_tree
365 366 trees = [tree]
366 367 # Traverse deep into the forest...
367 368 for path in paths:
368 369 try:
369 370 obj = repo[tree[path][1]]
370 371 if isinstance(obj, objects.Tree):
371 372 trees.append(obj)
372 373 tree = obj
373 374 except KeyError:
374 375 break
375 376 # Cut down the blob and all rotten trees on the way back...
376 377 for path, tree in reversed(zip(paths, trees)):
377 378 del tree[path]
378 379 if tree:
379 380 # This tree still has elements - don't remove it or any
380 381 # of it's parents
381 382 break
382 383
383 384 object_store.add_object(commit_tree)
384 385
385 386 # Create commit
386 387 commit = objects.Commit()
387 388 commit.tree = commit_tree.id
388 389 for k, v in commit_data.iteritems():
389 390 setattr(commit, k, v)
390 391 object_store.add_object(commit)
391 392
392 393 ref = 'refs/heads/%s' % branch
393 394 repo.refs[ref] = commit.id
394 395
395 396 return commit.id
396 397
397 398 @reraise_safe_exceptions
398 399 def fetch(self, wire, url, apply_refs=True, refs=None):
399 400 if url != 'default' and '://' not in url:
400 401 client = LocalGitClient(url)
401 402 else:
402 403 url_obj = url_parser(url)
403 404 o = self._build_opener(url)
404 405 url, _ = url_obj.authinfo()
405 406 client = HttpGitClient(base_url=url, opener=o)
406 407 repo = self._factory.repo(wire)
407 408
408 409 determine_wants = repo.object_store.determine_wants_all
409 410 if refs:
410 411 def determine_wants_requested(references):
411 412 return [references[r] for r in references if r in refs]
412 413 determine_wants = determine_wants_requested
413 414
414 415 try:
415 416 remote_refs = client.fetch(
416 417 path=url, target=repo, determine_wants=determine_wants)
417 418 except NotGitRepository as e:
418 419 log.warning(
419 420 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 421 # Exception can contain unicode which we convert
421 raise exceptions.AbortException(repr(e))
422 raise exceptions.AbortException(e)(repr(e))
422 423
423 424 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 425 # refs filtered by `determine_wants` function. We need to filter result
425 426 # as well
426 427 if refs:
427 428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 429
429 430 if apply_refs:
430 431 # TODO: johbo: Needs proper test coverage with a git repository
431 432 # that contains a tag object, so that we would end up with
432 433 # a peeled ref at this point.
433 434 PEELED_REF_MARKER = '^{}'
434 435 for k in remote_refs:
435 436 if k.endswith(PEELED_REF_MARKER):
436 437 log.info("Skipping peeled reference %s", k)
437 438 continue
438 439 repo[k] = remote_refs[k]
439 440
440 441 if refs:
441 442 # mikhail: explicitly set the head to the last ref.
442 443 repo['HEAD'] = remote_refs[refs[-1]]
443 444
444 445 # TODO: mikhail: should we return remote_refs here to be
445 446 # consistent?
446 447 else:
447 448 return remote_refs
448 449
449 450 @reraise_safe_exceptions
450 451 def sync_push(self, wire, url, refs=None):
451 452 if self.check_url(url, wire):
452 453 repo = self._factory.repo(wire)
453 454 self.run_git_command(
454 455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 456 _copts=['-c', 'core.askpass=""'],
456 457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457 458
458 459 @reraise_safe_exceptions
459 460 def get_remote_refs(self, wire, url):
460 461 repo = Repo(url)
461 462 return repo.get_refs()
462 463
463 464 @reraise_safe_exceptions
464 465 def get_description(self, wire):
465 466 repo = self._factory.repo(wire)
466 467 return repo.get_description()
467 468
468 469 @reraise_safe_exceptions
469 470 def get_file_history(self, wire, file_path, commit_id, limit):
470 471 repo = self._factory.repo(wire)
471 472 include = [commit_id]
472 473 paths = [file_path]
473 474
474 475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 476 return [x.commit.id for x in walker]
476 477
477 478 @reraise_safe_exceptions
478 479 def get_missing_revs(self, wire, rev1, rev2, path2):
479 480 repo = self._factory.repo(wire)
480 481 LocalGitClient(thin_packs=False).fetch(path2, repo)
481 482
482 483 wire_remote = wire.copy()
483 484 wire_remote['path'] = path2
484 485 repo_remote = self._factory.repo(wire_remote)
485 486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486 487
487 488 revs = [
488 489 x.commit.id
489 490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 491 return revs
491 492
492 493 @reraise_safe_exceptions
493 494 def get_object(self, wire, sha):
494 495 repo = self._factory.repo(wire)
495 496 obj = repo.get_object(sha)
496 497 commit_id = obj.id
497 498
498 499 if isinstance(obj, Tag):
499 500 commit_id = obj.object[1]
500 501
501 502 return {
502 503 'id': obj.id,
503 504 'type': obj.type_name,
504 505 'commit_id': commit_id
505 506 }
506 507
507 508 @reraise_safe_exceptions
508 509 def get_object_attrs(self, wire, sha, *attrs):
509 510 repo = self._factory.repo(wire)
510 511 obj = repo.get_object(sha)
511 512 return list(getattr(obj, a) for a in attrs)
512 513
513 514 @reraise_safe_exceptions
514 515 def get_refs(self, wire):
515 516 repo = self._factory.repo(wire)
516 517 result = {}
517 518 for ref, sha in repo.refs.as_dict().items():
518 519 peeled_sha = repo.get_peeled(ref)
519 520 result[ref] = peeled_sha
520 521 return result
521 522
522 523 @reraise_safe_exceptions
523 524 def get_refs_path(self, wire):
524 525 repo = self._factory.repo(wire)
525 526 return repo.refs.path
526 527
527 528 @reraise_safe_exceptions
528 def head(self, wire):
529 def head(self, wire, show_exc=True):
529 530 repo = self._factory.repo(wire)
530 return repo.head()
531 try:
532 return repo.head()
533 except Exception:
534 if show_exc:
535 raise
531 536
532 537 @reraise_safe_exceptions
533 538 def init(self, wire):
534 539 repo_path = str_to_dulwich(wire['path'])
535 540 self.repo = Repo.init(repo_path)
536 541
537 542 @reraise_safe_exceptions
538 543 def init_bare(self, wire):
539 544 repo_path = str_to_dulwich(wire['path'])
540 545 self.repo = Repo.init_bare(repo_path)
541 546
542 547 @reraise_safe_exceptions
543 548 def revision(self, wire, rev):
544 549 repo = self._factory.repo(wire)
545 550 obj = repo[rev]
546 551 obj_data = {
547 552 'id': obj.id,
548 553 }
549 554 try:
550 555 obj_data['tree'] = obj.tree
551 556 except AttributeError:
552 557 pass
553 558 return obj_data
554 559
555 560 @reraise_safe_exceptions
556 561 def commit_attribute(self, wire, rev, attr):
557 562 repo = self._factory.repo(wire)
558 563 obj = repo[rev]
559 564 return getattr(obj, attr)
560 565
561 566 @reraise_safe_exceptions
562 567 def set_refs(self, wire, key, value):
563 568 repo = self._factory.repo(wire)
564 569 repo.refs[key] = value
565 570
566 571 @reraise_safe_exceptions
567 572 def remove_ref(self, wire, key):
568 573 repo = self._factory.repo(wire)
569 574 del repo.refs[key]
570 575
571 576 @reraise_safe_exceptions
572 577 def tree_changes(self, wire, source_id, target_id):
573 578 repo = self._factory.repo(wire)
574 579 source = repo[source_id].tree if source_id else None
575 580 target = repo[target_id].tree
576 581 result = repo.object_store.tree_changes(source, target)
577 582 return list(result)
578 583
579 584 @reraise_safe_exceptions
580 585 def tree_items(self, wire, tree_id):
581 586 repo = self._factory.repo(wire)
582 587 tree = repo[tree_id]
583 588
584 589 result = []
585 590 for item in tree.iteritems():
586 591 item_sha = item.sha
587 592 item_mode = item.mode
588 593
589 594 if FILE_MODE(item_mode) == GIT_LINK:
590 595 item_type = "link"
591 596 else:
592 597 item_type = repo[item_sha].type_name
593 598
594 599 result.append((item.path, item_mode, item_sha, item_type))
595 600 return result
596 601
597 602 @reraise_safe_exceptions
598 603 def update_server_info(self, wire):
599 604 repo = self._factory.repo(wire)
600 605 update_server_info(repo)
601 606
602 607 @reraise_safe_exceptions
603 608 def discover_git_version(self):
604 609 stdout, _ = self.run_git_command(
605 610 {}, ['--version'], _bare=True, _safe=True)
606 611 prefix = 'git version'
607 612 if stdout.startswith(prefix):
608 613 stdout = stdout[len(prefix):]
609 614 return stdout.strip()
610 615
611 616 @reraise_safe_exceptions
612 617 def run_git_command(self, wire, cmd, **opts):
613 618 path = wire.get('path', None)
614 619
615 620 if path and os.path.isdir(path):
616 621 opts['cwd'] = path
617 622
618 623 if '_bare' in opts:
619 624 _copts = []
620 625 del opts['_bare']
621 626 else:
622 627 _copts = ['-c', 'core.quotepath=false', ]
623 628 safe_call = False
624 629 if '_safe' in opts:
625 630 # no exc on failure
626 631 del opts['_safe']
627 632 safe_call = True
628 633
629 634 if '_copts' in opts:
630 635 _copts.extend(opts['_copts'] or [])
631 636 del opts['_copts']
632 637
633 638 gitenv = os.environ.copy()
634 639 gitenv.update(opts.pop('extra_env', {}))
635 640 # need to clean fix GIT_DIR !
636 641 if 'GIT_DIR' in gitenv:
637 642 del gitenv['GIT_DIR']
638 643 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639 644 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
640 645
641 646 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
642 647
643 648 try:
644 649 _opts = {'env': gitenv, 'shell': False}
645 650 _opts.update(opts)
646 651 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
647 652
648 653 return ''.join(p), ''.join(p.error)
649 654 except (EnvironmentError, OSError) as err:
650 655 cmd = ' '.join(cmd) # human friendly CMD
651 656 tb_err = ("Couldn't run git command (%s).\n"
652 657 "Original error was:%s\n" % (cmd, err))
653 658 log.exception(tb_err)
654 659 if safe_call:
655 660 return '', err
656 661 else:
657 raise exceptions.VcsException(tb_err)
662 raise exceptions.VcsException()(tb_err)
658 663
659 664 @reraise_safe_exceptions
660 665 def install_hooks(self, wire, force=False):
661 666 from vcsserver.hook_utils import install_git_hooks
662 667 repo = self._factory.repo(wire)
663 668 return install_git_hooks(repo.path, repo.bare, force_create=force)
664 669
665 670
666 671 def str_to_dulwich(value):
667 672 """
668 673 Dulwich 0.10.1a requires `unicode` objects to be passed in.
669 674 """
670 675 return value.decode(settings.WIRE_ENCODING)
@@ -1,776 +1,793 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired):
78 raise_from_original(exceptions.AbortException)
79 except RepoLookupError:
80 raise_from_original(exceptions.LookupException)
81 except RequirementError:
82 raise_from_original(exceptions.RequirementException)
83 except RepoError:
84 raise_from_original(exceptions.VcsException)
85 except LookupError:
86 raise_from_original(exceptions.LookupException)
77 except (Abort, InterventionRequired) as e:
78 raise_from_original(exceptions.AbortException(e))
79 except RepoLookupError as e:
80 raise_from_original(exceptions.LookupException(e))
81 except RequirementError as e:
82 raise_from_original(exceptions.RequirementException(e))
83 except RepoError as e:
84 raise_from_original(exceptions.VcsException(e))
85 except LookupError as e:
86 raise_from_original(exceptions.LookupException(e))
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException(e))
91
91 92 raise
92 93 return wrapper
93 94
94 95
95 96 class MercurialFactory(RepoFactory):
97 repo_type = 'hg'
96 98
97 99 def _create_config(self, config, hooks=True):
98 100 if not hooks:
99 101 hooks_to_clean = frozenset((
100 102 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 104 new_config = []
103 105 for section, option, value in config:
104 106 if section == 'hooks' and option in hooks_to_clean:
105 107 continue
106 108 new_config.append((section, option, value))
107 109 config = new_config
108 110
109 111 baseui = make_ui_from_config(config)
110 112 return baseui
111 113
112 114 def _create_repo(self, wire, create):
113 115 baseui = self._create_config(wire["config"])
114 116 return localrepository(baseui, wire["path"], create)
115 117
116 118
117 119 class HgRemote(object):
118 120
119 121 def __init__(self, factory):
120 122 self._factory = factory
121 123
122 124 self._bulk_methods = {
123 125 "affected_files": self.ctx_files,
124 126 "author": self.ctx_user,
125 127 "branch": self.ctx_branch,
126 128 "children": self.ctx_children,
127 129 "date": self.ctx_date,
128 130 "message": self.ctx_description,
129 131 "parents": self.ctx_parents,
130 132 "status": self.ctx_status,
131 133 "obsolete": self.ctx_obsolete,
132 134 "phase": self.ctx_phase,
133 135 "hidden": self.ctx_hidden,
134 136 "_file_paths": self.ctx_list,
135 137 }
136 138
137 139 @reraise_safe_exceptions
138 140 def discover_hg_version(self):
139 141 from mercurial import util
140 142 return util.version()
141 143
142 144 @reraise_safe_exceptions
143 145 def archive_repo(self, archive_path, mtime, file_info, kind):
144 146 if kind == "tgz":
145 147 archiver = archival.tarit(archive_path, mtime, "gz")
146 148 elif kind == "tbz2":
147 149 archiver = archival.tarit(archive_path, mtime, "bz2")
148 150 elif kind == 'zip':
149 151 archiver = archival.zipit(archive_path, mtime)
150 152 else:
151 raise exceptions.ArchiveException(
153 raise exceptions.ArchiveException()(
152 154 'Remote does not support: "%s".' % kind)
153 155
154 156 for f_path, f_mode, f_is_link, f_content in file_info:
155 157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 158 archiver.done()
157 159
158 160 @reraise_safe_exceptions
159 161 def bookmarks(self, wire):
160 162 repo = self._factory.repo(wire)
161 163 return dict(repo._bookmarks)
162 164
163 165 @reraise_safe_exceptions
164 166 def branches(self, wire, normal, closed):
165 167 repo = self._factory.repo(wire)
166 168 iter_branches = repo.branchmap().iterbranches()
167 169 bt = {}
168 170 for branch_name, _heads, tip, is_closed in iter_branches:
169 171 if normal and not is_closed:
170 172 bt[branch_name] = tip
171 173 if closed and is_closed:
172 174 bt[branch_name] = tip
173 175
174 176 return bt
175 177
176 178 @reraise_safe_exceptions
177 179 def bulk_request(self, wire, rev, pre_load):
178 180 result = {}
179 181 for attr in pre_load:
180 182 try:
181 183 method = self._bulk_methods[attr]
182 184 result[attr] = method(wire, rev)
183 except KeyError:
184 raise exceptions.VcsException(
185 except KeyError as e:
186 raise exceptions.VcsException(e)(
185 187 'Unknown bulk attribute: "%s"' % attr)
186 188 return result
187 189
188 190 @reraise_safe_exceptions
189 191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 193 clone(baseui, source, dest, noupdate=not update_after_clone)
192 194
193 195 @reraise_safe_exceptions
194 196 def commitctx(
195 197 self, wire, message, parents, commit_time, commit_timezone,
196 198 user, files, extra, removed, updated):
197 199
198 200 def _filectxfn(_repo, memctx, path):
199 201 """
200 202 Marks given path as added/changed/removed in a given _repo. This is
201 203 for internal mercurial commit function.
202 204 """
203 205
204 206 # check if this path is removed
205 207 if path in removed:
206 208 # returning None is a way to mark node for removal
207 209 return None
208 210
209 211 # check if this path is added
210 212 for node in updated:
211 213 if node['path'] == path:
212 214 return memfilectx(
213 215 _repo,
216 changectx=memctx,
214 217 path=node['path'],
215 218 data=node['content'],
216 219 islink=False,
217 220 isexec=bool(node['mode'] & stat.S_IXUSR),
218 copied=False,
219 memctx=memctx)
221 copied=False)
220 222
221 raise exceptions.AbortException(
223 raise exceptions.AbortException()(
222 224 "Given path haven't been marked as added, "
223 225 "changed or removed (%s)" % path)
224 226
225 227 repo = self._factory.repo(wire)
226 228
227 229 commit_ctx = memctx(
228 230 repo=repo,
229 231 parents=parents,
230 232 text=message,
231 233 files=files,
232 234 filectxfn=_filectxfn,
233 235 user=user,
234 236 date=(commit_time, commit_timezone),
235 237 extra=extra)
236 238
237 239 n = repo.commitctx(commit_ctx)
238 240 new_id = hex(n)
239 241
240 242 return new_id
241 243
242 244 @reraise_safe_exceptions
243 245 def ctx_branch(self, wire, revision):
244 246 repo = self._factory.repo(wire)
245 247 ctx = repo[revision]
246 248 return ctx.branch()
247 249
248 250 @reraise_safe_exceptions
249 251 def ctx_children(self, wire, revision):
250 252 repo = self._factory.repo(wire)
251 253 ctx = repo[revision]
252 254 return [child.rev() for child in ctx.children()]
253 255
254 256 @reraise_safe_exceptions
255 257 def ctx_date(self, wire, revision):
256 258 repo = self._factory.repo(wire)
257 259 ctx = repo[revision]
258 260 return ctx.date()
259 261
260 262 @reraise_safe_exceptions
261 263 def ctx_description(self, wire, revision):
262 264 repo = self._factory.repo(wire)
263 265 ctx = repo[revision]
264 266 return ctx.description()
265 267
266 268 @reraise_safe_exceptions
267 269 def ctx_diff(
268 270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 271 repo = self._factory.repo(wire)
270 272 ctx = repo[revision]
271 273 result = ctx.diff(
272 274 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 275 return list(result)
274 276
275 277 @reraise_safe_exceptions
276 278 def ctx_files(self, wire, revision):
277 279 repo = self._factory.repo(wire)
278 280 ctx = repo[revision]
279 281 return ctx.files()
280 282
281 283 @reraise_safe_exceptions
282 284 def ctx_list(self, path, revision):
283 285 repo = self._factory.repo(path)
284 286 ctx = repo[revision]
285 287 return list(ctx)
286 288
287 289 @reraise_safe_exceptions
288 290 def ctx_parents(self, wire, revision):
289 291 repo = self._factory.repo(wire)
290 292 ctx = repo[revision]
291 293 return [parent.rev() for parent in ctx.parents()]
292 294
293 295 @reraise_safe_exceptions
294 296 def ctx_phase(self, wire, revision):
295 297 repo = self._factory.repo(wire)
296 298 ctx = repo[revision]
297 299 # public=0, draft=1, secret=3
298 300 return ctx.phase()
299 301
300 302 @reraise_safe_exceptions
301 303 def ctx_obsolete(self, wire, revision):
302 304 repo = self._factory.repo(wire)
303 305 ctx = repo[revision]
304 306 return ctx.obsolete()
305 307
306 308 @reraise_safe_exceptions
307 309 def ctx_hidden(self, wire, revision):
308 310 repo = self._factory.repo(wire)
309 311 ctx = repo[revision]
310 312 return ctx.hidden()
311 313
312 314 @reraise_safe_exceptions
313 315 def ctx_substate(self, wire, revision):
314 316 repo = self._factory.repo(wire)
315 317 ctx = repo[revision]
316 318 return ctx.substate
317 319
318 320 @reraise_safe_exceptions
319 321 def ctx_status(self, wire, revision):
320 322 repo = self._factory.repo(wire)
321 323 ctx = repo[revision]
322 324 status = repo[ctx.p1().node()].status(other=ctx.node())
323 325 # object of status (odd, custom named tuple in mercurial) is not
324 326 # correctly serializable, we make it a list, as the underling
325 327 # API expects this to be a list
326 328 return list(status)
327 329
328 330 @reraise_safe_exceptions
329 331 def ctx_user(self, wire, revision):
330 332 repo = self._factory.repo(wire)
331 333 ctx = repo[revision]
332 334 return ctx.user()
333 335
334 336 @reraise_safe_exceptions
335 337 def check_url(self, url, config):
336 338 _proto = None
337 339 if '+' in url[:url.find('://')]:
338 340 _proto = url[0:url.find('+')]
339 341 url = url[url.find('+') + 1:]
340 342 handlers = []
341 343 url_obj = url_parser(url)
342 344 test_uri, authinfo = url_obj.authinfo()
343 345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 346 url_obj.query = obfuscate_qs(url_obj.query)
345 347
346 348 cleaned_uri = str(url_obj)
347 349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348 350
349 351 if authinfo:
350 352 # create a password manager
351 353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 354 passmgr.add_password(*authinfo)
353 355
354 356 handlers.extend((httpbasicauthhandler(passmgr),
355 357 httpdigestauthhandler(passmgr)))
356 358
357 359 o = urllib2.build_opener(*handlers)
358 360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 361 ('Accept', 'application/mercurial-0.1')]
360 362
361 363 q = {"cmd": 'between'}
362 364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 365 qs = '?%s' % urllib.urlencode(q)
364 366 cu = "%s%s" % (test_uri, qs)
365 367 req = urllib2.Request(cu, None, {})
366 368
367 369 try:
368 370 log.debug("Trying to open URL %s", cleaned_uri)
369 371 resp = o.open(req)
370 372 if resp.code != 200:
371 raise exceptions.URLError('Return Code is not 200')
373 raise exceptions.URLError()('Return Code is not 200')
372 374 except Exception as e:
373 375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 376 # means it cannot be cloned
375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
376 378
377 379 # now check if it's a proper hg repo, but don't do it for svn
378 380 try:
379 381 if _proto == 'svn':
380 382 pass
381 383 else:
382 384 # check for pure hg repos
383 385 log.debug(
384 386 "Verifying if URL is a Mercurial repository: %s",
385 387 cleaned_uri)
386 httppeer(make_ui_from_config(config), url).lookup('tip')
388 ui = make_ui_from_config(config)
389 peer_checker = makepeer(ui, url)
390 peer_checker.lookup('tip')
387 391 except Exception as e:
388 392 log.warning("URL is not a valid Mercurial repository: %s",
389 393 cleaned_uri)
390 raise exceptions.URLError(
394 raise exceptions.URLError(e)(
391 395 "url [%s] does not look like an hg repo org_exc: %s"
392 396 % (cleaned_uri, e))
393 397
394 398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
395 399 return True
396 400
397 401 @reraise_safe_exceptions
398 402 def diff(
399 403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
400 404 context):
401 405 repo = self._factory.repo(wire)
402 406
403 407 if file_filter:
404 408 match_filter = match(file_filter[0], '', [file_filter[1]])
405 409 else:
406 410 match_filter = file_filter
407 411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
408 412
409 413 try:
410 414 return "".join(patch.diff(
411 415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
412 except RepoLookupError:
413 raise exceptions.LookupException()
416 except RepoLookupError as e:
417 raise exceptions.LookupException(e)()
414 418
415 419 @reraise_safe_exceptions
416 420 def file_history(self, wire, revision, path, limit):
417 421 repo = self._factory.repo(wire)
418 422
419 423 ctx = repo[revision]
420 424 fctx = ctx.filectx(path)
421 425
422 426 def history_iter():
423 427 limit_rev = fctx.rev()
424 428 for obj in reversed(list(fctx.filelog())):
425 429 obj = fctx.filectx(obj)
426 430 if limit_rev >= obj.rev():
427 431 yield obj
428 432
429 433 history = []
430 434 for cnt, obj in enumerate(history_iter()):
431 435 if limit and cnt >= limit:
432 436 break
433 437 history.append(hex(obj.node()))
434 438
435 439 return [x for x in history]
436 440
437 441 @reraise_safe_exceptions
438 442 def file_history_untill(self, wire, revision, path, limit):
439 443 repo = self._factory.repo(wire)
440 444 ctx = repo[revision]
441 445 fctx = ctx.filectx(path)
442 446
443 447 file_log = list(fctx.filelog())
444 448 if limit:
445 449 # Limit to the last n items
446 450 file_log = file_log[-limit:]
447 451
448 452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
449 453
450 454 @reraise_safe_exceptions
451 455 def fctx_annotate(self, wire, revision, path):
452 456 repo = self._factory.repo(wire)
453 457 ctx = repo[revision]
454 458 fctx = ctx.filectx(path)
455 459
456 460 result = []
457 for i, (a_line, content) in enumerate(fctx.annotate()):
458 ln_no = i + 1
459 sha = hex(a_line.fctx.node())
461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
462 ln_no = i
463 sha = hex(annotate_obj.fctx.node())
464 content = annotate_obj.text
460 465 result.append((ln_no, sha, content))
461 466 return result
462 467
463 468 @reraise_safe_exceptions
464 469 def fctx_data(self, wire, revision, path):
465 470 repo = self._factory.repo(wire)
466 471 ctx = repo[revision]
467 472 fctx = ctx.filectx(path)
468 473 return fctx.data()
469 474
470 475 @reraise_safe_exceptions
471 476 def fctx_flags(self, wire, revision, path):
472 477 repo = self._factory.repo(wire)
473 478 ctx = repo[revision]
474 479 fctx = ctx.filectx(path)
475 480 return fctx.flags()
476 481
477 482 @reraise_safe_exceptions
478 483 def fctx_size(self, wire, revision, path):
479 484 repo = self._factory.repo(wire)
480 485 ctx = repo[revision]
481 486 fctx = ctx.filectx(path)
482 487 return fctx.size()
483 488
484 489 @reraise_safe_exceptions
485 490 def get_all_commit_ids(self, wire, name):
486 491 repo = self._factory.repo(wire)
487 492 revs = repo.filtered(name).changelog.index
488 493 return map(lambda x: hex(x[7]), revs)[:-1]
489 494
490 495 @reraise_safe_exceptions
491 496 def get_config_value(self, wire, section, name, untrusted=False):
492 497 repo = self._factory.repo(wire)
493 498 return repo.ui.config(section, name, untrusted=untrusted)
494 499
495 500 @reraise_safe_exceptions
496 501 def get_config_bool(self, wire, section, name, untrusted=False):
497 502 repo = self._factory.repo(wire)
498 503 return repo.ui.configbool(section, name, untrusted=untrusted)
499 504
500 505 @reraise_safe_exceptions
501 506 def get_config_list(self, wire, section, name, untrusted=False):
502 507 repo = self._factory.repo(wire)
503 508 return repo.ui.configlist(section, name, untrusted=untrusted)
504 509
505 510 @reraise_safe_exceptions
506 511 def is_large_file(self, wire, path):
507 512 return largefiles.lfutil.isstandin(path)
508 513
509 514 @reraise_safe_exceptions
510 515 def in_largefiles_store(self, wire, sha):
511 516 repo = self._factory.repo(wire)
512 517 return largefiles.lfutil.instore(repo, sha)
513 518
514 519 @reraise_safe_exceptions
515 520 def in_user_cache(self, wire, sha):
516 521 repo = self._factory.repo(wire)
517 522 return largefiles.lfutil.inusercache(repo.ui, sha)
518 523
519 524 @reraise_safe_exceptions
520 525 def store_path(self, wire, sha):
521 526 repo = self._factory.repo(wire)
522 527 return largefiles.lfutil.storepath(repo, sha)
523 528
524 529 @reraise_safe_exceptions
525 530 def link(self, wire, sha, path):
526 531 repo = self._factory.repo(wire)
527 532 largefiles.lfutil.link(
528 533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529 534
530 535 @reraise_safe_exceptions
531 536 def localrepository(self, wire, create=False):
532 537 self._factory.repo(wire, create=create)
533 538
534 539 @reraise_safe_exceptions
535 540 def lookup(self, wire, revision, both):
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
537 if isinstance(revision, float):
538 revision = long(revision)
541
539 542 repo = self._factory.repo(wire)
543
544 if isinstance(revision, int):
545 # NOTE(marcink):
546 # since Mercurial doesn't support indexes properly
547 # we need to shift accordingly by one to get proper index, e.g
548 # repo[-1] => repo[-2]
549 # repo[0] => repo[-1]
550 # repo[1] => repo[2] we also never call repo[0] because
551 # it's actually second commit
552 if revision <= 0:
553 revision = revision + -1
554 else:
555 revision = revision + 1
556
540 557 try:
541 558 ctx = repo[revision]
542 except RepoLookupError:
543 raise exceptions.LookupException(revision)
559 except RepoLookupError as e:
560 raise exceptions.LookupException(e)(revision)
544 561 except LookupError as e:
545 raise exceptions.LookupException(e.name)
562 raise exceptions.LookupException(e)(e.name)
546 563
547 564 if not both:
548 565 return ctx.hex()
549 566
550 567 ctx = repo[ctx.hex()]
551 568 return ctx.hex(), ctx.rev()
552 569
553 570 @reraise_safe_exceptions
554 571 def pull(self, wire, url, commit_ids=None):
555 572 repo = self._factory.repo(wire)
556 573 # Disable any prompts for this repo
557 574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
558 575
559 576 remote = peer(repo, {}, url)
560 577 # Disable any prompts for this remote
561 578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
562 579
563 580 if commit_ids:
564 581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
565 582
566 583 return exchange.pull(
567 584 repo, remote, heads=commit_ids, force=None).cgresult
568 585
569 586 @reraise_safe_exceptions
570 587 def sync_push(self, wire, url):
571 588 if self.check_url(url, wire['config']):
572 589 repo = self._factory.repo(wire)
573 590
574 591 # Disable any prompts for this repo
575 592 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576 593
577 594 bookmarks = dict(repo._bookmarks).keys()
578 595 remote = peer(repo, {}, url)
579 596 # Disable any prompts for this remote
580 597 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
581 598
582 599 return exchange.push(
583 600 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
584 601
585 602 @reraise_safe_exceptions
586 603 def revision(self, wire, rev):
587 604 repo = self._factory.repo(wire)
588 605 ctx = repo[rev]
589 606 return ctx.rev()
590 607
591 608 @reraise_safe_exceptions
592 609 def rev_range(self, wire, filter):
593 610 repo = self._factory.repo(wire)
594 611 revisions = [rev for rev in revrange(repo, filter)]
595 612 return revisions
596 613
597 614 @reraise_safe_exceptions
598 615 def rev_range_hash(self, wire, node):
599 616 repo = self._factory.repo(wire)
600 617
601 618 def get_revs(repo, rev_opt):
602 619 if rev_opt:
603 620 revs = revrange(repo, rev_opt)
604 621 if len(revs) == 0:
605 622 return (nullrev, nullrev)
606 623 return max(revs), min(revs)
607 624 else:
608 625 return len(repo) - 1, 0
609 626
610 627 stop, start = get_revs(repo, [node + ':'])
611 628 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
612 629 return revs
613 630
614 631 @reraise_safe_exceptions
615 632 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
616 633 other_path = kwargs.pop('other_path', None)
617 634
618 635 # case when we want to compare two independent repositories
619 636 if other_path and other_path != wire["path"]:
620 637 baseui = self._factory._create_config(wire["config"])
621 638 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
622 639 else:
623 640 repo = self._factory.repo(wire)
624 641 return list(repo.revs(rev_spec, *args))
625 642
626 643 @reraise_safe_exceptions
627 644 def strip(self, wire, revision, update, backup):
628 645 repo = self._factory.repo(wire)
629 646 ctx = repo[revision]
630 647 hgext_strip(
631 648 repo.baseui, repo, ctx.node(), update=update, backup=backup)
632 649
633 650 @reraise_safe_exceptions
634 651 def verify(self, wire,):
635 652 repo = self._factory.repo(wire)
636 653 baseui = self._factory._create_config(wire['config'])
637 654 baseui.setconfig('ui', 'quiet', 'false')
638 655 output = io.BytesIO()
639 656
640 657 def write(data, **unused_kwargs):
641 658 output.write(data)
642 659 baseui.write = write
643 660
644 661 repo.ui = baseui
645 662 verify.verify(repo)
646 663 return output.getvalue()
647 664
648 665 @reraise_safe_exceptions
649 666 def tag(self, wire, name, revision, message, local, user,
650 667 tag_time, tag_timezone):
651 668 repo = self._factory.repo(wire)
652 669 ctx = repo[revision]
653 670 node = ctx.node()
654 671
655 672 date = (tag_time, tag_timezone)
656 673 try:
657 674 hg_tag.tag(repo, name, node, message, local, user, date)
658 675 except Abort as e:
659 676 log.exception("Tag operation aborted")
660 677 # Exception can contain unicode which we convert
661 raise exceptions.AbortException(repr(e))
678 raise exceptions.AbortException(e)(repr(e))
662 679
663 680 @reraise_safe_exceptions
664 681 def tags(self, wire):
665 682 repo = self._factory.repo(wire)
666 683 return repo.tags()
667 684
668 685 @reraise_safe_exceptions
669 686 def update(self, wire, node=None, clean=False):
670 687 repo = self._factory.repo(wire)
671 688 baseui = self._factory._create_config(wire['config'])
672 689 commands.update(baseui, repo, node=node, clean=clean)
673 690
674 691 @reraise_safe_exceptions
675 692 def identify(self, wire):
676 693 repo = self._factory.repo(wire)
677 694 baseui = self._factory._create_config(wire['config'])
678 695 output = io.BytesIO()
679 696 baseui.write = output.write
680 697 # This is required to get a full node id
681 698 baseui.debugflag = True
682 699 commands.identify(baseui, repo, id=True)
683 700
684 701 return output.getvalue()
685 702
686 703 @reraise_safe_exceptions
687 704 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
688 705 hooks=True):
689 706 repo = self._factory.repo(wire)
690 707 baseui = self._factory._create_config(wire['config'], hooks=hooks)
691 708
692 709 # Mercurial internally has a lot of logic that checks ONLY if
693 710 # option is defined, we just pass those if they are defined then
694 711 opts = {}
695 712 if bookmark:
696 713 opts['bookmark'] = bookmark
697 714 if branch:
698 715 opts['branch'] = branch
699 716 if revision:
700 717 opts['rev'] = revision
701 718
702 719 commands.pull(baseui, repo, source, **opts)
703 720
704 721 @reraise_safe_exceptions
705 722 def heads(self, wire, branch=None):
706 723 repo = self._factory.repo(wire)
707 724 baseui = self._factory._create_config(wire['config'])
708 725 output = io.BytesIO()
709 726
710 727 def write(data, **unused_kwargs):
711 728 output.write(data)
712 729
713 730 baseui.write = write
714 731 if branch:
715 732 args = [branch]
716 733 else:
717 734 args = []
718 735 commands.heads(baseui, repo, template='{node} ', *args)
719 736
720 737 return output.getvalue()
721 738
722 739 @reraise_safe_exceptions
723 740 def ancestor(self, wire, revision1, revision2):
724 741 repo = self._factory.repo(wire)
725 742 changelog = repo.changelog
726 743 lookup = repo.lookup
727 744 a = changelog.ancestor(lookup(revision1), lookup(revision2))
728 745 return hex(a)
729 746
730 747 @reraise_safe_exceptions
731 748 def push(self, wire, revisions, dest_path, hooks=True,
732 749 push_branches=False):
733 750 repo = self._factory.repo(wire)
734 751 baseui = self._factory._create_config(wire['config'], hooks=hooks)
735 752 commands.push(baseui, repo, dest=dest_path, rev=revisions,
736 753 new_branch=push_branches)
737 754
738 755 @reraise_safe_exceptions
739 756 def merge(self, wire, revision):
740 757 repo = self._factory.repo(wire)
741 758 baseui = self._factory._create_config(wire['config'])
742 759 repo.ui.setconfig('ui', 'merge', 'internal:dump')
743 760
744 761 # In case of sub repositories are used mercurial prompts the user in
745 762 # case of merge conflicts or different sub repository sources. By
746 763 # setting the interactive flag to `False` mercurial doesn't prompt the
747 764 # used but instead uses a default value.
748 765 repo.ui.setconfig('ui', 'interactive', False)
749 766
750 767 commands.merge(baseui, repo, rev=revision)
751 768
752 769 @reraise_safe_exceptions
753 770 def commit(self, wire, message, username, close_branch=False):
754 771 repo = self._factory.repo(wire)
755 772 baseui = self._factory._create_config(wire['config'])
756 773 repo.ui.setconfig('ui', 'username', username)
757 774 commands.commit(baseui, repo, message=message, close_branch=close_branch)
758 775
759 776 @reraise_safe_exceptions
760 777 def rebase(self, wire, source=None, dest=None, abort=False):
761 778 repo = self._factory.repo(wire)
762 779 baseui = self._factory._create_config(wire['config'])
763 780 repo.ui.setconfig('ui', 'merge', 'internal:dump')
764 781 rebase.rebase(
765 782 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
766 783
767 784 @reraise_safe_exceptions
768 785 def bookmark(self, wire, bookmark, revision=None):
769 786 repo = self._factory.repo(wire)
770 787 baseui = self._factory._create_config(wire['config'])
771 788 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
772 789
773 790 @reraise_safe_exceptions
774 791 def install_hooks(self, wire, force=False):
775 792 # we don't need any special hooks for Mercurial
776 793 pass
@@ -1,63 +1,63 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import tags as hg_tag
40 40
41 41 from mercurial.commands import clone, nullid, pull
42 42 from mercurial.context import memctx, memfilectx
43 43 from mercurial.error import (
44 44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 45 RequirementError)
46 46 from mercurial.hgweb import hgweb_mod
47 47 from mercurial.localrepo import localrepository
48 48 from mercurial.match import match
49 49 from mercurial.mdiff import diffopts
50 50 from mercurial.node import bin, hex
51 51 from mercurial.encoding import tolocal
52 52 from mercurial.discovery import findcommonoutgoing
53 53 from mercurial.hg import peer
54 from mercurial.httppeer import httppeer
54 from mercurial.httppeer import makepeer
55 55 from mercurial.util import url as hg_url
56 56 from mercurial.scmutil import revrange
57 57 from mercurial.node import nullrev
58 58 from mercurial import exchange
59 59 from hgext import largefiles
60 60
61 61 # those authnadlers are patched for python 2.6.5 bug an
62 62 # infinit looping when given invalid resources
63 63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,134 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 lfproto.capabilities = wrapper
39 lfproto._capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 wrapped_capabilities = lfproto.capabilities
44 wrapped_capabilities = lfproto._capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 def _dynamic_capabilities(repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 56 else:
56 57 logger.debug('Extension largefiles disabled')
57 calc_capabilities = lfproto.capabilitiesorig
58 return calc_capabilities(repo, proto)
58 return orig(repo, proto)
59 59
60 60 return _dynamic_capabilities
61 61
62 62
63 63 def patch_subrepo_type_mapping():
64 64 from collections import defaultdict
65 65 from hgcompat import subrepo
66 66 from exceptions import SubrepoMergeException
67 67
68 68 class NoOpSubrepo(subrepo.abstractsubrepo):
69 69
70 70 def __init__(self, ctx, path, *args, **kwargs):
71 71 """Initialize abstractsubrepo part
72 72
73 73 ``ctx`` is the context referring this subrepository in the
74 74 parent repository.
75 75
76 76 ``path`` is the path to this subrepository as seen from
77 77 innermost repository.
78 78 """
79 79 self.ui = ctx.repo().ui
80 80 self._ctx = ctx
81 81 self._path = path
82 82
83 83 def storeclean(self, path):
84 84 """
85 85 returns true if the repository has not changed since it was last
86 86 cloned from or pushed to a given repository.
87 87 """
88 88 return True
89 89
90 90 def dirty(self, ignoreupdate=False, missing=False):
91 91 """returns true if the dirstate of the subrepo is dirty or does not
92 92 match current stored state. If ignoreupdate is true, only check
93 93 whether the subrepo has uncommitted changes in its dirstate.
94 94 """
95 95 return False
96 96
97 97 def basestate(self):
98 98 """current working directory base state, disregarding .hgsubstate
99 99 state and working directory modifications"""
100 100 substate = subrepo.state(self._ctx, self.ui)
101 101 file_system_path, rev, repotype = substate.get(self._path)
102 102 return rev
103 103
104 104 def remove(self):
105 105 """remove the subrepo
106 106
107 107 (should verify the dirstate is not dirty first)
108 108 """
109 109 pass
110 110
111 111 def get(self, state, overwrite=False):
112 112 """run whatever commands are needed to put the subrepo into
113 113 this state
114 114 """
115 115 pass
116 116
117 117 def merge(self, state):
118 118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
119 raise SubrepoMergeException()()
120 120
121 121 def push(self, opts):
122 122 """perform whatever action is analogous to 'hg push'
123 123
124 124 This may be a no-op on some systems.
125 125 """
126 126 pass
127 127
128 128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 129 # whenever a subrepo class is looked up.
130 130 subrepo.types = {
131 131 'hg': NoOpSubrepo,
132 132 'git': NoOpSubrepo,
133 133 'svn': NoOpSubrepo
134 134 }
@@ -1,570 +1,657 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 55 return json.loads(response.read())
56 56
57 57 def _serialize(self, hook_name, extras):
58 58 data = {
59 59 'method': hook_name,
60 60 'extras': extras
61 61 }
62 62 return json.dumps(data)
63 63
64 64
65 65 class HooksDummyClient(object):
66 66 def __init__(self, hooks_module):
67 67 self._hooks_module = importlib.import_module(hooks_module)
68 68
69 69 def __call__(self, hook_name, extras):
70 70 with self._hooks_module.Hooks() as hooks:
71 71 return getattr(hooks, hook_name)(extras)
72 72
73 73
74 74 class RemoteMessageWriter(object):
75 75 """Writer base class."""
76 76 def write(self, message):
77 77 raise NotImplementedError()
78 78
79 79
80 80 class HgMessageWriter(RemoteMessageWriter):
81 81 """Writer that knows how to send messages to mercurial clients."""
82 82
83 83 def __init__(self, ui):
84 84 self.ui = ui
85 85
86 86 def write(self, message):
87 87 # TODO: Check why the quiet flag is set by default.
88 88 old = self.ui.quiet
89 89 self.ui.quiet = False
90 90 self.ui.status(message.encode('utf-8'))
91 91 self.ui.quiet = old
92 92
93 93
94 94 class GitMessageWriter(RemoteMessageWriter):
95 95 """Writer that knows how to send messages to git clients."""
96 96
97 97 def __init__(self, stdout=None):
98 98 self.stdout = stdout or sys.stdout
99 99
100 100 def write(self, message):
101 101 self.stdout.write(message.encode('utf-8'))
102 102
103 103
104 104 class SvnMessageWriter(RemoteMessageWriter):
105 105 """Writer that knows how to send messages to svn clients."""
106 106
107 107 def __init__(self, stderr=None):
108 108 # SVN needs data sent to stderr for back-to-client messaging
109 109 self.stderr = stderr or sys.stderr
110 110
111 111 def write(self, message):
112 112 self.stderr.write(message.encode('utf-8'))
113 113
114 114
115 115 def _handle_exception(result):
116 116 exception_class = result.get('exception')
117 117 exception_traceback = result.get('exception_traceback')
118 118
119 119 if exception_traceback:
120 120 log.error('Got traceback from remote call:%s', exception_traceback)
121 121
122 122 if exception_class == 'HTTPLockedRC':
123 raise exceptions.RepositoryLockedException(*result['exception_args'])
123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 elif exception_class == 'HTTPBranchProtected':
125 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
124 126 elif exception_class == 'RepositoryError':
125 raise exceptions.VcsException(*result['exception_args'])
127 raise exceptions.VcsException()(*result['exception_args'])
126 128 elif exception_class:
127 129 raise Exception('Got remote exception "%s" with args "%s"' %
128 130 (exception_class, result['exception_args']))
129 131
130 132
131 133 def _get_hooks_client(extras):
132 134 if 'hooks_uri' in extras:
133 135 protocol = extras.get('hooks_protocol')
134 136 return HooksHttpClient(extras['hooks_uri'])
135 137 else:
136 138 return HooksDummyClient(extras['hooks_module'])
137 139
138 140
139 141 def _call_hook(hook_name, extras, writer):
140 142 hooks_client = _get_hooks_client(extras)
141 143 log.debug('Hooks, using client:%s', hooks_client)
142 144 result = hooks_client(hook_name, extras)
143 145 log.debug('Hooks got result: %s', result)
144 146 writer.write(result['output'])
145 147 _handle_exception(result)
146 148
147 149 return result['status']
148 150
149 151
150 152 def _extras_from_ui(ui):
151 153 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
152 154 if not hook_data:
153 155 # maybe it's inside environ ?
154 156 env_hook_data = os.environ.get('RC_SCM_DATA')
155 157 if env_hook_data:
156 158 hook_data = env_hook_data
157 159
158 160 extras = {}
159 161 if hook_data:
160 162 extras = json.loads(hook_data)
161 163 return extras
162 164
163 165
164 def _rev_range_hash(repo, node):
166 def _rev_range_hash(repo, node, check_heads=False):
165 167
166 168 commits = []
167 for rev in xrange(repo[node], len(repo)):
169 revs = []
170 start = repo[node].rev()
171 end = len(repo)
172 for rev in range(start, end):
173 revs.append(rev)
168 174 ctx = repo[rev]
169 175 commit_id = mercurial.node.hex(ctx.node())
170 176 branch = ctx.branch()
171 177 commits.append((commit_id, branch))
172 178
173 return commits
179 parent_heads = []
180 if check_heads:
181 parent_heads = _check_heads(repo, start, end, revs)
182 return commits, parent_heads
183
184
185 def _check_heads(repo, start, end, commits):
186 changelog = repo.changelog
187 parents = set()
188
189 for new_rev in commits:
190 for p in changelog.parentrevs(new_rev):
191 if p == mercurial.node.nullrev:
192 continue
193 if p < start:
194 parents.add(p)
195
196 for p in parents:
197 branch = repo[p].branch()
198 # The heads descending from that parent, on the same branch
199 parent_heads = set([p])
200 reachable = set([p])
201 for x in xrange(p + 1, end):
202 if repo[x].branch() != branch:
203 continue
204 for pp in changelog.parentrevs(x):
205 if pp in reachable:
206 reachable.add(x)
207 parent_heads.discard(pp)
208 parent_heads.add(x)
209 # More than one head? Suggest merging
210 if len(parent_heads) > 1:
211 return list(parent_heads)
212
213 return []
174 214
175 215
176 216 def repo_size(ui, repo, **kwargs):
177 217 extras = _extras_from_ui(ui)
178 218 return _call_hook('repo_size', extras, HgMessageWriter(ui))
179 219
180 220
181 221 def pre_pull(ui, repo, **kwargs):
182 222 extras = _extras_from_ui(ui)
183 223 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
184 224
185 225
186 226 def pre_pull_ssh(ui, repo, **kwargs):
187 227 extras = _extras_from_ui(ui)
188 228 if extras and extras.get('SSH'):
189 229 return pre_pull(ui, repo, **kwargs)
190 230 return 0
191 231
192 232
193 233 def post_pull(ui, repo, **kwargs):
194 234 extras = _extras_from_ui(ui)
195 235 return _call_hook('post_pull', extras, HgMessageWriter(ui))
196 236
197 237
198 238 def post_pull_ssh(ui, repo, **kwargs):
199 239 extras = _extras_from_ui(ui)
200 240 if extras and extras.get('SSH'):
201 241 return post_pull(ui, repo, **kwargs)
202 242 return 0
203 243
204 244
205 245 def pre_push(ui, repo, node=None, **kwargs):
246 """
247 Mercurial pre_push hook
248 """
206 249 extras = _extras_from_ui(ui)
250 detect_force_push = extras.get('detect_force_push')
207 251
208 252 rev_data = []
209 253 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
210 254 branches = collections.defaultdict(list)
211 for commit_id, branch in _rev_range_hash(repo, node):
255 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
256 for commit_id, branch in commits:
212 257 branches[branch].append(commit_id)
213 258
214 for branch, commits in branches.iteritems():
259 for branch, commits in branches.items():
215 260 old_rev = kwargs.get('node_last') or commits[0]
216 261 rev_data.append({
217 262 'old_rev': old_rev,
218 263 'new_rev': commits[-1],
219 264 'ref': '',
220 265 'type': 'branch',
221 266 'name': branch,
222 267 })
223 268
269 for push_ref in rev_data:
270 push_ref['multiple_heads'] = _heads
271
224 272 extras['commit_ids'] = rev_data
225 273 return _call_hook('pre_push', extras, HgMessageWriter(ui))
226 274
227 275
228 276 def pre_push_ssh(ui, repo, node=None, **kwargs):
229 if _extras_from_ui(ui).get('SSH'):
277 extras = _extras_from_ui(ui)
278 if extras.get('SSH'):
230 279 return pre_push(ui, repo, node, **kwargs)
231 280
232 281 return 0
233 282
234 283
235 284 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
285 """
286 Mercurial pre_push hook for SSH
287 """
236 288 extras = _extras_from_ui(ui)
237 289 if extras.get('SSH'):
238 290 permission = extras['SSH_PERMISSIONS']
239 291
240 292 if 'repository.write' == permission or 'repository.admin' == permission:
241 293 return 0
242 294
243 295 # non-zero ret code
244 296 return 1
245 297
246 298 return 0
247 299
248 300
249 301 def post_push(ui, repo, node, **kwargs):
302 """
303 Mercurial post_push hook
304 """
250 305 extras = _extras_from_ui(ui)
251 306
252 307 commit_ids = []
253 308 branches = []
254 309 bookmarks = []
255 310 tags = []
256 311
257 for commit_id, branch in _rev_range_hash(repo, node):
312 commits, _heads = _rev_range_hash(repo, node)
313 for commit_id, branch in commits:
258 314 commit_ids.append(commit_id)
259 315 if branch not in branches:
260 316 branches.append(branch)
261 317
262 318 if hasattr(ui, '_rc_pushkey_branches'):
263 319 bookmarks = ui._rc_pushkey_branches
264 320
265 321 extras['commit_ids'] = commit_ids
266 322 extras['new_refs'] = {
267 323 'branches': branches,
268 324 'bookmarks': bookmarks,
269 325 'tags': tags
270 326 }
271 327
272 328 return _call_hook('post_push', extras, HgMessageWriter(ui))
273 329
274 330
275 331 def post_push_ssh(ui, repo, node, **kwargs):
332 """
333 Mercurial post_push hook for SSH
334 """
276 335 if _extras_from_ui(ui).get('SSH'):
277 336 return post_push(ui, repo, node, **kwargs)
278 337 return 0
279 338
280 339
281 340 def key_push(ui, repo, **kwargs):
282 341 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
283 342 # store new bookmarks in our UI object propagated later to post_push
284 343 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
285 344 return
286 345
287 346
288 347 # backward compat
289 348 log_pull_action = post_pull
290 349
291 350 # backward compat
292 351 log_push_action = post_push
293 352
294 353
295 354 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
296 355 """
297 356 Old hook name: keep here for backward compatibility.
298 357
299 358 This is only required when the installed git hooks are not upgraded.
300 359 """
301 360 pass
302 361
303 362
304 363 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
305 364 """
306 365 Old hook name: keep here for backward compatibility.
307 366
308 367 This is only required when the installed git hooks are not upgraded.
309 368 """
310 369 pass
311 370
312 371
313 372 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
314 373
315 374
316 375 def git_pre_pull(extras):
317 376 """
318 377 Pre pull hook.
319 378
320 379 :param extras: dictionary containing the keys defined in simplevcs
321 380 :type extras: dict
322 381
323 382 :return: status code of the hook. 0 for success.
324 383 :rtype: int
325 384 """
326 385 if 'pull' not in extras['hooks']:
327 386 return HookResponse(0, '')
328 387
329 388 stdout = io.BytesIO()
330 389 try:
331 390 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
332 391 except Exception as error:
333 392 status = 128
334 393 stdout.write('ERROR: %s\n' % str(error))
335 394
336 395 return HookResponse(status, stdout.getvalue())
337 396
338 397
339 398 def git_post_pull(extras):
340 399 """
341 400 Post pull hook.
342 401
343 402 :param extras: dictionary containing the keys defined in simplevcs
344 403 :type extras: dict
345 404
346 405 :return: status code of the hook. 0 for success.
347 406 :rtype: int
348 407 """
349 408 if 'pull' not in extras['hooks']:
350 409 return HookResponse(0, '')
351 410
352 411 stdout = io.BytesIO()
353 412 try:
354 413 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
355 414 except Exception as error:
356 415 status = 128
357 416 stdout.write('ERROR: %s\n' % error)
358 417
359 418 return HookResponse(status, stdout.getvalue())
360 419
361 420
362 421 def _parse_git_ref_lines(revision_lines):
363 422 rev_data = []
364 423 for revision_line in revision_lines or []:
365 424 old_rev, new_rev, ref = revision_line.strip().split(' ')
366 425 ref_data = ref.split('/', 2)
367 426 if ref_data[1] in ('tags', 'heads'):
368 427 rev_data.append({
369 428 'old_rev': old_rev,
370 429 'new_rev': new_rev,
371 430 'ref': ref,
372 431 'type': ref_data[1],
373 432 'name': ref_data[2],
374 433 })
375 434 return rev_data
376 435
377 436
378 437 def git_pre_receive(unused_repo_path, revision_lines, env):
379 438 """
380 439 Pre push hook.
381 440
382 441 :param extras: dictionary containing the keys defined in simplevcs
383 442 :type extras: dict
384 443
385 444 :return: status code of the hook. 0 for success.
386 445 :rtype: int
387 446 """
388 447 extras = json.loads(env['RC_SCM_DATA'])
389 448 rev_data = _parse_git_ref_lines(revision_lines)
390 449 if 'push' not in extras['hooks']:
391 450 return 0
451 empty_commit_id = '0' * 40
452
453 detect_force_push = extras.get('detect_force_push')
454
455 for push_ref in rev_data:
456 # store our git-env which holds the temp store
457 push_ref['git_env'] = [
458 (k, v) for k, v in os.environ.items() if k.startswith('GIT')]
459 push_ref['pruned_sha'] = ''
460 if not detect_force_push:
461 # don't check for forced-push when we don't need to
462 continue
463
464 type_ = push_ref['type']
465 new_branch = push_ref['old_rev'] == empty_commit_id
466 if type_ == 'heads' and not new_branch:
467 old_rev = push_ref['old_rev']
468 new_rev = push_ref['new_rev']
469 cmd = [settings.GIT_EXECUTABLE, 'rev-list',
470 old_rev, '^{}'.format(new_rev)]
471 stdout, stderr = subprocessio.run_command(
472 cmd, env=os.environ.copy())
473 # means we're having some non-reachable objects, this forced push
474 # was used
475 if stdout:
476 push_ref['pruned_sha'] = stdout.splitlines()
477
392 478 extras['commit_ids'] = rev_data
393 479 return _call_hook('pre_push', extras, GitMessageWriter())
394 480
395 481
396 482 def git_post_receive(unused_repo_path, revision_lines, env):
397 483 """
398 484 Post push hook.
399 485
400 486 :param extras: dictionary containing the keys defined in simplevcs
401 487 :type extras: dict
402 488
403 489 :return: status code of the hook. 0 for success.
404 490 :rtype: int
405 491 """
406 492 extras = json.loads(env['RC_SCM_DATA'])
407 493 if 'push' not in extras['hooks']:
408 494 return 0
409 495
410 496 rev_data = _parse_git_ref_lines(revision_lines)
411 497
412 498 git_revs = []
413 499
414 500 # N.B.(skreft): it is ok to just call git, as git before calling a
415 501 # subcommand sets the PATH environment variable so that it point to the
416 502 # correct version of the git executable.
417 503 empty_commit_id = '0' * 40
418 504 branches = []
419 505 tags = []
420 506 for push_ref in rev_data:
421 507 type_ = push_ref['type']
422 508
423 509 if type_ == 'heads':
424 510 if push_ref['old_rev'] == empty_commit_id:
425 511 # starting new branch case
426 512 if push_ref['name'] not in branches:
427 513 branches.append(push_ref['name'])
428 514
429 515 # Fix up head revision if needed
430 516 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
431 517 try:
432 518 subprocessio.run_command(cmd, env=os.environ.copy())
433 519 except Exception:
434 520 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
435 521 'refs/heads/%s' % push_ref['name']]
436 522 print("Setting default branch to %s" % push_ref['name'])
437 523 subprocessio.run_command(cmd, env=os.environ.copy())
438 524
439 525 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
440 526 '--format=%(refname)', 'refs/heads/*']
441 527 stdout, stderr = subprocessio.run_command(
442 528 cmd, env=os.environ.copy())
443 529 heads = stdout
444 530 heads = heads.replace(push_ref['ref'], '')
445 heads = ' '.join(head for head in heads.splitlines() if head)
531 heads = ' '.join(head for head
532 in heads.splitlines() if head) or '.'
446 533 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
447 534 '--pretty=format:%H', '--', push_ref['new_rev'],
448 535 '--not', heads]
449 536 stdout, stderr = subprocessio.run_command(
450 537 cmd, env=os.environ.copy())
451 538 git_revs.extend(stdout.splitlines())
452 539 elif push_ref['new_rev'] == empty_commit_id:
453 540 # delete branch case
454 541 git_revs.append('delete_branch=>%s' % push_ref['name'])
455 542 else:
456 543 if push_ref['name'] not in branches:
457 544 branches.append(push_ref['name'])
458 545
459 546 cmd = [settings.GIT_EXECUTABLE, 'log',
460 547 '{old_rev}..{new_rev}'.format(**push_ref),
461 548 '--reverse', '--pretty=format:%H']
462 549 stdout, stderr = subprocessio.run_command(
463 550 cmd, env=os.environ.copy())
464 551 git_revs.extend(stdout.splitlines())
465 552 elif type_ == 'tags':
466 553 if push_ref['name'] not in tags:
467 554 tags.append(push_ref['name'])
468 555 git_revs.append('tag=>%s' % push_ref['name'])
469 556
470 557 extras['commit_ids'] = git_revs
471 558 extras['new_refs'] = {
472 559 'branches': branches,
473 560 'bookmarks': [],
474 561 'tags': tags,
475 562 }
476 563
477 564 if 'repo_size' in extras['hooks']:
478 565 try:
479 566 _call_hook('repo_size', extras, GitMessageWriter())
480 567 except:
481 568 pass
482 569
483 570 return _call_hook('post_push', extras, GitMessageWriter())
484 571
485 572
486 573 def _get_extras_from_txn_id(path, txn_id):
487 574 extras = {}
488 575 try:
489 576 cmd = ['svnlook', 'pget',
490 577 '-t', txn_id,
491 578 '--revprop', path, 'rc-scm-extras']
492 579 stdout, stderr = subprocessio.run_command(
493 580 cmd, env=os.environ.copy())
494 581 extras = json.loads(base64.urlsafe_b64decode(stdout))
495 582 except Exception:
496 583 log.exception('Failed to extract extras info from txn_id')
497 584
498 585 return extras
499 586
500 587
501 588 def svn_pre_commit(repo_path, commit_data, env):
502 589 path, txn_id = commit_data
503 590 branches = []
504 591 tags = []
505 592
506 593 if env.get('RC_SCM_DATA'):
507 594 extras = json.loads(env['RC_SCM_DATA'])
508 595 else:
509 596 # fallback method to read from TXN-ID stored data
510 597 extras = _get_extras_from_txn_id(path, txn_id)
511 598 if not extras:
512 599 return 0
513 600
514 601 extras['commit_ids'] = []
515 602 extras['txn_id'] = txn_id
516 603 extras['new_refs'] = {
517 604 'branches': branches,
518 605 'bookmarks': [],
519 606 'tags': tags,
520 607 }
521 608
522 609 return _call_hook('pre_push', extras, SvnMessageWriter())
523 610
524 611
525 612 def _get_extras_from_commit_id(commit_id, path):
526 613 extras = {}
527 614 try:
528 615 cmd = ['svnlook', 'pget',
529 616 '-r', commit_id,
530 617 '--revprop', path, 'rc-scm-extras']
531 618 stdout, stderr = subprocessio.run_command(
532 619 cmd, env=os.environ.copy())
533 620 extras = json.loads(base64.urlsafe_b64decode(stdout))
534 621 except Exception:
535 622 log.exception('Failed to extract extras info from commit_id')
536 623
537 624 return extras
538 625
539 626
540 627 def svn_post_commit(repo_path, commit_data, env):
541 628 """
542 629 commit_data is path, rev, txn_id
543 630 """
544 631 path, commit_id, txn_id = commit_data
545 632 branches = []
546 633 tags = []
547 634
548 635 if env.get('RC_SCM_DATA'):
549 636 extras = json.loads(env['RC_SCM_DATA'])
550 637 else:
551 638 # fallback method to read from TXN-ID stored data
552 639 extras = _get_extras_from_commit_id(commit_id, path)
553 640 if not extras:
554 641 return 0
555 642
556 643 extras['commit_ids'] = [commit_id]
557 644 extras['txn_id'] = txn_id
558 645 extras['new_refs'] = {
559 646 'branches': branches,
560 647 'bookmarks': [],
561 648 'tags': tags,
562 649 }
563 650
564 651 if 'repo_size' in extras['hooks']:
565 652 try:
566 653 _call_hook('repo_size', extras, SvnMessageWriter())
567 654 except Exception:
568 655 pass
569 656
570 657 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,487 +1,563 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 import sys
19 20 import base64
20 21 import locale
21 22 import logging
22 23 import uuid
23 24 import wsgiref.util
24 25 import traceback
25 26 from itertools import chain
26 27
27 28 import simplejson as json
28 29 import msgpack
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
31 30 from pyramid.config import Configurator
31 from pyramid.settings import asbool, aslist
32 32 from pyramid.wsgi import wsgiapp
33 33 from pyramid.compat import configparser
34 34
35
36 log = logging.getLogger(__name__)
37
38 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
39 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
40
41 try:
42 locale.setlocale(locale.LC_ALL, '')
43 except locale.Error as e:
44 log.error(
45 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
46 os.environ['LC_ALL'] = 'C'
47
48
35 49 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
36 50 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
37 51 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
38 52 from vcsserver.echo_stub.echo_app import EchoApp
39 from vcsserver.exceptions import HTTPRepoLocked
53 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
54 from vcsserver.lib.exc_tracking import store_exception
40 55 from vcsserver.server import VcsServer
41 56
42 57 try:
43 58 from vcsserver.git import GitFactory, GitRemote
44 59 except ImportError:
45 60 GitFactory = None
46 61 GitRemote = None
47 62
48 63 try:
49 64 from vcsserver.hg import MercurialFactory, HgRemote
50 65 except ImportError:
51 66 MercurialFactory = None
52 67 HgRemote = None
53 68
54 69 try:
55 70 from vcsserver.svn import SubversionFactory, SvnRemote
56 71 except ImportError:
57 72 SubversionFactory = None
58 73 SvnRemote = None
59 74
60 log = logging.getLogger(__name__)
75
61 76
62 77
63 78 def _is_request_chunked(environ):
64 79 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
65 80 return stream
66 81
67 82
83 def _int_setting(settings, name, default):
84 settings[name] = int(settings.get(name, default))
85
86
87 def _bool_setting(settings, name, default):
88 input_val = settings.get(name, default)
89 if isinstance(input_val, unicode):
90 input_val = input_val.encode('utf8')
91 settings[name] = asbool(input_val)
92
93
94 def _list_setting(settings, name, default):
95 raw_value = settings.get(name, default)
96
97 # Otherwise we assume it uses pyramids space/newline separation.
98 settings[name] = aslist(raw_value)
99
100
101 def _string_setting(settings, name, default, lower=True):
102 value = settings.get(name, default)
103 if lower:
104 value = value.lower()
105 settings[name] = value
106
107
68 108 class VCS(object):
69 109 def __init__(self, locale=None, cache_config=None):
70 110 self.locale = locale
71 111 self.cache_config = cache_config
72 112 self._configure_locale()
73 self._initialize_cache()
74 113
75 114 if GitFactory and GitRemote:
76 git_repo_cache = self.cache.get_cache_region(
77 'git', region='repo_object')
78 git_factory = GitFactory(git_repo_cache)
115 git_factory = GitFactory()
79 116 self._git_remote = GitRemote(git_factory)
80 117 else:
81 118 log.info("Git client import failed")
82 119
83 120 if MercurialFactory and HgRemote:
84 hg_repo_cache = self.cache.get_cache_region(
85 'hg', region='repo_object')
86 hg_factory = MercurialFactory(hg_repo_cache)
121 hg_factory = MercurialFactory()
87 122 self._hg_remote = HgRemote(hg_factory)
88 123 else:
89 124 log.info("Mercurial client import failed")
90 125
91 126 if SubversionFactory and SvnRemote:
92 svn_repo_cache = self.cache.get_cache_region(
93 'svn', region='repo_object')
94 svn_factory = SubversionFactory(svn_repo_cache)
127 svn_factory = SubversionFactory()
128
95 129 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
130 hg_factory = MercurialFactory()
99 131 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
100 132 else:
101 133 log.info("Subversion client import failed")
102 134
103 135 self._vcsserver = VcsServer()
104 136
105 def _initialize_cache(self):
106 cache_config = parse_cache_config_options(self.cache_config)
107 log.info('Initializing beaker cache: %s' % cache_config)
108 self.cache = CacheManager(**cache_config)
109
110 137 def _configure_locale(self):
111 138 if self.locale:
112 139 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
113 140 else:
114 141 log.info(
115 142 'Configuring locale subsystem based on environment variables')
116 143 try:
117 144 # If self.locale is the empty string, then the locale
118 145 # module will use the environment variables. See the
119 146 # documentation of the package `locale`.
120 147 locale.setlocale(locale.LC_ALL, self.locale)
121 148
122 149 language_code, encoding = locale.getlocale()
123 150 log.info(
124 151 'Locale set to language code "%s" with encoding "%s".',
125 152 language_code, encoding)
126 153 except locale.Error:
127 154 log.exception(
128 155 'Cannot set locale, not configuring the locale system')
129 156
130 157
131 158 class WsgiProxy(object):
132 159 def __init__(self, wsgi):
133 160 self.wsgi = wsgi
134 161
135 162 def __call__(self, environ, start_response):
136 163 input_data = environ['wsgi.input'].read()
137 164 input_data = msgpack.unpackb(input_data)
138 165
139 166 error = None
140 167 try:
141 168 data, status, headers = self.wsgi.handle(
142 169 input_data['environment'], input_data['input_data'],
143 170 *input_data['args'], **input_data['kwargs'])
144 171 except Exception as e:
145 172 data, status, headers = [], None, None
146 173 error = {
147 174 'message': str(e),
148 175 '_vcs_kind': getattr(e, '_vcs_kind', None)
149 176 }
150 177
151 178 start_response(200, {})
152 179 return self._iterator(error, status, headers, data)
153 180
154 181 def _iterator(self, error, status, headers, data):
155 182 initial_data = [
156 183 error,
157 184 status,
158 185 headers,
159 186 ]
160 187
161 188 for d in chain(initial_data, data):
162 189 yield msgpack.packb(d)
163 190
164 191
165 192 class HTTPApplication(object):
166 193 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
167 194
168 195 remote_wsgi = remote_wsgi
169 196 _use_echo_app = False
170 197
171 198 def __init__(self, settings=None, global_config=None):
199 self._sanitize_settings_and_apply_defaults(settings)
200
172 201 self.config = Configurator(settings=settings)
173 202 self.global_config = global_config
203 self.config.include('vcsserver.lib.rc_cache')
174 204
175 205 locale = settings.get('locale', '') or 'en_US.UTF-8'
176 206 vcs = VCS(locale=locale, cache_config=settings)
177 207 self._remotes = {
178 208 'hg': vcs._hg_remote,
179 209 'git': vcs._git_remote,
180 210 'svn': vcs._svn_remote,
181 211 'server': vcs._vcsserver,
182 212 }
183 213 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
184 214 self._use_echo_app = True
185 215 log.warning("Using EchoApp for VCS operations.")
186 216 self.remote_wsgi = remote_wsgi_stub
187 217 self._configure_settings(settings)
188 218 self._configure()
189 219
190 220 def _configure_settings(self, app_settings):
191 221 """
192 222 Configure the settings module.
193 223 """
194 224 git_path = app_settings.get('git_path', None)
195 225 if git_path:
196 226 settings.GIT_EXECUTABLE = git_path
197 227 binary_dir = app_settings.get('core.binary_dir', None)
198 228 if binary_dir:
199 229 settings.BINARY_DIR = binary_dir
200 230
231 def _sanitize_settings_and_apply_defaults(self, settings):
232 # repo_object cache
233 _string_setting(
234 settings,
235 'rc_cache.repo_object.backend',
236 'dogpile.cache.rc.memory_lru')
237 _int_setting(
238 settings,
239 'rc_cache.repo_object.expiration_time',
240 300)
241 _int_setting(
242 settings,
243 'rc_cache.repo_object.max_size',
244 1024)
245
201 246 def _configure(self):
202 247 self.config.add_renderer(
203 248 name='msgpack',
204 249 factory=self._msgpack_renderer_factory)
205 250
206 251 self.config.add_route('service', '/_service')
207 252 self.config.add_route('status', '/status')
208 253 self.config.add_route('hg_proxy', '/proxy/hg')
209 254 self.config.add_route('git_proxy', '/proxy/git')
210 255 self.config.add_route('vcs', '/{backend}')
211 256 self.config.add_route('stream_git', '/stream/git/*repo_name')
212 257 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
213 258
214 259 self.config.add_view(
215 260 self.status_view, route_name='status', renderer='json')
216 261 self.config.add_view(
217 262 self.service_view, route_name='service', renderer='msgpack')
218 263
219 264 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
220 265 self.config.add_view(self.git_proxy(), route_name='git_proxy')
221 266 self.config.add_view(
222 267 self.vcs_view, route_name='vcs', renderer='msgpack',
223 268 custom_predicates=[self.is_vcs_view])
224 269
225 270 self.config.add_view(self.hg_stream(), route_name='stream_hg')
226 271 self.config.add_view(self.git_stream(), route_name='stream_git')
227 272
228 273 def notfound(request):
229 274 return {'status': '404 NOT FOUND'}
230 275 self.config.add_notfound_view(notfound, renderer='json')
231 276
232 277 self.config.add_view(self.handle_vcs_exception, context=Exception)
233 278
234 279 self.config.add_tween(
235 280 'vcsserver.tweens.RequestWrapperTween',
236 281 )
237 282
238 283 def wsgi_app(self):
239 284 return self.config.make_wsgi_app()
240 285
241 286 def vcs_view(self, request):
242 287 remote = self._remotes[request.matchdict['backend']]
243 288 payload = msgpack.unpackb(request.body, use_list=True)
244 289 method = payload.get('method')
245 290 params = payload.get('params')
246 291 wire = params.get('wire')
247 292 args = params.get('args')
248 293 kwargs = params.get('kwargs')
294 context_uid = None
295
249 296 if wire:
250 297 try:
251 wire['context'] = uuid.UUID(wire['context'])
298 wire['context'] = context_uid = uuid.UUID(wire['context'])
252 299 except KeyError:
253 300 pass
254 301 args.insert(0, wire)
255 302
256 log.debug('method called:%s with kwargs:%s', method, kwargs)
303 log.debug('method called:%s with kwargs:%s context_uid: %s',
304 method, kwargs, context_uid)
257 305 try:
258 306 resp = getattr(remote, method)(*args, **kwargs)
259 307 except Exception as e:
260 tb_info = traceback.format_exc()
308 exc_info = list(sys.exc_info())
309 exc_type, exc_value, exc_traceback = exc_info
310
311 org_exc = getattr(e, '_org_exc', None)
312 org_exc_name = None
313 if org_exc:
314 org_exc_name = org_exc.__class__.__name__
315 # replace our "faked" exception with our org
316 exc_info[0] = org_exc.__class__
317 exc_info[1] = org_exc
318
319 store_exception(id(exc_info), exc_info)
320
321 tb_info = ''.join(
322 traceback.format_exception(exc_type, exc_value, exc_traceback))
261 323
262 324 type_ = e.__class__.__name__
263 325 if type_ not in self.ALLOWED_EXCEPTIONS:
264 326 type_ = None
265 327
266 328 resp = {
267 329 'id': payload.get('id'),
268 330 'error': {
269 331 'message': e.message,
270 332 'traceback': tb_info,
333 'org_exc': org_exc_name,
271 334 'type': type_
272 335 }
273 336 }
274 337 try:
275 resp['error']['_vcs_kind'] = e._vcs_kind
338 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
276 339 except AttributeError:
277 340 pass
278 341 else:
279 342 resp = {
280 343 'id': payload.get('id'),
281 344 'result': resp
282 345 }
283 346
284 347 return resp
285 348
286 349 def status_view(self, request):
287 350 import vcsserver
288 351 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
289 352 'pid': os.getpid()}
290 353
291 354 def service_view(self, request):
292 355 import vcsserver
293 356
294 357 payload = msgpack.unpackb(request.body, use_list=True)
295 358
296 359 try:
297 360 path = self.global_config['__file__']
298 361 config = configparser.ConfigParser()
299 362 config.read(path)
300 363 parsed_ini = config
301 364 if parsed_ini.has_section('server:main'):
302 365 parsed_ini = dict(parsed_ini.items('server:main'))
303 366 except Exception:
304 367 log.exception('Failed to read .ini file for display')
305 368 parsed_ini = {}
306 369
307 370 resp = {
308 371 'id': payload.get('id'),
309 372 'result': dict(
310 373 version=vcsserver.__version__,
311 374 config=parsed_ini,
312 375 payload=payload,
313 376 )
314 377 }
315 378 return resp
316 379
317 380 def _msgpack_renderer_factory(self, info):
318 381 def _render(value, system):
319 382 value = msgpack.packb(value)
320 383 request = system.get('request')
321 384 if request is not None:
322 385 response = request.response
323 386 ct = response.content_type
324 387 if ct == response.default_content_type:
325 388 response.content_type = 'application/x-msgpack'
326 389 return value
327 390 return _render
328 391
329 392 def set_env_from_config(self, environ, config):
330 393 dict_conf = {}
331 394 try:
332 395 for elem in config:
333 396 if elem[0] == 'rhodecode':
334 397 dict_conf = json.loads(elem[2])
335 398 break
336 399 except Exception:
337 400 log.exception('Failed to fetch SCM CONFIG')
338 401 return
339 402
340 403 username = dict_conf.get('username')
341 404 if username:
342 405 environ['REMOTE_USER'] = username
343 406 # mercurial specific, some extension api rely on this
344 407 environ['HGUSER'] = username
345 408
346 409 ip = dict_conf.get('ip')
347 410 if ip:
348 411 environ['REMOTE_HOST'] = ip
349 412
350 413 if _is_request_chunked(environ):
351 414 # set the compatibility flag for webob
352 415 environ['wsgi.input_terminated'] = True
353 416
354 417 def hg_proxy(self):
355 418 @wsgiapp
356 419 def _hg_proxy(environ, start_response):
357 420 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
358 421 return app(environ, start_response)
359 422 return _hg_proxy
360 423
361 424 def git_proxy(self):
362 425 @wsgiapp
363 426 def _git_proxy(environ, start_response):
364 427 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
365 428 return app(environ, start_response)
366 429 return _git_proxy
367 430
368 431 def hg_stream(self):
369 432 if self._use_echo_app:
370 433 @wsgiapp
371 434 def _hg_stream(environ, start_response):
372 435 app = EchoApp('fake_path', 'fake_name', None)
373 436 return app(environ, start_response)
374 437 return _hg_stream
375 438 else:
376 439 @wsgiapp
377 440 def _hg_stream(environ, start_response):
378 441 log.debug('http-app: handling hg stream')
379 442 repo_path = environ['HTTP_X_RC_REPO_PATH']
380 443 repo_name = environ['HTTP_X_RC_REPO_NAME']
381 444 packed_config = base64.b64decode(
382 445 environ['HTTP_X_RC_REPO_CONFIG'])
383 446 config = msgpack.unpackb(packed_config)
384 447 app = scm_app.create_hg_wsgi_app(
385 448 repo_path, repo_name, config)
386 449
387 450 # Consistent path information for hgweb
388 451 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
389 452 environ['REPO_NAME'] = repo_name
390 453 self.set_env_from_config(environ, config)
391 454
392 455 log.debug('http-app: starting app handler '
393 456 'with %s and process request', app)
394 457 return app(environ, ResponseFilter(start_response))
395 458 return _hg_stream
396 459
397 460 def git_stream(self):
398 461 if self._use_echo_app:
399 462 @wsgiapp
400 463 def _git_stream(environ, start_response):
401 464 app = EchoApp('fake_path', 'fake_name', None)
402 465 return app(environ, start_response)
403 466 return _git_stream
404 467 else:
405 468 @wsgiapp
406 469 def _git_stream(environ, start_response):
407 470 log.debug('http-app: handling git stream')
408 471 repo_path = environ['HTTP_X_RC_REPO_PATH']
409 472 repo_name = environ['HTTP_X_RC_REPO_NAME']
410 473 packed_config = base64.b64decode(
411 474 environ['HTTP_X_RC_REPO_CONFIG'])
412 475 config = msgpack.unpackb(packed_config)
413 476
414 477 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
415 478 self.set_env_from_config(environ, config)
416 479
417 480 content_type = environ.get('CONTENT_TYPE', '')
418 481
419 482 path = environ['PATH_INFO']
420 483 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
421 484 log.debug(
422 485 'LFS: Detecting if request `%s` is LFS server path based '
423 486 'on content type:`%s`, is_lfs:%s',
424 487 path, content_type, is_lfs_request)
425 488
426 489 if not is_lfs_request:
427 490 # fallback detection by path
428 491 if GIT_LFS_PROTO_PAT.match(path):
429 492 is_lfs_request = True
430 493 log.debug(
431 494 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
432 495 path, is_lfs_request)
433 496
434 497 if is_lfs_request:
435 498 app = scm_app.create_git_lfs_wsgi_app(
436 499 repo_path, repo_name, config)
437 500 else:
438 501 app = scm_app.create_git_wsgi_app(
439 502 repo_path, repo_name, config)
440 503
441 504 log.debug('http-app: starting app handler '
442 505 'with %s and process request', app)
443 506
444 507 return app(environ, start_response)
445 508
446 509 return _git_stream
447 510
448 511 def is_vcs_view(self, context, request):
449 512 """
450 513 View predicate that returns true if given backend is supported by
451 514 defined remotes.
452 515 """
453 516 backend = request.matchdict.get('backend')
454 517 return backend in self._remotes
455 518
456 519 def handle_vcs_exception(self, exception, request):
457 520 _vcs_kind = getattr(exception, '_vcs_kind', '')
458 521 if _vcs_kind == 'repo_locked':
459 522 # Get custom repo-locked status code if present.
460 523 status_code = request.headers.get('X-RC-Locked-Status-Code')
461 524 return HTTPRepoLocked(
462 525 title=exception.message, status_code=status_code)
463 526
464 # Re-raise exception if we can not handle it.
465 log.exception(
466 'error occurred handling this request for path: %s', request.path)
527 elif _vcs_kind == 'repo_branch_protected':
528 # Get custom repo-branch-protected status code if present.
529 return HTTPRepoBranchProtected(title=exception.message)
530
531 exc_info = request.exc_info
532 store_exception(id(exc_info), exc_info)
533
534 traceback_info = 'unavailable'
535 if request.exc_info:
536 exc_type, exc_value, exc_tb = request.exc_info
537 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
538
539 log.error(
540 'error occurred handling this request for path: %s, \n tb: %s',
541 request.path, traceback_info)
467 542 raise exception
468 543
469 544
470 545 class ResponseFilter(object):
471 546
472 547 def __init__(self, start_response):
473 548 self._start_response = start_response
474 549
475 550 def __call__(self, status, response_headers, exc_info=None):
476 551 headers = tuple(
477 552 (h, v) for h, v in response_headers
478 553 if not wsgiref.util.is_hop_by_hop(h))
479 554 return self._start_response(status, headers, exc_info)
480 555
481 556
482 557 def main(global_config, **settings):
483 558 if MercurialFactory:
484 559 hgpatches.patch_largefiles_capabilities()
485 560 hgpatches.patch_subrepo_type_mapping()
561
486 562 app = HTTPApplication(settings=settings, global_config=global_config)
487 563 return app.wsgi_app()
@@ -1,229 +1,234 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import itertools
21 21
22 22 import mercurial
23 23 import mercurial.error
24 import mercurial.wireprotoserver
24 25 import mercurial.hgweb.common
25 26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.protocol
27 27 import webob.exc
28 28
29 29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 30
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 # propagated from mercurial documentation
36 36 HG_UI_SECTIONS = [
37 37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 40 ]
41 41
42 42
43 43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 44 """Extension of hgweb that simplifies some functions."""
45 45
46 46 def _get_view(self, repo):
47 47 """Views are not supported."""
48 48 return repo
49 49
50 50 def loadsubweb(self):
51 51 """The result is only used in the templater method which is not used."""
52 52 return None
53 53
54 54 def run(self):
55 55 """Unused function so raise an exception if accidentally called."""
56 56 raise NotImplementedError
57 57
58 58 def templater(self, req):
59 59 """Function used in an unreachable code path.
60 60
61 61 This code is unreachable because we guarantee that the HTTP request,
62 62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 63 never going to get a user-visible url.
64 64 """
65 65 raise NotImplementedError
66 66
67 67 def archivelist(self, nodeid):
68 68 """Unused function so raise an exception if accidentally called."""
69 69 raise NotImplementedError
70 70
71 71 def __call__(self, environ, start_response):
72 72 """Run the WSGI application.
73 73
74 74 This may be called by multiple threads.
75 75 """
76 req = mercurial.hgweb.request.wsgirequest(environ, start_response)
77 gen = self.run_wsgi(req)
76 from mercurial.hgweb import request as requestmod
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
78 80
79 81 first_chunk = None
80 82
81 83 try:
82 84 data = gen.next()
83 def first_chunk(): yield data
85
86 def first_chunk():
87 yield data
84 88 except StopIteration:
85 89 pass
86 90
87 91 if first_chunk:
88 92 return itertools.chain(first_chunk(), gen)
89 93 return gen
90 94
91 def _runwsgi(self, req, repo):
92 cmd = req.form.get('cmd', [''])[0]
93 if not mercurial.hgweb.protocol.iscmd(cmd):
94 req.respond(
95 mercurial.hgweb.common.ErrorResponse(
96 mercurial.hgweb.common.HTTP_BAD_REQUEST),
97 mercurial.hgweb.protocol.HGTYPE
98 )
99 return ['']
95 def _runwsgi(self, req, res, repo):
100 96
101 return super(HgWeb, self)._runwsgi(req, repo)
97 cmd = req.qsparams.get('cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
101 from mercurial.hgweb.common import statusmessage
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.setbodybytes('')
104 return res.sendresponse()
105
106 return super(HgWeb, self)._runwsgi(req, res, repo)
102 107
103 108
104 109 def make_hg_ui_from_config(repo_config):
105 110 baseui = mercurial.ui.ui()
106 111
107 112 # clean the baseui object
108 113 baseui._ocfg = mercurial.config.config()
109 114 baseui._ucfg = mercurial.config.config()
110 115 baseui._tcfg = mercurial.config.config()
111 116
112 117 for section, option, value in repo_config:
113 118 baseui.setconfig(section, option, value)
114 119
115 120 # make our hgweb quiet so it doesn't print output
116 121 baseui.setconfig('ui', 'quiet', 'true')
117 122
118 123 return baseui
119 124
120 125
121 126 def update_hg_ui_from_hgrc(baseui, repo_path):
122 127 path = os.path.join(repo_path, '.hg', 'hgrc')
123 128
124 129 if not os.path.isfile(path):
125 130 log.debug('hgrc file is not present at %s, skipping...', path)
126 131 return
127 132 log.debug('reading hgrc from %s', path)
128 133 cfg = mercurial.config.config()
129 134 cfg.read(path)
130 135 for section in HG_UI_SECTIONS:
131 136 for k, v in cfg.items(section):
132 137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
133 138 baseui.setconfig(section, k, v)
134 139
135 140
136 141 def create_hg_wsgi_app(repo_path, repo_name, config):
137 142 """
138 143 Prepares a WSGI application to handle Mercurial requests.
139 144
140 145 :param config: is a list of 3-item tuples representing a ConfigObject
141 146 (it is the serialized version of the config object).
142 147 """
143 148 log.debug("Creating Mercurial WSGI application")
144 149
145 150 baseui = make_hg_ui_from_config(config)
146 151 update_hg_ui_from_hgrc(baseui, repo_path)
147 152
148 153 try:
149 154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
150 except mercurial.error.RequirementError as exc:
151 raise exceptions.RequirementException(exc)
155 except mercurial.error.RequirementError as e:
156 raise exceptions.RequirementException(e)(e)
152 157
153 158
154 159 class GitHandler(object):
155 160 """
156 161 Handler for Git operations like push/pull etc
157 162 """
158 163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
159 164 extras):
160 165 if not os.path.isdir(repo_location):
161 166 raise OSError(repo_location)
162 167 self.content_path = repo_location
163 168 self.repo_name = repo_name
164 169 self.repo_location = repo_location
165 170 self.extras = extras
166 171 self.git_path = git_path
167 172 self.update_server_info = update_server_info
168 173
169 174 def __call__(self, environ, start_response):
170 175 app = webob.exc.HTTPNotFound()
171 176 candidate_paths = (
172 177 self.content_path, os.path.join(self.content_path, '.git'))
173 178
174 179 for content_path in candidate_paths:
175 180 try:
176 181 app = pygrack.GitRepository(
177 182 self.repo_name, content_path, self.git_path,
178 183 self.update_server_info, self.extras)
179 184 break
180 185 except OSError:
181 186 continue
182 187
183 188 return app(environ, start_response)
184 189
185 190
186 191 def create_git_wsgi_app(repo_path, repo_name, config):
187 192 """
188 193 Creates a WSGI application to handle Git requests.
189 194
190 195 :param config: is a dictionary holding the extras.
191 196 """
192 197 git_path = settings.GIT_EXECUTABLE
193 198 update_server_info = config.pop('git_update_server_info')
194 199 app = GitHandler(
195 200 repo_path, repo_name, git_path, update_server_info, config)
196 201
197 202 return app
198 203
199 204
200 205 class GitLFSHandler(object):
201 206 """
202 207 Handler for Git LFS operations
203 208 """
204 209
205 210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
206 211 extras):
207 212 if not os.path.isdir(repo_location):
208 213 raise OSError(repo_location)
209 214 self.content_path = repo_location
210 215 self.repo_name = repo_name
211 216 self.repo_location = repo_location
212 217 self.extras = extras
213 218 self.git_path = git_path
214 219 self.update_server_info = update_server_info
215 220
216 221 def get_app(self, git_lfs_enabled, git_lfs_store_path):
217 222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
218 223 return app
219 224
220 225
221 226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
222 227 git_path = settings.GIT_EXECUTABLE
223 228 update_server_info = config.pop('git_update_server_info')
224 229 git_lfs_enabled = config.pop('git_lfs_enabled')
225 230 git_lfs_store_path = config.pop('git_lfs_store_path')
226 231 app = GitLFSHandler(
227 232 repo_path, repo_name, git_path, update_server_info, config)
228 233
229 234 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,689 +1,705 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 from urllib2 import URLError
22 22 import logging
23 23 import posixpath as vcspath
24 24 import StringIO
25 25 import urllib
26 26 import traceback
27 27
28 28 import svn.client
29 29 import svn.core
30 30 import svn.delta
31 31 import svn.diff
32 32 import svn.fs
33 33 import svn.repos
34 34
35 35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 36 from vcsserver.base import RepoFactory, raise_from_original
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 # Set of svn compatible version flags.
42 42 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
43 svn_compatible_versions = {
44 44 'pre-1.4-compatible',
45 45 'pre-1.5-compatible',
46 46 'pre-1.6-compatible',
47 47 'pre-1.8-compatible',
48 'pre-1.9-compatible',
49 ])
48 'pre-1.9-compatible'
49 }
50 50
51 51 svn_compatible_versions_map = {
52 52 'pre-1.4-compatible': '1.3',
53 53 'pre-1.5-compatible': '1.4',
54 54 'pre-1.6-compatible': '1.5',
55 55 'pre-1.8-compatible': '1.7',
56 56 'pre-1.9-compatible': '1.8',
57 57 }
58 58
59 59
60 60 def reraise_safe_exceptions(func):
61 61 """Decorator for converting svn exceptions to something neutral."""
62 62 def wrapper(*args, **kwargs):
63 63 try:
64 64 return func(*args, **kwargs)
65 65 except Exception as e:
66 66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in hg remote call")
68 raise_from_original(exceptions.UnhandledException)
67 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException(e))
69 69 raise
70 70 return wrapper
71 71
72 72
73 73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74 75
75 76 def _create_repo(self, wire, create, compatible_version):
76 77 path = svn.core.svn_path_canonicalize(wire['path'])
77 78 if create:
78 79 fs_config = {'compatible-version': '1.9'}
79 80 if compatible_version:
80 81 if compatible_version not in svn_compatible_versions:
81 82 raise Exception('Unknown SVN compatible version "{}"'
82 83 .format(compatible_version))
83 84 fs_config['compatible-version'] = \
84 85 svn_compatible_versions_map[compatible_version]
85 86
86 87 log.debug('Create SVN repo with config "%s"', fs_config)
87 88 repo = svn.repos.create(path, "", "", None, fs_config)
88 89 else:
89 90 repo = svn.repos.open(path)
90 91
91 92 log.debug('Got SVN object: %s', repo)
92 93 return repo
93 94
94 95 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
96 """
97 Get a repository instance for the given path.
98
99 Uses internally the low level beaker API since the decorators introduce
100 significant overhead.
101 """
102 region = self._cache_region
103 context = wire.get('context', None)
104 repo_path = wire.get('path', '')
105 context_uid = '{}'.format(context)
106 cache = wire.get('cache', True)
107 cache_on = context and cache
108
109 @region.conditional_cache_on_arguments(condition=cache_on)
110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
96 111 return self._create_repo(wire, create, compatible_version)
97 112
98 return self._repo(wire, create_new_repo)
113 return create_new_repo(self.repo_type, repo_path, context_uid,
114 compatible_version)
99 115
100 116
101 117 NODE_TYPE_MAPPING = {
102 118 svn.core.svn_node_file: 'file',
103 119 svn.core.svn_node_dir: 'dir',
104 120 }
105 121
106 122
107 123 class SvnRemote(object):
108 124
109 125 def __init__(self, factory, hg_factory=None):
110 126 self._factory = factory
111 127 # TODO: Remove once we do not use internal Mercurial objects anymore
112 128 # for subversion
113 129 self._hg_factory = hg_factory
114 130
115 131 @reraise_safe_exceptions
116 132 def discover_svn_version(self):
117 133 try:
118 134 import svn.core
119 135 svn_ver = svn.core.SVN_VERSION
120 136 except ImportError:
121 137 svn_ver = None
122 138 return svn_ver
123 139
124 140 def check_url(self, url, config_items):
125 141 # this can throw exception if not installed, but we detect this
126 142 from hgsubversion import svnrepo
127 143
128 144 baseui = self._hg_factory._create_config(config_items)
129 145 # uuid function get's only valid UUID from proper repo, else
130 146 # throws exception
131 147 try:
132 148 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 149 except Exception:
134 150 tb = traceback.format_exc()
135 151 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
136 152 raise URLError(
137 153 '"%s" is not a valid Subversion source url.' % (url, ))
138 154 return True
139 155
140 156 def is_path_valid_repository(self, wire, path):
141 157
142 158 # NOTE(marcink): short circuit the check for SVN repo
143 159 # the repos.open might be expensive to check, but we have one cheap
144 160 # pre condition that we can use, to check for 'format' file
145 161
146 162 if not os.path.isfile(os.path.join(path, 'format')):
147 163 return False
148 164
149 165 try:
150 166 svn.repos.open(path)
151 167 except svn.core.SubversionException:
152 168 tb = traceback.format_exc()
153 169 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
154 170 return False
155 171 return True
156 172
157 173 @reraise_safe_exceptions
158 174 def verify(self, wire,):
159 175 repo_path = wire['path']
160 176 if not self.is_path_valid_repository(wire, repo_path):
161 177 raise Exception(
162 178 "Path %s is not a valid Subversion repository." % repo_path)
163 179
164 180 cmd = ['svnadmin', 'info', repo_path]
165 181 stdout, stderr = subprocessio.run_command(cmd)
166 182 return stdout
167 183
168 184 def lookup(self, wire, revision):
169 185 if revision not in [-1, None, 'HEAD']:
170 186 raise NotImplementedError
171 187 repo = self._factory.repo(wire)
172 188 fs_ptr = svn.repos.fs(repo)
173 189 head = svn.fs.youngest_rev(fs_ptr)
174 190 return head
175 191
176 192 def lookup_interval(self, wire, start_ts, end_ts):
177 193 repo = self._factory.repo(wire)
178 194 fsobj = svn.repos.fs(repo)
179 195 start_rev = None
180 196 end_rev = None
181 197 if start_ts:
182 198 start_ts_svn = apr_time_t(start_ts)
183 199 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
184 200 else:
185 201 start_rev = 1
186 202 if end_ts:
187 203 end_ts_svn = apr_time_t(end_ts)
188 204 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
189 205 else:
190 206 end_rev = svn.fs.youngest_rev(fsobj)
191 207 return start_rev, end_rev
192 208
193 209 def revision_properties(self, wire, revision):
194 210 repo = self._factory.repo(wire)
195 211 fs_ptr = svn.repos.fs(repo)
196 212 return svn.fs.revision_proplist(fs_ptr, revision)
197 213
198 214 def revision_changes(self, wire, revision):
199 215
200 216 repo = self._factory.repo(wire)
201 217 fsobj = svn.repos.fs(repo)
202 218 rev_root = svn.fs.revision_root(fsobj, revision)
203 219
204 220 editor = svn.repos.ChangeCollector(fsobj, rev_root)
205 221 editor_ptr, editor_baton = svn.delta.make_editor(editor)
206 222 base_dir = ""
207 223 send_deltas = False
208 224 svn.repos.replay2(
209 225 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
210 226 editor_ptr, editor_baton, None)
211 227
212 228 added = []
213 229 changed = []
214 230 removed = []
215 231
216 232 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
217 233 for path, change in editor.changes.iteritems():
218 234 # TODO: Decide what to do with directory nodes. Subversion can add
219 235 # empty directories.
220 236
221 237 if change.item_kind == svn.core.svn_node_dir:
222 238 continue
223 239 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
224 240 added.append(path)
225 241 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
226 242 svn.repos.CHANGE_ACTION_REPLACE]:
227 243 changed.append(path)
228 244 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
229 245 removed.append(path)
230 246 else:
231 247 raise NotImplementedError(
232 248 "Action %s not supported on path %s" % (
233 249 change.action, path))
234 250
235 251 changes = {
236 252 'added': added,
237 253 'changed': changed,
238 254 'removed': removed,
239 255 }
240 256 return changes
241 257
242 258 def node_history(self, wire, path, revision, limit):
243 259 cross_copies = False
244 260 repo = self._factory.repo(wire)
245 261 fsobj = svn.repos.fs(repo)
246 262 rev_root = svn.fs.revision_root(fsobj, revision)
247 263
248 264 history_revisions = []
249 265 history = svn.fs.node_history(rev_root, path)
250 266 history = svn.fs.history_prev(history, cross_copies)
251 267 while history:
252 268 __, node_revision = svn.fs.history_location(history)
253 269 history_revisions.append(node_revision)
254 270 if limit and len(history_revisions) >= limit:
255 271 break
256 272 history = svn.fs.history_prev(history, cross_copies)
257 273 return history_revisions
258 274
259 275 def node_properties(self, wire, path, revision):
260 276 repo = self._factory.repo(wire)
261 277 fsobj = svn.repos.fs(repo)
262 278 rev_root = svn.fs.revision_root(fsobj, revision)
263 279 return svn.fs.node_proplist(rev_root, path)
264 280
265 281 def file_annotate(self, wire, path, revision):
266 282 abs_path = 'file://' + urllib.pathname2url(
267 283 vcspath.join(wire['path'], path))
268 284 file_uri = svn.core.svn_path_canonicalize(abs_path)
269 285
270 286 start_rev = svn_opt_revision_value_t(0)
271 287 peg_rev = svn_opt_revision_value_t(revision)
272 288 end_rev = peg_rev
273 289
274 290 annotations = []
275 291
276 292 def receiver(line_no, revision, author, date, line, pool):
277 293 annotations.append((line_no, revision, line))
278 294
279 295 # TODO: Cannot use blame5, missing typemap function in the swig code
280 296 try:
281 297 svn.client.blame2(
282 298 file_uri, peg_rev, start_rev, end_rev,
283 299 receiver, svn.client.create_context())
284 300 except svn.core.SubversionException as exc:
285 301 log.exception("Error during blame operation.")
286 302 raise Exception(
287 303 "Blame not supported or file does not exist at path %s. "
288 304 "Error %s." % (path, exc))
289 305
290 306 return annotations
291 307
292 308 def get_node_type(self, wire, path, rev=None):
293 309 repo = self._factory.repo(wire)
294 310 fs_ptr = svn.repos.fs(repo)
295 311 if rev is None:
296 312 rev = svn.fs.youngest_rev(fs_ptr)
297 313 root = svn.fs.revision_root(fs_ptr, rev)
298 314 node = svn.fs.check_path(root, path)
299 315 return NODE_TYPE_MAPPING.get(node, None)
300 316
301 317 def get_nodes(self, wire, path, revision=None):
302 318 repo = self._factory.repo(wire)
303 319 fsobj = svn.repos.fs(repo)
304 320 if revision is None:
305 321 revision = svn.fs.youngest_rev(fsobj)
306 322 root = svn.fs.revision_root(fsobj, revision)
307 323 entries = svn.fs.dir_entries(root, path)
308 324 result = []
309 325 for entry_path, entry_info in entries.iteritems():
310 326 result.append(
311 327 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
312 328 return result
313 329
314 330 def get_file_content(self, wire, path, rev=None):
315 331 repo = self._factory.repo(wire)
316 332 fsobj = svn.repos.fs(repo)
317 333 if rev is None:
318 334 rev = svn.fs.youngest_revision(fsobj)
319 335 root = svn.fs.revision_root(fsobj, rev)
320 336 content = svn.core.Stream(svn.fs.file_contents(root, path))
321 337 return content.read()
322 338
323 339 def get_file_size(self, wire, path, revision=None):
324 340 repo = self._factory.repo(wire)
325 341 fsobj = svn.repos.fs(repo)
326 342 if revision is None:
327 343 revision = svn.fs.youngest_revision(fsobj)
328 344 root = svn.fs.revision_root(fsobj, revision)
329 345 size = svn.fs.file_length(root, path)
330 346 return size
331 347
332 348 def create_repository(self, wire, compatible_version=None):
333 349 log.info('Creating Subversion repository in path "%s"', wire['path'])
334 350 self._factory.repo(wire, create=True,
335 351 compatible_version=compatible_version)
336 352
337 353 def import_remote_repository(self, wire, src_url):
338 354 repo_path = wire['path']
339 355 if not self.is_path_valid_repository(wire, repo_path):
340 356 raise Exception(
341 357 "Path %s is not a valid Subversion repository." % repo_path)
342 358
343 359 # TODO: johbo: URL checks ?
344 360 import subprocess
345 361 rdump = subprocess.Popen(
346 362 ['svnrdump', 'dump', '--non-interactive', src_url],
347 363 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
348 364 load = subprocess.Popen(
349 365 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
350 366
351 367 # TODO: johbo: This can be a very long operation, might be better
352 368 # to track some kind of status and provide an api to check if the
353 369 # import is done.
354 370 rdump.wait()
355 371 load.wait()
356 372
357 373 if rdump.returncode != 0:
358 374 errors = rdump.stderr.read()
359 375 log.error('svnrdump dump failed: statuscode %s: message: %s',
360 376 rdump.returncode, errors)
361 377 reason = 'UNKNOWN'
362 378 if 'svnrdump: E230001:' in errors:
363 379 reason = 'INVALID_CERTIFICATE'
364 380 raise Exception(
365 381 'Failed to dump the remote repository from %s.' % src_url,
366 382 reason)
367 383 if load.returncode != 0:
368 384 raise Exception(
369 385 'Failed to load the dump of remote repository from %s.' %
370 386 (src_url, ))
371 387
372 388 def commit(self, wire, message, author, timestamp, updated, removed):
373 389 assert isinstance(message, str)
374 390 assert isinstance(author, str)
375 391
376 392 repo = self._factory.repo(wire)
377 393 fsobj = svn.repos.fs(repo)
378 394
379 395 rev = svn.fs.youngest_rev(fsobj)
380 396 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
381 397 txn_root = svn.fs.txn_root(txn)
382 398
383 399 for node in updated:
384 400 TxnNodeProcessor(node, txn_root).update()
385 401 for node in removed:
386 402 TxnNodeProcessor(node, txn_root).remove()
387 403
388 404 commit_id = svn.repos.fs_commit_txn(repo, txn)
389 405
390 406 if timestamp:
391 407 apr_time = apr_time_t(timestamp)
392 408 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
393 409 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
394 410
395 411 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
396 412 return commit_id
397 413
398 414 def diff(self, wire, rev1, rev2, path1=None, path2=None,
399 415 ignore_whitespace=False, context=3):
400 416
401 417 wire.update(cache=False)
402 418 repo = self._factory.repo(wire)
403 419 diff_creator = SvnDiffer(
404 420 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
405 421 try:
406 422 return diff_creator.generate_diff()
407 423 except svn.core.SubversionException as e:
408 424 log.exception(
409 425 "Error during diff operation operation. "
410 426 "Path might not exist %s, %s" % (path1, path2))
411 427 return ""
412 428
413 429 @reraise_safe_exceptions
414 430 def is_large_file(self, wire, path):
415 431 return False
416 432
417 433 @reraise_safe_exceptions
418 434 def install_hooks(self, wire, force=False):
419 435 from vcsserver.hook_utils import install_svn_hooks
420 436 repo_path = wire['path']
421 437 binary_dir = settings.BINARY_DIR
422 438 executable = None
423 439 if binary_dir:
424 440 executable = os.path.join(binary_dir, 'python')
425 441 return install_svn_hooks(
426 442 repo_path, executable=executable, force_create=force)
427 443
428 444
429 445 class SvnDiffer(object):
430 446 """
431 447 Utility to create diffs based on difflib and the Subversion api
432 448 """
433 449
434 450 binary_content = False
435 451
436 452 def __init__(
437 453 self, repo, src_rev, src_path, tgt_rev, tgt_path,
438 454 ignore_whitespace, context):
439 455 self.repo = repo
440 456 self.ignore_whitespace = ignore_whitespace
441 457 self.context = context
442 458
443 459 fsobj = svn.repos.fs(repo)
444 460
445 461 self.tgt_rev = tgt_rev
446 462 self.tgt_path = tgt_path or ''
447 463 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
448 464 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
449 465
450 466 self.src_rev = src_rev
451 467 self.src_path = src_path or self.tgt_path
452 468 self.src_root = svn.fs.revision_root(fsobj, src_rev)
453 469 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
454 470
455 471 self._validate()
456 472
457 473 def _validate(self):
458 474 if (self.tgt_kind != svn.core.svn_node_none and
459 475 self.src_kind != svn.core.svn_node_none and
460 476 self.src_kind != self.tgt_kind):
461 477 # TODO: johbo: proper error handling
462 478 raise Exception(
463 479 "Source and target are not compatible for diff generation. "
464 480 "Source type: %s, target type: %s" %
465 481 (self.src_kind, self.tgt_kind))
466 482
467 483 def generate_diff(self):
468 484 buf = StringIO.StringIO()
469 485 if self.tgt_kind == svn.core.svn_node_dir:
470 486 self._generate_dir_diff(buf)
471 487 else:
472 488 self._generate_file_diff(buf)
473 489 return buf.getvalue()
474 490
475 491 def _generate_dir_diff(self, buf):
476 492 editor = DiffChangeEditor()
477 493 editor_ptr, editor_baton = svn.delta.make_editor(editor)
478 494 svn.repos.dir_delta2(
479 495 self.src_root,
480 496 self.src_path,
481 497 '', # src_entry
482 498 self.tgt_root,
483 499 self.tgt_path,
484 500 editor_ptr, editor_baton,
485 501 authorization_callback_allow_all,
486 502 False, # text_deltas
487 503 svn.core.svn_depth_infinity, # depth
488 504 False, # entry_props
489 505 False, # ignore_ancestry
490 506 )
491 507
492 508 for path, __, change in sorted(editor.changes):
493 509 self._generate_node_diff(
494 510 buf, change, path, self.tgt_path, path, self.src_path)
495 511
496 512 def _generate_file_diff(self, buf):
497 513 change = None
498 514 if self.src_kind == svn.core.svn_node_none:
499 515 change = "add"
500 516 elif self.tgt_kind == svn.core.svn_node_none:
501 517 change = "delete"
502 518 tgt_base, tgt_path = vcspath.split(self.tgt_path)
503 519 src_base, src_path = vcspath.split(self.src_path)
504 520 self._generate_node_diff(
505 521 buf, change, tgt_path, tgt_base, src_path, src_base)
506 522
507 523 def _generate_node_diff(
508 524 self, buf, change, tgt_path, tgt_base, src_path, src_base):
509 525
510 526 if self.src_rev == self.tgt_rev and tgt_base == src_base:
511 527 # makes consistent behaviour with git/hg to return empty diff if
512 528 # we compare same revisions
513 529 return
514 530
515 531 tgt_full_path = vcspath.join(tgt_base, tgt_path)
516 532 src_full_path = vcspath.join(src_base, src_path)
517 533
518 534 self.binary_content = False
519 535 mime_type = self._get_mime_type(tgt_full_path)
520 536
521 537 if mime_type and not mime_type.startswith('text'):
522 538 self.binary_content = True
523 539 buf.write("=" * 67 + '\n')
524 540 buf.write("Cannot display: file marked as a binary type.\n")
525 541 buf.write("svn:mime-type = %s\n" % mime_type)
526 542 buf.write("Index: %s\n" % (tgt_path, ))
527 543 buf.write("=" * 67 + '\n')
528 544 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
529 545 'tgt_path': tgt_path})
530 546
531 547 if change == 'add':
532 548 # TODO: johbo: SVN is missing a zero here compared to git
533 549 buf.write("new file mode 10644\n")
534 550
535 551 #TODO(marcink): intro to binary detection of svn patches
536 552 # if self.binary_content:
537 553 # buf.write('GIT binary patch\n')
538 554
539 555 buf.write("--- /dev/null\t(revision 0)\n")
540 556 src_lines = []
541 557 else:
542 558 if change == 'delete':
543 559 buf.write("deleted file mode 10644\n")
544 560
545 561 #TODO(marcink): intro to binary detection of svn patches
546 562 # if self.binary_content:
547 563 # buf.write('GIT binary patch\n')
548 564
549 565 buf.write("--- a/%s\t(revision %s)\n" % (
550 566 src_path, self.src_rev))
551 567 src_lines = self._svn_readlines(self.src_root, src_full_path)
552 568
553 569 if change == 'delete':
554 570 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
555 571 tgt_lines = []
556 572 else:
557 573 buf.write("+++ b/%s\t(revision %s)\n" % (
558 574 tgt_path, self.tgt_rev))
559 575 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
560 576
561 577 if not self.binary_content:
562 578 udiff = svn_diff.unified_diff(
563 579 src_lines, tgt_lines, context=self.context,
564 580 ignore_blank_lines=self.ignore_whitespace,
565 581 ignore_case=False,
566 582 ignore_space_changes=self.ignore_whitespace)
567 583 buf.writelines(udiff)
568 584
569 585 def _get_mime_type(self, path):
570 586 try:
571 587 mime_type = svn.fs.node_prop(
572 588 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
573 589 except svn.core.SubversionException:
574 590 mime_type = svn.fs.node_prop(
575 591 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
576 592 return mime_type
577 593
578 594 def _svn_readlines(self, fs_root, node_path):
579 595 if self.binary_content:
580 596 return []
581 597 node_kind = svn.fs.check_path(fs_root, node_path)
582 598 if node_kind not in (
583 599 svn.core.svn_node_file, svn.core.svn_node_symlink):
584 600 return []
585 601 content = svn.core.Stream(
586 602 svn.fs.file_contents(fs_root, node_path)).read()
587 603 return content.splitlines(True)
588 604
589 605
590 606
591 607 class DiffChangeEditor(svn.delta.Editor):
592 608 """
593 609 Records changes between two given revisions
594 610 """
595 611
596 612 def __init__(self):
597 613 self.changes = []
598 614
599 615 def delete_entry(self, path, revision, parent_baton, pool=None):
600 616 self.changes.append((path, None, 'delete'))
601 617
602 618 def add_file(
603 619 self, path, parent_baton, copyfrom_path, copyfrom_revision,
604 620 file_pool=None):
605 621 self.changes.append((path, 'file', 'add'))
606 622
607 623 def open_file(self, path, parent_baton, base_revision, file_pool=None):
608 624 self.changes.append((path, 'file', 'change'))
609 625
610 626
611 627 def authorization_callback_allow_all(root, path, pool):
612 628 return True
613 629
614 630
615 631 class TxnNodeProcessor(object):
616 632 """
617 633 Utility to process the change of one node within a transaction root.
618 634
619 635 It encapsulates the knowledge of how to add, update or remove
620 636 a node for a given transaction root. The purpose is to support the method
621 637 `SvnRemote.commit`.
622 638 """
623 639
624 640 def __init__(self, node, txn_root):
625 641 assert isinstance(node['path'], str)
626 642
627 643 self.node = node
628 644 self.txn_root = txn_root
629 645
630 646 def update(self):
631 647 self._ensure_parent_dirs()
632 648 self._add_file_if_node_does_not_exist()
633 649 self._update_file_content()
634 650 self._update_file_properties()
635 651
636 652 def remove(self):
637 653 svn.fs.delete(self.txn_root, self.node['path'])
638 654 # TODO: Clean up directory if empty
639 655
640 656 def _ensure_parent_dirs(self):
641 657 curdir = vcspath.dirname(self.node['path'])
642 658 dirs_to_create = []
643 659 while not self._svn_path_exists(curdir):
644 660 dirs_to_create.append(curdir)
645 661 curdir = vcspath.dirname(curdir)
646 662
647 663 for curdir in reversed(dirs_to_create):
648 664 log.debug('Creating missing directory "%s"', curdir)
649 665 svn.fs.make_dir(self.txn_root, curdir)
650 666
651 667 def _svn_path_exists(self, path):
652 668 path_status = svn.fs.check_path(self.txn_root, path)
653 669 return path_status != svn.core.svn_node_none
654 670
655 671 def _add_file_if_node_does_not_exist(self):
656 672 kind = svn.fs.check_path(self.txn_root, self.node['path'])
657 673 if kind == svn.core.svn_node_none:
658 674 svn.fs.make_file(self.txn_root, self.node['path'])
659 675
660 676 def _update_file_content(self):
661 677 assert isinstance(self.node['content'], str)
662 678 handler, baton = svn.fs.apply_textdelta(
663 679 self.txn_root, self.node['path'], None, None)
664 680 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
665 681
666 682 def _update_file_properties(self):
667 683 properties = self.node.get('properties', {})
668 684 for key, value in properties.iteritems():
669 685 svn.fs.change_node_prop(
670 686 self.txn_root, self.node['path'], key, value)
671 687
672 688
673 689 def apr_time_t(timestamp):
674 690 """
675 691 Convert a Python timestamp into APR timestamp type apr_time_t
676 692 """
677 693 return timestamp * 1E6
678 694
679 695
680 696 def svn_opt_revision_value_t(num):
681 697 """
682 698 Put `num` into a `svn_opt_revision_value_t` structure.
683 699 """
684 700 value = svn.core.svn_opt_revision_value_t()
685 701 value.number = num
686 702 revision = svn.core.svn_opt_revision_t()
687 703 revision.kind = svn.core.svn_opt_revision_number
688 704 revision.value = value
689 705 return revision
@@ -1,58 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import socket
19 19
20 20 import pytest
21 21
22 22
23 23 def pytest_addoption(parser):
24 24 parser.addoption(
25 25 '--repeat', type=int, default=100,
26 26 help="Number of repetitions in performance tests.")
27 27
28 28
29 29 @pytest.fixture(scope='session')
30 30 def repeat(request):
31 31 """
32 32 The number of repetitions is based on this fixture.
33 33
34 34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 35 tests are not too slow in our default test suite.
36 36 """
37 37 return request.config.getoption('--repeat')
38 38
39 39
40 40 @pytest.fixture(scope='session')
41 41 def vcsserver_port(request):
42 42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print('Using vcsserver port %s' % (port, ))
44 44 return port
45 45
46 46
47 47 def get_available_port():
48 48 family = socket.AF_INET
49 49 socktype = socket.SOCK_STREAM
50 50 host = '127.0.0.1'
51 51
52 52 mysocket = socket.socket(family, socktype)
53 53 mysocket.bind((host, 0))
54 54 port = mysocket.getsockname()[1]
55 55 mysocket.close()
56 56 del mysocket
57 57 return port
58
@@ -1,162 +1,165 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.fetch(
82 82 wire=None, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102 def test_remove_ref(self):
103 103 ref_to_remove = 'refs/tags/v0.1.9'
104 104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 105 self.remote_git.remove_ref(None, ref_to_remove)
106 106 assert ref_to_remove not in self.mock_repo.refs
107 107
108 108
109 109 class TestReraiseSafeExceptions(object):
110 110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 111 factory = Mock()
112 112 git_remote = git.GitRemote(factory)
113 113
114 114 def fake_function():
115 115 return None
116 116
117 117 decorator = git.reraise_safe_exceptions(fake_function)
118 118
119 119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 120 for method_name, method in methods:
121 121 if not method_name.startswith('_'):
122 122 assert method.im_func.__code__ == decorator.__code__
123 123
124 124 @pytest.mark.parametrize('side_effect, expected_type', [
125 125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 129 (dulwich.errors.HangupException(), 'error'),
130 130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 131 ])
132 132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 133 @git.reraise_safe_exceptions
134 134 def fake_method():
135 135 raise side_effect
136 136
137 137 with pytest.raises(Exception) as exc_info:
138 138 fake_method()
139 139 assert type(exc_info.value) == Exception
140 140 assert exc_info.value._vcs_kind == expected_type
141 141
142 142
143 143 class TestDulwichRepoWrapper(object):
144 144 def test_calls_close_on_delete(self):
145 145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 146 with isdir_patcher:
147 147 repo = git.Repo('/tmp/abcde')
148 148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 149 del repo
150 150 close_mock.assert_called_once_with()
151 151
152 152
153 153 class TestGitFactory(object):
154 154 def test_create_repo_returns_dulwich_wrapper(self):
155 factory = git.GitFactory(repo_cache=Mock())
156 wire = {
157 'path': '/tmp/abcde'
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
155
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 mock.side_effect = {'repo_objects': ''}
158 factory = git.GitFactory()
159 wire = {
160 'path': '/tmp/abcde'
161 }
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 with isdir_patcher:
164 result = factory._create_repo(wire, True)
165 assert isinstance(result, git.Repo)
@@ -1,127 +1,127 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 29 class TestHGLookup(object):
30 30 def setup(self):
31 31 self.mock_repo = MagicMock()
32 32 self.mock_repo.__getitem__.side_effect = LookupError(
33 33 'revision_or_commit_id', 'index', 'message')
34 34 factory = Mock()
35 35 factory.repo = Mock(return_value=self.mock_repo)
36 36 self.remote_hg = hg.HgRemote(factory)
37 37
38 38 def test_fail_lookup_hg(self):
39 39 with pytest.raises(Exception) as exc_info:
40 40 self.remote_hg.lookup(
41 41 wire=None, revision='revision_or_commit_id', both=True)
42 42
43 43 assert exc_info.value._vcs_kind == 'lookup'
44 44 assert 'revision_or_commit_id' in exc_info.value.args
45 45
46 46
47 47 class TestDiff(object):
48 48 def test_raising_safe_exception_when_lookup_failed(self):
49 49 repo = Mock()
50 50 factory = Mock()
51 51 factory.repo = Mock(return_value=repo)
52 52 hg_remote = hg.HgRemote(factory)
53 53 with patch('mercurial.patch.diff') as diff_mock:
54 54 diff_mock.side_effect = LookupError(
55 55 'deadbeef', 'index', 'message')
56 56 with pytest.raises(Exception) as exc_info:
57 57 hg_remote.diff(
58 58 wire=None, rev1='deadbeef', rev2='deadbee1',
59 59 file_filter=None, opt_git=True, opt_ignorews=True,
60 60 context=3)
61 61 assert type(exc_info.value) == Exception
62 62 assert exc_info.value._vcs_kind == 'lookup'
63 63
64 64
65 65 class TestReraiseSafeExceptions(object):
66 66 def test_method_decorated_with_reraise_safe_exceptions(self):
67 67 factory = Mock()
68 68 hg_remote = hg.HgRemote(factory)
69 69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 70 decorator = hg.reraise_safe_exceptions(None)
71 71 for method_name, method in methods:
72 72 if not method_name.startswith('_'):
73 73 assert method.im_func.__code__ == decorator.__code__
74 74
75 75 @pytest.mark.parametrize('side_effect, expected_type', [
76 76 (hgcompat.Abort(), 'abort'),
77 77 (hgcompat.InterventionRequired(), 'abort'),
78 78 (hgcompat.RepoLookupError(), 'lookup'),
79 79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 80 (hgcompat.RepoError(), 'error'),
81 81 (hgcompat.RequirementError(), 'requirement'),
82 82 ])
83 83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 84 @hg.reraise_safe_exceptions
85 85 def fake_method():
86 86 raise side_effect
87 87
88 88 with pytest.raises(Exception) as exc_info:
89 89 fake_method()
90 90 assert type(exc_info.value) == Exception
91 91 assert exc_info.value._vcs_kind == expected_type
92 92
93 93 def test_keeps_original_traceback(self):
94 94 @hg.reraise_safe_exceptions
95 95 def fake_method():
96 96 try:
97 97 raise hgcompat.Abort()
98 98 except:
99 99 self.original_traceback = traceback.format_tb(
100 100 sys.exc_info()[2])
101 101 raise
102 102
103 103 try:
104 104 fake_method()
105 105 except Exception:
106 106 new_traceback = traceback.format_tb(sys.exc_info()[2])
107 107
108 108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 109 assert new_traceback_tail == self.original_traceback
110 110
111 111 def test_maps_unknow_exceptions_to_unhandled(self):
112 112 @hg.reraise_safe_exceptions
113 113 def stub_method():
114 114 raise ValueError('stub')
115 115
116 116 with pytest.raises(Exception) as exc_info:
117 117 stub_method()
118 118 assert exc_info.value._vcs_kind == 'unhandled'
119 119
120 120 def test_does_not_map_known_exceptions(self):
121 121 @hg.reraise_safe_exceptions
122 122 def stub_method():
123 raise exceptions.LookupException('stub')
123 raise exceptions.LookupException()('stub')
124 124
125 125 with pytest.raises(Exception) as exc_info:
126 126 stub_method()
127 127 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,130 +1,124 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 19 import pytest
20 20
21 21 from vcsserver import hgcompat, hgpatches
22 22
23 23
24 24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25 25
26 26
27 27 def test_patch_largefiles_capabilities_applies_patch(
28 28 patched_capabilities):
29 29 lfproto = hgcompat.largefiles.proto
30 30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
32 32
33 33
34 34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 orig_capabilities):
36 37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 38 hgcompat.largefiles.proto, stub_extensions)
38 39
39 caps = dynamic_capabilities(stub_repo, stub_proto)
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
40 41
41 42 stub_extensions.assert_called_once_with(stub_ui)
42 43 assert LARGEFILES_CAPABILITY not in caps
43 44
44 45
45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
48 hgcompat.largefiles.proto, stub_extensions)
49
50 # This happens when the extension is loaded for the first time, important
51 # to ensure that an updated function is correctly picked up.
52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
53 return_value='REPLACED')
54
55 caps = dynamic_capabilities(stub_repo, stub_proto)
56 assert 'REPLACED' == caps
57
58
59 46 def test_dynamic_capabilities_ignores_updated_capabilities(
60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 orig_capabilities):
61 49 stub_extensions.return_value = [('largefiles', mock.Mock())]
62 50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
63 51 hgcompat.largefiles.proto, stub_extensions)
64 52
65 53 # This happens when the extension is loaded for the first time, important
66 54 # to ensure that an updated function is correctly picked up.
67 hgcompat.largefiles.proto.capabilities = mock.Mock(
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
68 56 side_effect=Exception('Must not be called'))
69 57
70 dynamic_capabilities(stub_repo, stub_proto)
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
71 59
72 60
73 61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 orig_capabilities):
75 64 stub_extensions.return_value = [('largefiles', mock.Mock())]
76 65
77 66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
78 67 hgcompat.largefiles.proto, stub_extensions)
79 68
80 caps = dynamic_capabilities(stub_repo, stub_proto)
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
81 70
82 71 stub_extensions.assert_called_once_with(stub_ui)
83 72 assert LARGEFILES_CAPABILITY in caps
84 73
85 74
86 75 def test_hgsubversion_import():
87 76 from hgsubversion import svnrepo
88 77 assert svnrepo
89 78
90 79
91 80 @pytest.fixture
92 81 def patched_capabilities(request):
93 82 """
94 83 Patch in `capabilitiesorig` and restore both capability functions.
95 84 """
96 85 lfproto = hgcompat.largefiles.proto
97 orig_capabilities = lfproto.capabilities
98 orig_capabilitiesorig = lfproto.capabilitiesorig
99
100 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
86 orig_capabilities = lfproto._capabilities
101 87
102 88 @request.addfinalizer
103 89 def restore():
104 lfproto.capabilities = orig_capabilities
105 lfproto.capabilitiesorig = orig_capabilitiesorig
90 lfproto._capabilities = orig_capabilities
106 91
107 92
108 93 @pytest.fixture
109 94 def stub_repo(stub_ui):
110 95 repo = mock.Mock()
111 96 repo.ui = stub_ui
112 97 return repo
113 98
114 99
115 100 @pytest.fixture
116 101 def stub_proto(stub_ui):
117 102 proto = mock.Mock()
118 103 proto.ui = stub_ui
119 104 return proto
120 105
121 106
122 107 @pytest.fixture
108 def orig_capabilities():
109 from mercurial.wireprotov1server import wireprotocaps
110
111 def _capabilities(repo, proto):
112 return wireprotocaps
113 return _capabilities
114
115
116 @pytest.fixture
123 117 def stub_ui():
124 118 return hgcompat.ui.ui()
125 119
126 120
127 121 @pytest.fixture
128 122 def stub_extensions():
129 123 extensions = mock.Mock(return_value=tuple())
130 124 return extensions
@@ -1,44 +1,39 b''
1 1 """
2 2 Tests used to profile the HTTP based implementation.
3 3 """
4 4
5 5 import pytest
6 6 import webtest
7 7
8 8 from vcsserver.http_main import main
9 9
10 10
11 11 @pytest.fixture
12 12 def vcs_app():
13 13 stub_settings = {
14 14 'dev.use_echo_app': 'true',
15 'beaker.cache.regions': 'repo_object',
16 'beaker.cache.repo_object.type': 'memorylru',
17 'beaker.cache.repo_object.max_items': '100',
18 'beaker.cache.repo_object.expire': '300',
19 'beaker.cache.repo_object.enabled': 'true',
20 15 'locale': 'en_US.UTF-8',
21 16 }
22 17 vcs_app = main({}, **stub_settings)
23 18 app = webtest.TestApp(vcs_app)
24 19 return app
25 20
26 21
27 22 @pytest.fixture(scope='module')
28 23 def data():
29 24 one_kb = 'x' * 1024
30 25 return one_kb * 1024 * 10
31 26
32 27
33 28 def test_http_app_streaming_with_data(data, repeat, vcs_app):
34 29 app = vcs_app
35 30 for x in xrange(repeat / 10):
36 31 response = app.post('/stream/git/', params=data)
37 32 assert response.status_code == 200
38 33
39 34
40 35 def test_http_app_streaming_no_data(repeat, vcs_app):
41 36 app = vcs_app
42 37 for x in xrange(repeat / 10):
43 38 response = app.post('/stream/git/')
44 39 assert response.status_code == 200
@@ -1,82 +1,89 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import logging
18 import hashlib
18 19
19 20 log = logging.getLogger(__name__)
20 21
21 22
22 23 def safe_int(val, default=None):
23 24 """
24 25 Returns int() of val if val is not convertable to int use default
25 26 instead
26 27
27 28 :param val:
28 29 :param default:
29 30 """
30 31
31 32 try:
32 33 val = int(val)
33 34 except (ValueError, TypeError):
34 35 val = default
35 36
36 37 return val
37 38
38 39
39 40 def safe_str(unicode_, to_encoding=['utf8']):
40 41 """
41 42 safe str function. Does few trick to turn unicode_ into string
42 43
43 44 In case of UnicodeEncodeError, we try to return it with encoding detected
44 45 by chardet library if it fails fallback to string with errors replaced
45 46
46 47 :param unicode_: unicode to encode
47 48 :rtype: str
48 49 :returns: str object
49 50 """
50 51
51 52 # if it's not basestr cast to str
52 53 if not isinstance(unicode_, basestring):
53 54 return str(unicode_)
54 55
55 56 if isinstance(unicode_, str):
56 57 return unicode_
57 58
58 59 if not isinstance(to_encoding, (list, tuple)):
59 60 to_encoding = [to_encoding]
60 61
61 62 for enc in to_encoding:
62 63 try:
63 64 return unicode_.encode(enc)
64 65 except UnicodeEncodeError:
65 66 pass
66 67
67 68 try:
68 69 import chardet
69 70 encoding = chardet.detect(unicode_)['encoding']
70 71 if encoding is None:
71 72 raise UnicodeEncodeError()
72 73
73 74 return unicode_.encode(encoding)
74 75 except (ImportError, UnicodeEncodeError):
75 76 return unicode_.encode(to_encoding[0], 'replace')
76 77
77 78
78 79 class AttributeDict(dict):
79 80 def __getattr__(self, attr):
80 81 return self.get(attr, None)
81 82 __setattr__ = dict.__setitem__
82 83 __delattr__ = dict.__delitem__
84
85
86 def sha1(val):
87 return hashlib.sha1(val).hexdigest()
88
89
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now