##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r511:00fadf7b merge stable
parent child Browse files
Show More
@@ -0,0 +1,152 b''
1 """
2 gunicorn config extension and hooks. Sets additional configuration that is
3 available post the .ini config.
4
5 - workers = ${cpu_number}
6 - threads = 1
7 - proc_name = ${gunicorn_proc_name}
8 - worker_class = sync
9 - worker_connections = 10
10 - max_requests = 1000
11 - max_requests_jitter = 30
12 - timeout = 21600
13
14 """
15
16 import multiprocessing
17 import sys
18 import time
19 import datetime
20 import threading
21 import traceback
22 from gunicorn.glogging import Logger
23
24
25 # GLOBAL
26 errorlog = '-'
27 accesslog = '-'
28 loglevel = 'debug'
29
30 # SECURITY
31
32 # The maximum size of HTTP request line in bytes.
33 limit_request_line = 4094
34
35 # Limit the number of HTTP headers fields in a request.
36 limit_request_fields = 1024
37
38 # Limit the allowed size of an HTTP request header field.
39 # Value is a positive number or 0.
40 # Setting it to 0 will allow unlimited header field sizes.
41 limit_request_field_size = 0
42
43
44 # Timeout for graceful workers restart.
45 # After receiving a restart signal, workers have this much time to finish
46 # serving requests. Workers still alive after the timeout (starting from the
47 # receipt of the restart signal) are force killed.
48 graceful_timeout = 30
49
50
51 # The number of seconds to wait for requests on a Keep-Alive connection.
52 # Generally set in the 1-5 seconds range.
53 keepalive = 2
54
55
56 # SERVER MECHANICS
57 # None == system temp dir
58 # worker_tmp_dir is recommended to be set to some tmpfs
59 worker_tmp_dir = None
60 tmp_upload_dir = None
61
62 # Custom log format
63 access_log_format = (
64 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
65
66 # self adjust workers based on CPU count
67 # workers = multiprocessing.cpu_count() * 2 + 1
68
69
70 def post_fork(server, worker):
71 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
72
73
74 def pre_fork(server, worker):
75 pass
76
77
78 def pre_exec(server):
79 server.log.info("Forked child, re-executing.")
80
81
82 def on_starting(server):
83 server.log.info("Server is starting.")
84
85
86 def when_ready(server):
87 server.log.info("Server is ready. Spawning workers")
88
89
90 def on_reload(server):
91 pass
92
93
94 def worker_int(worker):
95 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
96
97 # get traceback info, on worker crash
98 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
99 code = []
100 for thread_id, stack in sys._current_frames().items():
101 code.append(
102 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
103 for fname, lineno, name, line in traceback.extract_stack(stack):
104 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
105 if line:
106 code.append(" %s" % (line.strip()))
107 worker.log.debug("\n".join(code))
108
109
110 def worker_abort(worker):
111 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
112
113
114 def worker_exit(server, worker):
115 worker.log.info("[<%-10s>] worker exit", worker.pid)
116
117
118 def child_exit(server, worker):
119 worker.log.info("[<%-10s>] worker child exit", worker.pid)
120
121
122 def pre_request(worker, req):
123 worker.start_time = time.time()
124 worker.log.debug(
125 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
126
127
128 def post_request(worker, req, environ, resp):
129 total_time = time.time() - worker.start_time
130 worker.log.debug(
131 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
132 worker.nr, req.method, req.path, resp.status_code, total_time)
133
134
135 class RhodeCodeLogger(Logger):
136 """
137 Custom Logger that allows some customization that gunicorn doesn't allow
138 """
139
140 datefmt = r"%Y-%m-%d %H:%M:%S"
141
142 def __init__(self, cfg):
143 Logger.__init__(self, cfg)
144
145 def now(self):
146 """ return date in RhodeCode Log format """
147 now = time.time()
148 msecs = int((now - long(now)) * 1000)
149 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
150
151
152 logger_class = RhodeCodeLogger
@@ -0,0 +1,28 b''
1
2 ==============================
3 Generate the Nix expressions
4 ==============================
5
6 Details can be found in the repository of `RhodeCode Enterprise CE`_ inside of
7 the file `docs/contributing/dependencies.rst`.
8
9 Start the environment as follows:
10
11 .. code:: shell
12
13 nix-shell pkgs/shell-generate.nix
14
15
16 Python dependencies
17 ===================
18
19 .. code:: shell
20
21 pip2nix generate --licenses
22 # or faster
23 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
24
25
26 .. Links
27
28 .. _RhodeCode Enterprise CE: https://code.rhodecode.com/rhodecode-enterprise-ce
@@ -0,0 +1,17 b''
1 { pkgs
2 , pythonPackages
3 }:
4
5 rec {
6 pip2nix-src = pkgs.fetchzip {
7 url = https://github.com/johbo/pip2nix/archive/51e6fdae34d0e8ded9efeef7a8601730249687a6.tar.gz;
8 sha256 = "02a4jjgi7lsvf8mhrxsd56s9a3yg20081rl9bgc2m84w60v2gbz2";
9 };
10
11 pip2nix = import pip2nix-src {
12 inherit
13 pkgs
14 pythonPackages;
15 };
16
17 }
@@ -0,0 +1,45 b''
1 self: super: {
2 # bump GIT version
3 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 name = "git-2.17.1";
5 src = self.fetchurl {
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.17.1.tar.xz";
7 sha256 = "0pm6bdnrrm165k3krnazxcxadifk2gqi30awlbcf9fism1x6w4vr";
8 };
9
10 patches = [
11 ./git_patches/docbook2texi.patch
12 ./git_patches/symlinks-in-bin.patch
13 ./git_patches/git-sh-i18n.patch
14 ./git_patches/ssh-path.patch
15 ];
16
17 });
18
19 # Override subversion derivation to
20 # - activate python bindings
21 subversion =
22 let
23 subversionWithPython = super.subversion.override {
24 httpSupport = true;
25 pythonBindings = true;
26 python = self.python27Packages.python;
27 };
28 in
29 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
30 name = "subversion-1.10.2";
31 src = self.fetchurl {
32 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
33 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
34 };
35
36 ## use internal lz4/utf8proc because it is stable and shipped with SVN
37 configureFlags = oldAttrs.configureFlags ++ [
38 " --with-lz4=internal"
39 " --with-utf8proc=internal"
40 ];
41
42
43 });
44
45 }
@@ -0,0 +1,41 b''
1 { pkgs ? (import <nixpkgs> {})
2 , pythonPackages ? "python27Packages"
3 }:
4
5 with pkgs.lib;
6
7 let _pythonPackages = pythonPackages; in
8 let
9 pythonPackages = getAttr _pythonPackages pkgs;
10
11 pip2nix = import ./nix-common/pip2nix.nix {
12 inherit
13 pkgs
14 pythonPackages;
15 };
16
17 in
18
19 pkgs.stdenv.mkDerivation {
20 name = "pip2nix-generated";
21 buildInputs = [
22 pip2nix.pip2nix
23 pythonPackages.pip-tools
24 pkgs.apr
25 pkgs.aprutil
26 ];
27
28 shellHook = ''
29 runHook preShellHook
30 echo "Setting SVN_* variables"
31 export SVN_LIBRARY_PATH=${pkgs.subversion}/lib
32 export SVN_HEADER_PATH=${pkgs.subversion.dev}/include
33 runHook postShellHook
34 '';
35
36 preShellHook = ''
37 echo "Starting Generate Shell"
38 # Custom prompt to distinguish from other dev envs.
39 export PS1="\n\[\033[1;32m\][Generate-shell:\w]$\[\033[0m\] "
40 '';
41 }
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,146 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 import os
22 import time
23 import datetime
24 import msgpack
25 import logging
26 import traceback
27 import tempfile
28
29
30 log = logging.getLogger(__name__)
31
32 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 global_prefix = 'vcsserver'
34
35
36 def exc_serialize(exc_id, tb, exc_type):
37
38 data = {
39 'version': 'v1',
40 'exc_id': exc_id,
41 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
42 'exc_timestamp': repr(time.time()),
43 'exc_message': tb,
44 'exc_type': exc_type,
45 }
46 return msgpack.packb(data), data
47
48
49 def exc_unserialize(tb):
50 return msgpack.unpackb(tb)
51
52
53 def get_exc_store():
54 """
55 Get and create exception store if it's not existing
56 """
57 exc_store_dir = 'rc_exception_store_v1'
58 # fallback
59 _exc_store_path = os.path.join(tempfile.gettempdir(), exc_store_dir)
60
61 exc_store_dir = '' # TODO: need a persistent cross instance store here
62 if exc_store_dir:
63 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir)
64
65 _exc_store_path = os.path.abspath(_exc_store_path)
66 if not os.path.isdir(_exc_store_path):
67 os.makedirs(_exc_store_path)
68 log.debug('Initializing exceptions store at %s', _exc_store_path)
69 return _exc_store_path
70
71
72 def _store_exception(exc_id, exc_info, prefix):
73 exc_type, exc_value, exc_traceback = exc_info
74 tb = ''.join(traceback.format_exception(
75 exc_type, exc_value, exc_traceback, None))
76
77 exc_type_name = exc_type.__name__
78 exc_store_path = get_exc_store()
79 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
80 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
81 if not os.path.isdir(exc_store_path):
82 os.makedirs(exc_store_path)
83 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
84 with open(stored_exc_path, 'wb') as f:
85 f.write(exc_data)
86 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
87
88
89 def store_exception(exc_id, exc_info, prefix=global_prefix):
90 try:
91 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
92 except Exception:
93 log.exception('Failed to store exception `%s` information', exc_id)
94 # there's no way this can fail, it will crash server badly if it does.
95 pass
96
97
98 def _find_exc_file(exc_id, prefix=global_prefix):
99 exc_store_path = get_exc_store()
100 if prefix:
101 exc_id = '{}_{}'.format(exc_id, prefix)
102 else:
103 # search without a prefix
104 exc_id = '{}'.format(exc_id)
105
106 # we need to search the store for such start pattern as above
107 for fname in os.listdir(exc_store_path):
108 if fname.startswith(exc_id):
109 exc_id = os.path.join(exc_store_path, fname)
110 break
111 continue
112 else:
113 exc_id = None
114
115 return exc_id
116
117
118 def _read_exception(exc_id, prefix):
119 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
120 if exc_id_file_path:
121 with open(exc_id_file_path, 'rb') as f:
122 return exc_unserialize(f.read())
123 else:
124 log.debug('Exception File `%s` not found', exc_id_file_path)
125 return None
126
127
128 def read_exception(exc_id, prefix=global_prefix):
129 try:
130 return _read_exception(exc_id=exc_id, prefix=prefix)
131 except Exception:
132 log.exception('Failed to read exception `%s` information', exc_id)
133 # there's no way this can fail, it will crash server badly if it does.
134 return None
135
136
137 def delete_exception(exc_id, prefix=global_prefix):
138 try:
139 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
140 if exc_id_file_path:
141 os.remove(exc_id_file_path)
142
143 except Exception:
144 log.exception('Failed to remove exception `%s` information', exc_id)
145 # there's no way this can fail, it will crash server badly if it does.
146 pass
@@ -0,0 +1,65 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 import logging
22
23 from repoze.lru import LRUCache
24
25 from vcsserver.utils import safe_str
26
27 log = logging.getLogger(__name__)
28
29
30 class LRUDict(LRUCache):
31 """
32 Wrapper to provide partial dict access
33 """
34
35 def __setitem__(self, key, value):
36 return self.put(key, value)
37
38 def __getitem__(self, key):
39 return self.get(key)
40
41 def __contains__(self, key):
42 return bool(self.get(key))
43
44 def __delitem__(self, key):
45 del self.data[key]
46
47 def keys(self):
48 return self.data.keys()
49
50
51 class LRUDictDebug(LRUDict):
52 """
53 Wrapper to provide some debug options
54 """
55 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 # trick for pformat print it more nicely
58 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s' % (elems_cnt, fmt))
62
63 def __getitem__(self, key):
64 self._report_keys()
65 return self.get(key)
@@ -0,0 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19 from dogpile.cache import register_backend
20
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
24
25 log = logging.getLogger(__name__)
26
27 from . import region_meta
28 from .util import key_generator, get_default_cache_settings, make_region
29
30
31 def configure_dogpile_cache(settings):
32 cache_dir = settings.get('cache_dir')
33 if cache_dir:
34 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
35
36 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
37
38 # inspect available namespaces
39 avail_regions = set()
40 for key in rc_cache_data.keys():
41 namespace_name = key.split('.', 1)[0]
42 avail_regions.add(namespace_name)
43 log.debug('dogpile: found following cache regions: %s', avail_regions)
44
45 # register them into namespace
46 for region_name in avail_regions:
47 new_region = make_region(
48 name=region_name,
49 function_key_generator=key_generator
50 )
51
52 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
53
54 log.debug('dogpile: registering a new region %s[%s]',
55 region_name, new_region.__dict__)
56 region_meta.dogpile_cache_regions[region_name] = new_region
57
58
59 def includeme(config):
60 configure_dogpile_cache(config.registry.settings)
@@ -0,0 +1,51 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19
20 from dogpile.cache.backends import memory as memory_backend
21 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
22
23
24 _default_max_size = 1024
25
26 log = logging.getLogger(__name__)
27
28
29 class LRUMemoryBackend(memory_backend.MemoryBackend):
30 pickle_values = False
31
32 def __init__(self, arguments):
33 max_size = arguments.pop('max_size', _default_max_size)
34
35 LRUDictClass = LRUDict
36 if arguments.pop('log_key_count', None):
37 LRUDictClass = LRUDictDebug
38
39 arguments['cache_dict'] = LRUDictClass(max_size)
40 super(LRUMemoryBackend, self).__init__(arguments)
41
42 def delete(self, key):
43 try:
44 del self._cache[key]
45 except KeyError:
46 # we don't care if key isn't there at deletion
47 pass
48
49 def delete_multi(self, keys):
50 for key in keys:
51 self.delete(key)
@@ -0,0 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import tempfile
20
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
@@ -0,0 +1,136 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import logging
20 import functools
21
22 from vcsserver.utils import safe_str, sha1
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
25
26 log = logging.getLogger(__name__)
27
28
29 class RhodeCodeCacheRegion(CacheRegion):
30
31 def conditional_cache_on_arguments(
32 self, namespace=None,
33 expiration_time=None,
34 should_cache_fn=None,
35 to_str=compat.string_type,
36 function_key_generator=None,
37 condition=True):
38 """
39 Custom conditional decorator, that will not touch any dogpile internals if
40 condition isn't meet. This works a bit different than should_cache_fn
41 And it's faster in cases we don't ever want to compute cached values
42 """
43 expiration_time_is_callable = compat.callable(expiration_time)
44
45 if function_key_generator is None:
46 function_key_generator = self.function_key_generator
47
48 def decorator(fn):
49 if to_str is compat.string_type:
50 # backwards compatible
51 key_generator = function_key_generator(namespace, fn)
52 else:
53 key_generator = function_key_generator(namespace, fn, to_str=to_str)
54
55 @functools.wraps(fn)
56 def decorate(*arg, **kw):
57 key = key_generator(*arg, **kw)
58
59 @functools.wraps(fn)
60 def creator():
61 return fn(*arg, **kw)
62
63 if not condition:
64 return creator()
65
66 timeout = expiration_time() if expiration_time_is_callable \
67 else expiration_time
68
69 return self.get_or_create(key, creator, timeout, should_cache_fn)
70
71 def invalidate(*arg, **kw):
72 key = key_generator(*arg, **kw)
73 self.delete(key)
74
75 def set_(value, *arg, **kw):
76 key = key_generator(*arg, **kw)
77 self.set(key, value)
78
79 def get(*arg, **kw):
80 key = key_generator(*arg, **kw)
81 return self.get(key)
82
83 def refresh(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 value = fn(*arg, **kw)
86 self.set(key, value)
87 return value
88
89 decorate.set = set_
90 decorate.invalidate = invalidate
91 decorate.refresh = refresh
92 decorate.get = get
93 decorate.original = fn
94 decorate.key_generator = key_generator
95
96 return decorate
97
98 return decorator
99
100
101 def make_region(*arg, **kw):
102 return RhodeCodeCacheRegion(*arg, **kw)
103
104
105 def get_default_cache_settings(settings, prefixes=None):
106 prefixes = prefixes or []
107 cache_settings = {}
108 for key in settings.keys():
109 for prefix in prefixes:
110 if key.startswith(prefix):
111 name = key.split(prefix)[1].strip()
112 val = settings[key]
113 if isinstance(val, basestring):
114 val = val.strip()
115 cache_settings[name] = val
116 return cache_settings
117
118
119 def compute_key_from_params(*args):
120 """
121 Helper to compute key from given params to be used in cache manager
122 """
123 return sha1("_".join(map(safe_str, args)))
124
125
126 def key_generator(namespace, fn):
127 fname = fn.__name__
128
129 def generate_key(*args):
130 namespace_pref = namespace or 'default'
131 arg_key = compute_key_from_params(*args)
132 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
133
134 return final_key
135
136 return generate_key
@@ -1,5 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.12.4
2 current_version = 4.13.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
@@ -5,12 +5,10 b' done = false'
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.12.4
13 version = 4.13.0
16
14
@@ -15,4 +15,4 b' test-clean:'
15 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
15 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
16
16
17 test-only:
17 test-only:
18 PYTHONHASHSEED=random py.test -vv -r xw --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
18 PYTHONHASHSEED=random py.test -vv -r xw -p no:sugar --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
@@ -1,1 +1,79 b''
1 development_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 0.0.0.0
10 port = 9900
11
12 use = egg:waitress#main
13
14
15 [app:main]
16 use = egg:rhodecode-vcsserver
17
18 pyramid.default_locale_name = en
19 pyramid.includes =
20
21 ## default locale used by VCS systems
22 locale = en_US.UTF-8
23
24
25 ## path to binaries for vcsserver, it should be set by the installer
26 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 core.binary_dir = ""
28
29 ## cache region for storing repo_objects cache
30 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
31 ## cache auto-expires after N seconds
32 rc_cache.repo_object.expiration_time = 300
33 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
34 rc_cache.repo_object.max_size = 100
35
36
37 ################################
38 ### LOGGING CONFIGURATION ####
39 ################################
40 [loggers]
41 keys = root, vcsserver
42
43 [handlers]
44 keys = console
45
46 [formatters]
47 keys = generic
48
49 #############
50 ## LOGGERS ##
51 #############
52 [logger_root]
53 level = NOTSET
54 handlers = console
55
56 [logger_vcsserver]
57 level = DEBUG
58 handlers =
59 qualname = vcsserver
60 propagate = 1
61
62
63 ##############
64 ## HANDLERS ##
65 ##############
66
67 [handler_console]
68 class = StreamHandler
69 args = (sys.stderr,)
70 level = DEBUG
71 formatter = generic
72
73 ################
74 ## FORMATTERS ##
75 ################
76
77 [formatter_generic]
78 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
79 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1 +1,100 b''
1 production_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 127.0.0.1
10 port = 9900
11
12
13 ##########################
14 ## GUNICORN WSGI SERVER ##
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
21 ## process name
22 proc_name = rhodecode_vcsserver
23 ## type of worker class, currently `sync` is the only option allowed.
24 worker_class = sync
25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 #worker_connections = 10
27 ## max number of requests that worker will handle before being gracefully
28 ## restarted, could prevent memory leaks
29 max_requests = 1000
30 max_requests_jitter = 30
31 ## amount of time a worker can spend with handling a request before it
32 ## gets killed and restarted. Set to 6hrs
33 timeout = 21600
34
35
36 [app:main]
37 use = egg:rhodecode-vcsserver
38
39 pyramid.default_locale_name = en
40 pyramid.includes =
41
42 ## default locale used by VCS systems
43 locale = en_US.UTF-8
44
45
46 ## path to binaries for vcsserver, it should be set by the installer
47 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 core.binary_dir = ""
49
50 ## cache region for storing repo_objects cache
51 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
52 ## cache auto-expires after N seconds
53 rc_cache.repo_object.expiration_time = 300
54 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
55 rc_cache.repo_object.max_size = 100
56
57
58 ################################
59 ### LOGGING CONFIGURATION ####
60 ################################
61 [loggers]
62 keys = root, vcsserver
63
64 [handlers]
65 keys = console
66
67 [formatters]
68 keys = generic
69
70 #############
71 ## LOGGERS ##
72 #############
73 [logger_root]
74 level = NOTSET
75 handlers = console
76
77 [logger_vcsserver]
78 level = DEBUG
79 handlers =
80 qualname = vcsserver
81 propagate = 1
82
83
84 ##############
85 ## HANDLERS ##
86 ##############
87
88 [handler_console]
89 class = StreamHandler
90 args = (sys.stderr,)
91 level = DEBUG
92 formatter = generic
93
94 ################
95 ## FORMATTERS ##
96 ################
97
98 [formatter_generic]
99 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
100 datefmt = %Y-%m-%d %H:%M:%S
@@ -4,163 +4,175 b''
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 args@
8 , pythonPackages ? "python27Packages"
8 { pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? false
11 , ...
11 }:
12 }:
12
13
13 let pkgs_ = pkgs; in
14 let pkgs_ = (import <nixpkgs> {}); in
14
15
15 let
16 let
16 pkgs = pkgs_.overridePackages (self: super: {
17 # bump GIT version
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.16.4";
20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.16.4.tar.xz";
22 sha256 = "0cnmidjvbdf81mybcvxvl0c2r2x2nvq2jj2dl59dmrc7qklv0sbf";
23 };
24
25 patches = [
26 ./pkgs/git_patches/docbook2texi.patch
27 ./pkgs/git_patches/symlinks-in-bin.patch
28 ./pkgs/git_patches/git-sh-i18n.patch
29 ./pkgs/git_patches/ssh-path.patch
30 ];
31
32 });
33
17
34 # Override subversion derivation to
18 # TODO: Currently we ignore the passed in pkgs, instead we should use it
35 # - activate python bindings
19 # somehow as a base and apply overlays to it.
36 subversion = let
20 pkgs = import <nixpkgs> {
37 subversionWithPython = super.subversion.override {
21 overlays = [
38 httpSupport = true;
22 (import ./pkgs/overlays.nix)
39 pythonBindings = true;
23 ];
40 python = self.python27Packages.python;
24 inherit (pkgs_)
41 };
25 system;
42
26 };
43 in
44
27
45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
28 # Works with the new python-packages, still can fallback to the old
46 name = "subversion-1.9.7";
29 # variant.
47 src = pkgs.fetchurl {
30 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
31 self: basePythonPackages.override (a: { inherit self; }));
49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
50 };
51
52 });
53
54 });
55
32
56 inherit (pkgs.lib) fix extends;
33 # Evaluates to the last segment of a file system path.
57 basePythonPackages = with builtins; if isAttrs pythonPackages
34 basename = path: with pkgs.lib; last (splitString "/" path);
58 then pythonPackages
59 else getAttr pythonPackages pkgs;
60
35
61 elem = builtins.elem;
36 # source code filter used as arugment to builtins.filterSource.
62 basename = path: with pkgs.lib; last (splitString "/" path);
63 startsWith = prefix: full: let
64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
65 in actualPrefix == prefix;
66
67 src-filter = path: type: with pkgs.lib;
37 src-filter = path: type: with pkgs.lib;
68 let
38 let
69 ext = last (splitString "." path);
39 ext = last (splitString "." path);
70 in
40 in
71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
41 !builtins.elem (basename path) [
72 "node_modules" "build" "data" "tmp"] &&
42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
73 !elem ext ["egg-info" "pyc"] &&
43 "bower_components" "node_modules"
74 !startsWith "result" path;
44 "build" "data" "result" "tmp"] &&
45 !builtins.elem ext ["egg-info" "pyc"] &&
46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
47 # it would still be good to restore it since we want to ignore "result-*".
48 !hasPrefix "result" path;
75
49
50 sources =
51 let
52 inherit (pkgs.lib) all isString attrValues;
53 sourcesConfig = pkgs.config.rc.sources or {};
54 in
55 # Ensure that sources are configured as strings. Using a path
56 # would result in a copy into the nix store.
57 assert all isString (attrValues sourcesConfig);
58 sourcesConfig;
59
60 version = builtins.readFile "${rhodecode-vcsserver-src}/vcsserver/VERSION";
76 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
61 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
77
62
78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
79 inherit self;
80 }) // (scopedImport {
81 self = self;
82 super = basePythonPackages;
83 inherit pkgs;
84 inherit (pkgs) fetchurl fetchgit;
85 } ./pkgs/python-packages.nix);
86
87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
88 inherit basePythonPackages pkgs;
89 };
90
91 version = builtins.readFile ./vcsserver/VERSION;
92
93 pythonLocalOverrides = self: super: {
63 pythonLocalOverrides = self: super: {
94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
64 rhodecode-vcsserver =
95 inherit doCheck version;
65 let
66 releaseName = "RhodeCodeVCSServer-${version}";
67 in super.rhodecode-vcsserver.override (attrs: {
68 inherit
69 doCheck
70 version;
96
71
97 name = "rhodecode-vcsserver-${version}";
72 name = "rhodecode-vcsserver-${version}";
98 releaseName = "RhodeCodeVCSServer-${version}";
73 releaseName = releaseName;
99 src = rhodecode-vcsserver-src;
74 src = rhodecode-vcsserver-src;
100 dontStrip = true; # prevent strip, we don't need it.
75 dontStrip = true; # prevent strip, we don't need it.
101
76
102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
77 # expose following attributed outside
103 pkgs.git
104 pkgs.subversion
105 ]);
106
107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
108 # pkgs/default.nix?
109 passthru = {
78 passthru = {
110 pythonPackages = self;
79 pythonPackages = self;
111 };
80 };
112
81
113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
82 propagatedBuildInputs =
83 attrs.propagatedBuildInputs or [] ++ [
84 pkgs.git
85 pkgs.subversion
86 ];
87
88 # set some default locale env variables
89 LC_ALL = "en_US.UTF-8";
90 LOCALE_ARCHIVE =
91 if pkgs.stdenv.isLinux
92 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
93 else "";
94
95 # Add bin directory to path so that tests can find 'vcsserver'.
114 preCheck = ''
96 preCheck = ''
115 export PATH="$out/bin:$PATH"
97 export PATH="$out/bin:$PATH"
116 '';
98 '';
117
99
118 # put custom attrs here
100 # custom check phase for testing
119 checkPhase = ''
101 checkPhase = ''
120 runHook preCheck
102 runHook preCheck
121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
103 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
122 runHook postCheck
104 runHook postCheck
123 '';
105 '';
124
106
107 postCheck = ''
108 echo "Cleanup of vcsserver/tests"
109 rm -rf $out/lib/${self.python.libPrefix}/site-packages/vcsserver/tests
110 '';
111
125 postInstall = ''
112 postInstall = ''
126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
113 echo "Writing vcsserver meta information for rccontrol to nix-support/rccontrol"
127 mkdir -p $out/nix-support/rccontrol
114 mkdir -p $out/nix-support/rccontrol
128 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
115 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
129 echo "DONE: Meta information for rccontrol written"
116 echo "DONE: vcsserver meta information for rccontrol written"
117
118 mkdir -p $out/etc
119 cp configs/production.ini $out/etc
120 echo "DONE: saved vcsserver production.ini into $out/etc"
130
121
131 # python based programs need to be wrapped
122 # python based programs need to be wrapped
123 mkdir -p $out/bin
124 ln -s ${self.python}/bin/python $out/bin
132 ln -s ${self.pyramid}/bin/* $out/bin/
125 ln -s ${self.pyramid}/bin/* $out/bin/
133 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
126 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
134
127
135 # Symlink version control utilities
128 # Symlink version control utilities
136 #
137 # We ensure that always the correct version is available as a symlink.
129 # We ensure that always the correct version is available as a symlink.
138 # So that users calling them via the profile path will always use the
130 # So that users calling them via the profile path will always use the
139 # correct version.
131 # correct version.
140 ln -s ${self.python}/bin/python $out/bin
132
141 ln -s ${pkgs.git}/bin/git $out/bin
133 ln -s ${pkgs.git}/bin/git $out/bin
142 ln -s ${self.mercurial}/bin/hg $out/bin
134 ln -s ${self.mercurial}/bin/hg $out/bin
143 ln -s ${pkgs.subversion}/bin/svn* $out/bin
135 ln -s ${pkgs.subversion}/bin/svn* $out/bin
136 echo "DONE: created symlinks into $out/bin"
144
137
145 for file in $out/bin/*;
138 for file in $out/bin/*;
146 do
139 do
147 wrapProgram $file \
140 wrapProgram $file \
148 --set PATH $PATH \
141 --prefix PATH : $PATH \
149 --set PYTHONPATH $PYTHONPATH \
142 --prefix PYTHONPATH : $PYTHONPATH \
150 --set PYTHONHASHSEED random
143 --set PYTHONHASHSEED random
151 done
144 done
145 echo "DONE: vcsserver binary wrapping"
152
146
153 '';
147 '';
154
148
155 });
149 });
156 };
150 };
157
151
152 basePythonPackages = with builtins;
153 if isAttrs pythonPackages then
154 pythonPackages
155 else
156 getAttr pythonPackages pkgs;
157
158 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
159 inherit pkgs;
160 inherit (pkgs) fetchurl fetchgit fetchhg;
161 };
162
163 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
164 inherit pkgs basePythonPackages;
165 };
166
167
158 # Apply all overrides and fix the final package set
168 # Apply all overrides and fix the final package set
159 myPythonPackages =
169 myPythonPackagesUnfix = with pkgs.lib;
160 (fix
161 (extends pythonExternalOverrides
170 (extends pythonExternalOverrides
162 (extends pythonLocalOverrides
171 (extends pythonLocalOverrides
163 (extends pythonOverrides
172 (extends pythonVCSServerOverrides
164 pythonGeneratedPackages))));
173 (extends pythonGeneratedPackages
174 basePythonPackagesUnfix))));
175
176 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
165
177
166 in myPythonPackages.rhodecode-vcsserver
178 in myPythonPackages.rhodecode-vcsserver
@@ -4,57 +4,50 b''
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs, basePythonPackages }:
7 { pkgs
8 , basePythonPackages
9 }:
8
10
9 let
11 let
10 sed = "sed -i";
12 sed = "sed -i";
13
11 in
14 in
12
15
13 self: super: {
16 self: super: {
14
17
15 Beaker = super.Beaker.override (attrs: {
18 "gevent" = super."gevent".override (attrs: {
16 patches = [
19 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
17 ./patch-beaker-lock-func-debug.diff
20 # NOTE: (marcink) odd requirements from gevent aren't set properly,
21 # thus we need to inject psutil manually
22 self."psutil"
18 ];
23 ];
19 });
24 });
20
25
21 subvertpy = super.subvertpy.override (attrs: {
26 "hgsubversion" = super."hgsubversion".override (attrs: {
22 # TODO: johbo: Remove the "or" once we drop 16.03 support
23 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
24 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
27 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 pkgs.sqlite
29 #basePythonPackages.sqlite3
30 self.mercurial
31 ];
32 });
33
34 "subvertpy" = super."subvertpy".override (attrs: {
35 SVN_PREFIX = "${pkgs.subversion.dev}";
36 propagatedBuildInputs = [
37 pkgs.apr.dev
25 pkgs.aprutil
38 pkgs.aprutil
26 pkgs.subversion
39 pkgs.subversion
27 ];
40 ];
28 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
29 ${sed} -e "s/'gcc'/'clang'/" setup.py
30 '';
31 });
41 });
32
42
33 hgsubversion = super.hgsubversion.override (attrs: {
43 "mercurial" = super."mercurial".override (attrs: {
34 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
44 propagatedBuildInputs = [
35 pkgs.sqlite
45 # self.python.modules.curses
36 basePythonPackages.sqlite3
37 ];
46 ];
38 });
47 });
39
48
40 mercurial = super.mercurial.override (attrs: {
49 # Avoid that base packages screw up the build process
41 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
50 inherit (basePythonPackages)
42 self.python.modules.curses
51 setuptools;
43 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
44 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
45 });
46
47 pyramid = super.pyramid.override (attrs: {
48 postFixup = ''
49 wrapPythonPrograms
50 # TODO: johbo: "wrapPython" adds this magic line which
51 # confuses pserve.
52 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
53 '';
54 });
55
56 # Avoid that setuptools is replaced, this leads to trouble
57 # with buildPythonPackage.
58 setuptools = basePythonPackages.setuptools;
59
52
60 }
53 }
This diff has been collapsed as it changes many lines, (988 lines changed) Show them Hide them
@@ -1,873 +1,943 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 Beaker = super.buildPythonPackage {
5
6 name = "Beaker-1.9.1";
6 self: super: {
7 buildInputs = with self; [];
7 "atomicwrites" = super.buildPythonPackage {
8 doCheck = false;
8 name = "atomicwrites-1.1.5";
9 propagatedBuildInputs = with self; [funcsigs];
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
12 md5 = "46fda0a164e2b0d24ccbda51a2310301";
13 };
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
17 };
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.9.6";
20 buildInputs = with self; [];
21 doCheck = false;
9 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
23 src = fetchurl {
10 src = fetchurl {
24 url = "https://pypi.python.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
25 md5 = "6411537324b4dba0956aaa8109f3c77b";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
26 };
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
30 };
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.7";
33 buildInputs = with self; [];
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 md5 = "5836cc997b1b773ef389bf6629c30e65";
39 };
13 };
40 meta = {
14 meta = {
41 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
42 };
16 };
43 };
17 };
44 MarkupSafe = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
45 name = "MarkupSafe-1.0";
19 name = "attrs-18.1.0";
46 buildInputs = with self; [];
47 doCheck = false;
20 doCheck = false;
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
21 src = fetchurl {
50 url = "https://pypi.python.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
51 md5 = "2fcedc9284d50e577b5192e8e3578355";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
52 };
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
56 };
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
65 };
24 };
66 meta = {
25 meta = {
67 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
68 };
27 };
69 };
28 };
70 WebOb = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
71 name = "WebOb-1.7.4";
30 name = "backports.shutil-get-terminal-size-1.0.0";
72 buildInputs = with self; [];
73 doCheck = false;
31 doCheck = false;
74 propagatedBuildInputs = with self; [];
75 src = fetchurl {
32 src = fetchurl {
76 url = "https://pypi.python.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
77 md5 = "397e46892d7f199b1a07eb20a2d3d9bd";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
78 };
35 };
79 meta = {
36 meta = {
80 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
81 };
38 };
82 };
39 };
83 WebTest = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
84 name = "WebTest-2.0.29";
41 name = "beautifulsoup4-4.6.3";
85 buildInputs = with self; [];
86 doCheck = false;
42 doCheck = false;
87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 src = fetchurl {
43 src = fetchurl {
89 url = "https://pypi.python.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
90 md5 = "30b4cf0d340b9a5335fac4389e6f84fc";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
91 };
46 };
92 meta = {
47 meta = {
93 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
94 };
49 };
95 };
50 };
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
51 "configobj" = super.buildPythonPackage {
97 name = "backports.shutil-get-terminal-size-1.0.0";
52 name = "configobj-5.0.6";
98 buildInputs = with self; [];
99 doCheck = false;
100 propagatedBuildInputs = with self; [];
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 md5 = "03267762480bd86b50580dc19dff3c66";
104 };
105 meta = {
106 license = [ pkgs.lib.licenses.mit ];
107 };
108 };
109 beautifulsoup4 = super.buildPythonPackage {
110 name = "beautifulsoup4-4.6.0";
111 buildInputs = with self; [];
112 doCheck = false;
53 doCheck = false;
113 propagatedBuildInputs = with self; [];
54 propagatedBuildInputs = [
55 self."six"
56 ];
114 src = fetchurl {
57 src = fetchurl {
115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
116 md5 = "c17714d0f91a23b708a592cb3c697728";
59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
117 };
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
120 };
121 };
122 configobj = super.buildPythonPackage {
123 name = "configobj-5.0.6";
124 buildInputs = with self; [];
125 doCheck = false;
126 propagatedBuildInputs = with self; [six];
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 };
60 };
131 meta = {
61 meta = {
132 license = [ pkgs.lib.licenses.bsdOriginal ];
62 license = [ pkgs.lib.licenses.bsdOriginal ];
133 };
63 };
134 };
64 };
135 cov-core = super.buildPythonPackage {
65 "cov-core" = super.buildPythonPackage {
136 name = "cov-core-1.15.0";
66 name = "cov-core-1.15.0";
137 buildInputs = with self; [];
138 doCheck = false;
67 doCheck = false;
139 propagatedBuildInputs = with self; [coverage];
68 propagatedBuildInputs = [
69 self."coverage"
70 ];
140 src = fetchurl {
71 src = fetchurl {
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
143 };
74 };
144 meta = {
75 meta = {
145 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
146 };
77 };
147 };
78 };
148 coverage = super.buildPythonPackage {
79 "coverage" = super.buildPythonPackage {
149 name = "coverage-3.7.1";
80 name = "coverage-3.7.1";
150 buildInputs = with self; [];
151 doCheck = false;
81 doCheck = false;
152 propagatedBuildInputs = with self; [];
153 src = fetchurl {
82 src = fetchurl {
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
156 };
85 };
157 meta = {
86 meta = {
158 license = [ pkgs.lib.licenses.bsdOriginal ];
87 license = [ pkgs.lib.licenses.bsdOriginal ];
159 };
88 };
160 };
89 };
161 decorator = super.buildPythonPackage {
90 "decorator" = super.buildPythonPackage {
162 name = "decorator-4.1.2";
91 name = "decorator-4.1.2";
163 buildInputs = with self; [];
164 doCheck = false;
92 doCheck = false;
165 propagatedBuildInputs = with self; [];
166 src = fetchurl {
93 src = fetchurl {
167 url = "https://pypi.python.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
168 md5 = "a0f7f4fe00ae2dde93494d90c192cf8c";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
169 };
96 };
170 meta = {
97 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 };
99 };
173 };
100 };
174 dulwich = super.buildPythonPackage {
101 "dogpile.cache" = super.buildPythonPackage {
175 name = "dulwich-0.13.0";
102 name = "dogpile.cache-0.6.6";
176 buildInputs = with self; [];
103 doCheck = false;
104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
107 };
108 meta = {
109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 };
111 };
112 "dogpile.core" = super.buildPythonPackage {
113 name = "dogpile.core-0.4.1";
177 doCheck = false;
114 doCheck = false;
178 propagatedBuildInputs = with self; [];
179 src = fetchurl {
115 src = fetchurl {
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 };
119 meta = {
120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 };
122 };
123 "dulwich" = super.buildPythonPackage {
124 name = "dulwich-0.13.0";
125 doCheck = false;
126 src = fetchurl {
127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
182 };
129 };
183 meta = {
130 meta = {
184 license = [ pkgs.lib.licenses.gpl2Plus ];
131 license = [ pkgs.lib.licenses.gpl2Plus ];
185 };
132 };
186 };
133 };
187 enum34 = super.buildPythonPackage {
134 "enum34" = super.buildPythonPackage {
188 name = "enum34-1.1.6";
135 name = "enum34-1.1.6";
189 buildInputs = with self; [];
190 doCheck = false;
136 doCheck = false;
191 propagatedBuildInputs = with self; [];
192 src = fetchurl {
137 src = fetchurl {
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
195 };
140 };
196 meta = {
141 meta = {
197 license = [ pkgs.lib.licenses.bsdOriginal ];
142 license = [ pkgs.lib.licenses.bsdOriginal ];
198 };
143 };
199 };
144 };
200 funcsigs = super.buildPythonPackage {
145 "funcsigs" = super.buildPythonPackage {
201 name = "funcsigs-1.0.2";
146 name = "funcsigs-1.0.2";
202 buildInputs = with self; [];
203 doCheck = false;
147 doCheck = false;
204 propagatedBuildInputs = with self; [];
205 src = fetchurl {
148 src = fetchurl {
206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
208 };
151 };
209 meta = {
152 meta = {
210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 };
154 };
212 };
155 };
213 gevent = super.buildPythonPackage {
156 "gevent" = super.buildPythonPackage {
214 name = "gevent-1.2.2";
157 name = "gevent-1.3.5";
215 buildInputs = with self; [];
216 doCheck = false;
158 doCheck = false;
217 propagatedBuildInputs = with self; [greenlet];
159 propagatedBuildInputs = [
160 self."greenlet"
161 ];
218 src = fetchurl {
162 src = fetchurl {
219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
163 url = "https://files.pythonhosted.org/packages/e6/0a/fc345c6e6161f84484870dbcaa58e427c10bd9bdcd08a69bed3d6b398bf1/gevent-1.3.5.tar.gz";
220 md5 = "7f0baf355384fe5ff2ecf66853422554";
164 sha256 = "1w3gydxirgd2f60c5yv579w4903ds9s4g3587ik4jby97hgqc5bz";
221 };
165 };
222 meta = {
166 meta = {
223 license = [ pkgs.lib.licenses.mit ];
167 license = [ pkgs.lib.licenses.mit ];
224 };
168 };
225 };
169 };
226 gprof2dot = super.buildPythonPackage {
170 "gprof2dot" = super.buildPythonPackage {
227 name = "gprof2dot-2017.9.19";
171 name = "gprof2dot-2017.9.19";
228 buildInputs = with self; [];
229 doCheck = false;
172 doCheck = false;
230 propagatedBuildInputs = with self; [];
231 src = fetchurl {
173 src = fetchurl {
232 url = "https://pypi.python.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
233 md5 = "cda2d552bb0d0b9f16e6824a9aabd225";
175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
234 };
176 };
235 meta = {
177 meta = {
236 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
237 };
179 };
238 };
180 };
239 greenlet = super.buildPythonPackage {
181 "greenlet" = super.buildPythonPackage {
240 name = "greenlet-0.4.13";
182 name = "greenlet-0.4.13";
241 buildInputs = with self; [];
242 doCheck = false;
183 doCheck = false;
243 propagatedBuildInputs = with self; [];
244 src = fetchurl {
184 src = fetchurl {
245 url = "https://pypi.python.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
246 md5 = "6e0b9dd5385f81d478451ec8ed1d62b3";
186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
187 };
188 meta = {
189 license = [ pkgs.lib.licenses.mit ];
190 };
191 };
192 "gunicorn" = super.buildPythonPackage {
193 name = "gunicorn-19.9.0";
194 doCheck = false;
195 src = fetchurl {
196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
247 };
198 };
248 meta = {
199 meta = {
249 license = [ pkgs.lib.licenses.mit ];
200 license = [ pkgs.lib.licenses.mit ];
250 };
201 };
251 };
202 };
252 gunicorn = super.buildPythonPackage {
203 "hg-evolve" = super.buildPythonPackage {
253 name = "gunicorn-19.7.1";
204 name = "hg-evolve-8.0.1";
254 buildInputs = with self; [];
205 doCheck = false;
206 src = fetchurl {
207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
209 };
210 meta = {
211 license = [ { fullName = "GPLv2+"; } ];
212 };
213 };
214 "hgsubversion" = super.buildPythonPackage {
215 name = "hgsubversion-1.9.2";
255 doCheck = false;
216 doCheck = false;
256 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = [
218 self."mercurial"
219 self."subvertpy"
220 ];
257 src = fetchurl {
221 src = fetchurl {
258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
259 md5 = "174d3c3cd670a5be0404d84c484e590c";
223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
224 };
225 meta = {
226 license = [ pkgs.lib.licenses.gpl1 ];
227 };
228 };
229 "hupper" = super.buildPythonPackage {
230 name = "hupper-1.3";
231 doCheck = false;
232 src = fetchurl {
233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
260 };
235 };
261 meta = {
236 meta = {
262 license = [ pkgs.lib.licenses.mit ];
237 license = [ pkgs.lib.licenses.mit ];
263 };
238 };
264 };
239 };
265 hg-evolve = super.buildPythonPackage {
240 "ipdb" = super.buildPythonPackage {
266 name = "hg-evolve-7.0.1";
241 name = "ipdb-0.11";
267 buildInputs = with self; [];
268 doCheck = false;
242 doCheck = false;
269 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = [
244 self."setuptools"
245 self."ipython"
246 ];
270 src = fetchurl {
247 src = fetchurl {
271 url = "https://pypi.python.org/packages/92/5c/4c216be1a08f326a12076b645f4892a2b0865810db1f4a0c9648f1f4c113/hg-evolve-7.0.1.tar.gz";
248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
272 md5 = "2dfa926846ea873a8406bababb06b277";
249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
273 };
250 };
274 meta = {
251 meta = {
275 license = [ { fullName = "GPLv2+"; } ];
252 license = [ pkgs.lib.licenses.bsdOriginal ];
276 };
253 };
277 };
254 };
278 hgsubversion = super.buildPythonPackage {
255 "ipython" = super.buildPythonPackage {
279 name = "hgsubversion-1.9";
256 name = "ipython-5.1.0";
280 buildInputs = with self; [];
281 doCheck = false;
257 doCheck = false;
282 propagatedBuildInputs = with self; [mercurial subvertpy];
258 propagatedBuildInputs = [
259 self."setuptools"
260 self."decorator"
261 self."pickleshare"
262 self."simplegeneric"
263 self."traitlets"
264 self."prompt-toolkit"
265 self."pygments"
266 self."pexpect"
267 self."backports.shutil-get-terminal-size"
268 self."pathlib2"
269 self."pexpect"
270 ];
283 src = fetchurl {
271 src = fetchurl {
284 url = "https://pypi.python.org/packages/db/26/7293a6c6b85e2a74ab452e9ba7f00b04ff0e440e6cd4f84131ac5d5e6b22/hgsubversion-1.9.tar.gz";
272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
285 md5 = "0c6f93ef12cc2e7fe67286f16bcc7211";
273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
286 };
274 };
287 meta = {
275 meta = {
288 license = [ pkgs.lib.licenses.gpl1 ];
276 license = [ pkgs.lib.licenses.bsdOriginal ];
289 };
277 };
290 };
278 };
291 hupper = super.buildPythonPackage {
279 "ipython-genutils" = super.buildPythonPackage {
292 name = "hupper-1.0";
280 name = "ipython-genutils-0.2.0";
293 buildInputs = with self; [];
294 doCheck = false;
281 doCheck = false;
295 propagatedBuildInputs = with self; [];
296 src = fetchurl {
282 src = fetchurl {
297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
285 };
286 meta = {
287 license = [ pkgs.lib.licenses.bsdOriginal ];
288 };
289 };
290 "mako" = super.buildPythonPackage {
291 name = "mako-1.0.7";
292 doCheck = false;
293 propagatedBuildInputs = [
294 self."markupsafe"
295 ];
296 src = fetchurl {
297 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
298 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
299 };
299 };
300 meta = {
300 meta = {
301 license = [ pkgs.lib.licenses.mit ];
301 license = [ pkgs.lib.licenses.mit ];
302 };
302 };
303 };
303 };
304 infrae.cache = super.buildPythonPackage {
304 "markupsafe" = super.buildPythonPackage {
305 name = "infrae.cache-1.0.1";
305 name = "markupsafe-1.0";
306 buildInputs = with self; [];
307 doCheck = false;
306 doCheck = false;
308 propagatedBuildInputs = with self; [Beaker repoze.lru];
309 src = fetchurl {
307 src = fetchurl {
310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
308 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
311 md5 = "b09076a766747e6ed2a755cc62088e32";
309 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
312 };
313 meta = {
314 license = [ pkgs.lib.licenses.zpt21 ];
315 };
316 };
317 ipdb = super.buildPythonPackage {
318 name = "ipdb-0.10.3";
319 buildInputs = with self; [];
320 doCheck = false;
321 propagatedBuildInputs = with self; [setuptools ipython];
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
325 };
310 };
326 meta = {
311 meta = {
327 license = [ pkgs.lib.licenses.bsdOriginal ];
312 license = [ pkgs.lib.licenses.bsdOriginal ];
328 };
313 };
329 };
314 };
330 ipython = super.buildPythonPackage {
315 "mercurial" = super.buildPythonPackage {
331 name = "ipython-5.1.0";
316 name = "mercurial-4.6.2";
332 buildInputs = with self; [];
333 doCheck = false;
317 doCheck = false;
334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
335 src = fetchurl {
318 src = fetchurl {
336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
319 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
320 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
338 };
321 };
339 meta = {
322 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal ];
323 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
341 };
324 };
342 };
325 };
343 ipython-genutils = super.buildPythonPackage {
326 "mock" = super.buildPythonPackage {
344 name = "ipython-genutils-0.2.0";
327 name = "mock-1.0.1";
345 buildInputs = with self; [];
346 doCheck = false;
328 doCheck = false;
347 propagatedBuildInputs = with self; [];
348 src = fetchurl {
329 src = fetchurl {
349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
330 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
331 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
351 };
332 };
352 meta = {
333 meta = {
353 license = [ pkgs.lib.licenses.bsdOriginal ];
334 license = [ pkgs.lib.licenses.bsdOriginal ];
354 };
335 };
355 };
336 };
356 mercurial = super.buildPythonPackage {
337 "more-itertools" = super.buildPythonPackage {
357 name = "mercurial-4.4.2";
338 name = "more-itertools-4.3.0";
358 buildInputs = with self; [];
359 doCheck = false;
339 doCheck = false;
360 propagatedBuildInputs = with self; [];
340 propagatedBuildInputs = [
341 self."six"
342 ];
361 src = fetchurl {
343 src = fetchurl {
362 url = "https://pypi.python.org/packages/d0/83/92a5fa662ba277128db305e39e7ea5a638f2f1cbbc6dc5fbf4c14aefae22/mercurial-4.4.2.tar.gz";
344 url = "https://files.pythonhosted.org/packages/88/ff/6d485d7362f39880810278bdc906c13300db05485d9c65971dec1142da6a/more-itertools-4.3.0.tar.gz";
363 md5 = "95769125cf7e9dbc341a983253acefcd";
345 sha256 = "17h3na0rdh8xq30w4b9pizgkdxmm51896bxw600x84jflg9vaxn4";
364 };
346 };
365 meta = {
347 meta = {
366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
348 license = [ pkgs.lib.licenses.mit ];
367 };
349 };
368 };
350 };
369 mock = super.buildPythonPackage {
351 "msgpack-python" = super.buildPythonPackage {
370 name = "mock-1.0.1";
352 name = "msgpack-python-0.5.6";
371 buildInputs = with self; [];
372 doCheck = false;
353 doCheck = false;
373 propagatedBuildInputs = with self; [];
374 src = fetchurl {
354 src = fetchurl {
375 url = "https://pypi.python.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
355 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
376 md5 = "c3971991738caa55ec7c356bbc154ee2";
356 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
377 };
378 meta = {
379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 };
381 };
382 msgpack-python = super.buildPythonPackage {
383 name = "msgpack-python-0.4.8";
384 buildInputs = with self; [];
385 doCheck = false;
386 propagatedBuildInputs = with self; [];
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
390 };
357 };
391 meta = {
358 meta = {
392 license = [ pkgs.lib.licenses.asl20 ];
359 license = [ pkgs.lib.licenses.asl20 ];
393 };
360 };
394 };
361 };
395 pathlib2 = super.buildPythonPackage {
362 "pastedeploy" = super.buildPythonPackage {
396 name = "pathlib2-2.3.0";
363 name = "pastedeploy-1.5.2";
397 buildInputs = with self; [];
398 doCheck = false;
364 doCheck = false;
399 propagatedBuildInputs = with self; [six scandir];
400 src = fetchurl {
365 src = fetchurl {
401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
366 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
402 md5 = "89c90409d11fd5947966b6a30a47d18c";
367 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
368 };
369 meta = {
370 license = [ pkgs.lib.licenses.mit ];
371 };
372 };
373 "pathlib2" = super.buildPythonPackage {
374 name = "pathlib2-2.3.0";
375 doCheck = false;
376 propagatedBuildInputs = [
377 self."six"
378 self."scandir"
379 ];
380 src = fetchurl {
381 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
382 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
403 };
383 };
404 meta = {
384 meta = {
405 license = [ pkgs.lib.licenses.mit ];
385 license = [ pkgs.lib.licenses.mit ];
406 };
386 };
407 };
387 };
408 pexpect = super.buildPythonPackage {
388 "pexpect" = super.buildPythonPackage {
409 name = "pexpect-4.4.0";
389 name = "pexpect-4.6.0";
410 buildInputs = with self; [];
411 doCheck = false;
390 doCheck = false;
412 propagatedBuildInputs = with self; [ptyprocess];
391 propagatedBuildInputs = [
392 self."ptyprocess"
393 ];
413 src = fetchurl {
394 src = fetchurl {
414 url = "https://pypi.python.org/packages/fa/c3/60c0cbf96f242d0b47a82e9ca634dcd6dcb043832cf05e17540812e1c707/pexpect-4.4.0.tar.gz";
395 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
415 md5 = "e9b07f0765df8245ac72201d757baaef";
396 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
416 };
397 };
417 meta = {
398 meta = {
418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
399 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
419 };
400 };
420 };
401 };
421 pickleshare = super.buildPythonPackage {
402 "pickleshare" = super.buildPythonPackage {
422 name = "pickleshare-0.7.4";
403 name = "pickleshare-0.7.4";
423 buildInputs = with self; [];
404 doCheck = false;
405 propagatedBuildInputs = [
406 self."pathlib2"
407 ];
408 src = fetchurl {
409 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
410 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
411 };
412 meta = {
413 license = [ pkgs.lib.licenses.mit ];
414 };
415 };
416 "plaster" = super.buildPythonPackage {
417 name = "plaster-1.0";
424 doCheck = false;
418 doCheck = false;
425 propagatedBuildInputs = with self; [pathlib2];
419 propagatedBuildInputs = [
420 self."setuptools"
421 ];
426 src = fetchurl {
422 src = fetchurl {
427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
423 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
424 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
425 };
426 meta = {
427 license = [ pkgs.lib.licenses.mit ];
428 };
429 };
430 "plaster-pastedeploy" = super.buildPythonPackage {
431 name = "plaster-pastedeploy-0.6";
432 doCheck = false;
433 propagatedBuildInputs = [
434 self."pastedeploy"
435 self."plaster"
436 ];
437 src = fetchurl {
438 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
439 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
440 };
441 meta = {
442 license = [ pkgs.lib.licenses.mit ];
443 };
444 };
445 "pluggy" = super.buildPythonPackage {
446 name = "pluggy-0.6.0";
447 doCheck = false;
448 src = fetchurl {
449 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
450 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
429 };
451 };
430 meta = {
452 meta = {
431 license = [ pkgs.lib.licenses.mit ];
453 license = [ pkgs.lib.licenses.mit ];
432 };
454 };
433 };
455 };
434 plaster = super.buildPythonPackage {
456 "prompt-toolkit" = super.buildPythonPackage {
435 name = "plaster-1.0";
457 name = "prompt-toolkit-1.0.15";
436 buildInputs = with self; [];
437 doCheck = false;
458 doCheck = false;
438 propagatedBuildInputs = with self; [setuptools];
459 propagatedBuildInputs = [
460 self."six"
461 self."wcwidth"
462 ];
439 src = fetchurl {
463 src = fetchurl {
440 url = "https://pypi.python.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
464 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
441 md5 = "80e6beb4760c16fea31754babcc0576e";
465 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
442 };
466 };
443 meta = {
467 meta = {
444 license = [ pkgs.lib.licenses.mit ];
468 license = [ pkgs.lib.licenses.bsdOriginal ];
445 };
469 };
446 };
470 };
447 plaster-pastedeploy = super.buildPythonPackage {
471 "psutil" = super.buildPythonPackage {
448 name = "plaster-pastedeploy-0.4.2";
472 name = "psutil-5.4.6";
449 buildInputs = with self; [];
450 doCheck = false;
473 doCheck = false;
451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 src = fetchurl {
474 src = fetchurl {
453 url = "https://pypi.python.org/packages/2c/62/0daf9c0be958e785023e583e51baac15863699e956bfb3d448898d80edd8/plaster_pastedeploy-0.4.2.tar.gz";
475 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
454 md5 = "58fd7852002909378e818c9d5b71e90a";
476 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
455 };
456 meta = {
457 license = [ pkgs.lib.licenses.mit ];
458 };
459 };
460 prompt-toolkit = super.buildPythonPackage {
461 name = "prompt-toolkit-1.0.15";
462 buildInputs = with self; [];
463 doCheck = false;
464 propagatedBuildInputs = with self; [six wcwidth];
465 src = fetchurl {
466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 md5 = "8fe70295006dbc8afedd43e5eba99032";
468 };
477 };
469 meta = {
478 meta = {
470 license = [ pkgs.lib.licenses.bsdOriginal ];
479 license = [ pkgs.lib.licenses.bsdOriginal ];
471 };
480 };
472 };
481 };
473 ptyprocess = super.buildPythonPackage {
482 "ptyprocess" = super.buildPythonPackage {
474 name = "ptyprocess-0.5.2";
483 name = "ptyprocess-0.6.0";
475 buildInputs = with self; [];
476 doCheck = false;
484 doCheck = false;
477 propagatedBuildInputs = with self; [];
478 src = fetchurl {
485 src = fetchurl {
479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
486 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
487 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
481 };
488 };
482 meta = {
489 meta = {
483 license = [ ];
490 license = [ ];
484 };
491 };
485 };
492 };
486 py = super.buildPythonPackage {
493 "py" = super.buildPythonPackage {
487 name = "py-1.5.2";
494 name = "py-1.5.3";
488 buildInputs = with self; [];
489 doCheck = false;
495 doCheck = false;
490 propagatedBuildInputs = with self; [];
491 src = fetchurl {
496 src = fetchurl {
492 url = "https://pypi.python.org/packages/90/e3/e075127d39d35f09a500ebb4a90afd10f9ef0a1d28a6d09abeec0e444fdd/py-1.5.2.tar.gz";
497 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
493 md5 = "279ca69c632069e1b71e11b14641ca28";
498 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
494 };
499 };
495 meta = {
500 meta = {
496 license = [ pkgs.lib.licenses.mit ];
501 license = [ pkgs.lib.licenses.mit ];
497 };
502 };
498 };
503 };
499 pygments = super.buildPythonPackage {
504 "pygments" = super.buildPythonPackage {
500 name = "pygments-2.2.0";
505 name = "pygments-2.2.0";
501 buildInputs = with self; [];
502 doCheck = false;
506 doCheck = false;
503 propagatedBuildInputs = with self; [];
504 src = fetchurl {
507 src = fetchurl {
505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
508 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
506 md5 = "13037baca42f16917cbd5ad2fab50844";
509 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
507 };
510 };
508 meta = {
511 meta = {
509 license = [ pkgs.lib.licenses.bsdOriginal ];
512 license = [ pkgs.lib.licenses.bsdOriginal ];
510 };
513 };
511 };
514 };
512 pyramid = super.buildPythonPackage {
515 "pyramid" = super.buildPythonPackage {
513 name = "pyramid-1.9.1";
516 name = "pyramid-1.9.2";
514 buildInputs = with self; [];
515 doCheck = false;
517 doCheck = false;
516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
518 propagatedBuildInputs = [
519 self."setuptools"
520 self."webob"
521 self."repoze.lru"
522 self."zope.interface"
523 self."zope.deprecation"
524 self."venusian"
525 self."translationstring"
526 self."pastedeploy"
527 self."plaster"
528 self."plaster-pastedeploy"
529 self."hupper"
530 ];
517 src = fetchurl {
531 src = fetchurl {
518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
532 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
533 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
520 };
534 };
521 meta = {
535 meta = {
522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 };
537 };
524 };
538 };
525 pyramid-jinja2 = super.buildPythonPackage {
539 "pyramid-mako" = super.buildPythonPackage {
526 name = "pyramid-jinja2-2.7";
540 name = "pyramid-mako-1.0.2";
527 buildInputs = with self; [];
528 doCheck = false;
541 doCheck = false;
529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
542 propagatedBuildInputs = [
543 self."pyramid"
544 self."mako"
545 ];
530 src = fetchurl {
546 src = fetchurl {
531 url = "https://pypi.python.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
547 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
532 md5 = "c2f8b2cd7b73a6f1d9a311fcfaf4fb92";
548 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
533 };
549 };
534 meta = {
550 meta = {
535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
551 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 };
552 };
537 };
553 };
538 pyramid-mako = super.buildPythonPackage {
554 "pytest" = super.buildPythonPackage {
539 name = "pyramid-mako-1.0.2";
555 name = "pytest-3.6.0";
540 buildInputs = with self; [];
541 doCheck = false;
542 propagatedBuildInputs = with self; [pyramid Mako];
543 src = fetchurl {
544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
546 };
547 meta = {
548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
549 };
550 };
551 pytest = super.buildPythonPackage {
552 name = "pytest-3.2.5";
553 buildInputs = with self; [];
554 doCheck = false;
556 doCheck = false;
555 propagatedBuildInputs = with self; [py setuptools];
557 propagatedBuildInputs = [
558 self."py"
559 self."six"
560 self."setuptools"
561 self."attrs"
562 self."more-itertools"
563 self."atomicwrites"
564 self."pluggy"
565 self."funcsigs"
566 ];
556 src = fetchurl {
567 src = fetchurl {
557 url = "https://pypi.python.org/packages/1f/f8/8cd74c16952163ce0db0bd95fdd8810cbf093c08be00e6e665ebf0dc3138/pytest-3.2.5.tar.gz";
568 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
558 md5 = "6dbe9bb093883f75394a689a1426ac6f";
569 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
559 };
560 meta = {
561 license = [ pkgs.lib.licenses.mit ];
562 };
563 };
564 pytest-catchlog = super.buildPythonPackage {
565 name = "pytest-catchlog-1.2.2";
566 buildInputs = with self; [];
567 doCheck = false;
568 propagatedBuildInputs = with self; [py pytest];
569 src = fetchurl {
570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
571 md5 = "09d890c54c7456c818102b7ff8c182c8";
572 };
570 };
573 meta = {
571 meta = {
574 license = [ pkgs.lib.licenses.mit ];
572 license = [ pkgs.lib.licenses.mit ];
575 };
573 };
576 };
574 };
577 pytest-cov = super.buildPythonPackage {
575 "pytest-cov" = super.buildPythonPackage {
578 name = "pytest-cov-2.5.1";
576 name = "pytest-cov-2.5.1";
579 buildInputs = with self; [];
580 doCheck = false;
577 doCheck = false;
581 propagatedBuildInputs = with self; [pytest coverage];
578 propagatedBuildInputs = [
579 self."pytest"
580 self."coverage"
581 ];
582 src = fetchurl {
582 src = fetchurl {
583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
583 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
584 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
585 };
585 };
586 meta = {
586 meta = {
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 };
588 };
589 };
589 };
590 pytest-profiling = super.buildPythonPackage {
590 "pytest-profiling" = super.buildPythonPackage {
591 name = "pytest-profiling-1.2.11";
591 name = "pytest-profiling-1.3.0";
592 buildInputs = with self; [];
593 doCheck = false;
592 doCheck = false;
594 propagatedBuildInputs = with self; [six pytest gprof2dot];
593 propagatedBuildInputs = [
594 self."six"
595 self."pytest"
596 self."gprof2dot"
597 ];
595 src = fetchurl {
598 src = fetchurl {
596 url = "https://pypi.python.org/packages/c0/4a/b4aa786e93c07a86f1f87c581a36bf355a9e06a9da7e00dbd05047626bd2/pytest-profiling-1.2.11.tar.gz";
599 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
597 md5 = "9ef6b60248731be5d44477980408e8f7";
600 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
598 };
601 };
599 meta = {
602 meta = {
600 license = [ pkgs.lib.licenses.mit ];
603 license = [ pkgs.lib.licenses.mit ];
601 };
604 };
602 };
605 };
603 pytest-runner = super.buildPythonPackage {
606 "pytest-runner" = super.buildPythonPackage {
604 name = "pytest-runner-3.0";
607 name = "pytest-runner-4.2";
605 buildInputs = with self; [];
606 doCheck = false;
608 doCheck = false;
607 propagatedBuildInputs = with self; [];
608 src = fetchurl {
609 src = fetchurl {
609 url = "https://pypi.python.org/packages/65/b4/ae89338cd2d81e2cc54bd6db2e962bfe948f612303610d68ab24539ac2d1/pytest-runner-3.0.tar.gz";
610 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
610 md5 = "8f8363a52bbabc4cedd5e239beb2ba11";
611 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
611 };
612 };
612 meta = {
613 meta = {
613 license = [ pkgs.lib.licenses.mit ];
614 license = [ pkgs.lib.licenses.mit ];
614 };
615 };
615 };
616 };
616 pytest-sugar = super.buildPythonPackage {
617 "pytest-sugar" = super.buildPythonPackage {
617 name = "pytest-sugar-0.9.0";
618 name = "pytest-sugar-0.9.1";
618 buildInputs = with self; [];
619 doCheck = false;
619 doCheck = false;
620 propagatedBuildInputs = with self; [pytest termcolor];
620 propagatedBuildInputs = [
621 self."pytest"
622 self."termcolor"
623 ];
621 src = fetchurl {
624 src = fetchurl {
622 url = "https://pypi.python.org/packages/49/d8/c5ff6cca3ce2ebd8b73eec89779bf6b4a7737456a70e8ea4d44c1ff90f71/pytest-sugar-0.9.0.tar.gz";
625 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
623 md5 = "89fbff17277fa6a95a560a04b68cb9f9";
626 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
624 };
627 };
625 meta = {
628 meta = {
626 license = [ pkgs.lib.licenses.bsdOriginal ];
629 license = [ pkgs.lib.licenses.bsdOriginal ];
627 };
630 };
628 };
631 };
629 pytest-timeout = super.buildPythonPackage {
632 "pytest-timeout" = super.buildPythonPackage {
630 name = "pytest-timeout-1.2.0";
633 name = "pytest-timeout-1.2.1";
631 buildInputs = with self; [];
632 doCheck = false;
634 doCheck = false;
633 propagatedBuildInputs = with self; [pytest];
635 propagatedBuildInputs = [
636 self."pytest"
637 ];
634 src = fetchurl {
638 src = fetchurl {
635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
639 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
636 md5 = "83607d91aa163562c7ee835da57d061d";
640 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
637 };
641 };
638 meta = {
642 meta = {
639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
643 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
640 };
644 };
641 };
645 };
642 repoze.lru = super.buildPythonPackage {
646 "repoze.lru" = super.buildPythonPackage {
643 name = "repoze.lru-0.7";
647 name = "repoze.lru-0.7";
644 buildInputs = with self; [];
645 doCheck = false;
648 doCheck = false;
646 propagatedBuildInputs = with self; [];
647 src = fetchurl {
649 src = fetchurl {
648 url = "https://pypi.python.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
650 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
649 md5 = "c08cc030387e0b1fc53c5c7d964b35e2";
651 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
650 };
652 };
651 meta = {
653 meta = {
652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
654 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
653 };
655 };
654 };
656 };
655 rhodecode-vcsserver = super.buildPythonPackage {
657 "rhodecode-vcsserver" = super.buildPythonPackage {
656 name = "rhodecode-vcsserver-4.12.4";
658 name = "rhodecode-vcsserver-4.13.0";
657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
659 buildInputs = [
660 self."pytest"
661 self."py"
662 self."pytest-cov"
663 self."pytest-sugar"
664 self."pytest-runner"
665 self."pytest-profiling"
666 self."gprof2dot"
667 self."pytest-timeout"
668 self."mock"
669 self."webtest"
670 self."cov-core"
671 self."coverage"
672 self."configobj"
673 ];
658 doCheck = true;
674 doCheck = true;
659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
675 propagatedBuildInputs = [
676 self."configobj"
677 self."dogpile.cache"
678 self."dogpile.core"
679 self."decorator"
680 self."dulwich"
681 self."hgsubversion"
682 self."hg-evolve"
683 self."mako"
684 self."markupsafe"
685 self."mercurial"
686 self."msgpack-python"
687 self."pastedeploy"
688 self."psutil"
689 self."pyramid"
690 self."pyramid-mako"
691 self."pygments"
692 self."pathlib2"
693 self."repoze.lru"
694 self."simplejson"
695 self."subprocess32"
696 self."setproctitle"
697 self."subvertpy"
698 self."six"
699 self."translationstring"
700 self."webob"
701 self."zope.deprecation"
702 self."zope.interface"
703 self."gevent"
704 self."greenlet"
705 self."gunicorn"
706 self."waitress"
707 self."ipdb"
708 self."ipython"
709 self."pytest"
710 self."py"
711 self."pytest-cov"
712 self."pytest-sugar"
713 self."pytest-runner"
714 self."pytest-profiling"
715 self."gprof2dot"
716 self."pytest-timeout"
717 self."mock"
718 self."webtest"
719 self."cov-core"
720 self."coverage"
721 ];
660 src = ./.;
722 src = ./.;
661 meta = {
723 meta = {
662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
724 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
663 };
725 };
664 };
726 };
665 scandir = super.buildPythonPackage {
727 "scandir" = super.buildPythonPackage {
666 name = "scandir-1.7";
728 name = "scandir-1.9.0";
667 buildInputs = with self; [];
668 doCheck = false;
729 doCheck = false;
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
730 src = fetchurl {
671 url = "https://pypi.python.org/packages/13/bb/e541b74230bbf7a20a3949a2ee6631be299378a784f5445aa5d0047c192b/scandir-1.7.tar.gz";
731 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
672 md5 = "037e5f24d1a0e78b17faca72dea9555f";
732 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
673 };
733 };
674 meta = {
734 meta = {
675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
735 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 };
736 };
677 };
737 };
678 setuptools = super.buildPythonPackage {
738 "setproctitle" = super.buildPythonPackage {
679 name = "setuptools-30.1.0";
739 name = "setproctitle-1.1.10";
680 buildInputs = with self; [];
681 doCheck = false;
740 doCheck = false;
682 propagatedBuildInputs = with self; [];
683 src = fetchurl {
741 src = fetchurl {
684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
742 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
743 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
744 };
745 meta = {
746 license = [ pkgs.lib.licenses.bsdOriginal ];
747 };
748 };
749 "setuptools" = super.buildPythonPackage {
750 name = "setuptools-40.1.0";
751 doCheck = false;
752 src = fetchurl {
753 url = "https://files.pythonhosted.org/packages/5a/df/b2e3d9693bb0dcbeac516a73dd7a9eb82b126ae52e4a74605a9b01beddd5/setuptools-40.1.0.zip";
754 sha256 = "0w1blx5ajga5y15dci0mddk49cf2xpq0mp7rp7jrqr2diqk00ib6";
686 };
755 };
687 meta = {
756 meta = {
688 license = [ pkgs.lib.licenses.mit ];
757 license = [ pkgs.lib.licenses.mit ];
689 };
758 };
690 };
759 };
691 simplegeneric = super.buildPythonPackage {
760 "simplegeneric" = super.buildPythonPackage {
692 name = "simplegeneric-0.8.1";
761 name = "simplegeneric-0.8.1";
693 buildInputs = with self; [];
694 doCheck = false;
762 doCheck = false;
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
763 src = fetchurl {
697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
764 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
698 md5 = "f9c1fab00fd981be588fc32759f474e3";
765 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
699 };
766 };
700 meta = {
767 meta = {
701 license = [ pkgs.lib.licenses.zpt21 ];
768 license = [ pkgs.lib.licenses.zpl21 ];
702 };
769 };
703 };
770 };
704 simplejson = super.buildPythonPackage {
771 "simplejson" = super.buildPythonPackage {
705 name = "simplejson-3.11.1";
772 name = "simplejson-3.11.1";
706 buildInputs = with self; [];
707 doCheck = false;
773 doCheck = false;
708 propagatedBuildInputs = with self; [];
709 src = fetchurl {
774 src = fetchurl {
710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
775 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
776 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
712 };
777 };
713 meta = {
778 meta = {
714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
779 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
715 };
780 };
716 };
781 };
717 six = super.buildPythonPackage {
782 "six" = super.buildPythonPackage {
718 name = "six-1.11.0";
783 name = "six-1.11.0";
719 buildInputs = with self; [];
720 doCheck = false;
784 doCheck = false;
721 propagatedBuildInputs = with self; [];
722 src = fetchurl {
785 src = fetchurl {
723 url = "https://pypi.python.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
786 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
724 md5 = "d12789f9baf7e9fb2524c0c64f1773f8";
787 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
725 };
788 };
726 meta = {
789 meta = {
727 license = [ pkgs.lib.licenses.mit ];
790 license = [ pkgs.lib.licenses.mit ];
728 };
791 };
729 };
792 };
730 subprocess32 = super.buildPythonPackage {
793 "subprocess32" = super.buildPythonPackage {
731 name = "subprocess32-3.2.7";
794 name = "subprocess32-3.5.1";
732 buildInputs = with self; [];
733 doCheck = false;
795 doCheck = false;
734 propagatedBuildInputs = with self; [];
735 src = fetchurl {
796 src = fetchurl {
736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
797 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
737 md5 = "824c801e479d3e916879aae3e9c15e16";
798 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
738 };
799 };
739 meta = {
800 meta = {
740 license = [ pkgs.lib.licenses.psfl ];
801 license = [ pkgs.lib.licenses.psfl ];
741 };
802 };
742 };
803 };
743 subvertpy = super.buildPythonPackage {
804 "subvertpy" = super.buildPythonPackage {
744 name = "subvertpy-0.10.1";
805 name = "subvertpy-0.10.1";
745 buildInputs = with self; [];
746 doCheck = false;
806 doCheck = false;
747 propagatedBuildInputs = with self; [];
748 src = fetchurl {
807 src = fetchurl {
749 url = "https://pypi.python.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
808 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
750 md5 = "a70e03579902d480f5e9f8c570f6536b";
809 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
751 };
810 };
752 meta = {
811 meta = {
753 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
812 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
754 };
813 };
755 };
814 };
756 termcolor = super.buildPythonPackage {
815 "termcolor" = super.buildPythonPackage {
757 name = "termcolor-1.1.0";
816 name = "termcolor-1.1.0";
758 buildInputs = with self; [];
759 doCheck = false;
817 doCheck = false;
760 propagatedBuildInputs = with self; [];
761 src = fetchurl {
818 src = fetchurl {
762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
819 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
763 md5 = "043e89644f8909d462fbbfa511c768df";
820 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
764 };
821 };
765 meta = {
822 meta = {
766 license = [ pkgs.lib.licenses.mit ];
823 license = [ pkgs.lib.licenses.mit ];
767 };
824 };
768 };
825 };
769 traitlets = super.buildPythonPackage {
826 "traitlets" = super.buildPythonPackage {
770 name = "traitlets-4.3.2";
827 name = "traitlets-4.3.2";
771 buildInputs = with self; [];
772 doCheck = false;
828 doCheck = false;
773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
829 propagatedBuildInputs = [
830 self."ipython-genutils"
831 self."six"
832 self."decorator"
833 self."enum34"
834 ];
774 src = fetchurl {
835 src = fetchurl {
775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
836 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
837 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
777 };
838 };
778 meta = {
839 meta = {
779 license = [ pkgs.lib.licenses.bsdOriginal ];
840 license = [ pkgs.lib.licenses.bsdOriginal ];
780 };
841 };
781 };
842 };
782 translationstring = super.buildPythonPackage {
843 "translationstring" = super.buildPythonPackage {
783 name = "translationstring-1.3";
844 name = "translationstring-1.3";
784 buildInputs = with self; [];
785 doCheck = false;
845 doCheck = false;
786 propagatedBuildInputs = with self; [];
787 src = fetchurl {
846 src = fetchurl {
788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
847 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
848 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
790 };
849 };
791 meta = {
850 meta = {
792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
851 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
793 };
852 };
794 };
853 };
795 venusian = super.buildPythonPackage {
854 "venusian" = super.buildPythonPackage {
796 name = "venusian-1.1.0";
855 name = "venusian-1.1.0";
797 buildInputs = with self; [];
798 doCheck = false;
856 doCheck = false;
799 propagatedBuildInputs = with self; [];
800 src = fetchurl {
857 src = fetchurl {
801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
858 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
859 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
803 };
860 };
804 meta = {
861 meta = {
805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
862 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
806 };
863 };
807 };
864 };
808 waitress = super.buildPythonPackage {
865 "waitress" = super.buildPythonPackage {
809 name = "waitress-1.1.0";
866 name = "waitress-1.1.0";
810 buildInputs = with self; [];
811 doCheck = false;
867 doCheck = false;
812 propagatedBuildInputs = with self; [];
813 src = fetchurl {
868 src = fetchurl {
814 url = "https://pypi.python.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
869 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
815 md5 = "0f1eb7fdfdbf2e6d18decbda1733045c";
870 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
816 };
871 };
817 meta = {
872 meta = {
818 license = [ pkgs.lib.licenses.zpt21 ];
873 license = [ pkgs.lib.licenses.zpl21 ];
819 };
874 };
820 };
875 };
821 wcwidth = super.buildPythonPackage {
876 "wcwidth" = super.buildPythonPackage {
822 name = "wcwidth-0.1.7";
877 name = "wcwidth-0.1.7";
823 buildInputs = with self; [];
824 doCheck = false;
878 doCheck = false;
825 propagatedBuildInputs = with self; [];
826 src = fetchurl {
879 src = fetchurl {
827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
880 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
881 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
882 };
883 meta = {
884 license = [ pkgs.lib.licenses.mit ];
885 };
886 };
887 "webob" = super.buildPythonPackage {
888 name = "webob-1.7.4";
889 doCheck = false;
890 src = fetchurl {
891 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
892 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
829 };
893 };
830 meta = {
894 meta = {
831 license = [ pkgs.lib.licenses.mit ];
895 license = [ pkgs.lib.licenses.mit ];
832 };
896 };
833 };
897 };
834 wheel = super.buildPythonPackage {
898 "webtest" = super.buildPythonPackage {
835 name = "wheel-0.29.0";
899 name = "webtest-2.0.29";
836 buildInputs = with self; [];
837 doCheck = false;
900 doCheck = false;
838 propagatedBuildInputs = with self; [];
901 propagatedBuildInputs = [
902 self."six"
903 self."webob"
904 self."waitress"
905 self."beautifulsoup4"
906 ];
839 src = fetchurl {
907 src = fetchurl {
840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
908 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
841 md5 = "555a67e4507cedee23a0deb9651e452f";
909 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
842 };
910 };
843 meta = {
911 meta = {
844 license = [ pkgs.lib.licenses.mit ];
912 license = [ pkgs.lib.licenses.mit ];
845 };
913 };
846 };
914 };
847 zope.deprecation = super.buildPythonPackage {
915 "zope.deprecation" = super.buildPythonPackage {
848 name = "zope.deprecation-4.3.0";
916 name = "zope.deprecation-4.3.0";
849 buildInputs = with self; [];
850 doCheck = false;
917 doCheck = false;
851 propagatedBuildInputs = with self; [setuptools];
918 propagatedBuildInputs = [
919 self."setuptools"
920 ];
852 src = fetchurl {
921 src = fetchurl {
853 url = "https://pypi.python.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
922 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
854 md5 = "2166b2cb7e0e96a21104e6f8f9b696bb";
923 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
855 };
924 };
856 meta = {
925 meta = {
857 license = [ pkgs.lib.licenses.zpt21 ];
926 license = [ pkgs.lib.licenses.zpl21 ];
858 };
927 };
859 };
928 };
860 zope.interface = super.buildPythonPackage {
929 "zope.interface" = super.buildPythonPackage {
861 name = "zope.interface-4.4.3";
930 name = "zope.interface-4.5.0";
862 buildInputs = with self; [];
863 doCheck = false;
931 doCheck = false;
864 propagatedBuildInputs = with self; [setuptools];
932 propagatedBuildInputs = [
933 self."setuptools"
934 ];
865 src = fetchurl {
935 src = fetchurl {
866 url = "https://pypi.python.org/packages/bd/d2/25349ed41f9dcff7b3baf87bd88a4c82396cf6e02f1f42bb68657a3132af/zope.interface-4.4.3.tar.gz";
936 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
867 md5 = "8700a4f527c1203b34b10c2b4e7a6912";
937 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
868 };
938 };
869 meta = {
939 meta = {
870 license = [ pkgs.lib.licenses.zpt21 ];
940 license = [ pkgs.lib.licenses.zpl21 ];
871 };
941 };
872 };
942 };
873
943
@@ -1,3 +1,8 b''
1 [pytest]
1 [pytest]
2 testpaths = ./vcsserver
2 testpaths = vcsserver
3 addopts = -v
3 norecursedirs = vcsserver/hook_utils/hook_templates
4 cache_dir = /tmp/.pytest_cache
5
6
7 addopts =
8 --pdbcls=IPython.terminal.debugger:TerminalPdb
@@ -1,9 +1,10 b''
1 # This file defines how to "build" for packaging.
2
1 { pkgs ? import <nixpkgs> {}
3 { pkgs ? import <nixpkgs> {}
2 , doCheck ? true
4 , doCheck ? true
3 }:
5 }:
4
6
5 let
7 let
6
7 vcsserver = import ./default.nix {
8 vcsserver = import ./default.nix {
8 inherit
9 inherit
9 doCheck
10 doCheck
@@ -1,40 +1,45 b''
1 ## core
1 ## dependencies
2 setuptools==30.1.0
3
2
4 Beaker==1.9.1
3 # our custom configobj
5 configobj==5.0.6
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 dogpile.cache==0.6.6
6 dogpile.core==0.4.1
6 decorator==4.1.2
7 decorator==4.1.2
7 dulwich==0.13.0
8 dulwich==0.13.0
8 hgsubversion==1.9.0
9 hgsubversion==1.9.2
9 hg-evolve==7.0.1
10 hg-evolve==8.0.1
10 infrae.cache==1.0.1
11 mako==1.0.7
11 mercurial==4.4.2
12 markupsafe==1.0.0
12 msgpack-python==0.4.8
13 mercurial==4.6.2
13 pyramid-jinja2==2.7
14 msgpack-python==0.5.6
14 Jinja2==2.9.6
15
15 pyramid==1.9.1
16 pastedeploy==1.5.2
17 psutil==5.4.6
18 pyramid==1.9.2
16 pyramid-mako==1.0.2
19 pyramid-mako==1.0.2
20
21 pygments==2.2.0
22 pathlib2==2.3.0
17 repoze.lru==0.7
23 repoze.lru==0.7
18 simplejson==3.11.1
24 simplejson==3.11.1
19 subprocess32==3.2.7
25 subprocess32==3.5.1
20
26 setproctitle==1.1.10
21 subvertpy==0.10.1
27 subvertpy==0.10.1
22
28
23 six==1.11.0
29 six==1.11.0
24 translationstring==1.3
30 translationstring==1.3
25 WebOb==1.7.4
31 webob==1.7.4
26 wheel==0.29.0
27 zope.deprecation==4.3.0
32 zope.deprecation==4.3.0
28 zope.interface==4.4.3
33 zope.interface==4.5.0
29
34
30 ## http servers
35 ## http servers
31 gevent==1.2.2
36 gevent==1.3.5
32 greenlet==0.4.13
37 greenlet==0.4.13
33 gunicorn==19.7.1
38 gunicorn==19.9.0
34 waitress==1.1.0
39 waitress==1.1.0
35
40
36 ## debug
41 ## debug
37 ipdb==0.10.3
42 ipdb==0.11.0
38 ipython==5.1.0
43 ipython==5.1.0
39
44
40 ## test related requirements
45 ## test related requirements
@@ -1,15 +1,14 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.2.5
2 pytest==3.6.0
3 py==1.5.2
3 py==1.5.3
4 pytest-cov==2.5.1
4 pytest-cov==2.5.1
5 pytest-sugar==0.9.0
5 pytest-sugar==0.9.1
6 pytest-runner==3.0.0
6 pytest-runner==4.2.0
7 pytest-catchlog==1.2.2
7 pytest-profiling==1.3.0
8 pytest-profiling==1.2.11
9 gprof2dot==2017.9.19
8 gprof2dot==2017.9.19
10 pytest-timeout==1.2.0
9 pytest-timeout==1.2.1
11
10
12 mock==1.0.1
11 mock==1.0.1
13 WebTest==2.0.29
12 webtest==2.0.29
14 cov-core==1.15.0
13 cov-core==1.15.0
15 coverage==3.7.1
14 coverage==3.7.1
@@ -23,11 +23,18 b' import os'
23 import sys
23 import sys
24 import pkgutil
24 import pkgutil
25 import platform
25 import platform
26 import codecs
26
27
27 from pip.download import PipSession
28 try: # for pip >= 10
28 from pip.req import parse_requirements
29 from pip._internal.req import parse_requirements
30 except ImportError: # for pip <= 9.0.3
31 from pip.req import parse_requirements
29
32
30 from codecs import open
33 try: # for pip >= 10
34 from pip._internal.download import PipSession
35 except ImportError: # for pip <= 9.0.3
36 from pip.download import PipSession
37
31
38
32
39
33 if sys.version_info < (2, 7):
40 if sys.version_info < (2, 7):
@@ -89,8 +96,8 b" keywords = ' '.join(["
89 readme_file = 'README.rst'
96 readme_file = 'README.rst'
90 changelog_file = 'CHANGES.rst'
97 changelog_file = 'CHANGES.rst'
91 try:
98 try:
92 long_description = open(readme_file).read() + '\n\n' + \
99 long_description = codecs.open(readme_file).read() + '\n\n' + \
93 open(changelog_file).read()
100 codecs.open(changelog_file).read()
94 except IOError as err:
101 except IOError as err:
95 sys.stderr.write(
102 sys.stderr.write(
96 "[WARNING] Cannot find file specified as long_description (%s)\n "
103 "[WARNING] Cannot find file specified as long_description (%s)\n "
@@ -106,7 +113,7 b' setup('
106 keywords=keywords,
113 keywords=keywords,
107 license=__license__,
114 license=__license__,
108 author=__author__,
115 author=__author__,
109 author_email='marcin@rhodecode.com',
116 author_email='admin@rhodecode.com',
110 url=__url__,
117 url=__url__,
111 setup_requires=setup_requirements,
118 setup_requires=setup_requirements,
112 install_requires=install_requirements,
119 install_requires=install_requirements,
@@ -1,41 +1,67 b''
1 { pkgs ? import <nixpkgs> {}
1 # This file contains the adjustments which are desired for a development
2 # environment.
3
4 { pkgs ? (import <nixpkgs> {})
5 , pythonPackages ? "python27Packages"
2 , doCheck ? false
6 , doCheck ? false
3 }:
7 }:
4
8
5 let
9 let
6
10
7 vcsserver = import ./default.nix {
11 vcsserver = import ./default.nix {
8 inherit pkgs doCheck;
12 inherit
13 pkgs
14 doCheck;
9 };
15 };
10
16
11 vcs-pythonPackages = vcsserver.pythonPackages;
17 vcs-pythonPackages = vcsserver.pythonPackages;
12
18
13 in vcsserver.override (attrs: {
19 in vcsserver.override (attrs: {
14
15 # Avoid that we dump any sources into the store when entering the shell and
20 # Avoid that we dump any sources into the store when entering the shell and
16 # make development a little bit more convenient.
21 # make development a little bit more convenient.
17 src = null;
22 src = null;
18
23
24 # Add dependencies which are useful for the development environment.
19 buildInputs =
25 buildInputs =
20 attrs.buildInputs ++
26 attrs.buildInputs ++
21 (with vcs-pythonPackages; [
27 (with vcs-pythonPackages; [
22 ipdb
28 ipdb
23 ]);
29 ]);
24
30
25 # Somewhat snappier setup of the development environment
31 # place to inject some required libs from develop installs
26 # TODO: think of supporting a stable path again, so that multiple shells
32 propagatedBuildInputs =
27 # can share it.
33 attrs.propagatedBuildInputs ++
28 postShellHook = ''
34 [];
29 # Set locale
35
30 export LC_ALL="en_US.UTF-8"
36
37 # Make sure we execute both hooks
38 shellHook = ''
39 runHook preShellHook
40 runHook postShellHook
41 '';
42
43 preShellHook = ''
44 echo "Entering VCS-Shell"
31
45
32 # Custom prompt to distinguish from other dev envs.
46 # Custom prompt to distinguish from other dev envs.
33 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
47 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
34
48
49 # Set locale
50 export LC_ALL="en_US.UTF-8"
51
52 # Setup a temporary directory.
35 tmp_path=$(mktemp -d)
53 tmp_path=$(mktemp -d)
36 export PATH="$tmp_path/bin:$PATH"
54 export PATH="$tmp_path/bin:$PATH"
37 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
55 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
38 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
56 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
57
58 # Develop installation
59 echo "[BEGIN]: develop install of rhodecode-vcsserver"
39 python setup.py develop --prefix $tmp_path --allow-hosts ""
60 python setup.py develop --prefix $tmp_path --allow-hosts ""
40 '';
61 '';
62
63 postShellHook = ''
64
65 '';
66
41 })
67 })
@@ -1,1 +1,1 b''
1 4.12.4 No newline at end of file
1 4.13.0 No newline at end of file
@@ -20,6 +20,7 b' import traceback'
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
24
25
25
26
@@ -30,9 +31,10 b' class RepoFactory(object):'
30 It provides internal caching of the `repo` object based on
31 It provides internal caching of the `repo` object based on
31 the :term:`call context`.
32 the :term:`call context`.
32 """
33 """
34 repo_type = None
33
35
34 def __init__(self, repo_cache):
36 def __init__(self):
35 self._cache = repo_cache
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
36
38
37 def _create_config(self, path, config):
39 def _create_config(self, path, config):
38 config = {}
40 config = {}
@@ -48,26 +50,19 b' class RepoFactory(object):'
48 Uses internally the low level beaker API since the decorators introduce
50 Uses internally the low level beaker API since the decorators introduce
49 significant overhead.
51 significant overhead.
50 """
52 """
51 def create_new_repo():
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
52 return self._create_repo(wire, create)
62 return self._create_repo(wire, create)
53
63
54 return self._repo(wire, create_new_repo)
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
55
65 return repo
56 def _repo(self, wire, createfunc):
57 context = wire.get('context', None)
58 cache = wire.get('cache', True)
59
60 if context and cache:
61 cache_key = (context, wire['path'])
62 log.debug(
63 'FETCH %s@%s repo object from cache. Context: %s',
64 self.__class__.__name__, wire['path'], context)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
66 else:
67 log.debug(
68 'INIT %s@%s repo object based on wire %s. Context: %s',
69 self.__class__.__name__, wire['path'], wire, context)
70 return createfunc()
71
66
72
67
73 def obfuscate_qs(query_string):
68 def obfuscate_qs(query_string):
@@ -90,8 +85,6 b' def raise_from_original(new_type):'
90 """
85 """
91 exc_type, exc_value, exc_traceback = sys.exc_info()
86 exc_type, exc_value, exc_traceback = sys.exc_info()
92
87
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
95 try:
88 try:
96 raise new_type(*exc_value.args), None, exc_traceback
89 raise new_type(*exc_value.args), None, exc_traceback
97 finally:
90 finally:
@@ -24,11 +24,10 b' which contain an extra attribute `_vcs_k'
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 import functools
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28 from pyramid.httpexceptions import HTTPLocked
29
28
30
29
31 def _make_exception(kind, *args):
30 def _make_exception(kind, org_exc, *args):
32 """
31 """
33 Prepares a base `Exception` instance to be sent over the wire.
32 Prepares a base `Exception` instance to be sent over the wire.
34
33
@@ -37,26 +36,68 b' def _make_exception(kind, *args):'
37 """
36 """
38 exc = Exception(*args)
37 exc = Exception(*args)
39 exc._vcs_kind = kind
38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
40 return exc
40 return exc
41
41
42
42
43 AbortException = functools.partial(_make_exception, 'abort')
43 def AbortException(org_exc=None):
44 def _make_exception_wrapper(*args):
45 return _make_exception('abort', org_exc, *args)
46 return _make_exception_wrapper
47
44
48
45 ArchiveException = functools.partial(_make_exception, 'archive')
49 def ArchiveException(org_exc=None):
50 def _make_exception_wrapper(*args):
51 return _make_exception('archive', org_exc, *args)
52 return _make_exception_wrapper
53
46
54
47 LookupException = functools.partial(_make_exception, 'lookup')
55 def LookupException(org_exc=None):
56 def _make_exception_wrapper(*args):
57 return _make_exception('lookup', org_exc, *args)
58 return _make_exception_wrapper
59
48
60
49 VcsException = functools.partial(_make_exception, 'error')
61 def VcsException(org_exc=None):
62 def _make_exception_wrapper(*args):
63 return _make_exception('error', org_exc, *args)
64 return _make_exception_wrapper
65
66
67 def RepositoryLockedException(org_exc=None):
68 def _make_exception_wrapper(*args):
69 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception_wrapper
50
71
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
72
73 def RepositoryBranchProtectedException(org_exc=None):
74 def _make_exception_wrapper(*args):
75 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception_wrapper
52
77
53 RequirementException = functools.partial(_make_exception, 'requirement')
78
79 def RequirementException(org_exc=None):
80 def _make_exception_wrapper(*args):
81 return _make_exception('requirement', org_exc, *args)
82 return _make_exception_wrapper
83
54
84
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
85 def UnhandledException(org_exc=None):
86 def _make_exception_wrapper(*args):
87 return _make_exception('unhandled', org_exc, *args)
88 return _make_exception_wrapper
89
56
90
57 URLError = functools.partial(_make_exception, 'url_error')
91 def URLError(org_exc=None):
92 def _make_exception_wrapper(*args):
93 return _make_exception('url_error', org_exc, *args)
94 return _make_exception_wrapper
58
95
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
96
97 def SubrepoMergeException(org_exc=None):
98 def _make_exception_wrapper(*args):
99 return _make_exception('subrepo_merge_error', org_exc, *args)
100 return _make_exception_wrapper
60
101
61
102
62 class HTTPRepoLocked(HTTPLocked):
103 class HTTPRepoLocked(HTTPLocked):
@@ -68,3 +109,8 b' class HTTPRepoLocked(HTTPLocked):'
68 self.code = status_code or HTTPLocked.code
109 self.code = status_code or HTTPLocked.code
69 self.title = title
110 self.title = title
70 super(HTTPRepoLocked, self).__init__(**kwargs)
111 super(HTTPRepoLocked, self).__init__(**kwargs)
112
113
114 class HTTPRepoBranchProtected(HTTPForbidden):
115 def __init__(self, *args, **kwargs):
116 super(HTTPForbidden, self).__init__(*args, **kwargs)
@@ -56,9 +56,9 b' def reraise_safe_exceptions(func):'
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 ObjectMissing) as e:
58 ObjectMissing) as e:
59 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e)(e.message)
60 except (HangupException, UnexpectedCommandError) as e:
60 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e)(e.message)
62 except Exception as e:
62 except Exception as e:
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 # (KeyError on empty repos), we cannot track this and catch all
64 # (KeyError on empty repos), we cannot track this and catch all
@@ -87,6 +87,7 b' class Repo(DulwichRepo):'
87
87
88
88
89 class GitFactory(RepoFactory):
89 class GitFactory(RepoFactory):
90 repo_type = 'git'
90
91
91 def _create_repo(self, wire, create):
92 def _create_repo(self, wire, create):
92 repo_path = str_to_dulwich(wire['path'])
93 repo_path = str_to_dulwich(wire['path'])
@@ -213,8 +214,8 b' class GitRemote(object):'
213 elif attr in ["author", "message", "parents"]:
214 elif attr in ["author", "message", "parents"]:
214 args.append(attr)
215 args.append(attr)
215 result[attr] = method(*args)
216 result[attr] = method(*args)
216 except KeyError:
217 except KeyError as e:
217 raise exceptions.VcsException(
218 raise exceptions.VcsException(e)(
218 "Unknown bulk attribute: %s" % attr)
219 "Unknown bulk attribute: %s" % attr)
219 return result
220 return result
220
221
@@ -257,11 +258,11 b' class GitRemote(object):'
257 log.debug("Trying to open URL %s", cleaned_uri)
258 log.debug("Trying to open URL %s", cleaned_uri)
258 resp = o.open(req)
259 resp = o.open(req)
259 if resp.code != 200:
260 if resp.code != 200:
260 raise exceptions.URLError('Return Code is not 200')
261 raise exceptions.URLError()('Return Code is not 200')
261 except Exception as e:
262 except Exception as e:
262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 # means it cannot be cloned
264 # means it cannot be cloned
264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
265
266
266 # now detect if it's proper git repo
267 # now detect if it's proper git repo
267 gitdata = resp.read()
268 gitdata = resp.read()
@@ -271,7 +272,7 b' class GitRemote(object):'
271 # old style git can return some other format !
272 # old style git can return some other format !
272 pass
273 pass
273 else:
274 else:
274 raise exceptions.URLError(
275 raise exceptions.URLError()(
275 "url [%s] does not look like an git" % (cleaned_uri,))
276 "url [%s] does not look like an git" % (cleaned_uri,))
276
277
277 return True
278 return True
@@ -418,7 +419,7 b' class GitRemote(object):'
418 log.warning(
419 log.warning(
419 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 # Exception can contain unicode which we convert
421 # Exception can contain unicode which we convert
421 raise exceptions.AbortException(repr(e))
422 raise exceptions.AbortException(e)(repr(e))
422
423
423 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # refs filtered by `determine_wants` function. We need to filter result
425 # refs filtered by `determine_wants` function. We need to filter result
@@ -525,9 +526,13 b' class GitRemote(object):'
525 return repo.refs.path
526 return repo.refs.path
526
527
527 @reraise_safe_exceptions
528 @reraise_safe_exceptions
528 def head(self, wire):
529 def head(self, wire, show_exc=True):
529 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
530 return repo.head()
531 try:
532 return repo.head()
533 except Exception:
534 if show_exc:
535 raise
531
536
532 @reraise_safe_exceptions
537 @reraise_safe_exceptions
533 def init(self, wire):
538 def init(self, wire):
@@ -654,7 +659,7 b' class GitRemote(object):'
654 if safe_call:
659 if safe_call:
655 return '', err
660 return '', err
656 else:
661 else:
657 raise exceptions.VcsException(tb_err)
662 raise exceptions.VcsException()(tb_err)
658
663
659 @reraise_safe_exceptions
664 @reraise_safe_exceptions
660 def install_hooks(self, wire, force=False):
665 def install_hooks(self, wire, force=False):
@@ -32,7 +32,7 b' from vcsserver.base import RepoFactory, '
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
@@ -74,25 +74,27 b' def reraise_safe_exceptions(func):'
74 def wrapper(*args, **kwargs):
74 def wrapper(*args, **kwargs):
75 try:
75 try:
76 return func(*args, **kwargs)
76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired):
77 except (Abort, InterventionRequired) as e:
78 raise_from_original(exceptions.AbortException)
78 raise_from_original(exceptions.AbortException(e))
79 except RepoLookupError:
79 except RepoLookupError as e:
80 raise_from_original(exceptions.LookupException)
80 raise_from_original(exceptions.LookupException(e))
81 except RequirementError:
81 except RequirementError as e:
82 raise_from_original(exceptions.RequirementException)
82 raise_from_original(exceptions.RequirementException(e))
83 except RepoError:
83 except RepoError as e:
84 raise_from_original(exceptions.VcsException)
84 raise_from_original(exceptions.VcsException(e))
85 except LookupError:
85 except LookupError as e:
86 raise_from_original(exceptions.LookupException)
86 raise_from_original(exceptions.LookupException(e))
87 except Exception as e:
87 except Exception as e:
88 if not hasattr(e, '_vcs_kind'):
88 if not hasattr(e, '_vcs_kind'):
89 log.exception("Unhandled exception in hg remote call")
89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException(e))
91
91 raise
92 raise
92 return wrapper
93 return wrapper
93
94
94
95
95 class MercurialFactory(RepoFactory):
96 class MercurialFactory(RepoFactory):
97 repo_type = 'hg'
96
98
97 def _create_config(self, config, hooks=True):
99 def _create_config(self, config, hooks=True):
98 if not hooks:
100 if not hooks:
@@ -148,7 +150,7 b' class HgRemote(object):'
148 elif kind == 'zip':
150 elif kind == 'zip':
149 archiver = archival.zipit(archive_path, mtime)
151 archiver = archival.zipit(archive_path, mtime)
150 else:
152 else:
151 raise exceptions.ArchiveException(
153 raise exceptions.ArchiveException()(
152 'Remote does not support: "%s".' % kind)
154 'Remote does not support: "%s".' % kind)
153
155
154 for f_path, f_mode, f_is_link, f_content in file_info:
156 for f_path, f_mode, f_is_link, f_content in file_info:
@@ -180,8 +182,8 b' class HgRemote(object):'
180 try:
182 try:
181 method = self._bulk_methods[attr]
183 method = self._bulk_methods[attr]
182 result[attr] = method(wire, rev)
184 result[attr] = method(wire, rev)
183 except KeyError:
185 except KeyError as e:
184 raise exceptions.VcsException(
186 raise exceptions.VcsException(e)(
185 'Unknown bulk attribute: "%s"' % attr)
187 'Unknown bulk attribute: "%s"' % attr)
186 return result
188 return result
187
189
@@ -211,14 +213,14 b' class HgRemote(object):'
211 if node['path'] == path:
213 if node['path'] == path:
212 return memfilectx(
214 return memfilectx(
213 _repo,
215 _repo,
216 changectx=memctx,
214 path=node['path'],
217 path=node['path'],
215 data=node['content'],
218 data=node['content'],
216 islink=False,
219 islink=False,
217 isexec=bool(node['mode'] & stat.S_IXUSR),
220 isexec=bool(node['mode'] & stat.S_IXUSR),
218 copied=False,
221 copied=False)
219 memctx=memctx)
220
222
221 raise exceptions.AbortException(
223 raise exceptions.AbortException()(
222 "Given path haven't been marked as added, "
224 "Given path haven't been marked as added, "
223 "changed or removed (%s)" % path)
225 "changed or removed (%s)" % path)
224
226
@@ -368,11 +370,11 b' class HgRemote(object):'
368 log.debug("Trying to open URL %s", cleaned_uri)
370 log.debug("Trying to open URL %s", cleaned_uri)
369 resp = o.open(req)
371 resp = o.open(req)
370 if resp.code != 200:
372 if resp.code != 200:
371 raise exceptions.URLError('Return Code is not 200')
373 raise exceptions.URLError()('Return Code is not 200')
372 except Exception as e:
374 except Exception as e:
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 # means it cannot be cloned
376 # means it cannot be cloned
375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
376
378
377 # now check if it's a proper hg repo, but don't do it for svn
379 # now check if it's a proper hg repo, but don't do it for svn
378 try:
380 try:
@@ -383,11 +385,13 b' class HgRemote(object):'
383 log.debug(
385 log.debug(
384 "Verifying if URL is a Mercurial repository: %s",
386 "Verifying if URL is a Mercurial repository: %s",
385 cleaned_uri)
387 cleaned_uri)
386 httppeer(make_ui_from_config(config), url).lookup('tip')
388 ui = make_ui_from_config(config)
389 peer_checker = makepeer(ui, url)
390 peer_checker.lookup('tip')
387 except Exception as e:
391 except Exception as e:
388 log.warning("URL is not a valid Mercurial repository: %s",
392 log.warning("URL is not a valid Mercurial repository: %s",
389 cleaned_uri)
393 cleaned_uri)
390 raise exceptions.URLError(
394 raise exceptions.URLError(e)(
391 "url [%s] does not look like an hg repo org_exc: %s"
395 "url [%s] does not look like an hg repo org_exc: %s"
392 % (cleaned_uri, e))
396 % (cleaned_uri, e))
393
397
@@ -409,8 +413,8 b' class HgRemote(object):'
409 try:
413 try:
410 return "".join(patch.diff(
414 return "".join(patch.diff(
411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
412 except RepoLookupError:
416 except RepoLookupError as e:
413 raise exceptions.LookupException()
417 raise exceptions.LookupException(e)()
414
418
415 @reraise_safe_exceptions
419 @reraise_safe_exceptions
416 def file_history(self, wire, revision, path, limit):
420 def file_history(self, wire, revision, path, limit):
@@ -454,9 +458,10 b' class HgRemote(object):'
454 fctx = ctx.filectx(path)
458 fctx = ctx.filectx(path)
455
459
456 result = []
460 result = []
457 for i, (a_line, content) in enumerate(fctx.annotate()):
461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
458 ln_no = i + 1
462 ln_no = i
459 sha = hex(a_line.fctx.node())
463 sha = hex(annotate_obj.fctx.node())
464 content = annotate_obj.text
460 result.append((ln_no, sha, content))
465 result.append((ln_no, sha, content))
461 return result
466 return result
462
467
@@ -533,16 +538,28 b' class HgRemote(object):'
533
538
534 @reraise_safe_exceptions
539 @reraise_safe_exceptions
535 def lookup(self, wire, revision, both):
540 def lookup(self, wire, revision, both):
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
541
537 if isinstance(revision, float):
538 revision = long(revision)
539 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543
544 if isinstance(revision, int):
545 # NOTE(marcink):
546 # since Mercurial doesn't support indexes properly
547 # we need to shift accordingly by one to get proper index, e.g
548 # repo[-1] => repo[-2]
549 # repo[0] => repo[-1]
550 # repo[1] => repo[2] we also never call repo[0] because
551 # it's actually second commit
552 if revision <= 0:
553 revision = revision + -1
554 else:
555 revision = revision + 1
556
540 try:
557 try:
541 ctx = repo[revision]
558 ctx = repo[revision]
542 except RepoLookupError:
559 except RepoLookupError as e:
543 raise exceptions.LookupException(revision)
560 raise exceptions.LookupException(e)(revision)
544 except LookupError as e:
561 except LookupError as e:
545 raise exceptions.LookupException(e.name)
562 raise exceptions.LookupException(e)(e.name)
546
563
547 if not both:
564 if not both:
548 return ctx.hex()
565 return ctx.hex()
@@ -658,7 +675,7 b' class HgRemote(object):'
658 except Abort as e:
675 except Abort as e:
659 log.exception("Tag operation aborted")
676 log.exception("Tag operation aborted")
660 # Exception can contain unicode which we convert
677 # Exception can contain unicode which we convert
661 raise exceptions.AbortException(repr(e))
678 raise exceptions.AbortException(e)(repr(e))
662
679
663 @reraise_safe_exceptions
680 @reraise_safe_exceptions
664 def tags(self, wire):
681 def tags(self, wire):
@@ -51,7 +51,7 b' from mercurial.node import bin, hex'
51 from mercurial.encoding import tolocal
51 from mercurial.encoding import tolocal
52 from mercurial.discovery import findcommonoutgoing
52 from mercurial.discovery import findcommonoutgoing
53 from mercurial.hg import peer
53 from mercurial.hg import peer
54 from mercurial.httppeer import httppeer
54 from mercurial.httppeer import makepeer
55 from mercurial.util import url as hg_url
55 from mercurial.util import url as hg_url
56 from mercurial.scmutil import revrange
56 from mercurial.scmutil import revrange
57 from mercurial.node import nullrev
57 from mercurial.node import nullrev
@@ -36,15 +36,15 b' def patch_largefiles_capabilities():'
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto.capabilities = wrapper
39 lfproto._capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto.capabilities
44 wrapped_capabilities = lfproto._capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
@@ -52,10 +52,10 b' def _dynamic_capabilities_wrapper(lfprot'
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 else:
56 else:
56 logger.debug('Extension largefiles disabled')
57 logger.debug('Extension largefiles disabled')
57 calc_capabilities = lfproto.capabilitiesorig
58 return orig(repo, proto)
58 return calc_capabilities(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
@@ -116,7 +116,7 b' def patch_subrepo_type_mapping():'
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
119 raise SubrepoMergeException()()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
@@ -120,9 +120,11 b' def _handle_exception(result):'
120 log.error('Got traceback from remote call:%s', exception_traceback)
120 log.error('Got traceback from remote call:%s', exception_traceback)
121
121
122 if exception_class == 'HTTPLockedRC':
122 if exception_class == 'HTTPLockedRC':
123 raise exceptions.RepositoryLockedException(*result['exception_args'])
123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 elif exception_class == 'HTTPBranchProtected':
125 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
124 elif exception_class == 'RepositoryError':
126 elif exception_class == 'RepositoryError':
125 raise exceptions.VcsException(*result['exception_args'])
127 raise exceptions.VcsException()(*result['exception_args'])
126 elif exception_class:
128 elif exception_class:
127 raise Exception('Got remote exception "%s" with args "%s"' %
129 raise Exception('Got remote exception "%s" with args "%s"' %
128 (exception_class, result['exception_args']))
130 (exception_class, result['exception_args']))
@@ -161,16 +163,54 b' def _extras_from_ui(ui):'
161 return extras
163 return extras
162
164
163
165
164 def _rev_range_hash(repo, node):
166 def _rev_range_hash(repo, node, check_heads=False):
165
167
166 commits = []
168 commits = []
167 for rev in xrange(repo[node], len(repo)):
169 revs = []
170 start = repo[node].rev()
171 end = len(repo)
172 for rev in range(start, end):
173 revs.append(rev)
168 ctx = repo[rev]
174 ctx = repo[rev]
169 commit_id = mercurial.node.hex(ctx.node())
175 commit_id = mercurial.node.hex(ctx.node())
170 branch = ctx.branch()
176 branch = ctx.branch()
171 commits.append((commit_id, branch))
177 commits.append((commit_id, branch))
172
178
173 return commits
179 parent_heads = []
180 if check_heads:
181 parent_heads = _check_heads(repo, start, end, revs)
182 return commits, parent_heads
183
184
185 def _check_heads(repo, start, end, commits):
186 changelog = repo.changelog
187 parents = set()
188
189 for new_rev in commits:
190 for p in changelog.parentrevs(new_rev):
191 if p == mercurial.node.nullrev:
192 continue
193 if p < start:
194 parents.add(p)
195
196 for p in parents:
197 branch = repo[p].branch()
198 # The heads descending from that parent, on the same branch
199 parent_heads = set([p])
200 reachable = set([p])
201 for x in xrange(p + 1, end):
202 if repo[x].branch() != branch:
203 continue
204 for pp in changelog.parentrevs(x):
205 if pp in reachable:
206 reachable.add(x)
207 parent_heads.discard(pp)
208 parent_heads.add(x)
209 # More than one head? Suggest merging
210 if len(parent_heads) > 1:
211 return list(parent_heads)
212
213 return []
174
214
175
215
176 def repo_size(ui, repo, **kwargs):
216 def repo_size(ui, repo, **kwargs):
@@ -203,15 +243,20 b' def post_pull_ssh(ui, repo, **kwargs):'
203
243
204
244
205 def pre_push(ui, repo, node=None, **kwargs):
245 def pre_push(ui, repo, node=None, **kwargs):
246 """
247 Mercurial pre_push hook
248 """
206 extras = _extras_from_ui(ui)
249 extras = _extras_from_ui(ui)
250 detect_force_push = extras.get('detect_force_push')
207
251
208 rev_data = []
252 rev_data = []
209 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
253 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
210 branches = collections.defaultdict(list)
254 branches = collections.defaultdict(list)
211 for commit_id, branch in _rev_range_hash(repo, node):
255 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
256 for commit_id, branch in commits:
212 branches[branch].append(commit_id)
257 branches[branch].append(commit_id)
213
258
214 for branch, commits in branches.iteritems():
259 for branch, commits in branches.items():
215 old_rev = kwargs.get('node_last') or commits[0]
260 old_rev = kwargs.get('node_last') or commits[0]
216 rev_data.append({
261 rev_data.append({
217 'old_rev': old_rev,
262 'old_rev': old_rev,
@@ -221,18 +266,25 b' def pre_push(ui, repo, node=None, **kwar'
221 'name': branch,
266 'name': branch,
222 })
267 })
223
268
269 for push_ref in rev_data:
270 push_ref['multiple_heads'] = _heads
271
224 extras['commit_ids'] = rev_data
272 extras['commit_ids'] = rev_data
225 return _call_hook('pre_push', extras, HgMessageWriter(ui))
273 return _call_hook('pre_push', extras, HgMessageWriter(ui))
226
274
227
275
228 def pre_push_ssh(ui, repo, node=None, **kwargs):
276 def pre_push_ssh(ui, repo, node=None, **kwargs):
229 if _extras_from_ui(ui).get('SSH'):
277 extras = _extras_from_ui(ui)
278 if extras.get('SSH'):
230 return pre_push(ui, repo, node, **kwargs)
279 return pre_push(ui, repo, node, **kwargs)
231
280
232 return 0
281 return 0
233
282
234
283
235 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
284 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
285 """
286 Mercurial pre_push hook for SSH
287 """
236 extras = _extras_from_ui(ui)
288 extras = _extras_from_ui(ui)
237 if extras.get('SSH'):
289 if extras.get('SSH'):
238 permission = extras['SSH_PERMISSIONS']
290 permission = extras['SSH_PERMISSIONS']
@@ -247,6 +299,9 b' def pre_push_ssh_auth(ui, repo, node=Non'
247
299
248
300
249 def post_push(ui, repo, node, **kwargs):
301 def post_push(ui, repo, node, **kwargs):
302 """
303 Mercurial post_push hook
304 """
250 extras = _extras_from_ui(ui)
305 extras = _extras_from_ui(ui)
251
306
252 commit_ids = []
307 commit_ids = []
@@ -254,7 +309,8 b' def post_push(ui, repo, node, **kwargs):'
254 bookmarks = []
309 bookmarks = []
255 tags = []
310 tags = []
256
311
257 for commit_id, branch in _rev_range_hash(repo, node):
312 commits, _heads = _rev_range_hash(repo, node)
313 for commit_id, branch in commits:
258 commit_ids.append(commit_id)
314 commit_ids.append(commit_id)
259 if branch not in branches:
315 if branch not in branches:
260 branches.append(branch)
316 branches.append(branch)
@@ -273,6 +329,9 b' def post_push(ui, repo, node, **kwargs):'
273
329
274
330
275 def post_push_ssh(ui, repo, node, **kwargs):
331 def post_push_ssh(ui, repo, node, **kwargs):
332 """
333 Mercurial post_push hook for SSH
334 """
276 if _extras_from_ui(ui).get('SSH'):
335 if _extras_from_ui(ui).get('SSH'):
277 return post_push(ui, repo, node, **kwargs)
336 return post_push(ui, repo, node, **kwargs)
278 return 0
337 return 0
@@ -389,6 +448,33 b' def git_pre_receive(unused_repo_path, re'
389 rev_data = _parse_git_ref_lines(revision_lines)
448 rev_data = _parse_git_ref_lines(revision_lines)
390 if 'push' not in extras['hooks']:
449 if 'push' not in extras['hooks']:
391 return 0
450 return 0
451 empty_commit_id = '0' * 40
452
453 detect_force_push = extras.get('detect_force_push')
454
455 for push_ref in rev_data:
456 # store our git-env which holds the temp store
457 push_ref['git_env'] = [
458 (k, v) for k, v in os.environ.items() if k.startswith('GIT')]
459 push_ref['pruned_sha'] = ''
460 if not detect_force_push:
461 # don't check for forced-push when we don't need to
462 continue
463
464 type_ = push_ref['type']
465 new_branch = push_ref['old_rev'] == empty_commit_id
466 if type_ == 'heads' and not new_branch:
467 old_rev = push_ref['old_rev']
468 new_rev = push_ref['new_rev']
469 cmd = [settings.GIT_EXECUTABLE, 'rev-list',
470 old_rev, '^{}'.format(new_rev)]
471 stdout, stderr = subprocessio.run_command(
472 cmd, env=os.environ.copy())
473 # means we're having some non-reachable objects, this forced push
474 # was used
475 if stdout:
476 push_ref['pruned_sha'] = stdout.splitlines()
477
392 extras['commit_ids'] = rev_data
478 extras['commit_ids'] = rev_data
393 return _call_hook('pre_push', extras, GitMessageWriter())
479 return _call_hook('pre_push', extras, GitMessageWriter())
394
480
@@ -442,7 +528,8 b' def git_post_receive(unused_repo_path, r'
442 cmd, env=os.environ.copy())
528 cmd, env=os.environ.copy())
443 heads = stdout
529 heads = stdout
444 heads = heads.replace(push_ref['ref'], '')
530 heads = heads.replace(push_ref['ref'], '')
445 heads = ' '.join(head for head in heads.splitlines() if head)
531 heads = ' '.join(head for head
532 in heads.splitlines() if head) or '.'
446 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
533 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
447 '--pretty=format:%H', '--', push_ref['new_rev'],
534 '--pretty=format:%H', '--', push_ref['new_rev'],
448 '--not', heads]
535 '--not', heads]
@@ -16,6 +16,7 b''
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import base64
20 import base64
20 import locale
21 import locale
21 import logging
22 import logging
@@ -26,17 +27,31 b' from itertools import chain'
26
27
27 import simplejson as json
28 import simplejson as json
28 import msgpack
29 import msgpack
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
31 from pyramid.config import Configurator
30 from pyramid.config import Configurator
31 from pyramid.settings import asbool, aslist
32 from pyramid.wsgi import wsgiapp
32 from pyramid.wsgi import wsgiapp
33 from pyramid.compat import configparser
33 from pyramid.compat import configparser
34
34
35
36 log = logging.getLogger(__name__)
37
38 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
39 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
40
41 try:
42 locale.setlocale(locale.LC_ALL, '')
43 except locale.Error as e:
44 log.error(
45 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
46 os.environ['LC_ALL'] = 'C'
47
48
35 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
49 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
36 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
50 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
37 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
51 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
38 from vcsserver.echo_stub.echo_app import EchoApp
52 from vcsserver.echo_stub.echo_app import EchoApp
39 from vcsserver.exceptions import HTTPRepoLocked
53 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
54 from vcsserver.lib.exc_tracking import store_exception
40 from vcsserver.server import VcsServer
55 from vcsserver.server import VcsServer
41
56
42 try:
57 try:
@@ -57,7 +72,7 b' except ImportError:'
57 SubversionFactory = None
72 SubversionFactory = None
58 SvnRemote = None
73 SvnRemote = None
59
74
60 log = logging.getLogger(__name__)
75
61
76
62
77
63 def _is_request_chunked(environ):
78 def _is_request_chunked(environ):
@@ -65,48 +80,60 b' def _is_request_chunked(environ):'
65 return stream
80 return stream
66
81
67
82
83 def _int_setting(settings, name, default):
84 settings[name] = int(settings.get(name, default))
85
86
87 def _bool_setting(settings, name, default):
88 input_val = settings.get(name, default)
89 if isinstance(input_val, unicode):
90 input_val = input_val.encode('utf8')
91 settings[name] = asbool(input_val)
92
93
94 def _list_setting(settings, name, default):
95 raw_value = settings.get(name, default)
96
97 # Otherwise we assume it uses pyramids space/newline separation.
98 settings[name] = aslist(raw_value)
99
100
101 def _string_setting(settings, name, default, lower=True):
102 value = settings.get(name, default)
103 if lower:
104 value = value.lower()
105 settings[name] = value
106
107
68 class VCS(object):
108 class VCS(object):
69 def __init__(self, locale=None, cache_config=None):
109 def __init__(self, locale=None, cache_config=None):
70 self.locale = locale
110 self.locale = locale
71 self.cache_config = cache_config
111 self.cache_config = cache_config
72 self._configure_locale()
112 self._configure_locale()
73 self._initialize_cache()
74
113
75 if GitFactory and GitRemote:
114 if GitFactory and GitRemote:
76 git_repo_cache = self.cache.get_cache_region(
115 git_factory = GitFactory()
77 'git', region='repo_object')
78 git_factory = GitFactory(git_repo_cache)
79 self._git_remote = GitRemote(git_factory)
116 self._git_remote = GitRemote(git_factory)
80 else:
117 else:
81 log.info("Git client import failed")
118 log.info("Git client import failed")
82
119
83 if MercurialFactory and HgRemote:
120 if MercurialFactory and HgRemote:
84 hg_repo_cache = self.cache.get_cache_region(
121 hg_factory = MercurialFactory()
85 'hg', region='repo_object')
86 hg_factory = MercurialFactory(hg_repo_cache)
87 self._hg_remote = HgRemote(hg_factory)
122 self._hg_remote = HgRemote(hg_factory)
88 else:
123 else:
89 log.info("Mercurial client import failed")
124 log.info("Mercurial client import failed")
90
125
91 if SubversionFactory and SvnRemote:
126 if SubversionFactory and SvnRemote:
92 svn_repo_cache = self.cache.get_cache_region(
127 svn_factory = SubversionFactory()
93 'svn', region='repo_object')
128
94 svn_factory = SubversionFactory(svn_repo_cache)
95 # hg factory is used for svn url validation
129 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
130 hg_factory = MercurialFactory()
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
99 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
131 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
100 else:
132 else:
101 log.info("Subversion client import failed")
133 log.info("Subversion client import failed")
102
134
103 self._vcsserver = VcsServer()
135 self._vcsserver = VcsServer()
104
136
105 def _initialize_cache(self):
106 cache_config = parse_cache_config_options(self.cache_config)
107 log.info('Initializing beaker cache: %s' % cache_config)
108 self.cache = CacheManager(**cache_config)
109
110 def _configure_locale(self):
137 def _configure_locale(self):
111 if self.locale:
138 if self.locale:
112 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
139 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
@@ -169,8 +196,11 b' class HTTPApplication(object):'
169 _use_echo_app = False
196 _use_echo_app = False
170
197
171 def __init__(self, settings=None, global_config=None):
198 def __init__(self, settings=None, global_config=None):
199 self._sanitize_settings_and_apply_defaults(settings)
200
172 self.config = Configurator(settings=settings)
201 self.config = Configurator(settings=settings)
173 self.global_config = global_config
202 self.global_config = global_config
203 self.config.include('vcsserver.lib.rc_cache')
174
204
175 locale = settings.get('locale', '') or 'en_US.UTF-8'
205 locale = settings.get('locale', '') or 'en_US.UTF-8'
176 vcs = VCS(locale=locale, cache_config=settings)
206 vcs = VCS(locale=locale, cache_config=settings)
@@ -198,6 +228,21 b' class HTTPApplication(object):'
198 if binary_dir:
228 if binary_dir:
199 settings.BINARY_DIR = binary_dir
229 settings.BINARY_DIR = binary_dir
200
230
231 def _sanitize_settings_and_apply_defaults(self, settings):
232 # repo_object cache
233 _string_setting(
234 settings,
235 'rc_cache.repo_object.backend',
236 'dogpile.cache.rc.memory_lru')
237 _int_setting(
238 settings,
239 'rc_cache.repo_object.expiration_time',
240 300)
241 _int_setting(
242 settings,
243 'rc_cache.repo_object.max_size',
244 1024)
245
201 def _configure(self):
246 def _configure(self):
202 self.config.add_renderer(
247 self.config.add_renderer(
203 name='msgpack',
248 name='msgpack',
@@ -246,18 +291,35 b' class HTTPApplication(object):'
246 wire = params.get('wire')
291 wire = params.get('wire')
247 args = params.get('args')
292 args = params.get('args')
248 kwargs = params.get('kwargs')
293 kwargs = params.get('kwargs')
294 context_uid = None
295
249 if wire:
296 if wire:
250 try:
297 try:
251 wire['context'] = uuid.UUID(wire['context'])
298 wire['context'] = context_uid = uuid.UUID(wire['context'])
252 except KeyError:
299 except KeyError:
253 pass
300 pass
254 args.insert(0, wire)
301 args.insert(0, wire)
255
302
256 log.debug('method called:%s with kwargs:%s', method, kwargs)
303 log.debug('method called:%s with kwargs:%s context_uid: %s',
304 method, kwargs, context_uid)
257 try:
305 try:
258 resp = getattr(remote, method)(*args, **kwargs)
306 resp = getattr(remote, method)(*args, **kwargs)
259 except Exception as e:
307 except Exception as e:
260 tb_info = traceback.format_exc()
308 exc_info = list(sys.exc_info())
309 exc_type, exc_value, exc_traceback = exc_info
310
311 org_exc = getattr(e, '_org_exc', None)
312 org_exc_name = None
313 if org_exc:
314 org_exc_name = org_exc.__class__.__name__
315 # replace our "faked" exception with our org
316 exc_info[0] = org_exc.__class__
317 exc_info[1] = org_exc
318
319 store_exception(id(exc_info), exc_info)
320
321 tb_info = ''.join(
322 traceback.format_exception(exc_type, exc_value, exc_traceback))
261
323
262 type_ = e.__class__.__name__
324 type_ = e.__class__.__name__
263 if type_ not in self.ALLOWED_EXCEPTIONS:
325 if type_ not in self.ALLOWED_EXCEPTIONS:
@@ -268,11 +330,12 b' class HTTPApplication(object):'
268 'error': {
330 'error': {
269 'message': e.message,
331 'message': e.message,
270 'traceback': tb_info,
332 'traceback': tb_info,
333 'org_exc': org_exc_name,
271 'type': type_
334 'type': type_
272 }
335 }
273 }
336 }
274 try:
337 try:
275 resp['error']['_vcs_kind'] = e._vcs_kind
338 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
276 except AttributeError:
339 except AttributeError:
277 pass
340 pass
278 else:
341 else:
@@ -461,9 +524,21 b' class HTTPApplication(object):'
461 return HTTPRepoLocked(
524 return HTTPRepoLocked(
462 title=exception.message, status_code=status_code)
525 title=exception.message, status_code=status_code)
463
526
464 # Re-raise exception if we can not handle it.
527 elif _vcs_kind == 'repo_branch_protected':
465 log.exception(
528 # Get custom repo-branch-protected status code if present.
466 'error occurred handling this request for path: %s', request.path)
529 return HTTPRepoBranchProtected(title=exception.message)
530
531 exc_info = request.exc_info
532 store_exception(id(exc_info), exc_info)
533
534 traceback_info = 'unavailable'
535 if request.exc_info:
536 exc_type, exc_value, exc_tb = request.exc_info
537 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
538
539 log.error(
540 'error occurred handling this request for path: %s, \n tb: %s',
541 request.path, traceback_info)
467 raise exception
542 raise exception
468
543
469
544
@@ -483,5 +558,6 b' def main(global_config, **settings):'
483 if MercurialFactory:
558 if MercurialFactory:
484 hgpatches.patch_largefiles_capabilities()
559 hgpatches.patch_largefiles_capabilities()
485 hgpatches.patch_subrepo_type_mapping()
560 hgpatches.patch_subrepo_type_mapping()
561
486 app = HTTPApplication(settings=settings, global_config=global_config)
562 app = HTTPApplication(settings=settings, global_config=global_config)
487 return app.wsgi_app()
563 return app.wsgi_app()
@@ -21,9 +21,9 b' import itertools'
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.wireprotoserver
24 import mercurial.hgweb.common
25 import mercurial.hgweb.common
25 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.protocol
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
@@ -73,14 +73,18 b' class HgWeb(mercurial.hgweb.hgweb_mod.hg'
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 req = mercurial.hgweb.request.wsgirequest(environ, start_response)
76 from mercurial.hgweb import request as requestmod
77 gen = self.run_wsgi(req)
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
78
80
79 first_chunk = None
81 first_chunk = None
80
82
81 try:
83 try:
82 data = gen.next()
84 data = gen.next()
83 def first_chunk(): yield data
85
86 def first_chunk():
87 yield data
84 except StopIteration:
88 except StopIteration:
85 pass
89 pass
86
90
@@ -88,17 +92,18 b' class HgWeb(mercurial.hgweb.hgweb_mod.hg'
88 return itertools.chain(first_chunk(), gen)
92 return itertools.chain(first_chunk(), gen)
89 return gen
93 return gen
90
94
91 def _runwsgi(self, req, repo):
95 def _runwsgi(self, req, res, repo):
92 cmd = req.form.get('cmd', [''])[0]
93 if not mercurial.hgweb.protocol.iscmd(cmd):
94 req.respond(
95 mercurial.hgweb.common.ErrorResponse(
96 mercurial.hgweb.common.HTTP_BAD_REQUEST),
97 mercurial.hgweb.protocol.HGTYPE
98 )
99 return ['']
100
96
101 return super(HgWeb, self)._runwsgi(req, repo)
97 cmd = req.qsparams.get('cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
101 from mercurial.hgweb.common import statusmessage
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.setbodybytes('')
104 return res.sendresponse()
105
106 return super(HgWeb, self)._runwsgi(req, res, repo)
102
107
103
108
104 def make_hg_ui_from_config(repo_config):
109 def make_hg_ui_from_config(repo_config):
@@ -147,8 +152,8 b' def create_hg_wsgi_app(repo_path, repo_n'
147
152
148 try:
153 try:
149 return HgWeb(repo_path, name=repo_name, baseui=baseui)
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
150 except mercurial.error.RequirementError as exc:
155 except mercurial.error.RequirementError as e:
151 raise exceptions.RequirementException(exc)
156 raise exceptions.RequirementException(e)(e)
152
157
153
158
154 class GitHandler(object):
159 class GitHandler(object):
@@ -40,13 +40,13 b' log = logging.getLogger(__name__)'
40
40
41 # Set of svn compatible version flags.
41 # Set of svn compatible version flags.
42 # Compare with subversion/svnadmin/svnadmin.c
42 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
43 svn_compatible_versions = {
44 'pre-1.4-compatible',
44 'pre-1.4-compatible',
45 'pre-1.5-compatible',
45 'pre-1.5-compatible',
46 'pre-1.6-compatible',
46 'pre-1.6-compatible',
47 'pre-1.8-compatible',
47 'pre-1.8-compatible',
48 'pre-1.9-compatible',
48 'pre-1.9-compatible'
49 ])
49 }
50
50
51 svn_compatible_versions_map = {
51 svn_compatible_versions_map = {
52 'pre-1.4-compatible': '1.3',
52 'pre-1.4-compatible': '1.3',
@@ -64,13 +64,14 b' def reraise_safe_exceptions(func):'
64 return func(*args, **kwargs)
64 return func(*args, **kwargs)
65 except Exception as e:
65 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in hg remote call")
67 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException)
68 raise_from_original(exceptions.UnhandledException(e))
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74
75
75 def _create_repo(self, wire, create, compatible_version):
76 def _create_repo(self, wire, create, compatible_version):
76 path = svn.core.svn_path_canonicalize(wire['path'])
77 path = svn.core.svn_path_canonicalize(wire['path'])
@@ -92,10 +93,25 b' class SubversionFactory(RepoFactory):'
92 return repo
93 return repo
93
94
94 def repo(self, wire, create=False, compatible_version=None):
95 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
96 """
97 Get a repository instance for the given path.
98
99 Uses internally the low level beaker API since the decorators introduce
100 significant overhead.
101 """
102 region = self._cache_region
103 context = wire.get('context', None)
104 repo_path = wire.get('path', '')
105 context_uid = '{}'.format(context)
106 cache = wire.get('cache', True)
107 cache_on = context and cache
108
109 @region.conditional_cache_on_arguments(condition=cache_on)
110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
96 return self._create_repo(wire, create, compatible_version)
111 return self._create_repo(wire, create, compatible_version)
97
112
98 return self._repo(wire, create_new_repo)
113 return create_new_repo(self.repo_type, repo_path, context_uid,
114 compatible_version)
99
115
100
116
101 NODE_TYPE_MAPPING = {
117 NODE_TYPE_MAPPING = {
@@ -40,7 +40,7 b' def repeat(request):'
40 @pytest.fixture(scope='session')
40 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
41 def vcsserver_port(request):
42 port = get_available_port()
42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print('Using vcsserver port %s' % (port, ))
44 return port
44 return port
45
45
46
46
@@ -55,4 +55,3 b' def get_available_port():'
55 mysocket.close()
55 mysocket.close()
56 del mysocket
56 del mysocket
57 return port
57 return port
58
@@ -152,11 +152,14 b' class TestDulwichRepoWrapper(object):'
152
152
153 class TestGitFactory(object):
153 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
154 def test_create_repo_returns_dulwich_wrapper(self):
155 factory = git.GitFactory(repo_cache=Mock())
155
156 wire = {
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 'path': '/tmp/abcde'
157 mock.side_effect = {'repo_objects': ''}
158 }
158 factory = git.GitFactory()
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 wire = {
160 with isdir_patcher:
160 'path': '/tmp/abcde'
161 result = factory._create_repo(wire, True)
161 }
162 assert isinstance(result, git.Repo)
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 with isdir_patcher:
164 result = factory._create_repo(wire, True)
165 assert isinstance(result, git.Repo)
@@ -120,7 +120,7 b' class TestReraiseSafeExceptions(object):'
120 def test_does_not_map_known_exceptions(self):
120 def test_does_not_map_known_exceptions(self):
121 @hg.reraise_safe_exceptions
121 @hg.reraise_safe_exceptions
122 def stub_method():
122 def stub_method():
123 raise exceptions.LookupException('stub')
123 raise exceptions.LookupException()('stub')
124
124
125 with pytest.raises(Exception) as exc_info:
125 with pytest.raises(Exception) as exc_info:
126 stub_method()
126 stub_method()
@@ -28,56 +28,45 b' def test_patch_largefiles_capabilities_a'
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 orig_capabilities):
36 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 hgcompat.largefiles.proto, stub_extensions)
38 hgcompat.largefiles.proto, stub_extensions)
38
39
39 caps = dynamic_capabilities(stub_repo, stub_proto)
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
40
41
41 stub_extensions.assert_called_once_with(stub_ui)
42 stub_extensions.assert_called_once_with(stub_ui)
42 assert LARGEFILES_CAPABILITY not in caps
43 assert LARGEFILES_CAPABILITY not in caps
43
44
44
45
45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
48 hgcompat.largefiles.proto, stub_extensions)
49
50 # This happens when the extension is loaded for the first time, important
51 # to ensure that an updated function is correctly picked up.
52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
53 return_value='REPLACED')
54
55 caps = dynamic_capabilities(stub_repo, stub_proto)
56 assert 'REPLACED' == caps
57
58
59 def test_dynamic_capabilities_ignores_updated_capabilities(
46 def test_dynamic_capabilities_ignores_updated_capabilities(
60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 orig_capabilities):
61 stub_extensions.return_value = [('largefiles', mock.Mock())]
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
62 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
63 hgcompat.largefiles.proto, stub_extensions)
51 hgcompat.largefiles.proto, stub_extensions)
64
52
65 # This happens when the extension is loaded for the first time, important
53 # This happens when the extension is loaded for the first time, important
66 # to ensure that an updated function is correctly picked up.
54 # to ensure that an updated function is correctly picked up.
67 hgcompat.largefiles.proto.capabilities = mock.Mock(
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
68 side_effect=Exception('Must not be called'))
56 side_effect=Exception('Must not be called'))
69
57
70 dynamic_capabilities(stub_repo, stub_proto)
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
71
59
72
60
73 def test_dynamic_capabilities_uses_largefiles_if_enabled(
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 orig_capabilities):
75 stub_extensions.return_value = [('largefiles', mock.Mock())]
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
76
65
77 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
78 hgcompat.largefiles.proto, stub_extensions)
67 hgcompat.largefiles.proto, stub_extensions)
79
68
80 caps = dynamic_capabilities(stub_repo, stub_proto)
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
81
70
82 stub_extensions.assert_called_once_with(stub_ui)
71 stub_extensions.assert_called_once_with(stub_ui)
83 assert LARGEFILES_CAPABILITY in caps
72 assert LARGEFILES_CAPABILITY in caps
@@ -94,15 +83,11 b' def patched_capabilities(request):'
94 Patch in `capabilitiesorig` and restore both capability functions.
83 Patch in `capabilitiesorig` and restore both capability functions.
95 """
84 """
96 lfproto = hgcompat.largefiles.proto
85 lfproto = hgcompat.largefiles.proto
97 orig_capabilities = lfproto.capabilities
86 orig_capabilities = lfproto._capabilities
98 orig_capabilitiesorig = lfproto.capabilitiesorig
99
100 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
101
87
102 @request.addfinalizer
88 @request.addfinalizer
103 def restore():
89 def restore():
104 lfproto.capabilities = orig_capabilities
90 lfproto._capabilities = orig_capabilities
105 lfproto.capabilitiesorig = orig_capabilitiesorig
106
91
107
92
108 @pytest.fixture
93 @pytest.fixture
@@ -120,6 +105,15 b' def stub_proto(stub_ui):'
120
105
121
106
122 @pytest.fixture
107 @pytest.fixture
108 def orig_capabilities():
109 from mercurial.wireprotov1server import wireprotocaps
110
111 def _capabilities(repo, proto):
112 return wireprotocaps
113 return _capabilities
114
115
116 @pytest.fixture
123 def stub_ui():
117 def stub_ui():
124 return hgcompat.ui.ui()
118 return hgcompat.ui.ui()
125
119
@@ -12,11 +12,6 b' from vcsserver.http_main import main'
12 def vcs_app():
12 def vcs_app():
13 stub_settings = {
13 stub_settings = {
14 'dev.use_echo_app': 'true',
14 'dev.use_echo_app': 'true',
15 'beaker.cache.regions': 'repo_object',
16 'beaker.cache.repo_object.type': 'memorylru',
17 'beaker.cache.repo_object.max_items': '100',
18 'beaker.cache.repo_object.expire': '300',
19 'beaker.cache.repo_object.enabled': 'true',
20 'locale': 'en_US.UTF-8',
15 'locale': 'en_US.UTF-8',
21 }
16 }
22 vcs_app = main({}, **stub_settings)
17 vcs_app = main({}, **stub_settings)
@@ -15,6 +15,7 b''
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18 import hashlib
18
19
19 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
20
21
@@ -80,3 +81,9 b' class AttributeDict(dict):'
80 return self.get(attr, None)
81 return self.get(attr, None)
81 __setattr__ = dict.__setitem__
82 __setattr__ = dict.__setitem__
82 __delattr__ = dict.__delitem__
83 __delattr__ = dict.__delitem__
84
85
86 def sha1(val):
87 return hashlib.sha1(val).hexdigest()
88
89
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now