##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r511:00fadf7b merge stable
parent child Browse files
Show More
@@ -0,0 +1,152 b''
1 """
2 gunicorn config extension and hooks. Sets additional configuration that is
3 available post the .ini config.
4
5 - workers = ${cpu_number}
6 - threads = 1
7 - proc_name = ${gunicorn_proc_name}
8 - worker_class = sync
9 - worker_connections = 10
10 - max_requests = 1000
11 - max_requests_jitter = 30
12 - timeout = 21600
13
14 """
15
16 import multiprocessing
17 import sys
18 import time
19 import datetime
20 import threading
21 import traceback
22 from gunicorn.glogging import Logger
23
24
25 # GLOBAL
26 errorlog = '-'
27 accesslog = '-'
28 loglevel = 'debug'
29
30 # SECURITY
31
32 # The maximum size of HTTP request line in bytes.
33 limit_request_line = 4094
34
35 # Limit the number of HTTP headers fields in a request.
36 limit_request_fields = 1024
37
38 # Limit the allowed size of an HTTP request header field.
39 # Value is a positive number or 0.
40 # Setting it to 0 will allow unlimited header field sizes.
41 limit_request_field_size = 0
42
43
44 # Timeout for graceful workers restart.
45 # After receiving a restart signal, workers have this much time to finish
46 # serving requests. Workers still alive after the timeout (starting from the
47 # receipt of the restart signal) are force killed.
48 graceful_timeout = 30
49
50
51 # The number of seconds to wait for requests on a Keep-Alive connection.
52 # Generally set in the 1-5 seconds range.
53 keepalive = 2
54
55
56 # SERVER MECHANICS
57 # None == system temp dir
58 # worker_tmp_dir is recommended to be set to some tmpfs
59 worker_tmp_dir = None
60 tmp_upload_dir = None
61
62 # Custom log format
63 access_log_format = (
64 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
65
66 # self adjust workers based on CPU count
67 # workers = multiprocessing.cpu_count() * 2 + 1
68
69
70 def post_fork(server, worker):
71 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
72
73
74 def pre_fork(server, worker):
75 pass
76
77
78 def pre_exec(server):
79 server.log.info("Forked child, re-executing.")
80
81
82 def on_starting(server):
83 server.log.info("Server is starting.")
84
85
86 def when_ready(server):
87 server.log.info("Server is ready. Spawning workers")
88
89
90 def on_reload(server):
91 pass
92
93
94 def worker_int(worker):
95 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
96
97 # get traceback info, on worker crash
98 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
99 code = []
100 for thread_id, stack in sys._current_frames().items():
101 code.append(
102 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
103 for fname, lineno, name, line in traceback.extract_stack(stack):
104 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
105 if line:
106 code.append(" %s" % (line.strip()))
107 worker.log.debug("\n".join(code))
108
109
110 def worker_abort(worker):
111 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
112
113
114 def worker_exit(server, worker):
115 worker.log.info("[<%-10s>] worker exit", worker.pid)
116
117
118 def child_exit(server, worker):
119 worker.log.info("[<%-10s>] worker child exit", worker.pid)
120
121
122 def pre_request(worker, req):
123 worker.start_time = time.time()
124 worker.log.debug(
125 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
126
127
128 def post_request(worker, req, environ, resp):
129 total_time = time.time() - worker.start_time
130 worker.log.debug(
131 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
132 worker.nr, req.method, req.path, resp.status_code, total_time)
133
134
135 class RhodeCodeLogger(Logger):
136 """
137 Custom Logger that allows some customization that gunicorn doesn't allow
138 """
139
140 datefmt = r"%Y-%m-%d %H:%M:%S"
141
142 def __init__(self, cfg):
143 Logger.__init__(self, cfg)
144
145 def now(self):
146 """ return date in RhodeCode Log format """
147 now = time.time()
148 msecs = int((now - long(now)) * 1000)
149 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
150
151
152 logger_class = RhodeCodeLogger
@@ -0,0 +1,28 b''
1
2 ==============================
3 Generate the Nix expressions
4 ==============================
5
6 Details can be found in the repository of `RhodeCode Enterprise CE`_ inside of
7 the file `docs/contributing/dependencies.rst`.
8
9 Start the environment as follows:
10
11 .. code:: shell
12
13 nix-shell pkgs/shell-generate.nix
14
15
16 Python dependencies
17 ===================
18
19 .. code:: shell
20
21 pip2nix generate --licenses
22 # or faster
23 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
24
25
26 .. Links
27
28 .. _RhodeCode Enterprise CE: https://code.rhodecode.com/rhodecode-enterprise-ce
@@ -0,0 +1,17 b''
1 { pkgs
2 , pythonPackages
3 }:
4
5 rec {
6 pip2nix-src = pkgs.fetchzip {
7 url = https://github.com/johbo/pip2nix/archive/51e6fdae34d0e8ded9efeef7a8601730249687a6.tar.gz;
8 sha256 = "02a4jjgi7lsvf8mhrxsd56s9a3yg20081rl9bgc2m84w60v2gbz2";
9 };
10
11 pip2nix = import pip2nix-src {
12 inherit
13 pkgs
14 pythonPackages;
15 };
16
17 }
@@ -0,0 +1,45 b''
1 self: super: {
2 # bump GIT version
3 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 name = "git-2.17.1";
5 src = self.fetchurl {
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.17.1.tar.xz";
7 sha256 = "0pm6bdnrrm165k3krnazxcxadifk2gqi30awlbcf9fism1x6w4vr";
8 };
9
10 patches = [
11 ./git_patches/docbook2texi.patch
12 ./git_patches/symlinks-in-bin.patch
13 ./git_patches/git-sh-i18n.patch
14 ./git_patches/ssh-path.patch
15 ];
16
17 });
18
19 # Override subversion derivation to
20 # - activate python bindings
21 subversion =
22 let
23 subversionWithPython = super.subversion.override {
24 httpSupport = true;
25 pythonBindings = true;
26 python = self.python27Packages.python;
27 };
28 in
29 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
30 name = "subversion-1.10.2";
31 src = self.fetchurl {
32 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
33 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
34 };
35
36 ## use internal lz4/utf8proc because it is stable and shipped with SVN
37 configureFlags = oldAttrs.configureFlags ++ [
38 " --with-lz4=internal"
39 " --with-utf8proc=internal"
40 ];
41
42
43 });
44
45 }
@@ -0,0 +1,41 b''
1 { pkgs ? (import <nixpkgs> {})
2 , pythonPackages ? "python27Packages"
3 }:
4
5 with pkgs.lib;
6
7 let _pythonPackages = pythonPackages; in
8 let
9 pythonPackages = getAttr _pythonPackages pkgs;
10
11 pip2nix = import ./nix-common/pip2nix.nix {
12 inherit
13 pkgs
14 pythonPackages;
15 };
16
17 in
18
19 pkgs.stdenv.mkDerivation {
20 name = "pip2nix-generated";
21 buildInputs = [
22 pip2nix.pip2nix
23 pythonPackages.pip-tools
24 pkgs.apr
25 pkgs.aprutil
26 ];
27
28 shellHook = ''
29 runHook preShellHook
30 echo "Setting SVN_* variables"
31 export SVN_LIBRARY_PATH=${pkgs.subversion}/lib
32 export SVN_HEADER_PATH=${pkgs.subversion.dev}/include
33 runHook postShellHook
34 '';
35
36 preShellHook = ''
37 echo "Starting Generate Shell"
38 # Custom prompt to distinguish from other dev envs.
39 export PS1="\n\[\033[1;32m\][Generate-shell:\w]$\[\033[0m\] "
40 '';
41 }
@@ -0,0 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -0,0 +1,146 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 import os
22 import time
23 import datetime
24 import msgpack
25 import logging
26 import traceback
27 import tempfile
28
29
30 log = logging.getLogger(__name__)
31
32 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 global_prefix = 'vcsserver'
34
35
36 def exc_serialize(exc_id, tb, exc_type):
37
38 data = {
39 'version': 'v1',
40 'exc_id': exc_id,
41 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
42 'exc_timestamp': repr(time.time()),
43 'exc_message': tb,
44 'exc_type': exc_type,
45 }
46 return msgpack.packb(data), data
47
48
49 def exc_unserialize(tb):
50 return msgpack.unpackb(tb)
51
52
53 def get_exc_store():
54 """
55 Get and create exception store if it's not existing
56 """
57 exc_store_dir = 'rc_exception_store_v1'
58 # fallback
59 _exc_store_path = os.path.join(tempfile.gettempdir(), exc_store_dir)
60
61 exc_store_dir = '' # TODO: need a persistent cross instance store here
62 if exc_store_dir:
63 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir)
64
65 _exc_store_path = os.path.abspath(_exc_store_path)
66 if not os.path.isdir(_exc_store_path):
67 os.makedirs(_exc_store_path)
68 log.debug('Initializing exceptions store at %s', _exc_store_path)
69 return _exc_store_path
70
71
72 def _store_exception(exc_id, exc_info, prefix):
73 exc_type, exc_value, exc_traceback = exc_info
74 tb = ''.join(traceback.format_exception(
75 exc_type, exc_value, exc_traceback, None))
76
77 exc_type_name = exc_type.__name__
78 exc_store_path = get_exc_store()
79 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
80 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
81 if not os.path.isdir(exc_store_path):
82 os.makedirs(exc_store_path)
83 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
84 with open(stored_exc_path, 'wb') as f:
85 f.write(exc_data)
86 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
87
88
89 def store_exception(exc_id, exc_info, prefix=global_prefix):
90 try:
91 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
92 except Exception:
93 log.exception('Failed to store exception `%s` information', exc_id)
94 # there's no way this can fail, it will crash server badly if it does.
95 pass
96
97
98 def _find_exc_file(exc_id, prefix=global_prefix):
99 exc_store_path = get_exc_store()
100 if prefix:
101 exc_id = '{}_{}'.format(exc_id, prefix)
102 else:
103 # search without a prefix
104 exc_id = '{}'.format(exc_id)
105
106 # we need to search the store for such start pattern as above
107 for fname in os.listdir(exc_store_path):
108 if fname.startswith(exc_id):
109 exc_id = os.path.join(exc_store_path, fname)
110 break
111 continue
112 else:
113 exc_id = None
114
115 return exc_id
116
117
118 def _read_exception(exc_id, prefix):
119 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
120 if exc_id_file_path:
121 with open(exc_id_file_path, 'rb') as f:
122 return exc_unserialize(f.read())
123 else:
124 log.debug('Exception File `%s` not found', exc_id_file_path)
125 return None
126
127
128 def read_exception(exc_id, prefix=global_prefix):
129 try:
130 return _read_exception(exc_id=exc_id, prefix=prefix)
131 except Exception:
132 log.exception('Failed to read exception `%s` information', exc_id)
133 # there's no way this can fail, it will crash server badly if it does.
134 return None
135
136
137 def delete_exception(exc_id, prefix=global_prefix):
138 try:
139 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
140 if exc_id_file_path:
141 os.remove(exc_id_file_path)
142
143 except Exception:
144 log.exception('Failed to remove exception `%s` information', exc_id)
145 # there's no way this can fail, it will crash server badly if it does.
146 pass
@@ -0,0 +1,65 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 import logging
22
23 from repoze.lru import LRUCache
24
25 from vcsserver.utils import safe_str
26
27 log = logging.getLogger(__name__)
28
29
30 class LRUDict(LRUCache):
31 """
32 Wrapper to provide partial dict access
33 """
34
35 def __setitem__(self, key, value):
36 return self.put(key, value)
37
38 def __getitem__(self, key):
39 return self.get(key)
40
41 def __contains__(self, key):
42 return bool(self.get(key))
43
44 def __delitem__(self, key):
45 del self.data[key]
46
47 def keys(self):
48 return self.data.keys()
49
50
51 class LRUDictDebug(LRUDict):
52 """
53 Wrapper to provide some debug options
54 """
55 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 # trick for pformat print it more nicely
58 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s' % (elems_cnt, fmt))
62
63 def __getitem__(self, key):
64 self._report_keys()
65 return self.get(key)
@@ -0,0 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19 from dogpile.cache import register_backend
20
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
24
25 log = logging.getLogger(__name__)
26
27 from . import region_meta
28 from .util import key_generator, get_default_cache_settings, make_region
29
30
31 def configure_dogpile_cache(settings):
32 cache_dir = settings.get('cache_dir')
33 if cache_dir:
34 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
35
36 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
37
38 # inspect available namespaces
39 avail_regions = set()
40 for key in rc_cache_data.keys():
41 namespace_name = key.split('.', 1)[0]
42 avail_regions.add(namespace_name)
43 log.debug('dogpile: found following cache regions: %s', avail_regions)
44
45 # register them into namespace
46 for region_name in avail_regions:
47 new_region = make_region(
48 name=region_name,
49 function_key_generator=key_generator
50 )
51
52 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
53
54 log.debug('dogpile: registering a new region %s[%s]',
55 region_name, new_region.__dict__)
56 region_meta.dogpile_cache_regions[region_name] = new_region
57
58
59 def includeme(config):
60 configure_dogpile_cache(config.registry.settings)
@@ -0,0 +1,51 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import logging
19
20 from dogpile.cache.backends import memory as memory_backend
21 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
22
23
24 _default_max_size = 1024
25
26 log = logging.getLogger(__name__)
27
28
29 class LRUMemoryBackend(memory_backend.MemoryBackend):
30 pickle_values = False
31
32 def __init__(self, arguments):
33 max_size = arguments.pop('max_size', _default_max_size)
34
35 LRUDictClass = LRUDict
36 if arguments.pop('log_key_count', None):
37 LRUDictClass = LRUDictDebug
38
39 arguments['cache_dict'] = LRUDictClass(max_size)
40 super(LRUMemoryBackend, self).__init__(arguments)
41
42 def delete(self, key):
43 try:
44 del self._cache[key]
45 except KeyError:
46 # we don't care if key isn't there at deletion
47 pass
48
49 def delete_multi(self, keys):
50 for key in keys:
51 self.delete(key)
@@ -0,0 +1,26 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import tempfile
20
21 dogpile_config_defaults = {
22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 }
24
25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 dogpile_cache_regions = {}
@@ -0,0 +1,136 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18 import os
19 import logging
20 import functools
21
22 from vcsserver.utils import safe_str, sha1
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
25
26 log = logging.getLogger(__name__)
27
28
29 class RhodeCodeCacheRegion(CacheRegion):
30
31 def conditional_cache_on_arguments(
32 self, namespace=None,
33 expiration_time=None,
34 should_cache_fn=None,
35 to_str=compat.string_type,
36 function_key_generator=None,
37 condition=True):
38 """
39 Custom conditional decorator, that will not touch any dogpile internals if
40 condition isn't meet. This works a bit different than should_cache_fn
41 And it's faster in cases we don't ever want to compute cached values
42 """
43 expiration_time_is_callable = compat.callable(expiration_time)
44
45 if function_key_generator is None:
46 function_key_generator = self.function_key_generator
47
48 def decorator(fn):
49 if to_str is compat.string_type:
50 # backwards compatible
51 key_generator = function_key_generator(namespace, fn)
52 else:
53 key_generator = function_key_generator(namespace, fn, to_str=to_str)
54
55 @functools.wraps(fn)
56 def decorate(*arg, **kw):
57 key = key_generator(*arg, **kw)
58
59 @functools.wraps(fn)
60 def creator():
61 return fn(*arg, **kw)
62
63 if not condition:
64 return creator()
65
66 timeout = expiration_time() if expiration_time_is_callable \
67 else expiration_time
68
69 return self.get_or_create(key, creator, timeout, should_cache_fn)
70
71 def invalidate(*arg, **kw):
72 key = key_generator(*arg, **kw)
73 self.delete(key)
74
75 def set_(value, *arg, **kw):
76 key = key_generator(*arg, **kw)
77 self.set(key, value)
78
79 def get(*arg, **kw):
80 key = key_generator(*arg, **kw)
81 return self.get(key)
82
83 def refresh(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 value = fn(*arg, **kw)
86 self.set(key, value)
87 return value
88
89 decorate.set = set_
90 decorate.invalidate = invalidate
91 decorate.refresh = refresh
92 decorate.get = get
93 decorate.original = fn
94 decorate.key_generator = key_generator
95
96 return decorate
97
98 return decorator
99
100
101 def make_region(*arg, **kw):
102 return RhodeCodeCacheRegion(*arg, **kw)
103
104
105 def get_default_cache_settings(settings, prefixes=None):
106 prefixes = prefixes or []
107 cache_settings = {}
108 for key in settings.keys():
109 for prefix in prefixes:
110 if key.startswith(prefix):
111 name = key.split(prefix)[1].strip()
112 val = settings[key]
113 if isinstance(val, basestring):
114 val = val.strip()
115 cache_settings[name] = val
116 return cache_settings
117
118
119 def compute_key_from_params(*args):
120 """
121 Helper to compute key from given params to be used in cache manager
122 """
123 return sha1("_".join(map(safe_str, args)))
124
125
126 def key_generator(namespace, fn):
127 fname = fn.__name__
128
129 def generate_key(*args):
130 namespace_pref = namespace or 'default'
131 arg_key = compute_key_from_params(*args)
132 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
133
134 return final_key
135
136 return generate_key
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.12.4
2 current_version = 4.13.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.12.4
13 version = 4.13.0
16
14
@@ -1,18 +1,18 b''
1
1
2 .PHONY: clean test test-clean test-only
2 .PHONY: clean test test-clean test-only
3
3
4
4
5 clean:
5 clean:
6 make test-clean
6 make test-clean
7 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' \) -exec rm '{}' ';'
7 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' \) -exec rm '{}' ';'
8
8
9 test:
9 test:
10 make test-clean
10 make test-clean
11 make test-only
11 make test-only
12
12
13 test-clean:
13 test-clean:
14 rm -rf coverage.xml htmlcov junit.xml pylint.log result
14 rm -rf coverage.xml htmlcov junit.xml pylint.log result
15 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
15 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
16
16
17 test-only:
17 test-only:
18 PYTHONHASHSEED=random py.test -vv -r xw --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
18 PYTHONHASHSEED=random py.test -vv -r xw -p no:sugar --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
@@ -1,1 +1,79 b''
1 development_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 0.0.0.0
10 port = 9900
11
12 use = egg:waitress#main
13
14
15 [app:main]
16 use = egg:rhodecode-vcsserver
17
18 pyramid.default_locale_name = en
19 pyramid.includes =
20
21 ## default locale used by VCS systems
22 locale = en_US.UTF-8
23
24
25 ## path to binaries for vcsserver, it should be set by the installer
26 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 core.binary_dir = ""
28
29 ## cache region for storing repo_objects cache
30 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
31 ## cache auto-expires after N seconds
32 rc_cache.repo_object.expiration_time = 300
33 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
34 rc_cache.repo_object.max_size = 100
35
36
37 ################################
38 ### LOGGING CONFIGURATION ####
39 ################################
40 [loggers]
41 keys = root, vcsserver
42
43 [handlers]
44 keys = console
45
46 [formatters]
47 keys = generic
48
49 #############
50 ## LOGGERS ##
51 #############
52 [logger_root]
53 level = NOTSET
54 handlers = console
55
56 [logger_vcsserver]
57 level = DEBUG
58 handlers =
59 qualname = vcsserver
60 propagate = 1
61
62
63 ##############
64 ## HANDLERS ##
65 ##############
66
67 [handler_console]
68 class = StreamHandler
69 args = (sys.stderr,)
70 level = DEBUG
71 formatter = generic
72
73 ################
74 ## FORMATTERS ##
75 ################
76
77 [formatter_generic]
78 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
79 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1 +1,100 b''
1 production_http.ini No newline at end of file
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
5
6
7 [server:main]
8 ## COMMON ##
9 host = 127.0.0.1
10 port = 9900
11
12
13 ##########################
14 ## GUNICORN WSGI SERVER ##
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
21 ## process name
22 proc_name = rhodecode_vcsserver
23 ## type of worker class, currently `sync` is the only option allowed.
24 worker_class = sync
25 ## The maximum number of simultaneous clients. Valid only for Gevent
26 #worker_connections = 10
27 ## max number of requests that worker will handle before being gracefully
28 ## restarted, could prevent memory leaks
29 max_requests = 1000
30 max_requests_jitter = 30
31 ## amount of time a worker can spend with handling a request before it
32 ## gets killed and restarted. Set to 6hrs
33 timeout = 21600
34
35
36 [app:main]
37 use = egg:rhodecode-vcsserver
38
39 pyramid.default_locale_name = en
40 pyramid.includes =
41
42 ## default locale used by VCS systems
43 locale = en_US.UTF-8
44
45
46 ## path to binaries for vcsserver, it should be set by the installer
47 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 core.binary_dir = ""
49
50 ## cache region for storing repo_objects cache
51 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
52 ## cache auto-expires after N seconds
53 rc_cache.repo_object.expiration_time = 300
54 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
55 rc_cache.repo_object.max_size = 100
56
57
58 ################################
59 ### LOGGING CONFIGURATION ####
60 ################################
61 [loggers]
62 keys = root, vcsserver
63
64 [handlers]
65 keys = console
66
67 [formatters]
68 keys = generic
69
70 #############
71 ## LOGGERS ##
72 #############
73 [logger_root]
74 level = NOTSET
75 handlers = console
76
77 [logger_vcsserver]
78 level = DEBUG
79 handlers =
80 qualname = vcsserver
81 propagate = 1
82
83
84 ##############
85 ## HANDLERS ##
86 ##############
87
88 [handler_console]
89 class = StreamHandler
90 args = (sys.stderr,)
91 level = DEBUG
92 formatter = generic
93
94 ################
95 ## FORMATTERS ##
96 ################
97
98 [formatter_generic]
99 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
100 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,166 +1,178 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 args@
8 , pythonPackages ? "python27Packages"
8 { pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? false
11 , ...
11 }:
12 }:
12
13
13 let pkgs_ = pkgs; in
14 let pkgs_ = (import <nixpkgs> {}); in
14
15
15 let
16 let
16 pkgs = pkgs_.overridePackages (self: super: {
17
17 # bump GIT version
18 # TODO: Currently we ignore the passed in pkgs, instead we should use it
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 # somehow as a base and apply overlays to it.
19 name = "git-2.16.4";
20 pkgs = import <nixpkgs> {
20 src = pkgs.fetchurl {
21 overlays = [
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.16.4.tar.xz";
22 (import ./pkgs/overlays.nix)
22 sha256 = "0cnmidjvbdf81mybcvxvl0c2r2x2nvq2jj2dl59dmrc7qklv0sbf";
23 ];
24 inherit (pkgs_)
25 system;
23 };
26 };
24
27
25 patches = [
28 # Works with the new python-packages, still can fallback to the old
26 ./pkgs/git_patches/docbook2texi.patch
29 # variant.
27 ./pkgs/git_patches/symlinks-in-bin.patch
30 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
28 ./pkgs/git_patches/git-sh-i18n.patch
31 self: basePythonPackages.override (a: { inherit self; }));
29 ./pkgs/git_patches/ssh-path.patch
30 ];
31
32 });
33
34 # Override subversion derivation to
35 # - activate python bindings
36 subversion = let
37 subversionWithPython = super.subversion.override {
38 httpSupport = true;
39 pythonBindings = true;
40 python = self.python27Packages.python;
41 };
42
43 in
44
32
45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
33 # Evaluates to the last segment of a file system path.
46 name = "subversion-1.9.7";
34 basename = path: with pkgs.lib; last (splitString "/" path);
47 src = pkgs.fetchurl {
48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
50 };
51
52 });
53
54 });
55
35
56 inherit (pkgs.lib) fix extends;
36 # source code filter used as arugment to builtins.filterSource.
57 basePythonPackages = with builtins; if isAttrs pythonPackages
58 then pythonPackages
59 else getAttr pythonPackages pkgs;
60
61 elem = builtins.elem;
62 basename = path: with pkgs.lib; last (splitString "/" path);
63 startsWith = prefix: full: let
64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
65 in actualPrefix == prefix;
66
67 src-filter = path: type: with pkgs.lib;
37 src-filter = path: type: with pkgs.lib;
68 let
38 let
69 ext = last (splitString "." path);
39 ext = last (splitString "." path);
70 in
40 in
71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
41 !builtins.elem (basename path) [
72 "node_modules" "build" "data" "tmp"] &&
42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
73 !elem ext ["egg-info" "pyc"] &&
43 "bower_components" "node_modules"
74 !startsWith "result" path;
44 "build" "data" "result" "tmp"] &&
45 !builtins.elem ext ["egg-info" "pyc"] &&
46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
47 # it would still be good to restore it since we want to ignore "result-*".
48 !hasPrefix "result" path;
75
49
50 sources =
51 let
52 inherit (pkgs.lib) all isString attrValues;
53 sourcesConfig = pkgs.config.rc.sources or {};
54 in
55 # Ensure that sources are configured as strings. Using a path
56 # would result in a copy into the nix store.
57 assert all isString (attrValues sourcesConfig);
58 sourcesConfig;
59
60 version = builtins.readFile "${rhodecode-vcsserver-src}/vcsserver/VERSION";
76 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
61 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
77
62
78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
79 inherit self;
80 }) // (scopedImport {
81 self = self;
82 super = basePythonPackages;
83 inherit pkgs;
84 inherit (pkgs) fetchurl fetchgit;
85 } ./pkgs/python-packages.nix);
86
87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
88 inherit basePythonPackages pkgs;
89 };
90
91 version = builtins.readFile ./vcsserver/VERSION;
92
93 pythonLocalOverrides = self: super: {
63 pythonLocalOverrides = self: super: {
94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
64 rhodecode-vcsserver =
95 inherit doCheck version;
65 let
66 releaseName = "RhodeCodeVCSServer-${version}";
67 in super.rhodecode-vcsserver.override (attrs: {
68 inherit
69 doCheck
70 version;
96
71
97 name = "rhodecode-vcsserver-${version}";
72 name = "rhodecode-vcsserver-${version}";
98 releaseName = "RhodeCodeVCSServer-${version}";
73 releaseName = releaseName;
99 src = rhodecode-vcsserver-src;
74 src = rhodecode-vcsserver-src;
100 dontStrip = true; # prevent strip, we don't need it.
75 dontStrip = true; # prevent strip, we don't need it.
101
76
102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
77 # expose following attributed outside
103 pkgs.git
104 pkgs.subversion
105 ]);
106
107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
108 # pkgs/default.nix?
109 passthru = {
78 passthru = {
110 pythonPackages = self;
79 pythonPackages = self;
111 };
80 };
112
81
113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
82 propagatedBuildInputs =
83 attrs.propagatedBuildInputs or [] ++ [
84 pkgs.git
85 pkgs.subversion
86 ];
87
88 # set some default locale env variables
89 LC_ALL = "en_US.UTF-8";
90 LOCALE_ARCHIVE =
91 if pkgs.stdenv.isLinux
92 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
93 else "";
94
95 # Add bin directory to path so that tests can find 'vcsserver'.
114 preCheck = ''
96 preCheck = ''
115 export PATH="$out/bin:$PATH"
97 export PATH="$out/bin:$PATH"
116 '';
98 '';
117
99
118 # put custom attrs here
100 # custom check phase for testing
119 checkPhase = ''
101 checkPhase = ''
120 runHook preCheck
102 runHook preCheck
121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
103 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
122 runHook postCheck
104 runHook postCheck
123 '';
105 '';
124
106
107 postCheck = ''
108 echo "Cleanup of vcsserver/tests"
109 rm -rf $out/lib/${self.python.libPrefix}/site-packages/vcsserver/tests
110 '';
111
125 postInstall = ''
112 postInstall = ''
126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
113 echo "Writing vcsserver meta information for rccontrol to nix-support/rccontrol"
127 mkdir -p $out/nix-support/rccontrol
114 mkdir -p $out/nix-support/rccontrol
128 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
115 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
129 echo "DONE: Meta information for rccontrol written"
116 echo "DONE: vcsserver meta information for rccontrol written"
117
118 mkdir -p $out/etc
119 cp configs/production.ini $out/etc
120 echo "DONE: saved vcsserver production.ini into $out/etc"
130
121
131 # python based programs need to be wrapped
122 # python based programs need to be wrapped
123 mkdir -p $out/bin
124 ln -s ${self.python}/bin/python $out/bin
132 ln -s ${self.pyramid}/bin/* $out/bin/
125 ln -s ${self.pyramid}/bin/* $out/bin/
133 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
126 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
134
127
135 # Symlink version control utilities
128 # Symlink version control utilities
136 #
137 # We ensure that always the correct version is available as a symlink.
129 # We ensure that always the correct version is available as a symlink.
138 # So that users calling them via the profile path will always use the
130 # So that users calling them via the profile path will always use the
139 # correct version.
131 # correct version.
140 ln -s ${self.python}/bin/python $out/bin
132
141 ln -s ${pkgs.git}/bin/git $out/bin
133 ln -s ${pkgs.git}/bin/git $out/bin
142 ln -s ${self.mercurial}/bin/hg $out/bin
134 ln -s ${self.mercurial}/bin/hg $out/bin
143 ln -s ${pkgs.subversion}/bin/svn* $out/bin
135 ln -s ${pkgs.subversion}/bin/svn* $out/bin
136 echo "DONE: created symlinks into $out/bin"
144
137
145 for file in $out/bin/*;
138 for file in $out/bin/*;
146 do
139 do
147 wrapProgram $file \
140 wrapProgram $file \
148 --set PATH $PATH \
141 --prefix PATH : $PATH \
149 --set PYTHONPATH $PYTHONPATH \
142 --prefix PYTHONPATH : $PYTHONPATH \
150 --set PYTHONHASHSEED random
143 --set PYTHONHASHSEED random
151 done
144 done
145 echo "DONE: vcsserver binary wrapping"
152
146
153 '';
147 '';
154
148
155 });
149 });
156 };
150 };
157
151
152 basePythonPackages = with builtins;
153 if isAttrs pythonPackages then
154 pythonPackages
155 else
156 getAttr pythonPackages pkgs;
157
158 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
159 inherit pkgs;
160 inherit (pkgs) fetchurl fetchgit fetchhg;
161 };
162
163 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
164 inherit pkgs basePythonPackages;
165 };
166
167
158 # Apply all overrides and fix the final package set
168 # Apply all overrides and fix the final package set
159 myPythonPackages =
169 myPythonPackagesUnfix = with pkgs.lib;
160 (fix
161 (extends pythonExternalOverrides
170 (extends pythonExternalOverrides
162 (extends pythonLocalOverrides
171 (extends pythonLocalOverrides
163 (extends pythonOverrides
172 (extends pythonVCSServerOverrides
164 pythonGeneratedPackages))));
173 (extends pythonGeneratedPackages
174 basePythonPackagesUnfix))));
175
176 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
165
177
166 in myPythonPackages.rhodecode-vcsserver
178 in myPythonPackages.rhodecode-vcsserver
@@ -1,60 +1,53 b''
1 # Overrides for the generated python-packages.nix
1 # Overrides for the generated python-packages.nix
2 #
2 #
3 # This function is intended to be used as an extension to the generated file
3 # This function is intended to be used as an extension to the generated file
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs, basePythonPackages }:
7 { pkgs
8 , basePythonPackages
9 }:
8
10
9 let
11 let
10 sed = "sed -i";
12 sed = "sed -i";
13
11 in
14 in
12
15
13 self: super: {
16 self: super: {
14
17
15 Beaker = super.Beaker.override (attrs: {
18 "gevent" = super."gevent".override (attrs: {
16 patches = [
19 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
17 ./patch-beaker-lock-func-debug.diff
20 # NOTE: (marcink) odd requirements from gevent aren't set properly,
21 # thus we need to inject psutil manually
22 self."psutil"
18 ];
23 ];
19 });
24 });
20
25
21 subvertpy = super.subvertpy.override (attrs: {
26 "hgsubversion" = super."hgsubversion".override (attrs: {
22 # TODO: johbo: Remove the "or" once we drop 16.03 support
23 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
24 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
27 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 pkgs.sqlite
29 #basePythonPackages.sqlite3
30 self.mercurial
31 ];
32 });
33
34 "subvertpy" = super."subvertpy".override (attrs: {
35 SVN_PREFIX = "${pkgs.subversion.dev}";
36 propagatedBuildInputs = [
37 pkgs.apr.dev
25 pkgs.aprutil
38 pkgs.aprutil
26 pkgs.subversion
39 pkgs.subversion
27 ];
40 ];
28 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
29 ${sed} -e "s/'gcc'/'clang'/" setup.py
30 '';
31 });
41 });
32
42
33 hgsubversion = super.hgsubversion.override (attrs: {
43 "mercurial" = super."mercurial".override (attrs: {
34 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
44 propagatedBuildInputs = [
35 pkgs.sqlite
45 # self.python.modules.curses
36 basePythonPackages.sqlite3
37 ];
46 ];
38 });
47 });
39
48
40 mercurial = super.mercurial.override (attrs: {
49 # Avoid that base packages screw up the build process
41 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
50 inherit (basePythonPackages)
42 self.python.modules.curses
51 setuptools;
43 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
44 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
45 });
46
47 pyramid = super.pyramid.override (attrs: {
48 postFixup = ''
49 wrapPythonPrograms
50 # TODO: johbo: "wrapPython" adds this magic line which
51 # confuses pserve.
52 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
53 '';
54 });
55
56 # Avoid that setuptools is replaced, this leads to trouble
57 # with buildPythonPackage.
58 setuptools = basePythonPackages.setuptools;
59
52
60 }
53 }
This diff has been collapsed as it changes many lines, (988 lines changed) Show them Hide them
@@ -1,877 +1,947 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 Beaker = super.buildPythonPackage {
5
6 name = "Beaker-1.9.1";
6 self: super: {
7 buildInputs = with self; [];
7 "atomicwrites" = super.buildPythonPackage {
8 doCheck = false;
8 name = "atomicwrites-1.1.5";
9 propagatedBuildInputs = with self; [funcsigs];
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
12 md5 = "46fda0a164e2b0d24ccbda51a2310301";
13 };
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
17 };
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.9.6";
20 buildInputs = with self; [];
21 doCheck = false;
9 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
23 src = fetchurl {
10 src = fetchurl {
24 url = "https://pypi.python.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
25 md5 = "6411537324b4dba0956aaa8109f3c77b";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
26 };
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
30 };
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.7";
33 buildInputs = with self; [];
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 md5 = "5836cc997b1b773ef389bf6629c30e65";
39 };
13 };
40 meta = {
14 meta = {
41 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
42 };
16 };
43 };
17 };
44 MarkupSafe = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
45 name = "MarkupSafe-1.0";
19 name = "attrs-18.1.0";
46 buildInputs = with self; [];
47 doCheck = false;
20 doCheck = false;
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
21 src = fetchurl {
50 url = "https://pypi.python.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
51 md5 = "2fcedc9284d50e577b5192e8e3578355";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
52 };
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
56 };
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
65 };
24 };
66 meta = {
25 meta = {
67 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
68 };
27 };
69 };
28 };
70 WebOb = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
71 name = "WebOb-1.7.4";
30 name = "backports.shutil-get-terminal-size-1.0.0";
72 buildInputs = with self; [];
73 doCheck = false;
31 doCheck = false;
74 propagatedBuildInputs = with self; [];
75 src = fetchurl {
32 src = fetchurl {
76 url = "https://pypi.python.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
77 md5 = "397e46892d7f199b1a07eb20a2d3d9bd";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
78 };
35 };
79 meta = {
36 meta = {
80 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
81 };
38 };
82 };
39 };
83 WebTest = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
84 name = "WebTest-2.0.29";
41 name = "beautifulsoup4-4.6.3";
85 buildInputs = with self; [];
86 doCheck = false;
42 doCheck = false;
87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 src = fetchurl {
43 src = fetchurl {
89 url = "https://pypi.python.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
90 md5 = "30b4cf0d340b9a5335fac4389e6f84fc";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
91 };
46 };
92 meta = {
47 meta = {
93 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
94 };
49 };
95 };
50 };
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
51 "configobj" = super.buildPythonPackage {
97 name = "backports.shutil-get-terminal-size-1.0.0";
52 name = "configobj-5.0.6";
98 buildInputs = with self; [];
99 doCheck = false;
100 propagatedBuildInputs = with self; [];
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 md5 = "03267762480bd86b50580dc19dff3c66";
104 };
105 meta = {
106 license = [ pkgs.lib.licenses.mit ];
107 };
108 };
109 beautifulsoup4 = super.buildPythonPackage {
110 name = "beautifulsoup4-4.6.0";
111 buildInputs = with self; [];
112 doCheck = false;
53 doCheck = false;
113 propagatedBuildInputs = with self; [];
54 propagatedBuildInputs = [
55 self."six"
56 ];
114 src = fetchurl {
57 src = fetchurl {
115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
116 md5 = "c17714d0f91a23b708a592cb3c697728";
59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
117 };
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
120 };
121 };
122 configobj = super.buildPythonPackage {
123 name = "configobj-5.0.6";
124 buildInputs = with self; [];
125 doCheck = false;
126 propagatedBuildInputs = with self; [six];
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 };
60 };
131 meta = {
61 meta = {
132 license = [ pkgs.lib.licenses.bsdOriginal ];
62 license = [ pkgs.lib.licenses.bsdOriginal ];
133 };
63 };
134 };
64 };
135 cov-core = super.buildPythonPackage {
65 "cov-core" = super.buildPythonPackage {
136 name = "cov-core-1.15.0";
66 name = "cov-core-1.15.0";
137 buildInputs = with self; [];
138 doCheck = false;
67 doCheck = false;
139 propagatedBuildInputs = with self; [coverage];
68 propagatedBuildInputs = [
69 self."coverage"
70 ];
140 src = fetchurl {
71 src = fetchurl {
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
143 };
74 };
144 meta = {
75 meta = {
145 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
146 };
77 };
147 };
78 };
148 coverage = super.buildPythonPackage {
79 "coverage" = super.buildPythonPackage {
149 name = "coverage-3.7.1";
80 name = "coverage-3.7.1";
150 buildInputs = with self; [];
151 doCheck = false;
81 doCheck = false;
152 propagatedBuildInputs = with self; [];
153 src = fetchurl {
82 src = fetchurl {
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
156 };
85 };
157 meta = {
86 meta = {
158 license = [ pkgs.lib.licenses.bsdOriginal ];
87 license = [ pkgs.lib.licenses.bsdOriginal ];
159 };
88 };
160 };
89 };
161 decorator = super.buildPythonPackage {
90 "decorator" = super.buildPythonPackage {
162 name = "decorator-4.1.2";
91 name = "decorator-4.1.2";
163 buildInputs = with self; [];
164 doCheck = false;
92 doCheck = false;
165 propagatedBuildInputs = with self; [];
166 src = fetchurl {
93 src = fetchurl {
167 url = "https://pypi.python.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
168 md5 = "a0f7f4fe00ae2dde93494d90c192cf8c";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
169 };
96 };
170 meta = {
97 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 };
99 };
173 };
100 };
174 dulwich = super.buildPythonPackage {
101 "dogpile.cache" = super.buildPythonPackage {
175 name = "dulwich-0.13.0";
102 name = "dogpile.cache-0.6.6";
176 buildInputs = with self; [];
103 doCheck = false;
104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
107 };
108 meta = {
109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 };
111 };
112 "dogpile.core" = super.buildPythonPackage {
113 name = "dogpile.core-0.4.1";
177 doCheck = false;
114 doCheck = false;
178 propagatedBuildInputs = with self; [];
179 src = fetchurl {
115 src = fetchurl {
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 };
119 meta = {
120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 };
122 };
123 "dulwich" = super.buildPythonPackage {
124 name = "dulwich-0.13.0";
125 doCheck = false;
126 src = fetchurl {
127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
182 };
129 };
183 meta = {
130 meta = {
184 license = [ pkgs.lib.licenses.gpl2Plus ];
131 license = [ pkgs.lib.licenses.gpl2Plus ];
185 };
132 };
186 };
133 };
187 enum34 = super.buildPythonPackage {
134 "enum34" = super.buildPythonPackage {
188 name = "enum34-1.1.6";
135 name = "enum34-1.1.6";
189 buildInputs = with self; [];
190 doCheck = false;
136 doCheck = false;
191 propagatedBuildInputs = with self; [];
192 src = fetchurl {
137 src = fetchurl {
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
195 };
140 };
196 meta = {
141 meta = {
197 license = [ pkgs.lib.licenses.bsdOriginal ];
142 license = [ pkgs.lib.licenses.bsdOriginal ];
198 };
143 };
199 };
144 };
200 funcsigs = super.buildPythonPackage {
145 "funcsigs" = super.buildPythonPackage {
201 name = "funcsigs-1.0.2";
146 name = "funcsigs-1.0.2";
202 buildInputs = with self; [];
203 doCheck = false;
147 doCheck = false;
204 propagatedBuildInputs = with self; [];
205 src = fetchurl {
148 src = fetchurl {
206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
208 };
151 };
209 meta = {
152 meta = {
210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 };
154 };
212 };
155 };
213 gevent = super.buildPythonPackage {
156 "gevent" = super.buildPythonPackage {
214 name = "gevent-1.2.2";
157 name = "gevent-1.3.5";
215 buildInputs = with self; [];
216 doCheck = false;
158 doCheck = false;
217 propagatedBuildInputs = with self; [greenlet];
159 propagatedBuildInputs = [
160 self."greenlet"
161 ];
218 src = fetchurl {
162 src = fetchurl {
219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
163 url = "https://files.pythonhosted.org/packages/e6/0a/fc345c6e6161f84484870dbcaa58e427c10bd9bdcd08a69bed3d6b398bf1/gevent-1.3.5.tar.gz";
220 md5 = "7f0baf355384fe5ff2ecf66853422554";
164 sha256 = "1w3gydxirgd2f60c5yv579w4903ds9s4g3587ik4jby97hgqc5bz";
221 };
165 };
222 meta = {
166 meta = {
223 license = [ pkgs.lib.licenses.mit ];
167 license = [ pkgs.lib.licenses.mit ];
224 };
168 };
225 };
169 };
226 gprof2dot = super.buildPythonPackage {
170 "gprof2dot" = super.buildPythonPackage {
227 name = "gprof2dot-2017.9.19";
171 name = "gprof2dot-2017.9.19";
228 buildInputs = with self; [];
229 doCheck = false;
172 doCheck = false;
230 propagatedBuildInputs = with self; [];
231 src = fetchurl {
173 src = fetchurl {
232 url = "https://pypi.python.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
233 md5 = "cda2d552bb0d0b9f16e6824a9aabd225";
175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
234 };
176 };
235 meta = {
177 meta = {
236 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
237 };
179 };
238 };
180 };
239 greenlet = super.buildPythonPackage {
181 "greenlet" = super.buildPythonPackage {
240 name = "greenlet-0.4.13";
182 name = "greenlet-0.4.13";
241 buildInputs = with self; [];
242 doCheck = false;
183 doCheck = false;
243 propagatedBuildInputs = with self; [];
244 src = fetchurl {
184 src = fetchurl {
245 url = "https://pypi.python.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
246 md5 = "6e0b9dd5385f81d478451ec8ed1d62b3";
186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
187 };
188 meta = {
189 license = [ pkgs.lib.licenses.mit ];
190 };
191 };
192 "gunicorn" = super.buildPythonPackage {
193 name = "gunicorn-19.9.0";
194 doCheck = false;
195 src = fetchurl {
196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
247 };
198 };
248 meta = {
199 meta = {
249 license = [ pkgs.lib.licenses.mit ];
200 license = [ pkgs.lib.licenses.mit ];
250 };
201 };
251 };
202 };
252 gunicorn = super.buildPythonPackage {
203 "hg-evolve" = super.buildPythonPackage {
253 name = "gunicorn-19.7.1";
204 name = "hg-evolve-8.0.1";
254 buildInputs = with self; [];
205 doCheck = false;
206 src = fetchurl {
207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
209 };
210 meta = {
211 license = [ { fullName = "GPLv2+"; } ];
212 };
213 };
214 "hgsubversion" = super.buildPythonPackage {
215 name = "hgsubversion-1.9.2";
255 doCheck = false;
216 doCheck = false;
256 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = [
218 self."mercurial"
219 self."subvertpy"
220 ];
257 src = fetchurl {
221 src = fetchurl {
258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
259 md5 = "174d3c3cd670a5be0404d84c484e590c";
223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
224 };
225 meta = {
226 license = [ pkgs.lib.licenses.gpl1 ];
227 };
228 };
229 "hupper" = super.buildPythonPackage {
230 name = "hupper-1.3";
231 doCheck = false;
232 src = fetchurl {
233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
260 };
235 };
261 meta = {
236 meta = {
262 license = [ pkgs.lib.licenses.mit ];
237 license = [ pkgs.lib.licenses.mit ];
263 };
238 };
264 };
239 };
265 hg-evolve = super.buildPythonPackage {
240 "ipdb" = super.buildPythonPackage {
266 name = "hg-evolve-7.0.1";
241 name = "ipdb-0.11";
267 buildInputs = with self; [];
268 doCheck = false;
242 doCheck = false;
269 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = [
244 self."setuptools"
245 self."ipython"
246 ];
270 src = fetchurl {
247 src = fetchurl {
271 url = "https://pypi.python.org/packages/92/5c/4c216be1a08f326a12076b645f4892a2b0865810db1f4a0c9648f1f4c113/hg-evolve-7.0.1.tar.gz";
248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
272 md5 = "2dfa926846ea873a8406bababb06b277";
249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
273 };
250 };
274 meta = {
251 meta = {
275 license = [ { fullName = "GPLv2+"; } ];
252 license = [ pkgs.lib.licenses.bsdOriginal ];
276 };
253 };
277 };
254 };
278 hgsubversion = super.buildPythonPackage {
255 "ipython" = super.buildPythonPackage {
279 name = "hgsubversion-1.9";
256 name = "ipython-5.1.0";
280 buildInputs = with self; [];
281 doCheck = false;
257 doCheck = false;
282 propagatedBuildInputs = with self; [mercurial subvertpy];
258 propagatedBuildInputs = [
259 self."setuptools"
260 self."decorator"
261 self."pickleshare"
262 self."simplegeneric"
263 self."traitlets"
264 self."prompt-toolkit"
265 self."pygments"
266 self."pexpect"
267 self."backports.shutil-get-terminal-size"
268 self."pathlib2"
269 self."pexpect"
270 ];
283 src = fetchurl {
271 src = fetchurl {
284 url = "https://pypi.python.org/packages/db/26/7293a6c6b85e2a74ab452e9ba7f00b04ff0e440e6cd4f84131ac5d5e6b22/hgsubversion-1.9.tar.gz";
272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
285 md5 = "0c6f93ef12cc2e7fe67286f16bcc7211";
273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
286 };
274 };
287 meta = {
275 meta = {
288 license = [ pkgs.lib.licenses.gpl1 ];
276 license = [ pkgs.lib.licenses.bsdOriginal ];
289 };
277 };
290 };
278 };
291 hupper = super.buildPythonPackage {
279 "ipython-genutils" = super.buildPythonPackage {
292 name = "hupper-1.0";
280 name = "ipython-genutils-0.2.0";
293 buildInputs = with self; [];
294 doCheck = false;
281 doCheck = false;
295 propagatedBuildInputs = with self; [];
296 src = fetchurl {
282 src = fetchurl {
297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
285 };
286 meta = {
287 license = [ pkgs.lib.licenses.bsdOriginal ];
288 };
289 };
290 "mako" = super.buildPythonPackage {
291 name = "mako-1.0.7";
292 doCheck = false;
293 propagatedBuildInputs = [
294 self."markupsafe"
295 ];
296 src = fetchurl {
297 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
298 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
299 };
299 };
300 meta = {
300 meta = {
301 license = [ pkgs.lib.licenses.mit ];
301 license = [ pkgs.lib.licenses.mit ];
302 };
302 };
303 };
303 };
304 infrae.cache = super.buildPythonPackage {
304 "markupsafe" = super.buildPythonPackage {
305 name = "infrae.cache-1.0.1";
305 name = "markupsafe-1.0";
306 buildInputs = with self; [];
307 doCheck = false;
306 doCheck = false;
308 propagatedBuildInputs = with self; [Beaker repoze.lru];
309 src = fetchurl {
307 src = fetchurl {
310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
308 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
311 md5 = "b09076a766747e6ed2a755cc62088e32";
309 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
312 };
313 meta = {
314 license = [ pkgs.lib.licenses.zpt21 ];
315 };
316 };
317 ipdb = super.buildPythonPackage {
318 name = "ipdb-0.10.3";
319 buildInputs = with self; [];
320 doCheck = false;
321 propagatedBuildInputs = with self; [setuptools ipython];
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
325 };
310 };
326 meta = {
311 meta = {
327 license = [ pkgs.lib.licenses.bsdOriginal ];
312 license = [ pkgs.lib.licenses.bsdOriginal ];
328 };
313 };
329 };
314 };
330 ipython = super.buildPythonPackage {
315 "mercurial" = super.buildPythonPackage {
331 name = "ipython-5.1.0";
316 name = "mercurial-4.6.2";
332 buildInputs = with self; [];
333 doCheck = false;
317 doCheck = false;
334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
335 src = fetchurl {
318 src = fetchurl {
336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
319 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
320 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
338 };
321 };
339 meta = {
322 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal ];
323 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
341 };
324 };
342 };
325 };
343 ipython-genutils = super.buildPythonPackage {
326 "mock" = super.buildPythonPackage {
344 name = "ipython-genutils-0.2.0";
327 name = "mock-1.0.1";
345 buildInputs = with self; [];
346 doCheck = false;
328 doCheck = false;
347 propagatedBuildInputs = with self; [];
348 src = fetchurl {
329 src = fetchurl {
349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
330 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
331 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
351 };
332 };
352 meta = {
333 meta = {
353 license = [ pkgs.lib.licenses.bsdOriginal ];
334 license = [ pkgs.lib.licenses.bsdOriginal ];
354 };
335 };
355 };
336 };
356 mercurial = super.buildPythonPackage {
337 "more-itertools" = super.buildPythonPackage {
357 name = "mercurial-4.4.2";
338 name = "more-itertools-4.3.0";
358 buildInputs = with self; [];
359 doCheck = false;
339 doCheck = false;
360 propagatedBuildInputs = with self; [];
340 propagatedBuildInputs = [
341 self."six"
342 ];
361 src = fetchurl {
343 src = fetchurl {
362 url = "https://pypi.python.org/packages/d0/83/92a5fa662ba277128db305e39e7ea5a638f2f1cbbc6dc5fbf4c14aefae22/mercurial-4.4.2.tar.gz";
344 url = "https://files.pythonhosted.org/packages/88/ff/6d485d7362f39880810278bdc906c13300db05485d9c65971dec1142da6a/more-itertools-4.3.0.tar.gz";
363 md5 = "95769125cf7e9dbc341a983253acefcd";
345 sha256 = "17h3na0rdh8xq30w4b9pizgkdxmm51896bxw600x84jflg9vaxn4";
364 };
346 };
365 meta = {
347 meta = {
366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
348 license = [ pkgs.lib.licenses.mit ];
367 };
349 };
368 };
350 };
369 mock = super.buildPythonPackage {
351 "msgpack-python" = super.buildPythonPackage {
370 name = "mock-1.0.1";
352 name = "msgpack-python-0.5.6";
371 buildInputs = with self; [];
372 doCheck = false;
353 doCheck = false;
373 propagatedBuildInputs = with self; [];
374 src = fetchurl {
354 src = fetchurl {
375 url = "https://pypi.python.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
355 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
376 md5 = "c3971991738caa55ec7c356bbc154ee2";
356 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
377 };
378 meta = {
379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 };
381 };
382 msgpack-python = super.buildPythonPackage {
383 name = "msgpack-python-0.4.8";
384 buildInputs = with self; [];
385 doCheck = false;
386 propagatedBuildInputs = with self; [];
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
390 };
357 };
391 meta = {
358 meta = {
392 license = [ pkgs.lib.licenses.asl20 ];
359 license = [ pkgs.lib.licenses.asl20 ];
393 };
360 };
394 };
361 };
395 pathlib2 = super.buildPythonPackage {
362 "pastedeploy" = super.buildPythonPackage {
396 name = "pathlib2-2.3.0";
363 name = "pastedeploy-1.5.2";
397 buildInputs = with self; [];
398 doCheck = false;
364 doCheck = false;
399 propagatedBuildInputs = with self; [six scandir];
400 src = fetchurl {
365 src = fetchurl {
401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
366 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
402 md5 = "89c90409d11fd5947966b6a30a47d18c";
367 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
368 };
369 meta = {
370 license = [ pkgs.lib.licenses.mit ];
371 };
372 };
373 "pathlib2" = super.buildPythonPackage {
374 name = "pathlib2-2.3.0";
375 doCheck = false;
376 propagatedBuildInputs = [
377 self."six"
378 self."scandir"
379 ];
380 src = fetchurl {
381 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
382 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
403 };
383 };
404 meta = {
384 meta = {
405 license = [ pkgs.lib.licenses.mit ];
385 license = [ pkgs.lib.licenses.mit ];
406 };
386 };
407 };
387 };
408 pexpect = super.buildPythonPackage {
388 "pexpect" = super.buildPythonPackage {
409 name = "pexpect-4.4.0";
389 name = "pexpect-4.6.0";
410 buildInputs = with self; [];
411 doCheck = false;
390 doCheck = false;
412 propagatedBuildInputs = with self; [ptyprocess];
391 propagatedBuildInputs = [
392 self."ptyprocess"
393 ];
413 src = fetchurl {
394 src = fetchurl {
414 url = "https://pypi.python.org/packages/fa/c3/60c0cbf96f242d0b47a82e9ca634dcd6dcb043832cf05e17540812e1c707/pexpect-4.4.0.tar.gz";
395 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
415 md5 = "e9b07f0765df8245ac72201d757baaef";
396 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
416 };
397 };
417 meta = {
398 meta = {
418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
399 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
419 };
400 };
420 };
401 };
421 pickleshare = super.buildPythonPackage {
402 "pickleshare" = super.buildPythonPackage {
422 name = "pickleshare-0.7.4";
403 name = "pickleshare-0.7.4";
423 buildInputs = with self; [];
404 doCheck = false;
405 propagatedBuildInputs = [
406 self."pathlib2"
407 ];
408 src = fetchurl {
409 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
410 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
411 };
412 meta = {
413 license = [ pkgs.lib.licenses.mit ];
414 };
415 };
416 "plaster" = super.buildPythonPackage {
417 name = "plaster-1.0";
424 doCheck = false;
418 doCheck = false;
425 propagatedBuildInputs = with self; [pathlib2];
419 propagatedBuildInputs = [
420 self."setuptools"
421 ];
426 src = fetchurl {
422 src = fetchurl {
427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
423 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
424 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
425 };
426 meta = {
427 license = [ pkgs.lib.licenses.mit ];
428 };
429 };
430 "plaster-pastedeploy" = super.buildPythonPackage {
431 name = "plaster-pastedeploy-0.6";
432 doCheck = false;
433 propagatedBuildInputs = [
434 self."pastedeploy"
435 self."plaster"
436 ];
437 src = fetchurl {
438 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
439 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
440 };
441 meta = {
442 license = [ pkgs.lib.licenses.mit ];
443 };
444 };
445 "pluggy" = super.buildPythonPackage {
446 name = "pluggy-0.6.0";
447 doCheck = false;
448 src = fetchurl {
449 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
450 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
429 };
451 };
430 meta = {
452 meta = {
431 license = [ pkgs.lib.licenses.mit ];
453 license = [ pkgs.lib.licenses.mit ];
432 };
454 };
433 };
455 };
434 plaster = super.buildPythonPackage {
456 "prompt-toolkit" = super.buildPythonPackage {
435 name = "plaster-1.0";
457 name = "prompt-toolkit-1.0.15";
436 buildInputs = with self; [];
437 doCheck = false;
458 doCheck = false;
438 propagatedBuildInputs = with self; [setuptools];
459 propagatedBuildInputs = [
460 self."six"
461 self."wcwidth"
462 ];
439 src = fetchurl {
463 src = fetchurl {
440 url = "https://pypi.python.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
464 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
441 md5 = "80e6beb4760c16fea31754babcc0576e";
465 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
442 };
466 };
443 meta = {
467 meta = {
444 license = [ pkgs.lib.licenses.mit ];
468 license = [ pkgs.lib.licenses.bsdOriginal ];
445 };
469 };
446 };
470 };
447 plaster-pastedeploy = super.buildPythonPackage {
471 "psutil" = super.buildPythonPackage {
448 name = "plaster-pastedeploy-0.4.2";
472 name = "psutil-5.4.6";
449 buildInputs = with self; [];
450 doCheck = false;
473 doCheck = false;
451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 src = fetchurl {
474 src = fetchurl {
453 url = "https://pypi.python.org/packages/2c/62/0daf9c0be958e785023e583e51baac15863699e956bfb3d448898d80edd8/plaster_pastedeploy-0.4.2.tar.gz";
475 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
454 md5 = "58fd7852002909378e818c9d5b71e90a";
476 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
455 };
456 meta = {
457 license = [ pkgs.lib.licenses.mit ];
458 };
459 };
460 prompt-toolkit = super.buildPythonPackage {
461 name = "prompt-toolkit-1.0.15";
462 buildInputs = with self; [];
463 doCheck = false;
464 propagatedBuildInputs = with self; [six wcwidth];
465 src = fetchurl {
466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 md5 = "8fe70295006dbc8afedd43e5eba99032";
468 };
477 };
469 meta = {
478 meta = {
470 license = [ pkgs.lib.licenses.bsdOriginal ];
479 license = [ pkgs.lib.licenses.bsdOriginal ];
471 };
480 };
472 };
481 };
473 ptyprocess = super.buildPythonPackage {
482 "ptyprocess" = super.buildPythonPackage {
474 name = "ptyprocess-0.5.2";
483 name = "ptyprocess-0.6.0";
475 buildInputs = with self; [];
476 doCheck = false;
484 doCheck = false;
477 propagatedBuildInputs = with self; [];
478 src = fetchurl {
485 src = fetchurl {
479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
486 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
487 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
481 };
488 };
482 meta = {
489 meta = {
483 license = [ ];
490 license = [ ];
484 };
491 };
485 };
492 };
486 py = super.buildPythonPackage {
493 "py" = super.buildPythonPackage {
487 name = "py-1.5.2";
494 name = "py-1.5.3";
488 buildInputs = with self; [];
489 doCheck = false;
495 doCheck = false;
490 propagatedBuildInputs = with self; [];
491 src = fetchurl {
496 src = fetchurl {
492 url = "https://pypi.python.org/packages/90/e3/e075127d39d35f09a500ebb4a90afd10f9ef0a1d28a6d09abeec0e444fdd/py-1.5.2.tar.gz";
497 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
493 md5 = "279ca69c632069e1b71e11b14641ca28";
498 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
494 };
499 };
495 meta = {
500 meta = {
496 license = [ pkgs.lib.licenses.mit ];
501 license = [ pkgs.lib.licenses.mit ];
497 };
502 };
498 };
503 };
499 pygments = super.buildPythonPackage {
504 "pygments" = super.buildPythonPackage {
500 name = "pygments-2.2.0";
505 name = "pygments-2.2.0";
501 buildInputs = with self; [];
502 doCheck = false;
506 doCheck = false;
503 propagatedBuildInputs = with self; [];
504 src = fetchurl {
507 src = fetchurl {
505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
508 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
506 md5 = "13037baca42f16917cbd5ad2fab50844";
509 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
507 };
510 };
508 meta = {
511 meta = {
509 license = [ pkgs.lib.licenses.bsdOriginal ];
512 license = [ pkgs.lib.licenses.bsdOriginal ];
510 };
513 };
511 };
514 };
512 pyramid = super.buildPythonPackage {
515 "pyramid" = super.buildPythonPackage {
513 name = "pyramid-1.9.1";
516 name = "pyramid-1.9.2";
514 buildInputs = with self; [];
515 doCheck = false;
517 doCheck = false;
516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
518 propagatedBuildInputs = [
519 self."setuptools"
520 self."webob"
521 self."repoze.lru"
522 self."zope.interface"
523 self."zope.deprecation"
524 self."venusian"
525 self."translationstring"
526 self."pastedeploy"
527 self."plaster"
528 self."plaster-pastedeploy"
529 self."hupper"
530 ];
517 src = fetchurl {
531 src = fetchurl {
518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
532 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
533 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
520 };
534 };
521 meta = {
535 meta = {
522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 };
537 };
524 };
538 };
525 pyramid-jinja2 = super.buildPythonPackage {
539 "pyramid-mako" = super.buildPythonPackage {
526 name = "pyramid-jinja2-2.7";
540 name = "pyramid-mako-1.0.2";
527 buildInputs = with self; [];
528 doCheck = false;
541 doCheck = false;
529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
542 propagatedBuildInputs = [
543 self."pyramid"
544 self."mako"
545 ];
530 src = fetchurl {
546 src = fetchurl {
531 url = "https://pypi.python.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
547 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
532 md5 = "c2f8b2cd7b73a6f1d9a311fcfaf4fb92";
548 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
533 };
549 };
534 meta = {
550 meta = {
535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
551 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 };
552 };
537 };
553 };
538 pyramid-mako = super.buildPythonPackage {
554 "pytest" = super.buildPythonPackage {
539 name = "pyramid-mako-1.0.2";
555 name = "pytest-3.6.0";
540 buildInputs = with self; [];
541 doCheck = false;
542 propagatedBuildInputs = with self; [pyramid Mako];
543 src = fetchurl {
544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
546 };
547 meta = {
548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
549 };
550 };
551 pytest = super.buildPythonPackage {
552 name = "pytest-3.2.5";
553 buildInputs = with self; [];
554 doCheck = false;
556 doCheck = false;
555 propagatedBuildInputs = with self; [py setuptools];
557 propagatedBuildInputs = [
558 self."py"
559 self."six"
560 self."setuptools"
561 self."attrs"
562 self."more-itertools"
563 self."atomicwrites"
564 self."pluggy"
565 self."funcsigs"
566 ];
556 src = fetchurl {
567 src = fetchurl {
557 url = "https://pypi.python.org/packages/1f/f8/8cd74c16952163ce0db0bd95fdd8810cbf093c08be00e6e665ebf0dc3138/pytest-3.2.5.tar.gz";
568 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
558 md5 = "6dbe9bb093883f75394a689a1426ac6f";
569 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
559 };
560 meta = {
561 license = [ pkgs.lib.licenses.mit ];
562 };
563 };
564 pytest-catchlog = super.buildPythonPackage {
565 name = "pytest-catchlog-1.2.2";
566 buildInputs = with self; [];
567 doCheck = false;
568 propagatedBuildInputs = with self; [py pytest];
569 src = fetchurl {
570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
571 md5 = "09d890c54c7456c818102b7ff8c182c8";
572 };
570 };
573 meta = {
571 meta = {
574 license = [ pkgs.lib.licenses.mit ];
572 license = [ pkgs.lib.licenses.mit ];
575 };
573 };
576 };
574 };
577 pytest-cov = super.buildPythonPackage {
575 "pytest-cov" = super.buildPythonPackage {
578 name = "pytest-cov-2.5.1";
576 name = "pytest-cov-2.5.1";
579 buildInputs = with self; [];
580 doCheck = false;
577 doCheck = false;
581 propagatedBuildInputs = with self; [pytest coverage];
578 propagatedBuildInputs = [
579 self."pytest"
580 self."coverage"
581 ];
582 src = fetchurl {
582 src = fetchurl {
583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
583 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
584 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
585 };
585 };
586 meta = {
586 meta = {
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 };
588 };
589 };
589 };
590 pytest-profiling = super.buildPythonPackage {
590 "pytest-profiling" = super.buildPythonPackage {
591 name = "pytest-profiling-1.2.11";
591 name = "pytest-profiling-1.3.0";
592 buildInputs = with self; [];
593 doCheck = false;
592 doCheck = false;
594 propagatedBuildInputs = with self; [six pytest gprof2dot];
593 propagatedBuildInputs = [
594 self."six"
595 self."pytest"
596 self."gprof2dot"
597 ];
595 src = fetchurl {
598 src = fetchurl {
596 url = "https://pypi.python.org/packages/c0/4a/b4aa786e93c07a86f1f87c581a36bf355a9e06a9da7e00dbd05047626bd2/pytest-profiling-1.2.11.tar.gz";
599 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
597 md5 = "9ef6b60248731be5d44477980408e8f7";
600 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
598 };
601 };
599 meta = {
602 meta = {
600 license = [ pkgs.lib.licenses.mit ];
603 license = [ pkgs.lib.licenses.mit ];
601 };
604 };
602 };
605 };
603 pytest-runner = super.buildPythonPackage {
606 "pytest-runner" = super.buildPythonPackage {
604 name = "pytest-runner-3.0";
607 name = "pytest-runner-4.2";
605 buildInputs = with self; [];
606 doCheck = false;
608 doCheck = false;
607 propagatedBuildInputs = with self; [];
608 src = fetchurl {
609 src = fetchurl {
609 url = "https://pypi.python.org/packages/65/b4/ae89338cd2d81e2cc54bd6db2e962bfe948f612303610d68ab24539ac2d1/pytest-runner-3.0.tar.gz";
610 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
610 md5 = "8f8363a52bbabc4cedd5e239beb2ba11";
611 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
611 };
612 };
612 meta = {
613 meta = {
613 license = [ pkgs.lib.licenses.mit ];
614 license = [ pkgs.lib.licenses.mit ];
614 };
615 };
615 };
616 };
616 pytest-sugar = super.buildPythonPackage {
617 "pytest-sugar" = super.buildPythonPackage {
617 name = "pytest-sugar-0.9.0";
618 name = "pytest-sugar-0.9.1";
618 buildInputs = with self; [];
619 doCheck = false;
619 doCheck = false;
620 propagatedBuildInputs = with self; [pytest termcolor];
620 propagatedBuildInputs = [
621 self."pytest"
622 self."termcolor"
623 ];
621 src = fetchurl {
624 src = fetchurl {
622 url = "https://pypi.python.org/packages/49/d8/c5ff6cca3ce2ebd8b73eec89779bf6b4a7737456a70e8ea4d44c1ff90f71/pytest-sugar-0.9.0.tar.gz";
625 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
623 md5 = "89fbff17277fa6a95a560a04b68cb9f9";
626 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
624 };
627 };
625 meta = {
628 meta = {
626 license = [ pkgs.lib.licenses.bsdOriginal ];
629 license = [ pkgs.lib.licenses.bsdOriginal ];
627 };
630 };
628 };
631 };
629 pytest-timeout = super.buildPythonPackage {
632 "pytest-timeout" = super.buildPythonPackage {
630 name = "pytest-timeout-1.2.0";
633 name = "pytest-timeout-1.2.1";
631 buildInputs = with self; [];
632 doCheck = false;
634 doCheck = false;
633 propagatedBuildInputs = with self; [pytest];
635 propagatedBuildInputs = [
636 self."pytest"
637 ];
634 src = fetchurl {
638 src = fetchurl {
635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
639 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
636 md5 = "83607d91aa163562c7ee835da57d061d";
640 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
637 };
641 };
638 meta = {
642 meta = {
639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
643 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
640 };
644 };
641 };
645 };
642 repoze.lru = super.buildPythonPackage {
646 "repoze.lru" = super.buildPythonPackage {
643 name = "repoze.lru-0.7";
647 name = "repoze.lru-0.7";
644 buildInputs = with self; [];
645 doCheck = false;
648 doCheck = false;
646 propagatedBuildInputs = with self; [];
647 src = fetchurl {
649 src = fetchurl {
648 url = "https://pypi.python.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
650 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
649 md5 = "c08cc030387e0b1fc53c5c7d964b35e2";
651 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
650 };
652 };
651 meta = {
653 meta = {
652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
654 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
653 };
655 };
654 };
656 };
655 rhodecode-vcsserver = super.buildPythonPackage {
657 "rhodecode-vcsserver" = super.buildPythonPackage {
656 name = "rhodecode-vcsserver-4.12.4";
658 name = "rhodecode-vcsserver-4.13.0";
657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
659 buildInputs = [
660 self."pytest"
661 self."py"
662 self."pytest-cov"
663 self."pytest-sugar"
664 self."pytest-runner"
665 self."pytest-profiling"
666 self."gprof2dot"
667 self."pytest-timeout"
668 self."mock"
669 self."webtest"
670 self."cov-core"
671 self."coverage"
672 self."configobj"
673 ];
658 doCheck = true;
674 doCheck = true;
659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
675 propagatedBuildInputs = [
676 self."configobj"
677 self."dogpile.cache"
678 self."dogpile.core"
679 self."decorator"
680 self."dulwich"
681 self."hgsubversion"
682 self."hg-evolve"
683 self."mako"
684 self."markupsafe"
685 self."mercurial"
686 self."msgpack-python"
687 self."pastedeploy"
688 self."psutil"
689 self."pyramid"
690 self."pyramid-mako"
691 self."pygments"
692 self."pathlib2"
693 self."repoze.lru"
694 self."simplejson"
695 self."subprocess32"
696 self."setproctitle"
697 self."subvertpy"
698 self."six"
699 self."translationstring"
700 self."webob"
701 self."zope.deprecation"
702 self."zope.interface"
703 self."gevent"
704 self."greenlet"
705 self."gunicorn"
706 self."waitress"
707 self."ipdb"
708 self."ipython"
709 self."pytest"
710 self."py"
711 self."pytest-cov"
712 self."pytest-sugar"
713 self."pytest-runner"
714 self."pytest-profiling"
715 self."gprof2dot"
716 self."pytest-timeout"
717 self."mock"
718 self."webtest"
719 self."cov-core"
720 self."coverage"
721 ];
660 src = ./.;
722 src = ./.;
661 meta = {
723 meta = {
662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
724 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
663 };
725 };
664 };
726 };
665 scandir = super.buildPythonPackage {
727 "scandir" = super.buildPythonPackage {
666 name = "scandir-1.7";
728 name = "scandir-1.9.0";
667 buildInputs = with self; [];
668 doCheck = false;
729 doCheck = false;
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
730 src = fetchurl {
671 url = "https://pypi.python.org/packages/13/bb/e541b74230bbf7a20a3949a2ee6631be299378a784f5445aa5d0047c192b/scandir-1.7.tar.gz";
731 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
672 md5 = "037e5f24d1a0e78b17faca72dea9555f";
732 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
673 };
733 };
674 meta = {
734 meta = {
675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
735 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 };
736 };
677 };
737 };
678 setuptools = super.buildPythonPackage {
738 "setproctitle" = super.buildPythonPackage {
679 name = "setuptools-30.1.0";
739 name = "setproctitle-1.1.10";
680 buildInputs = with self; [];
681 doCheck = false;
740 doCheck = false;
682 propagatedBuildInputs = with self; [];
683 src = fetchurl {
741 src = fetchurl {
684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
742 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
743 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
744 };
745 meta = {
746 license = [ pkgs.lib.licenses.bsdOriginal ];
747 };
748 };
749 "setuptools" = super.buildPythonPackage {
750 name = "setuptools-40.1.0";
751 doCheck = false;
752 src = fetchurl {
753 url = "https://files.pythonhosted.org/packages/5a/df/b2e3d9693bb0dcbeac516a73dd7a9eb82b126ae52e4a74605a9b01beddd5/setuptools-40.1.0.zip";
754 sha256 = "0w1blx5ajga5y15dci0mddk49cf2xpq0mp7rp7jrqr2diqk00ib6";
686 };
755 };
687 meta = {
756 meta = {
688 license = [ pkgs.lib.licenses.mit ];
757 license = [ pkgs.lib.licenses.mit ];
689 };
758 };
690 };
759 };
691 simplegeneric = super.buildPythonPackage {
760 "simplegeneric" = super.buildPythonPackage {
692 name = "simplegeneric-0.8.1";
761 name = "simplegeneric-0.8.1";
693 buildInputs = with self; [];
694 doCheck = false;
762 doCheck = false;
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
763 src = fetchurl {
697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
764 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
698 md5 = "f9c1fab00fd981be588fc32759f474e3";
765 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
699 };
766 };
700 meta = {
767 meta = {
701 license = [ pkgs.lib.licenses.zpt21 ];
768 license = [ pkgs.lib.licenses.zpl21 ];
702 };
769 };
703 };
770 };
704 simplejson = super.buildPythonPackage {
771 "simplejson" = super.buildPythonPackage {
705 name = "simplejson-3.11.1";
772 name = "simplejson-3.11.1";
706 buildInputs = with self; [];
707 doCheck = false;
773 doCheck = false;
708 propagatedBuildInputs = with self; [];
709 src = fetchurl {
774 src = fetchurl {
710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
775 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
776 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
712 };
777 };
713 meta = {
778 meta = {
714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
779 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
715 };
780 };
716 };
781 };
717 six = super.buildPythonPackage {
782 "six" = super.buildPythonPackage {
718 name = "six-1.11.0";
783 name = "six-1.11.0";
719 buildInputs = with self; [];
720 doCheck = false;
784 doCheck = false;
721 propagatedBuildInputs = with self; [];
722 src = fetchurl {
785 src = fetchurl {
723 url = "https://pypi.python.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
786 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
724 md5 = "d12789f9baf7e9fb2524c0c64f1773f8";
787 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
725 };
788 };
726 meta = {
789 meta = {
727 license = [ pkgs.lib.licenses.mit ];
790 license = [ pkgs.lib.licenses.mit ];
728 };
791 };
729 };
792 };
730 subprocess32 = super.buildPythonPackage {
793 "subprocess32" = super.buildPythonPackage {
731 name = "subprocess32-3.2.7";
794 name = "subprocess32-3.5.1";
732 buildInputs = with self; [];
733 doCheck = false;
795 doCheck = false;
734 propagatedBuildInputs = with self; [];
735 src = fetchurl {
796 src = fetchurl {
736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
797 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
737 md5 = "824c801e479d3e916879aae3e9c15e16";
798 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
738 };
799 };
739 meta = {
800 meta = {
740 license = [ pkgs.lib.licenses.psfl ];
801 license = [ pkgs.lib.licenses.psfl ];
741 };
802 };
742 };
803 };
743 subvertpy = super.buildPythonPackage {
804 "subvertpy" = super.buildPythonPackage {
744 name = "subvertpy-0.10.1";
805 name = "subvertpy-0.10.1";
745 buildInputs = with self; [];
746 doCheck = false;
806 doCheck = false;
747 propagatedBuildInputs = with self; [];
748 src = fetchurl {
807 src = fetchurl {
749 url = "https://pypi.python.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
808 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
750 md5 = "a70e03579902d480f5e9f8c570f6536b";
809 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
751 };
810 };
752 meta = {
811 meta = {
753 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
812 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
754 };
813 };
755 };
814 };
756 termcolor = super.buildPythonPackage {
815 "termcolor" = super.buildPythonPackage {
757 name = "termcolor-1.1.0";
816 name = "termcolor-1.1.0";
758 buildInputs = with self; [];
759 doCheck = false;
817 doCheck = false;
760 propagatedBuildInputs = with self; [];
761 src = fetchurl {
818 src = fetchurl {
762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
819 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
763 md5 = "043e89644f8909d462fbbfa511c768df";
820 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
764 };
821 };
765 meta = {
822 meta = {
766 license = [ pkgs.lib.licenses.mit ];
823 license = [ pkgs.lib.licenses.mit ];
767 };
824 };
768 };
825 };
769 traitlets = super.buildPythonPackage {
826 "traitlets" = super.buildPythonPackage {
770 name = "traitlets-4.3.2";
827 name = "traitlets-4.3.2";
771 buildInputs = with self; [];
772 doCheck = false;
828 doCheck = false;
773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
829 propagatedBuildInputs = [
830 self."ipython-genutils"
831 self."six"
832 self."decorator"
833 self."enum34"
834 ];
774 src = fetchurl {
835 src = fetchurl {
775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
836 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
837 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
777 };
838 };
778 meta = {
839 meta = {
779 license = [ pkgs.lib.licenses.bsdOriginal ];
840 license = [ pkgs.lib.licenses.bsdOriginal ];
780 };
841 };
781 };
842 };
782 translationstring = super.buildPythonPackage {
843 "translationstring" = super.buildPythonPackage {
783 name = "translationstring-1.3";
844 name = "translationstring-1.3";
784 buildInputs = with self; [];
785 doCheck = false;
845 doCheck = false;
786 propagatedBuildInputs = with self; [];
787 src = fetchurl {
846 src = fetchurl {
788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
847 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
848 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
790 };
849 };
791 meta = {
850 meta = {
792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
851 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
793 };
852 };
794 };
853 };
795 venusian = super.buildPythonPackage {
854 "venusian" = super.buildPythonPackage {
796 name = "venusian-1.1.0";
855 name = "venusian-1.1.0";
797 buildInputs = with self; [];
798 doCheck = false;
856 doCheck = false;
799 propagatedBuildInputs = with self; [];
800 src = fetchurl {
857 src = fetchurl {
801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
858 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
859 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
803 };
860 };
804 meta = {
861 meta = {
805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
862 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
806 };
863 };
807 };
864 };
808 waitress = super.buildPythonPackage {
865 "waitress" = super.buildPythonPackage {
809 name = "waitress-1.1.0";
866 name = "waitress-1.1.0";
810 buildInputs = with self; [];
811 doCheck = false;
867 doCheck = false;
812 propagatedBuildInputs = with self; [];
813 src = fetchurl {
868 src = fetchurl {
814 url = "https://pypi.python.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
869 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
815 md5 = "0f1eb7fdfdbf2e6d18decbda1733045c";
870 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
816 };
871 };
817 meta = {
872 meta = {
818 license = [ pkgs.lib.licenses.zpt21 ];
873 license = [ pkgs.lib.licenses.zpl21 ];
819 };
874 };
820 };
875 };
821 wcwidth = super.buildPythonPackage {
876 "wcwidth" = super.buildPythonPackage {
822 name = "wcwidth-0.1.7";
877 name = "wcwidth-0.1.7";
823 buildInputs = with self; [];
824 doCheck = false;
878 doCheck = false;
825 propagatedBuildInputs = with self; [];
826 src = fetchurl {
879 src = fetchurl {
827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
880 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
881 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
882 };
883 meta = {
884 license = [ pkgs.lib.licenses.mit ];
885 };
886 };
887 "webob" = super.buildPythonPackage {
888 name = "webob-1.7.4";
889 doCheck = false;
890 src = fetchurl {
891 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
892 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
829 };
893 };
830 meta = {
894 meta = {
831 license = [ pkgs.lib.licenses.mit ];
895 license = [ pkgs.lib.licenses.mit ];
832 };
896 };
833 };
897 };
834 wheel = super.buildPythonPackage {
898 "webtest" = super.buildPythonPackage {
835 name = "wheel-0.29.0";
899 name = "webtest-2.0.29";
836 buildInputs = with self; [];
837 doCheck = false;
900 doCheck = false;
838 propagatedBuildInputs = with self; [];
901 propagatedBuildInputs = [
902 self."six"
903 self."webob"
904 self."waitress"
905 self."beautifulsoup4"
906 ];
839 src = fetchurl {
907 src = fetchurl {
840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
908 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
841 md5 = "555a67e4507cedee23a0deb9651e452f";
909 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
842 };
910 };
843 meta = {
911 meta = {
844 license = [ pkgs.lib.licenses.mit ];
912 license = [ pkgs.lib.licenses.mit ];
845 };
913 };
846 };
914 };
847 zope.deprecation = super.buildPythonPackage {
915 "zope.deprecation" = super.buildPythonPackage {
848 name = "zope.deprecation-4.3.0";
916 name = "zope.deprecation-4.3.0";
849 buildInputs = with self; [];
850 doCheck = false;
917 doCheck = false;
851 propagatedBuildInputs = with self; [setuptools];
918 propagatedBuildInputs = [
919 self."setuptools"
920 ];
852 src = fetchurl {
921 src = fetchurl {
853 url = "https://pypi.python.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
922 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
854 md5 = "2166b2cb7e0e96a21104e6f8f9b696bb";
923 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
855 };
924 };
856 meta = {
925 meta = {
857 license = [ pkgs.lib.licenses.zpt21 ];
926 license = [ pkgs.lib.licenses.zpl21 ];
858 };
927 };
859 };
928 };
860 zope.interface = super.buildPythonPackage {
929 "zope.interface" = super.buildPythonPackage {
861 name = "zope.interface-4.4.3";
930 name = "zope.interface-4.5.0";
862 buildInputs = with self; [];
863 doCheck = false;
931 doCheck = false;
864 propagatedBuildInputs = with self; [setuptools];
932 propagatedBuildInputs = [
933 self."setuptools"
934 ];
865 src = fetchurl {
935 src = fetchurl {
866 url = "https://pypi.python.org/packages/bd/d2/25349ed41f9dcff7b3baf87bd88a4c82396cf6e02f1f42bb68657a3132af/zope.interface-4.4.3.tar.gz";
936 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
867 md5 = "8700a4f527c1203b34b10c2b4e7a6912";
937 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
868 };
938 };
869 meta = {
939 meta = {
870 license = [ pkgs.lib.licenses.zpt21 ];
940 license = [ pkgs.lib.licenses.zpl21 ];
871 };
941 };
872 };
942 };
873
943
874 ### Test requirements
944 ### Test requirements
875
945
876
946
877 }
947 }
@@ -1,3 +1,8 b''
1 [pytest]
1 [pytest]
2 testpaths = ./vcsserver
2 testpaths = vcsserver
3 addopts = -v
3 norecursedirs = vcsserver/hook_utils/hook_templates
4 cache_dir = /tmp/.pytest_cache
5
6
7 addopts =
8 --pdbcls=IPython.terminal.debugger:TerminalPdb
@@ -1,15 +1,16 b''
1 # This file defines how to "build" for packaging.
2
1 { pkgs ? import <nixpkgs> {}
3 { pkgs ? import <nixpkgs> {}
2 , doCheck ? true
4 , doCheck ? true
3 }:
5 }:
4
6
5 let
7 let
6
7 vcsserver = import ./default.nix {
8 vcsserver = import ./default.nix {
8 inherit
9 inherit
9 doCheck
10 doCheck
10 pkgs;
11 pkgs;
11 };
12 };
12
13
13 in {
14 in {
14 build = vcsserver;
15 build = vcsserver;
15 }
16 }
@@ -1,41 +1,46 b''
1 ## core
1 ## dependencies
2 setuptools==30.1.0
3
2
4 Beaker==1.9.1
3 # our custom configobj
5 configobj==5.0.6
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 dogpile.cache==0.6.6
6 dogpile.core==0.4.1
6 decorator==4.1.2
7 decorator==4.1.2
7 dulwich==0.13.0
8 dulwich==0.13.0
8 hgsubversion==1.9.0
9 hgsubversion==1.9.2
9 hg-evolve==7.0.1
10 hg-evolve==8.0.1
10 infrae.cache==1.0.1
11 mako==1.0.7
11 mercurial==4.4.2
12 markupsafe==1.0.0
12 msgpack-python==0.4.8
13 mercurial==4.6.2
13 pyramid-jinja2==2.7
14 msgpack-python==0.5.6
14 Jinja2==2.9.6
15
15 pyramid==1.9.1
16 pastedeploy==1.5.2
17 psutil==5.4.6
18 pyramid==1.9.2
16 pyramid-mako==1.0.2
19 pyramid-mako==1.0.2
20
21 pygments==2.2.0
22 pathlib2==2.3.0
17 repoze.lru==0.7
23 repoze.lru==0.7
18 simplejson==3.11.1
24 simplejson==3.11.1
19 subprocess32==3.2.7
25 subprocess32==3.5.1
20
26 setproctitle==1.1.10
21 subvertpy==0.10.1
27 subvertpy==0.10.1
22
28
23 six==1.11.0
29 six==1.11.0
24 translationstring==1.3
30 translationstring==1.3
25 WebOb==1.7.4
31 webob==1.7.4
26 wheel==0.29.0
27 zope.deprecation==4.3.0
32 zope.deprecation==4.3.0
28 zope.interface==4.4.3
33 zope.interface==4.5.0
29
34
30 ## http servers
35 ## http servers
31 gevent==1.2.2
36 gevent==1.3.5
32 greenlet==0.4.13
37 greenlet==0.4.13
33 gunicorn==19.7.1
38 gunicorn==19.9.0
34 waitress==1.1.0
39 waitress==1.1.0
35
40
36 ## debug
41 ## debug
37 ipdb==0.10.3
42 ipdb==0.11.0
38 ipython==5.1.0
43 ipython==5.1.0
39
44
40 ## test related requirements
45 ## test related requirements
41 -r requirements_test.txt
46 -r requirements_test.txt
@@ -1,15 +1,14 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.2.5
2 pytest==3.6.0
3 py==1.5.2
3 py==1.5.3
4 pytest-cov==2.5.1
4 pytest-cov==2.5.1
5 pytest-sugar==0.9.0
5 pytest-sugar==0.9.1
6 pytest-runner==3.0.0
6 pytest-runner==4.2.0
7 pytest-catchlog==1.2.2
7 pytest-profiling==1.3.0
8 pytest-profiling==1.2.11
9 gprof2dot==2017.9.19
8 gprof2dot==2017.9.19
10 pytest-timeout==1.2.0
9 pytest-timeout==1.2.1
11
10
12 mock==1.0.1
11 mock==1.0.1
13 WebTest==2.0.29
12 webtest==2.0.29
14 cov-core==1.15.0
13 cov-core==1.15.0
15 coverage==3.7.1
14 coverage==3.7.1
@@ -1,132 +1,139 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # Copyright (C) 2014-2017 RodeCode GmbH
3 # Copyright (C) 2014-2017 RodeCode GmbH
4 #
4 #
5 # This program is free software; you can redistribute it and/or modify
5 # This program is free software; you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation; either version 3 of the License, or
7 # the Free Software Foundation; either version 3 of the License, or
8 # (at your option) any later version.
8 # (at your option) any later version.
9 #
9 #
10 # This program is distributed in the hope that it will be useful,
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
13 # GNU General Public License for more details.
14 #
14 #
15 # You should have received a copy of the GNU General Public License
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software Foundation,
16 # along with this program; if not, write to the Free Software Foundation,
17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
18
19 # Import early to make sure things are patched up properly
19 # Import early to make sure things are patched up properly
20 from setuptools import setup, find_packages
20 from setuptools import setup, find_packages
21
21
22 import os
22 import os
23 import sys
23 import sys
24 import pkgutil
24 import pkgutil
25 import platform
25 import platform
26 import codecs
26
27
27 from pip.download import PipSession
28 try: # for pip >= 10
29 from pip._internal.req import parse_requirements
30 except ImportError: # for pip <= 9.0.3
28 from pip.req import parse_requirements
31 from pip.req import parse_requirements
29
32
30 from codecs import open
33 try: # for pip >= 10
34 from pip._internal.download import PipSession
35 except ImportError: # for pip <= 9.0.3
36 from pip.download import PipSession
37
31
38
32
39
33 if sys.version_info < (2, 7):
40 if sys.version_info < (2, 7):
34 raise Exception('VCSServer requires Python 2.7 or later')
41 raise Exception('VCSServer requires Python 2.7 or later')
35
42
36 here = os.path.abspath(os.path.dirname(__file__))
43 here = os.path.abspath(os.path.dirname(__file__))
37
44
38 # defines current platform
45 # defines current platform
39 __platform__ = platform.system()
46 __platform__ = platform.system()
40 __license__ = 'GPL V3'
47 __license__ = 'GPL V3'
41 __author__ = 'RhodeCode GmbH'
48 __author__ = 'RhodeCode GmbH'
42 __url__ = 'https://code.rhodecode.com'
49 __url__ = 'https://code.rhodecode.com'
43 is_windows = __platform__ in ('Windows',)
50 is_windows = __platform__ in ('Windows',)
44
51
45
52
46 def _get_requirements(req_filename, exclude=None, extras=None):
53 def _get_requirements(req_filename, exclude=None, extras=None):
47 extras = extras or []
54 extras = extras or []
48 exclude = exclude or []
55 exclude = exclude or []
49
56
50 try:
57 try:
51 parsed = parse_requirements(
58 parsed = parse_requirements(
52 os.path.join(here, req_filename), session=PipSession())
59 os.path.join(here, req_filename), session=PipSession())
53 except TypeError:
60 except TypeError:
54 # try pip < 6.0.0, that doesn't support session
61 # try pip < 6.0.0, that doesn't support session
55 parsed = parse_requirements(os.path.join(here, req_filename))
62 parsed = parse_requirements(os.path.join(here, req_filename))
56
63
57 requirements = []
64 requirements = []
58 for ir in parsed:
65 for ir in parsed:
59 if ir.req and ir.name not in exclude:
66 if ir.req and ir.name not in exclude:
60 requirements.append(str(ir.req))
67 requirements.append(str(ir.req))
61 return requirements + extras
68 return requirements + extras
62
69
63
70
64 # requirements extract
71 # requirements extract
65 setup_requirements = ['pytest-runner']
72 setup_requirements = ['pytest-runner']
66 install_requirements = _get_requirements(
73 install_requirements = _get_requirements(
67 'requirements.txt', exclude=['setuptools'])
74 'requirements.txt', exclude=['setuptools'])
68 test_requirements = _get_requirements(
75 test_requirements = _get_requirements(
69 'requirements_test.txt', extras=['configobj'])
76 'requirements_test.txt', extras=['configobj'])
70
77
71
78
72 def get_version():
79 def get_version():
73 version = pkgutil.get_data('vcsserver', 'VERSION')
80 version = pkgutil.get_data('vcsserver', 'VERSION')
74 return version.strip()
81 return version.strip()
75
82
76
83
77 # additional files that goes into package itself
84 # additional files that goes into package itself
78 package_data = {
85 package_data = {
79 '': ['*.txt', '*.rst'],
86 '': ['*.txt', '*.rst'],
80 'configs': ['*.ini'],
87 'configs': ['*.ini'],
81 'vcsserver': ['VERSION'],
88 'vcsserver': ['VERSION'],
82 }
89 }
83
90
84 description = 'Version Control System Server'
91 description = 'Version Control System Server'
85 keywords = ' '.join([
92 keywords = ' '.join([
86 'CLI', 'RhodeCode', 'RhodeCode Enterprise', 'RhodeCode Tools'])
93 'CLI', 'RhodeCode', 'RhodeCode Enterprise', 'RhodeCode Tools'])
87
94
88 # README/DESCRIPTION generation
95 # README/DESCRIPTION generation
89 readme_file = 'README.rst'
96 readme_file = 'README.rst'
90 changelog_file = 'CHANGES.rst'
97 changelog_file = 'CHANGES.rst'
91 try:
98 try:
92 long_description = open(readme_file).read() + '\n\n' + \
99 long_description = codecs.open(readme_file).read() + '\n\n' + \
93 open(changelog_file).read()
100 codecs.open(changelog_file).read()
94 except IOError as err:
101 except IOError as err:
95 sys.stderr.write(
102 sys.stderr.write(
96 "[WARNING] Cannot find file specified as long_description (%s)\n "
103 "[WARNING] Cannot find file specified as long_description (%s)\n "
97 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
104 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
98 long_description = description
105 long_description = description
99
106
100
107
101 setup(
108 setup(
102 name='rhodecode-vcsserver',
109 name='rhodecode-vcsserver',
103 version=get_version(),
110 version=get_version(),
104 description=description,
111 description=description,
105 long_description=long_description,
112 long_description=long_description,
106 keywords=keywords,
113 keywords=keywords,
107 license=__license__,
114 license=__license__,
108 author=__author__,
115 author=__author__,
109 author_email='marcin@rhodecode.com',
116 author_email='admin@rhodecode.com',
110 url=__url__,
117 url=__url__,
111 setup_requires=setup_requirements,
118 setup_requires=setup_requirements,
112 install_requires=install_requirements,
119 install_requires=install_requirements,
113 tests_require=test_requirements,
120 tests_require=test_requirements,
114 zip_safe=False,
121 zip_safe=False,
115 packages=find_packages(exclude=["docs", "tests*"]),
122 packages=find_packages(exclude=["docs", "tests*"]),
116 package_data=package_data,
123 package_data=package_data,
117 include_package_data=True,
124 include_package_data=True,
118 classifiers=[
125 classifiers=[
119 'Development Status :: 6 - Mature',
126 'Development Status :: 6 - Mature',
120 'Intended Audience :: Developers',
127 'Intended Audience :: Developers',
121 'Operating System :: OS Independent',
128 'Operating System :: OS Independent',
122 'Topic :: Software Development :: Version Control',
129 'Topic :: Software Development :: Version Control',
123 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
130 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
124 'Programming Language :: Python :: 2.7',
131 'Programming Language :: Python :: 2.7',
125 ],
132 ],
126 entry_points={
133 entry_points={
127 'console_scripts': [
134 'console_scripts': [
128 'vcsserver=vcsserver.main:main',
135 'vcsserver=vcsserver.main:main',
129 ],
136 ],
130 'paste.app_factory': ['main=vcsserver.http_main:main']
137 'paste.app_factory': ['main=vcsserver.http_main:main']
131 },
138 },
132 )
139 )
@@ -1,41 +1,67 b''
1 { pkgs ? import <nixpkgs> {}
1 # This file contains the adjustments which are desired for a development
2 # environment.
3
4 { pkgs ? (import <nixpkgs> {})
5 , pythonPackages ? "python27Packages"
2 , doCheck ? false
6 , doCheck ? false
3 }:
7 }:
4
8
5 let
9 let
6
10
7 vcsserver = import ./default.nix {
11 vcsserver = import ./default.nix {
8 inherit pkgs doCheck;
12 inherit
13 pkgs
14 doCheck;
9 };
15 };
10
16
11 vcs-pythonPackages = vcsserver.pythonPackages;
17 vcs-pythonPackages = vcsserver.pythonPackages;
12
18
13 in vcsserver.override (attrs: {
19 in vcsserver.override (attrs: {
14
15 # Avoid that we dump any sources into the store when entering the shell and
20 # Avoid that we dump any sources into the store when entering the shell and
16 # make development a little bit more convenient.
21 # make development a little bit more convenient.
17 src = null;
22 src = null;
18
23
24 # Add dependencies which are useful for the development environment.
19 buildInputs =
25 buildInputs =
20 attrs.buildInputs ++
26 attrs.buildInputs ++
21 (with vcs-pythonPackages; [
27 (with vcs-pythonPackages; [
22 ipdb
28 ipdb
23 ]);
29 ]);
24
30
25 # Somewhat snappier setup of the development environment
31 # place to inject some required libs from develop installs
26 # TODO: think of supporting a stable path again, so that multiple shells
32 propagatedBuildInputs =
27 # can share it.
33 attrs.propagatedBuildInputs ++
28 postShellHook = ''
34 [];
29 # Set locale
35
30 export LC_ALL="en_US.UTF-8"
36
37 # Make sure we execute both hooks
38 shellHook = ''
39 runHook preShellHook
40 runHook postShellHook
41 '';
42
43 preShellHook = ''
44 echo "Entering VCS-Shell"
31
45
32 # Custom prompt to distinguish from other dev envs.
46 # Custom prompt to distinguish from other dev envs.
33 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
47 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
34
48
49 # Set locale
50 export LC_ALL="en_US.UTF-8"
51
52 # Setup a temporary directory.
35 tmp_path=$(mktemp -d)
53 tmp_path=$(mktemp -d)
36 export PATH="$tmp_path/bin:$PATH"
54 export PATH="$tmp_path/bin:$PATH"
37 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
55 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
38 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
56 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
57
58 # Develop installation
59 echo "[BEGIN]: develop install of rhodecode-vcsserver"
39 python setup.py develop --prefix $tmp_path --allow-hosts ""
60 python setup.py develop --prefix $tmp_path --allow-hosts ""
40 '';
61 '';
62
63 postShellHook = ''
64
65 '';
66
41 })
67 })
@@ -1,1 +1,1 b''
1 4.12.4 No newline at end of file
1 4.13.0 No newline at end of file
@@ -1,98 +1,91 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
24
25
25
26
26 class RepoFactory(object):
27 class RepoFactory(object):
27 """
28 """
28 Utility to create instances of repository
29 Utility to create instances of repository
29
30
30 It provides internal caching of the `repo` object based on
31 It provides internal caching of the `repo` object based on
31 the :term:`call context`.
32 the :term:`call context`.
32 """
33 """
34 repo_type = None
33
35
34 def __init__(self, repo_cache):
36 def __init__(self):
35 self._cache = repo_cache
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
36
38
37 def _create_config(self, path, config):
39 def _create_config(self, path, config):
38 config = {}
40 config = {}
39 return config
41 return config
40
42
41 def _create_repo(self, wire, create):
43 def _create_repo(self, wire, create):
42 raise NotImplementedError()
44 raise NotImplementedError()
43
45
44 def repo(self, wire, create=False):
46 def repo(self, wire, create=False):
45 """
47 """
46 Get a repository instance for the given path.
48 Get a repository instance for the given path.
47
49
48 Uses internally the low level beaker API since the decorators introduce
50 Uses internally the low level beaker API since the decorators introduce
49 significant overhead.
51 significant overhead.
50 """
52 """
51 def create_new_repo():
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
52 return self._create_repo(wire, create)
62 return self._create_repo(wire, create)
53
63
54 return self._repo(wire, create_new_repo)
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
55
65 return repo
56 def _repo(self, wire, createfunc):
57 context = wire.get('context', None)
58 cache = wire.get('cache', True)
59
60 if context and cache:
61 cache_key = (context, wire['path'])
62 log.debug(
63 'FETCH %s@%s repo object from cache. Context: %s',
64 self.__class__.__name__, wire['path'], context)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
66 else:
67 log.debug(
68 'INIT %s@%s repo object based on wire %s. Context: %s',
69 self.__class__.__name__, wire['path'], wire, context)
70 return createfunc()
71
66
72
67
73 def obfuscate_qs(query_string):
68 def obfuscate_qs(query_string):
74 if query_string is None:
69 if query_string is None:
75 return None
70 return None
76
71
77 parsed = []
72 parsed = []
78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
73 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
79 if k in ['auth_token', 'api_key']:
74 if k in ['auth_token', 'api_key']:
80 v = "*****"
75 v = "*****"
81 parsed.append((k, v))
76 parsed.append((k, v))
82
77
83 return '&'.join('{}{}'.format(
78 return '&'.join('{}{}'.format(
84 k, '={}'.format(v) if v else '') for k, v in parsed)
79 k, '={}'.format(v) if v else '') for k, v in parsed)
85
80
86
81
87 def raise_from_original(new_type):
82 def raise_from_original(new_type):
88 """
83 """
89 Raise a new exception type with original args and traceback.
84 Raise a new exception type with original args and traceback.
90 """
85 """
91 exc_type, exc_value, exc_traceback = sys.exc_info()
86 exc_type, exc_value, exc_traceback = sys.exc_info()
92
87
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
95 try:
88 try:
96 raise new_type(*exc_value.args), None, exc_traceback
89 raise new_type(*exc_value.args), None, exc_traceback
97 finally:
90 finally:
98 del exc_traceback
91 del exc_traceback
@@ -1,70 +1,116 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 import functools
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28 from pyramid.httpexceptions import HTTPLocked
29
28
30
29
31 def _make_exception(kind, *args):
30 def _make_exception(kind, org_exc, *args):
32 """
31 """
33 Prepares a base `Exception` instance to be sent over the wire.
32 Prepares a base `Exception` instance to be sent over the wire.
34
33
35 To give our caller a hint what this is about, it will attach an attribute
34 To give our caller a hint what this is about, it will attach an attribute
36 `_vcs_kind` to the exception.
35 `_vcs_kind` to the exception.
37 """
36 """
38 exc = Exception(*args)
37 exc = Exception(*args)
39 exc._vcs_kind = kind
38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
40 return exc
40 return exc
41
41
42
42
43 AbortException = functools.partial(_make_exception, 'abort')
43 def AbortException(org_exc=None):
44 def _make_exception_wrapper(*args):
45 return _make_exception('abort', org_exc, *args)
46 return _make_exception_wrapper
47
44
48
45 ArchiveException = functools.partial(_make_exception, 'archive')
49 def ArchiveException(org_exc=None):
50 def _make_exception_wrapper(*args):
51 return _make_exception('archive', org_exc, *args)
52 return _make_exception_wrapper
53
46
54
47 LookupException = functools.partial(_make_exception, 'lookup')
55 def LookupException(org_exc=None):
56 def _make_exception_wrapper(*args):
57 return _make_exception('lookup', org_exc, *args)
58 return _make_exception_wrapper
59
48
60
49 VcsException = functools.partial(_make_exception, 'error')
61 def VcsException(org_exc=None):
62 def _make_exception_wrapper(*args):
63 return _make_exception('error', org_exc, *args)
64 return _make_exception_wrapper
65
66
67 def RepositoryLockedException(org_exc=None):
68 def _make_exception_wrapper(*args):
69 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception_wrapper
50
71
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
72
73 def RepositoryBranchProtectedException(org_exc=None):
74 def _make_exception_wrapper(*args):
75 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception_wrapper
52
77
53 RequirementException = functools.partial(_make_exception, 'requirement')
78
79 def RequirementException(org_exc=None):
80 def _make_exception_wrapper(*args):
81 return _make_exception('requirement', org_exc, *args)
82 return _make_exception_wrapper
83
54
84
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
85 def UnhandledException(org_exc=None):
86 def _make_exception_wrapper(*args):
87 return _make_exception('unhandled', org_exc, *args)
88 return _make_exception_wrapper
89
56
90
57 URLError = functools.partial(_make_exception, 'url_error')
91 def URLError(org_exc=None):
92 def _make_exception_wrapper(*args):
93 return _make_exception('url_error', org_exc, *args)
94 return _make_exception_wrapper
58
95
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
96
97 def SubrepoMergeException(org_exc=None):
98 def _make_exception_wrapper(*args):
99 return _make_exception('subrepo_merge_error', org_exc, *args)
100 return _make_exception_wrapper
60
101
61
102
62 class HTTPRepoLocked(HTTPLocked):
103 class HTTPRepoLocked(HTTPLocked):
63 """
104 """
64 Subclass of HTTPLocked response that allows to set the title and status
105 Subclass of HTTPLocked response that allows to set the title and status
65 code via constructor arguments.
106 code via constructor arguments.
66 """
107 """
67 def __init__(self, title, status_code=None, **kwargs):
108 def __init__(self, title, status_code=None, **kwargs):
68 self.code = status_code or HTTPLocked.code
109 self.code = status_code or HTTPLocked.code
69 self.title = title
110 self.title = title
70 super(HTTPRepoLocked, self).__init__(**kwargs)
111 super(HTTPRepoLocked, self).__init__(**kwargs)
112
113
114 class HTTPRepoBranchProtected(HTTPForbidden):
115 def __init__(self, *args, **kwargs):
116 super(HTTPForbidden, self).__init__(*args, **kwargs)
@@ -1,670 +1,675 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 from dulwich import index, objects
28 from dulwich import index, objects
29 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.errors import (
30 from dulwich.errors import (
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 MissingCommitError, ObjectMissing, HangupException,
32 MissingCommitError, ObjectMissing, HangupException,
33 UnexpectedCommandError)
33 UnexpectedCommandError)
34 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.server import update_server_info
35 from dulwich.server import update_server_info
36
36
37 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver.utils import safe_str
38 from vcsserver.utils import safe_str
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.hgcompat import (
40 from vcsserver.hgcompat import (
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 from vcsserver.git_lfs.lib import LFSOidStore
42 from vcsserver.git_lfs.lib import LFSOidStore
43
43
44 DIR_STAT = stat.S_IFDIR
44 DIR_STAT = stat.S_IFDIR
45 FILE_MODE = stat.S_IFMT
45 FILE_MODE = stat.S_IFMT
46 GIT_LINK = objects.S_IFGITLINK
46 GIT_LINK = objects.S_IFGITLINK
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 def reraise_safe_exceptions(func):
51 def reraise_safe_exceptions(func):
52 """Converts Dulwich exceptions to something neutral."""
52 """Converts Dulwich exceptions to something neutral."""
53 @wraps(func)
53 @wraps(func)
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 ObjectMissing) as e:
58 ObjectMissing) as e:
59 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e)(e.message)
60 except (HangupException, UnexpectedCommandError) as e:
60 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e)(e.message)
62 except Exception as e:
62 except Exception as e:
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 # (KeyError on empty repos), we cannot track this and catch all
64 # (KeyError on empty repos), we cannot track this and catch all
65 # exceptions, it's an exceptions from other handlers
65 # exceptions, it's an exceptions from other handlers
66 #if not hasattr(e, '_vcs_kind'):
66 #if not hasattr(e, '_vcs_kind'):
67 #log.exception("Unhandled exception in git remote call")
67 #log.exception("Unhandled exception in git remote call")
68 #raise_from_original(exceptions.UnhandledException)
68 #raise_from_original(exceptions.UnhandledException)
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class Repo(DulwichRepo):
73 class Repo(DulwichRepo):
74 """
74 """
75 A wrapper for dulwich Repo class.
75 A wrapper for dulwich Repo class.
76
76
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 "Too many open files" error. We need to close all opened file descriptors
78 "Too many open files" error. We need to close all opened file descriptors
79 once the repo object is destroyed.
79 once the repo object is destroyed.
80
80
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 to 0.12.0 +
82 to 0.12.0 +
83 """
83 """
84 def __del__(self):
84 def __del__(self):
85 if hasattr(self, 'object_store'):
85 if hasattr(self, 'object_store'):
86 self.close()
86 self.close()
87
87
88
88
89 class GitFactory(RepoFactory):
89 class GitFactory(RepoFactory):
90 repo_type = 'git'
90
91
91 def _create_repo(self, wire, create):
92 def _create_repo(self, wire, create):
92 repo_path = str_to_dulwich(wire['path'])
93 repo_path = str_to_dulwich(wire['path'])
93 return Repo(repo_path)
94 return Repo(repo_path)
94
95
95
96
96 class GitRemote(object):
97 class GitRemote(object):
97
98
98 def __init__(self, factory):
99 def __init__(self, factory):
99 self._factory = factory
100 self._factory = factory
100
101
101 self._bulk_methods = {
102 self._bulk_methods = {
102 "author": self.commit_attribute,
103 "author": self.commit_attribute,
103 "date": self.get_object_attrs,
104 "date": self.get_object_attrs,
104 "message": self.commit_attribute,
105 "message": self.commit_attribute,
105 "parents": self.commit_attribute,
106 "parents": self.commit_attribute,
106 "_commit": self.revision,
107 "_commit": self.revision,
107 }
108 }
108
109
109 def _wire_to_config(self, wire):
110 def _wire_to_config(self, wire):
110 if 'config' in wire:
111 if 'config' in wire:
111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
112 return {}
113 return {}
113
114
114 def _assign_ref(self, wire, ref, commit_id):
115 def _assign_ref(self, wire, ref, commit_id):
115 repo = self._factory.repo(wire)
116 repo = self._factory.repo(wire)
116 repo[ref] = commit_id
117 repo[ref] = commit_id
117
118
118 @reraise_safe_exceptions
119 @reraise_safe_exceptions
119 def add_object(self, wire, content):
120 def add_object(self, wire, content):
120 repo = self._factory.repo(wire)
121 repo = self._factory.repo(wire)
121 blob = objects.Blob()
122 blob = objects.Blob()
122 blob.set_raw_string(content)
123 blob.set_raw_string(content)
123 repo.object_store.add_object(blob)
124 repo.object_store.add_object(blob)
124 return blob.id
125 return blob.id
125
126
126 @reraise_safe_exceptions
127 @reraise_safe_exceptions
127 def assert_correct_path(self, wire):
128 def assert_correct_path(self, wire):
128 path = wire.get('path')
129 path = wire.get('path')
129 try:
130 try:
130 self._factory.repo(wire)
131 self._factory.repo(wire)
131 except NotGitRepository as e:
132 except NotGitRepository as e:
132 tb = traceback.format_exc()
133 tb = traceback.format_exc()
133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 return False
135 return False
135
136
136 return True
137 return True
137
138
138 @reraise_safe_exceptions
139 @reraise_safe_exceptions
139 def bare(self, wire):
140 def bare(self, wire):
140 repo = self._factory.repo(wire)
141 repo = self._factory.repo(wire)
141 return repo.bare
142 return repo.bare
142
143
143 @reraise_safe_exceptions
144 @reraise_safe_exceptions
144 def blob_as_pretty_string(self, wire, sha):
145 def blob_as_pretty_string(self, wire, sha):
145 repo = self._factory.repo(wire)
146 repo = self._factory.repo(wire)
146 return repo[sha].as_pretty_string()
147 return repo[sha].as_pretty_string()
147
148
148 @reraise_safe_exceptions
149 @reraise_safe_exceptions
149 def blob_raw_length(self, wire, sha):
150 def blob_raw_length(self, wire, sha):
150 repo = self._factory.repo(wire)
151 repo = self._factory.repo(wire)
151 blob = repo[sha]
152 blob = repo[sha]
152 return blob.raw_length()
153 return blob.raw_length()
153
154
154 def _parse_lfs_pointer(self, raw_content):
155 def _parse_lfs_pointer(self, raw_content):
155
156
156 spec_string = 'version https://git-lfs.github.com/spec'
157 spec_string = 'version https://git-lfs.github.com/spec'
157 if raw_content and raw_content.startswith(spec_string):
158 if raw_content and raw_content.startswith(spec_string):
158 pattern = re.compile(r"""
159 pattern = re.compile(r"""
159 (?:\n)?
160 (?:\n)?
160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
162 ^size[ ](?P<oid_size>[0-9]+)\n
163 ^size[ ](?P<oid_size>[0-9]+)\n
163 (?:\n)?
164 (?:\n)?
164 """, re.VERBOSE | re.MULTILINE)
165 """, re.VERBOSE | re.MULTILINE)
165 match = pattern.match(raw_content)
166 match = pattern.match(raw_content)
166 if match:
167 if match:
167 return match.groupdict()
168 return match.groupdict()
168
169
169 return {}
170 return {}
170
171
171 @reraise_safe_exceptions
172 @reraise_safe_exceptions
172 def is_large_file(self, wire, sha):
173 def is_large_file(self, wire, sha):
173 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
174 blob = repo[sha]
175 blob = repo[sha]
175 return self._parse_lfs_pointer(blob.as_raw_string())
176 return self._parse_lfs_pointer(blob.as_raw_string())
176
177
177 @reraise_safe_exceptions
178 @reraise_safe_exceptions
178 def in_largefiles_store(self, wire, oid):
179 def in_largefiles_store(self, wire, oid):
179 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
180 conf = self._wire_to_config(wire)
181 conf = self._wire_to_config(wire)
181
182
182 store_location = conf.get('vcs_git_lfs_store_location')
183 store_location = conf.get('vcs_git_lfs_store_location')
183 if store_location:
184 if store_location:
184 repo_name = repo.path
185 repo_name = repo.path
185 store = LFSOidStore(
186 store = LFSOidStore(
186 oid=oid, repo=repo_name, store_location=store_location)
187 oid=oid, repo=repo_name, store_location=store_location)
187 return store.has_oid()
188 return store.has_oid()
188
189
189 return False
190 return False
190
191
191 @reraise_safe_exceptions
192 @reraise_safe_exceptions
192 def store_path(self, wire, oid):
193 def store_path(self, wire, oid):
193 repo = self._factory.repo(wire)
194 repo = self._factory.repo(wire)
194 conf = self._wire_to_config(wire)
195 conf = self._wire_to_config(wire)
195
196
196 store_location = conf.get('vcs_git_lfs_store_location')
197 store_location = conf.get('vcs_git_lfs_store_location')
197 if store_location:
198 if store_location:
198 repo_name = repo.path
199 repo_name = repo.path
199 store = LFSOidStore(
200 store = LFSOidStore(
200 oid=oid, repo=repo_name, store_location=store_location)
201 oid=oid, repo=repo_name, store_location=store_location)
201 return store.oid_path
202 return store.oid_path
202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
203
204
204 @reraise_safe_exceptions
205 @reraise_safe_exceptions
205 def bulk_request(self, wire, rev, pre_load):
206 def bulk_request(self, wire, rev, pre_load):
206 result = {}
207 result = {}
207 for attr in pre_load:
208 for attr in pre_load:
208 try:
209 try:
209 method = self._bulk_methods[attr]
210 method = self._bulk_methods[attr]
210 args = [wire, rev]
211 args = [wire, rev]
211 if attr == "date":
212 if attr == "date":
212 args.extend(["commit_time", "commit_timezone"])
213 args.extend(["commit_time", "commit_timezone"])
213 elif attr in ["author", "message", "parents"]:
214 elif attr in ["author", "message", "parents"]:
214 args.append(attr)
215 args.append(attr)
215 result[attr] = method(*args)
216 result[attr] = method(*args)
216 except KeyError:
217 except KeyError as e:
217 raise exceptions.VcsException(
218 raise exceptions.VcsException(e)(
218 "Unknown bulk attribute: %s" % attr)
219 "Unknown bulk attribute: %s" % attr)
219 return result
220 return result
220
221
221 def _build_opener(self, url):
222 def _build_opener(self, url):
222 handlers = []
223 handlers = []
223 url_obj = url_parser(url)
224 url_obj = url_parser(url)
224 _, authinfo = url_obj.authinfo()
225 _, authinfo = url_obj.authinfo()
225
226
226 if authinfo:
227 if authinfo:
227 # create a password manager
228 # create a password manager
228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
229 passmgr.add_password(*authinfo)
230 passmgr.add_password(*authinfo)
230
231
231 handlers.extend((httpbasicauthhandler(passmgr),
232 handlers.extend((httpbasicauthhandler(passmgr),
232 httpdigestauthhandler(passmgr)))
233 httpdigestauthhandler(passmgr)))
233
234
234 return urllib2.build_opener(*handlers)
235 return urllib2.build_opener(*handlers)
235
236
236 @reraise_safe_exceptions
237 @reraise_safe_exceptions
237 def check_url(self, url, config):
238 def check_url(self, url, config):
238 url_obj = url_parser(url)
239 url_obj = url_parser(url)
239 test_uri, _ = url_obj.authinfo()
240 test_uri, _ = url_obj.authinfo()
240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
241 url_obj.query = obfuscate_qs(url_obj.query)
242 url_obj.query = obfuscate_qs(url_obj.query)
242 cleaned_uri = str(url_obj)
243 cleaned_uri = str(url_obj)
243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
244
245
245 if not test_uri.endswith('info/refs'):
246 if not test_uri.endswith('info/refs'):
246 test_uri = test_uri.rstrip('/') + '/info/refs'
247 test_uri = test_uri.rstrip('/') + '/info/refs'
247
248
248 o = self._build_opener(url)
249 o = self._build_opener(url)
249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
250
251
251 q = {"service": 'git-upload-pack'}
252 q = {"service": 'git-upload-pack'}
252 qs = '?%s' % urllib.urlencode(q)
253 qs = '?%s' % urllib.urlencode(q)
253 cu = "%s%s" % (test_uri, qs)
254 cu = "%s%s" % (test_uri, qs)
254 req = urllib2.Request(cu, None, {})
255 req = urllib2.Request(cu, None, {})
255
256
256 try:
257 try:
257 log.debug("Trying to open URL %s", cleaned_uri)
258 log.debug("Trying to open URL %s", cleaned_uri)
258 resp = o.open(req)
259 resp = o.open(req)
259 if resp.code != 200:
260 if resp.code != 200:
260 raise exceptions.URLError('Return Code is not 200')
261 raise exceptions.URLError()('Return Code is not 200')
261 except Exception as e:
262 except Exception as e:
262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
263 # means it cannot be cloned
264 # means it cannot be cloned
264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
265 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
265
266
266 # now detect if it's proper git repo
267 # now detect if it's proper git repo
267 gitdata = resp.read()
268 gitdata = resp.read()
268 if 'service=git-upload-pack' in gitdata:
269 if 'service=git-upload-pack' in gitdata:
269 pass
270 pass
270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
271 # old style git can return some other format !
272 # old style git can return some other format !
272 pass
273 pass
273 else:
274 else:
274 raise exceptions.URLError(
275 raise exceptions.URLError()(
275 "url [%s] does not look like an git" % (cleaned_uri,))
276 "url [%s] does not look like an git" % (cleaned_uri,))
276
277
277 return True
278 return True
278
279
279 @reraise_safe_exceptions
280 @reraise_safe_exceptions
280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
281 remote_refs = self.fetch(wire, url, apply_refs=False)
282 remote_refs = self.fetch(wire, url, apply_refs=False)
282 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
283 if isinstance(valid_refs, list):
284 if isinstance(valid_refs, list):
284 valid_refs = tuple(valid_refs)
285 valid_refs = tuple(valid_refs)
285
286
286 for k in remote_refs:
287 for k in remote_refs:
287 # only parse heads/tags and skip so called deferred tags
288 # only parse heads/tags and skip so called deferred tags
288 if k.startswith(valid_refs) and not k.endswith(deferred):
289 if k.startswith(valid_refs) and not k.endswith(deferred):
289 repo[k] = remote_refs[k]
290 repo[k] = remote_refs[k]
290
291
291 if update_after_clone:
292 if update_after_clone:
292 # we want to checkout HEAD
293 # we want to checkout HEAD
293 repo["HEAD"] = remote_refs["HEAD"]
294 repo["HEAD"] = remote_refs["HEAD"]
294 index.build_index_from_tree(repo.path, repo.index_path(),
295 index.build_index_from_tree(repo.path, repo.index_path(),
295 repo.object_store, repo["HEAD"].tree)
296 repo.object_store, repo["HEAD"].tree)
296
297
297 # TODO: this is quite complex, check if that can be simplified
298 # TODO: this is quite complex, check if that can be simplified
298 @reraise_safe_exceptions
299 @reraise_safe_exceptions
299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
300 repo = self._factory.repo(wire)
301 repo = self._factory.repo(wire)
301 object_store = repo.object_store
302 object_store = repo.object_store
302
303
303 # Create tree and populates it with blobs
304 # Create tree and populates it with blobs
304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
305
306
306 for node in updated:
307 for node in updated:
307 # Compute subdirs if needed
308 # Compute subdirs if needed
308 dirpath, nodename = vcspath.split(node['path'])
309 dirpath, nodename = vcspath.split(node['path'])
309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
310 parent = commit_tree
311 parent = commit_tree
311 ancestors = [('', parent)]
312 ancestors = [('', parent)]
312
313
313 # Tries to dig for the deepest existing tree
314 # Tries to dig for the deepest existing tree
314 while dirnames:
315 while dirnames:
315 curdir = dirnames.pop(0)
316 curdir = dirnames.pop(0)
316 try:
317 try:
317 dir_id = parent[curdir][1]
318 dir_id = parent[curdir][1]
318 except KeyError:
319 except KeyError:
319 # put curdir back into dirnames and stops
320 # put curdir back into dirnames and stops
320 dirnames.insert(0, curdir)
321 dirnames.insert(0, curdir)
321 break
322 break
322 else:
323 else:
323 # If found, updates parent
324 # If found, updates parent
324 parent = repo[dir_id]
325 parent = repo[dir_id]
325 ancestors.append((curdir, parent))
326 ancestors.append((curdir, parent))
326 # Now parent is deepest existing tree and we need to create
327 # Now parent is deepest existing tree and we need to create
327 # subtrees for dirnames (in reverse order)
328 # subtrees for dirnames (in reverse order)
328 # [this only applies for nodes from added]
329 # [this only applies for nodes from added]
329 new_trees = []
330 new_trees = []
330
331
331 blob = objects.Blob.from_string(node['content'])
332 blob = objects.Blob.from_string(node['content'])
332
333
333 if dirnames:
334 if dirnames:
334 # If there are trees which should be created we need to build
335 # If there are trees which should be created we need to build
335 # them now (in reverse order)
336 # them now (in reverse order)
336 reversed_dirnames = list(reversed(dirnames))
337 reversed_dirnames = list(reversed(dirnames))
337 curtree = objects.Tree()
338 curtree = objects.Tree()
338 curtree[node['node_path']] = node['mode'], blob.id
339 curtree[node['node_path']] = node['mode'], blob.id
339 new_trees.append(curtree)
340 new_trees.append(curtree)
340 for dirname in reversed_dirnames[:-1]:
341 for dirname in reversed_dirnames[:-1]:
341 newtree = objects.Tree()
342 newtree = objects.Tree()
342 newtree[dirname] = (DIR_STAT, curtree.id)
343 newtree[dirname] = (DIR_STAT, curtree.id)
343 new_trees.append(newtree)
344 new_trees.append(newtree)
344 curtree = newtree
345 curtree = newtree
345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
346 else:
347 else:
347 parent.add(
348 parent.add(
348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
349
350
350 new_trees.append(parent)
351 new_trees.append(parent)
351 # Update ancestors
352 # Update ancestors
352 reversed_ancestors = reversed(
353 reversed_ancestors = reversed(
353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
354 for parent, tree, path in reversed_ancestors:
355 for parent, tree, path in reversed_ancestors:
355 parent[path] = (DIR_STAT, tree.id)
356 parent[path] = (DIR_STAT, tree.id)
356 object_store.add_object(tree)
357 object_store.add_object(tree)
357
358
358 object_store.add_object(blob)
359 object_store.add_object(blob)
359 for tree in new_trees:
360 for tree in new_trees:
360 object_store.add_object(tree)
361 object_store.add_object(tree)
361
362
362 for node_path in removed:
363 for node_path in removed:
363 paths = node_path.split('/')
364 paths = node_path.split('/')
364 tree = commit_tree
365 tree = commit_tree
365 trees = [tree]
366 trees = [tree]
366 # Traverse deep into the forest...
367 # Traverse deep into the forest...
367 for path in paths:
368 for path in paths:
368 try:
369 try:
369 obj = repo[tree[path][1]]
370 obj = repo[tree[path][1]]
370 if isinstance(obj, objects.Tree):
371 if isinstance(obj, objects.Tree):
371 trees.append(obj)
372 trees.append(obj)
372 tree = obj
373 tree = obj
373 except KeyError:
374 except KeyError:
374 break
375 break
375 # Cut down the blob and all rotten trees on the way back...
376 # Cut down the blob and all rotten trees on the way back...
376 for path, tree in reversed(zip(paths, trees)):
377 for path, tree in reversed(zip(paths, trees)):
377 del tree[path]
378 del tree[path]
378 if tree:
379 if tree:
379 # This tree still has elements - don't remove it or any
380 # This tree still has elements - don't remove it or any
380 # of it's parents
381 # of it's parents
381 break
382 break
382
383
383 object_store.add_object(commit_tree)
384 object_store.add_object(commit_tree)
384
385
385 # Create commit
386 # Create commit
386 commit = objects.Commit()
387 commit = objects.Commit()
387 commit.tree = commit_tree.id
388 commit.tree = commit_tree.id
388 for k, v in commit_data.iteritems():
389 for k, v in commit_data.iteritems():
389 setattr(commit, k, v)
390 setattr(commit, k, v)
390 object_store.add_object(commit)
391 object_store.add_object(commit)
391
392
392 ref = 'refs/heads/%s' % branch
393 ref = 'refs/heads/%s' % branch
393 repo.refs[ref] = commit.id
394 repo.refs[ref] = commit.id
394
395
395 return commit.id
396 return commit.id
396
397
397 @reraise_safe_exceptions
398 @reraise_safe_exceptions
398 def fetch(self, wire, url, apply_refs=True, refs=None):
399 def fetch(self, wire, url, apply_refs=True, refs=None):
399 if url != 'default' and '://' not in url:
400 if url != 'default' and '://' not in url:
400 client = LocalGitClient(url)
401 client = LocalGitClient(url)
401 else:
402 else:
402 url_obj = url_parser(url)
403 url_obj = url_parser(url)
403 o = self._build_opener(url)
404 o = self._build_opener(url)
404 url, _ = url_obj.authinfo()
405 url, _ = url_obj.authinfo()
405 client = HttpGitClient(base_url=url, opener=o)
406 client = HttpGitClient(base_url=url, opener=o)
406 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
407
408
408 determine_wants = repo.object_store.determine_wants_all
409 determine_wants = repo.object_store.determine_wants_all
409 if refs:
410 if refs:
410 def determine_wants_requested(references):
411 def determine_wants_requested(references):
411 return [references[r] for r in references if r in refs]
412 return [references[r] for r in references if r in refs]
412 determine_wants = determine_wants_requested
413 determine_wants = determine_wants_requested
413
414
414 try:
415 try:
415 remote_refs = client.fetch(
416 remote_refs = client.fetch(
416 path=url, target=repo, determine_wants=determine_wants)
417 path=url, target=repo, determine_wants=determine_wants)
417 except NotGitRepository as e:
418 except NotGitRepository as e:
418 log.warning(
419 log.warning(
419 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 'Trying to fetch from "%s" failed, not a Git repository.', url)
420 # Exception can contain unicode which we convert
421 # Exception can contain unicode which we convert
421 raise exceptions.AbortException(repr(e))
422 raise exceptions.AbortException(e)(repr(e))
422
423
423 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # mikhail: client.fetch() returns all the remote refs, but fetches only
424 # refs filtered by `determine_wants` function. We need to filter result
425 # refs filtered by `determine_wants` function. We need to filter result
425 # as well
426 # as well
426 if refs:
427 if refs:
427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
428
429
429 if apply_refs:
430 if apply_refs:
430 # TODO: johbo: Needs proper test coverage with a git repository
431 # TODO: johbo: Needs proper test coverage with a git repository
431 # that contains a tag object, so that we would end up with
432 # that contains a tag object, so that we would end up with
432 # a peeled ref at this point.
433 # a peeled ref at this point.
433 PEELED_REF_MARKER = '^{}'
434 PEELED_REF_MARKER = '^{}'
434 for k in remote_refs:
435 for k in remote_refs:
435 if k.endswith(PEELED_REF_MARKER):
436 if k.endswith(PEELED_REF_MARKER):
436 log.info("Skipping peeled reference %s", k)
437 log.info("Skipping peeled reference %s", k)
437 continue
438 continue
438 repo[k] = remote_refs[k]
439 repo[k] = remote_refs[k]
439
440
440 if refs:
441 if refs:
441 # mikhail: explicitly set the head to the last ref.
442 # mikhail: explicitly set the head to the last ref.
442 repo['HEAD'] = remote_refs[refs[-1]]
443 repo['HEAD'] = remote_refs[refs[-1]]
443
444
444 # TODO: mikhail: should we return remote_refs here to be
445 # TODO: mikhail: should we return remote_refs here to be
445 # consistent?
446 # consistent?
446 else:
447 else:
447 return remote_refs
448 return remote_refs
448
449
449 @reraise_safe_exceptions
450 @reraise_safe_exceptions
450 def sync_push(self, wire, url, refs=None):
451 def sync_push(self, wire, url, refs=None):
451 if self.check_url(url, wire):
452 if self.check_url(url, wire):
452 repo = self._factory.repo(wire)
453 repo = self._factory.repo(wire)
453 self.run_git_command(
454 self.run_git_command(
454 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
455 _copts=['-c', 'core.askpass=""'],
456 _copts=['-c', 'core.askpass=""'],
456 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
457
458
458 @reraise_safe_exceptions
459 @reraise_safe_exceptions
459 def get_remote_refs(self, wire, url):
460 def get_remote_refs(self, wire, url):
460 repo = Repo(url)
461 repo = Repo(url)
461 return repo.get_refs()
462 return repo.get_refs()
462
463
463 @reraise_safe_exceptions
464 @reraise_safe_exceptions
464 def get_description(self, wire):
465 def get_description(self, wire):
465 repo = self._factory.repo(wire)
466 repo = self._factory.repo(wire)
466 return repo.get_description()
467 return repo.get_description()
467
468
468 @reraise_safe_exceptions
469 @reraise_safe_exceptions
469 def get_file_history(self, wire, file_path, commit_id, limit):
470 def get_file_history(self, wire, file_path, commit_id, limit):
470 repo = self._factory.repo(wire)
471 repo = self._factory.repo(wire)
471 include = [commit_id]
472 include = [commit_id]
472 paths = [file_path]
473 paths = [file_path]
473
474
474 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
475 return [x.commit.id for x in walker]
476 return [x.commit.id for x in walker]
476
477
477 @reraise_safe_exceptions
478 @reraise_safe_exceptions
478 def get_missing_revs(self, wire, rev1, rev2, path2):
479 def get_missing_revs(self, wire, rev1, rev2, path2):
479 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
480 LocalGitClient(thin_packs=False).fetch(path2, repo)
481 LocalGitClient(thin_packs=False).fetch(path2, repo)
481
482
482 wire_remote = wire.copy()
483 wire_remote = wire.copy()
483 wire_remote['path'] = path2
484 wire_remote['path'] = path2
484 repo_remote = self._factory.repo(wire_remote)
485 repo_remote = self._factory.repo(wire_remote)
485 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
486
487
487 revs = [
488 revs = [
488 x.commit.id
489 x.commit.id
489 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
490 return revs
491 return revs
491
492
492 @reraise_safe_exceptions
493 @reraise_safe_exceptions
493 def get_object(self, wire, sha):
494 def get_object(self, wire, sha):
494 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
495 obj = repo.get_object(sha)
496 obj = repo.get_object(sha)
496 commit_id = obj.id
497 commit_id = obj.id
497
498
498 if isinstance(obj, Tag):
499 if isinstance(obj, Tag):
499 commit_id = obj.object[1]
500 commit_id = obj.object[1]
500
501
501 return {
502 return {
502 'id': obj.id,
503 'id': obj.id,
503 'type': obj.type_name,
504 'type': obj.type_name,
504 'commit_id': commit_id
505 'commit_id': commit_id
505 }
506 }
506
507
507 @reraise_safe_exceptions
508 @reraise_safe_exceptions
508 def get_object_attrs(self, wire, sha, *attrs):
509 def get_object_attrs(self, wire, sha, *attrs):
509 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
510 obj = repo.get_object(sha)
511 obj = repo.get_object(sha)
511 return list(getattr(obj, a) for a in attrs)
512 return list(getattr(obj, a) for a in attrs)
512
513
513 @reraise_safe_exceptions
514 @reraise_safe_exceptions
514 def get_refs(self, wire):
515 def get_refs(self, wire):
515 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
516 result = {}
517 result = {}
517 for ref, sha in repo.refs.as_dict().items():
518 for ref, sha in repo.refs.as_dict().items():
518 peeled_sha = repo.get_peeled(ref)
519 peeled_sha = repo.get_peeled(ref)
519 result[ref] = peeled_sha
520 result[ref] = peeled_sha
520 return result
521 return result
521
522
522 @reraise_safe_exceptions
523 @reraise_safe_exceptions
523 def get_refs_path(self, wire):
524 def get_refs_path(self, wire):
524 repo = self._factory.repo(wire)
525 repo = self._factory.repo(wire)
525 return repo.refs.path
526 return repo.refs.path
526
527
527 @reraise_safe_exceptions
528 @reraise_safe_exceptions
528 def head(self, wire):
529 def head(self, wire, show_exc=True):
529 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
531 try:
530 return repo.head()
532 return repo.head()
533 except Exception:
534 if show_exc:
535 raise
531
536
532 @reraise_safe_exceptions
537 @reraise_safe_exceptions
533 def init(self, wire):
538 def init(self, wire):
534 repo_path = str_to_dulwich(wire['path'])
539 repo_path = str_to_dulwich(wire['path'])
535 self.repo = Repo.init(repo_path)
540 self.repo = Repo.init(repo_path)
536
541
537 @reraise_safe_exceptions
542 @reraise_safe_exceptions
538 def init_bare(self, wire):
543 def init_bare(self, wire):
539 repo_path = str_to_dulwich(wire['path'])
544 repo_path = str_to_dulwich(wire['path'])
540 self.repo = Repo.init_bare(repo_path)
545 self.repo = Repo.init_bare(repo_path)
541
546
542 @reraise_safe_exceptions
547 @reraise_safe_exceptions
543 def revision(self, wire, rev):
548 def revision(self, wire, rev):
544 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
545 obj = repo[rev]
550 obj = repo[rev]
546 obj_data = {
551 obj_data = {
547 'id': obj.id,
552 'id': obj.id,
548 }
553 }
549 try:
554 try:
550 obj_data['tree'] = obj.tree
555 obj_data['tree'] = obj.tree
551 except AttributeError:
556 except AttributeError:
552 pass
557 pass
553 return obj_data
558 return obj_data
554
559
555 @reraise_safe_exceptions
560 @reraise_safe_exceptions
556 def commit_attribute(self, wire, rev, attr):
561 def commit_attribute(self, wire, rev, attr):
557 repo = self._factory.repo(wire)
562 repo = self._factory.repo(wire)
558 obj = repo[rev]
563 obj = repo[rev]
559 return getattr(obj, attr)
564 return getattr(obj, attr)
560
565
561 @reraise_safe_exceptions
566 @reraise_safe_exceptions
562 def set_refs(self, wire, key, value):
567 def set_refs(self, wire, key, value):
563 repo = self._factory.repo(wire)
568 repo = self._factory.repo(wire)
564 repo.refs[key] = value
569 repo.refs[key] = value
565
570
566 @reraise_safe_exceptions
571 @reraise_safe_exceptions
567 def remove_ref(self, wire, key):
572 def remove_ref(self, wire, key):
568 repo = self._factory.repo(wire)
573 repo = self._factory.repo(wire)
569 del repo.refs[key]
574 del repo.refs[key]
570
575
571 @reraise_safe_exceptions
576 @reraise_safe_exceptions
572 def tree_changes(self, wire, source_id, target_id):
577 def tree_changes(self, wire, source_id, target_id):
573 repo = self._factory.repo(wire)
578 repo = self._factory.repo(wire)
574 source = repo[source_id].tree if source_id else None
579 source = repo[source_id].tree if source_id else None
575 target = repo[target_id].tree
580 target = repo[target_id].tree
576 result = repo.object_store.tree_changes(source, target)
581 result = repo.object_store.tree_changes(source, target)
577 return list(result)
582 return list(result)
578
583
579 @reraise_safe_exceptions
584 @reraise_safe_exceptions
580 def tree_items(self, wire, tree_id):
585 def tree_items(self, wire, tree_id):
581 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
582 tree = repo[tree_id]
587 tree = repo[tree_id]
583
588
584 result = []
589 result = []
585 for item in tree.iteritems():
590 for item in tree.iteritems():
586 item_sha = item.sha
591 item_sha = item.sha
587 item_mode = item.mode
592 item_mode = item.mode
588
593
589 if FILE_MODE(item_mode) == GIT_LINK:
594 if FILE_MODE(item_mode) == GIT_LINK:
590 item_type = "link"
595 item_type = "link"
591 else:
596 else:
592 item_type = repo[item_sha].type_name
597 item_type = repo[item_sha].type_name
593
598
594 result.append((item.path, item_mode, item_sha, item_type))
599 result.append((item.path, item_mode, item_sha, item_type))
595 return result
600 return result
596
601
597 @reraise_safe_exceptions
602 @reraise_safe_exceptions
598 def update_server_info(self, wire):
603 def update_server_info(self, wire):
599 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
600 update_server_info(repo)
605 update_server_info(repo)
601
606
602 @reraise_safe_exceptions
607 @reraise_safe_exceptions
603 def discover_git_version(self):
608 def discover_git_version(self):
604 stdout, _ = self.run_git_command(
609 stdout, _ = self.run_git_command(
605 {}, ['--version'], _bare=True, _safe=True)
610 {}, ['--version'], _bare=True, _safe=True)
606 prefix = 'git version'
611 prefix = 'git version'
607 if stdout.startswith(prefix):
612 if stdout.startswith(prefix):
608 stdout = stdout[len(prefix):]
613 stdout = stdout[len(prefix):]
609 return stdout.strip()
614 return stdout.strip()
610
615
611 @reraise_safe_exceptions
616 @reraise_safe_exceptions
612 def run_git_command(self, wire, cmd, **opts):
617 def run_git_command(self, wire, cmd, **opts):
613 path = wire.get('path', None)
618 path = wire.get('path', None)
614
619
615 if path and os.path.isdir(path):
620 if path and os.path.isdir(path):
616 opts['cwd'] = path
621 opts['cwd'] = path
617
622
618 if '_bare' in opts:
623 if '_bare' in opts:
619 _copts = []
624 _copts = []
620 del opts['_bare']
625 del opts['_bare']
621 else:
626 else:
622 _copts = ['-c', 'core.quotepath=false', ]
627 _copts = ['-c', 'core.quotepath=false', ]
623 safe_call = False
628 safe_call = False
624 if '_safe' in opts:
629 if '_safe' in opts:
625 # no exc on failure
630 # no exc on failure
626 del opts['_safe']
631 del opts['_safe']
627 safe_call = True
632 safe_call = True
628
633
629 if '_copts' in opts:
634 if '_copts' in opts:
630 _copts.extend(opts['_copts'] or [])
635 _copts.extend(opts['_copts'] or [])
631 del opts['_copts']
636 del opts['_copts']
632
637
633 gitenv = os.environ.copy()
638 gitenv = os.environ.copy()
634 gitenv.update(opts.pop('extra_env', {}))
639 gitenv.update(opts.pop('extra_env', {}))
635 # need to clean fix GIT_DIR !
640 # need to clean fix GIT_DIR !
636 if 'GIT_DIR' in gitenv:
641 if 'GIT_DIR' in gitenv:
637 del gitenv['GIT_DIR']
642 del gitenv['GIT_DIR']
638 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
643 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
639 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
644 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
640
645
641 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
646 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
642
647
643 try:
648 try:
644 _opts = {'env': gitenv, 'shell': False}
649 _opts = {'env': gitenv, 'shell': False}
645 _opts.update(opts)
650 _opts.update(opts)
646 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
651 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
647
652
648 return ''.join(p), ''.join(p.error)
653 return ''.join(p), ''.join(p.error)
649 except (EnvironmentError, OSError) as err:
654 except (EnvironmentError, OSError) as err:
650 cmd = ' '.join(cmd) # human friendly CMD
655 cmd = ' '.join(cmd) # human friendly CMD
651 tb_err = ("Couldn't run git command (%s).\n"
656 tb_err = ("Couldn't run git command (%s).\n"
652 "Original error was:%s\n" % (cmd, err))
657 "Original error was:%s\n" % (cmd, err))
653 log.exception(tb_err)
658 log.exception(tb_err)
654 if safe_call:
659 if safe_call:
655 return '', err
660 return '', err
656 else:
661 else:
657 raise exceptions.VcsException(tb_err)
662 raise exceptions.VcsException()(tb_err)
658
663
659 @reraise_safe_exceptions
664 @reraise_safe_exceptions
660 def install_hooks(self, wire, force=False):
665 def install_hooks(self, wire, force=False):
661 from vcsserver.hook_utils import install_git_hooks
666 from vcsserver.hook_utils import install_git_hooks
662 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
663 return install_git_hooks(repo.path, repo.bare, force_create=force)
668 return install_git_hooks(repo.path, repo.bare, force_create=force)
664
669
665
670
666 def str_to_dulwich(value):
671 def str_to_dulwich(value):
667 """
672 """
668 Dulwich 0.10.1a requires `unicode` objects to be passed in.
673 Dulwich 0.10.1a requires `unicode` objects to be passed in.
669 """
674 """
670 return value.decode(settings.WIRE_ENCODING)
675 return value.decode(settings.WIRE_ENCODING)
@@ -1,776 +1,793 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 baseui.setconfig('ui', 'paginate', 'never')
56 baseui.setconfig('ui', 'paginate', 'never')
57 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 # signal in a non-main thread, thus generating a ValueError.
58 # signal in a non-main thread, thus generating a ValueError.
59 baseui.setconfig('worker', 'numcpus', 1)
59 baseui.setconfig('worker', 'numcpus', 1)
60
60
61 # If there is no config for the largefiles extension, we explicitly disable
61 # If there is no config for the largefiles extension, we explicitly disable
62 # it here. This overrides settings from repositories hgrc file. Recent
62 # it here. This overrides settings from repositories hgrc file. Recent
63 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 # repo.
64 # repo.
65 if not baseui.hasconfig('extensions', 'largefiles'):
65 if not baseui.hasconfig('extensions', 'largefiles'):
66 log.debug('Explicitly disable largefiles extension for repo.')
66 log.debug('Explicitly disable largefiles extension for repo.')
67 baseui.setconfig('extensions', 'largefiles', '!')
67 baseui.setconfig('extensions', 'largefiles', '!')
68
68
69 return baseui
69 return baseui
70
70
71
71
72 def reraise_safe_exceptions(func):
72 def reraise_safe_exceptions(func):
73 """Decorator for converting mercurial exceptions to something neutral."""
73 """Decorator for converting mercurial exceptions to something neutral."""
74 def wrapper(*args, **kwargs):
74 def wrapper(*args, **kwargs):
75 try:
75 try:
76 return func(*args, **kwargs)
76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired):
77 except (Abort, InterventionRequired) as e:
78 raise_from_original(exceptions.AbortException)
78 raise_from_original(exceptions.AbortException(e))
79 except RepoLookupError:
79 except RepoLookupError as e:
80 raise_from_original(exceptions.LookupException)
80 raise_from_original(exceptions.LookupException(e))
81 except RequirementError:
81 except RequirementError as e:
82 raise_from_original(exceptions.RequirementException)
82 raise_from_original(exceptions.RequirementException(e))
83 except RepoError:
83 except RepoError as e:
84 raise_from_original(exceptions.VcsException)
84 raise_from_original(exceptions.VcsException(e))
85 except LookupError:
85 except LookupError as e:
86 raise_from_original(exceptions.LookupException)
86 raise_from_original(exceptions.LookupException(e))
87 except Exception as e:
87 except Exception as e:
88 if not hasattr(e, '_vcs_kind'):
88 if not hasattr(e, '_vcs_kind'):
89 log.exception("Unhandled exception in hg remote call")
89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException(e))
91
91 raise
92 raise
92 return wrapper
93 return wrapper
93
94
94
95
95 class MercurialFactory(RepoFactory):
96 class MercurialFactory(RepoFactory):
97 repo_type = 'hg'
96
98
97 def _create_config(self, config, hooks=True):
99 def _create_config(self, config, hooks=True):
98 if not hooks:
100 if not hooks:
99 hooks_to_clean = frozenset((
101 hooks_to_clean = frozenset((
100 'changegroup.repo_size', 'preoutgoing.pre_pull',
102 'changegroup.repo_size', 'preoutgoing.pre_pull',
101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
102 new_config = []
104 new_config = []
103 for section, option, value in config:
105 for section, option, value in config:
104 if section == 'hooks' and option in hooks_to_clean:
106 if section == 'hooks' and option in hooks_to_clean:
105 continue
107 continue
106 new_config.append((section, option, value))
108 new_config.append((section, option, value))
107 config = new_config
109 config = new_config
108
110
109 baseui = make_ui_from_config(config)
111 baseui = make_ui_from_config(config)
110 return baseui
112 return baseui
111
113
112 def _create_repo(self, wire, create):
114 def _create_repo(self, wire, create):
113 baseui = self._create_config(wire["config"])
115 baseui = self._create_config(wire["config"])
114 return localrepository(baseui, wire["path"], create)
116 return localrepository(baseui, wire["path"], create)
115
117
116
118
117 class HgRemote(object):
119 class HgRemote(object):
118
120
119 def __init__(self, factory):
121 def __init__(self, factory):
120 self._factory = factory
122 self._factory = factory
121
123
122 self._bulk_methods = {
124 self._bulk_methods = {
123 "affected_files": self.ctx_files,
125 "affected_files": self.ctx_files,
124 "author": self.ctx_user,
126 "author": self.ctx_user,
125 "branch": self.ctx_branch,
127 "branch": self.ctx_branch,
126 "children": self.ctx_children,
128 "children": self.ctx_children,
127 "date": self.ctx_date,
129 "date": self.ctx_date,
128 "message": self.ctx_description,
130 "message": self.ctx_description,
129 "parents": self.ctx_parents,
131 "parents": self.ctx_parents,
130 "status": self.ctx_status,
132 "status": self.ctx_status,
131 "obsolete": self.ctx_obsolete,
133 "obsolete": self.ctx_obsolete,
132 "phase": self.ctx_phase,
134 "phase": self.ctx_phase,
133 "hidden": self.ctx_hidden,
135 "hidden": self.ctx_hidden,
134 "_file_paths": self.ctx_list,
136 "_file_paths": self.ctx_list,
135 }
137 }
136
138
137 @reraise_safe_exceptions
139 @reraise_safe_exceptions
138 def discover_hg_version(self):
140 def discover_hg_version(self):
139 from mercurial import util
141 from mercurial import util
140 return util.version()
142 return util.version()
141
143
142 @reraise_safe_exceptions
144 @reraise_safe_exceptions
143 def archive_repo(self, archive_path, mtime, file_info, kind):
145 def archive_repo(self, archive_path, mtime, file_info, kind):
144 if kind == "tgz":
146 if kind == "tgz":
145 archiver = archival.tarit(archive_path, mtime, "gz")
147 archiver = archival.tarit(archive_path, mtime, "gz")
146 elif kind == "tbz2":
148 elif kind == "tbz2":
147 archiver = archival.tarit(archive_path, mtime, "bz2")
149 archiver = archival.tarit(archive_path, mtime, "bz2")
148 elif kind == 'zip':
150 elif kind == 'zip':
149 archiver = archival.zipit(archive_path, mtime)
151 archiver = archival.zipit(archive_path, mtime)
150 else:
152 else:
151 raise exceptions.ArchiveException(
153 raise exceptions.ArchiveException()(
152 'Remote does not support: "%s".' % kind)
154 'Remote does not support: "%s".' % kind)
153
155
154 for f_path, f_mode, f_is_link, f_content in file_info:
156 for f_path, f_mode, f_is_link, f_content in file_info:
155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
156 archiver.done()
158 archiver.done()
157
159
158 @reraise_safe_exceptions
160 @reraise_safe_exceptions
159 def bookmarks(self, wire):
161 def bookmarks(self, wire):
160 repo = self._factory.repo(wire)
162 repo = self._factory.repo(wire)
161 return dict(repo._bookmarks)
163 return dict(repo._bookmarks)
162
164
163 @reraise_safe_exceptions
165 @reraise_safe_exceptions
164 def branches(self, wire, normal, closed):
166 def branches(self, wire, normal, closed):
165 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
166 iter_branches = repo.branchmap().iterbranches()
168 iter_branches = repo.branchmap().iterbranches()
167 bt = {}
169 bt = {}
168 for branch_name, _heads, tip, is_closed in iter_branches:
170 for branch_name, _heads, tip, is_closed in iter_branches:
169 if normal and not is_closed:
171 if normal and not is_closed:
170 bt[branch_name] = tip
172 bt[branch_name] = tip
171 if closed and is_closed:
173 if closed and is_closed:
172 bt[branch_name] = tip
174 bt[branch_name] = tip
173
175
174 return bt
176 return bt
175
177
176 @reraise_safe_exceptions
178 @reraise_safe_exceptions
177 def bulk_request(self, wire, rev, pre_load):
179 def bulk_request(self, wire, rev, pre_load):
178 result = {}
180 result = {}
179 for attr in pre_load:
181 for attr in pre_load:
180 try:
182 try:
181 method = self._bulk_methods[attr]
183 method = self._bulk_methods[attr]
182 result[attr] = method(wire, rev)
184 result[attr] = method(wire, rev)
183 except KeyError:
185 except KeyError as e:
184 raise exceptions.VcsException(
186 raise exceptions.VcsException(e)(
185 'Unknown bulk attribute: "%s"' % attr)
187 'Unknown bulk attribute: "%s"' % attr)
186 return result
188 return result
187
189
188 @reraise_safe_exceptions
190 @reraise_safe_exceptions
189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
191 clone(baseui, source, dest, noupdate=not update_after_clone)
193 clone(baseui, source, dest, noupdate=not update_after_clone)
192
194
193 @reraise_safe_exceptions
195 @reraise_safe_exceptions
194 def commitctx(
196 def commitctx(
195 self, wire, message, parents, commit_time, commit_timezone,
197 self, wire, message, parents, commit_time, commit_timezone,
196 user, files, extra, removed, updated):
198 user, files, extra, removed, updated):
197
199
198 def _filectxfn(_repo, memctx, path):
200 def _filectxfn(_repo, memctx, path):
199 """
201 """
200 Marks given path as added/changed/removed in a given _repo. This is
202 Marks given path as added/changed/removed in a given _repo. This is
201 for internal mercurial commit function.
203 for internal mercurial commit function.
202 """
204 """
203
205
204 # check if this path is removed
206 # check if this path is removed
205 if path in removed:
207 if path in removed:
206 # returning None is a way to mark node for removal
208 # returning None is a way to mark node for removal
207 return None
209 return None
208
210
209 # check if this path is added
211 # check if this path is added
210 for node in updated:
212 for node in updated:
211 if node['path'] == path:
213 if node['path'] == path:
212 return memfilectx(
214 return memfilectx(
213 _repo,
215 _repo,
216 changectx=memctx,
214 path=node['path'],
217 path=node['path'],
215 data=node['content'],
218 data=node['content'],
216 islink=False,
219 islink=False,
217 isexec=bool(node['mode'] & stat.S_IXUSR),
220 isexec=bool(node['mode'] & stat.S_IXUSR),
218 copied=False,
221 copied=False)
219 memctx=memctx)
220
222
221 raise exceptions.AbortException(
223 raise exceptions.AbortException()(
222 "Given path haven't been marked as added, "
224 "Given path haven't been marked as added, "
223 "changed or removed (%s)" % path)
225 "changed or removed (%s)" % path)
224
226
225 repo = self._factory.repo(wire)
227 repo = self._factory.repo(wire)
226
228
227 commit_ctx = memctx(
229 commit_ctx = memctx(
228 repo=repo,
230 repo=repo,
229 parents=parents,
231 parents=parents,
230 text=message,
232 text=message,
231 files=files,
233 files=files,
232 filectxfn=_filectxfn,
234 filectxfn=_filectxfn,
233 user=user,
235 user=user,
234 date=(commit_time, commit_timezone),
236 date=(commit_time, commit_timezone),
235 extra=extra)
237 extra=extra)
236
238
237 n = repo.commitctx(commit_ctx)
239 n = repo.commitctx(commit_ctx)
238 new_id = hex(n)
240 new_id = hex(n)
239
241
240 return new_id
242 return new_id
241
243
242 @reraise_safe_exceptions
244 @reraise_safe_exceptions
243 def ctx_branch(self, wire, revision):
245 def ctx_branch(self, wire, revision):
244 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
245 ctx = repo[revision]
247 ctx = repo[revision]
246 return ctx.branch()
248 return ctx.branch()
247
249
248 @reraise_safe_exceptions
250 @reraise_safe_exceptions
249 def ctx_children(self, wire, revision):
251 def ctx_children(self, wire, revision):
250 repo = self._factory.repo(wire)
252 repo = self._factory.repo(wire)
251 ctx = repo[revision]
253 ctx = repo[revision]
252 return [child.rev() for child in ctx.children()]
254 return [child.rev() for child in ctx.children()]
253
255
254 @reraise_safe_exceptions
256 @reraise_safe_exceptions
255 def ctx_date(self, wire, revision):
257 def ctx_date(self, wire, revision):
256 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
257 ctx = repo[revision]
259 ctx = repo[revision]
258 return ctx.date()
260 return ctx.date()
259
261
260 @reraise_safe_exceptions
262 @reraise_safe_exceptions
261 def ctx_description(self, wire, revision):
263 def ctx_description(self, wire, revision):
262 repo = self._factory.repo(wire)
264 repo = self._factory.repo(wire)
263 ctx = repo[revision]
265 ctx = repo[revision]
264 return ctx.description()
266 return ctx.description()
265
267
266 @reraise_safe_exceptions
268 @reraise_safe_exceptions
267 def ctx_diff(
269 def ctx_diff(
268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
269 repo = self._factory.repo(wire)
271 repo = self._factory.repo(wire)
270 ctx = repo[revision]
272 ctx = repo[revision]
271 result = ctx.diff(
273 result = ctx.diff(
272 git=git, ignore_whitespace=ignore_whitespace, context=context)
274 git=git, ignore_whitespace=ignore_whitespace, context=context)
273 return list(result)
275 return list(result)
274
276
275 @reraise_safe_exceptions
277 @reraise_safe_exceptions
276 def ctx_files(self, wire, revision):
278 def ctx_files(self, wire, revision):
277 repo = self._factory.repo(wire)
279 repo = self._factory.repo(wire)
278 ctx = repo[revision]
280 ctx = repo[revision]
279 return ctx.files()
281 return ctx.files()
280
282
281 @reraise_safe_exceptions
283 @reraise_safe_exceptions
282 def ctx_list(self, path, revision):
284 def ctx_list(self, path, revision):
283 repo = self._factory.repo(path)
285 repo = self._factory.repo(path)
284 ctx = repo[revision]
286 ctx = repo[revision]
285 return list(ctx)
287 return list(ctx)
286
288
287 @reraise_safe_exceptions
289 @reraise_safe_exceptions
288 def ctx_parents(self, wire, revision):
290 def ctx_parents(self, wire, revision):
289 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
290 ctx = repo[revision]
292 ctx = repo[revision]
291 return [parent.rev() for parent in ctx.parents()]
293 return [parent.rev() for parent in ctx.parents()]
292
294
293 @reraise_safe_exceptions
295 @reraise_safe_exceptions
294 def ctx_phase(self, wire, revision):
296 def ctx_phase(self, wire, revision):
295 repo = self._factory.repo(wire)
297 repo = self._factory.repo(wire)
296 ctx = repo[revision]
298 ctx = repo[revision]
297 # public=0, draft=1, secret=3
299 # public=0, draft=1, secret=3
298 return ctx.phase()
300 return ctx.phase()
299
301
300 @reraise_safe_exceptions
302 @reraise_safe_exceptions
301 def ctx_obsolete(self, wire, revision):
303 def ctx_obsolete(self, wire, revision):
302 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
303 ctx = repo[revision]
305 ctx = repo[revision]
304 return ctx.obsolete()
306 return ctx.obsolete()
305
307
306 @reraise_safe_exceptions
308 @reraise_safe_exceptions
307 def ctx_hidden(self, wire, revision):
309 def ctx_hidden(self, wire, revision):
308 repo = self._factory.repo(wire)
310 repo = self._factory.repo(wire)
309 ctx = repo[revision]
311 ctx = repo[revision]
310 return ctx.hidden()
312 return ctx.hidden()
311
313
312 @reraise_safe_exceptions
314 @reraise_safe_exceptions
313 def ctx_substate(self, wire, revision):
315 def ctx_substate(self, wire, revision):
314 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
315 ctx = repo[revision]
317 ctx = repo[revision]
316 return ctx.substate
318 return ctx.substate
317
319
318 @reraise_safe_exceptions
320 @reraise_safe_exceptions
319 def ctx_status(self, wire, revision):
321 def ctx_status(self, wire, revision):
320 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
321 ctx = repo[revision]
323 ctx = repo[revision]
322 status = repo[ctx.p1().node()].status(other=ctx.node())
324 status = repo[ctx.p1().node()].status(other=ctx.node())
323 # object of status (odd, custom named tuple in mercurial) is not
325 # object of status (odd, custom named tuple in mercurial) is not
324 # correctly serializable, we make it a list, as the underling
326 # correctly serializable, we make it a list, as the underling
325 # API expects this to be a list
327 # API expects this to be a list
326 return list(status)
328 return list(status)
327
329
328 @reraise_safe_exceptions
330 @reraise_safe_exceptions
329 def ctx_user(self, wire, revision):
331 def ctx_user(self, wire, revision):
330 repo = self._factory.repo(wire)
332 repo = self._factory.repo(wire)
331 ctx = repo[revision]
333 ctx = repo[revision]
332 return ctx.user()
334 return ctx.user()
333
335
334 @reraise_safe_exceptions
336 @reraise_safe_exceptions
335 def check_url(self, url, config):
337 def check_url(self, url, config):
336 _proto = None
338 _proto = None
337 if '+' in url[:url.find('://')]:
339 if '+' in url[:url.find('://')]:
338 _proto = url[0:url.find('+')]
340 _proto = url[0:url.find('+')]
339 url = url[url.find('+') + 1:]
341 url = url[url.find('+') + 1:]
340 handlers = []
342 handlers = []
341 url_obj = url_parser(url)
343 url_obj = url_parser(url)
342 test_uri, authinfo = url_obj.authinfo()
344 test_uri, authinfo = url_obj.authinfo()
343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
344 url_obj.query = obfuscate_qs(url_obj.query)
346 url_obj.query = obfuscate_qs(url_obj.query)
345
347
346 cleaned_uri = str(url_obj)
348 cleaned_uri = str(url_obj)
347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
348
350
349 if authinfo:
351 if authinfo:
350 # create a password manager
352 # create a password manager
351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
352 passmgr.add_password(*authinfo)
354 passmgr.add_password(*authinfo)
353
355
354 handlers.extend((httpbasicauthhandler(passmgr),
356 handlers.extend((httpbasicauthhandler(passmgr),
355 httpdigestauthhandler(passmgr)))
357 httpdigestauthhandler(passmgr)))
356
358
357 o = urllib2.build_opener(*handlers)
359 o = urllib2.build_opener(*handlers)
358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
359 ('Accept', 'application/mercurial-0.1')]
361 ('Accept', 'application/mercurial-0.1')]
360
362
361 q = {"cmd": 'between'}
363 q = {"cmd": 'between'}
362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
363 qs = '?%s' % urllib.urlencode(q)
365 qs = '?%s' % urllib.urlencode(q)
364 cu = "%s%s" % (test_uri, qs)
366 cu = "%s%s" % (test_uri, qs)
365 req = urllib2.Request(cu, None, {})
367 req = urllib2.Request(cu, None, {})
366
368
367 try:
369 try:
368 log.debug("Trying to open URL %s", cleaned_uri)
370 log.debug("Trying to open URL %s", cleaned_uri)
369 resp = o.open(req)
371 resp = o.open(req)
370 if resp.code != 200:
372 if resp.code != 200:
371 raise exceptions.URLError('Return Code is not 200')
373 raise exceptions.URLError()('Return Code is not 200')
372 except Exception as e:
374 except Exception as e:
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 # means it cannot be cloned
376 # means it cannot be cloned
375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
376
378
377 # now check if it's a proper hg repo, but don't do it for svn
379 # now check if it's a proper hg repo, but don't do it for svn
378 try:
380 try:
379 if _proto == 'svn':
381 if _proto == 'svn':
380 pass
382 pass
381 else:
383 else:
382 # check for pure hg repos
384 # check for pure hg repos
383 log.debug(
385 log.debug(
384 "Verifying if URL is a Mercurial repository: %s",
386 "Verifying if URL is a Mercurial repository: %s",
385 cleaned_uri)
387 cleaned_uri)
386 httppeer(make_ui_from_config(config), url).lookup('tip')
388 ui = make_ui_from_config(config)
389 peer_checker = makepeer(ui, url)
390 peer_checker.lookup('tip')
387 except Exception as e:
391 except Exception as e:
388 log.warning("URL is not a valid Mercurial repository: %s",
392 log.warning("URL is not a valid Mercurial repository: %s",
389 cleaned_uri)
393 cleaned_uri)
390 raise exceptions.URLError(
394 raise exceptions.URLError(e)(
391 "url [%s] does not look like an hg repo org_exc: %s"
395 "url [%s] does not look like an hg repo org_exc: %s"
392 % (cleaned_uri, e))
396 % (cleaned_uri, e))
393
397
394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
395 return True
399 return True
396
400
397 @reraise_safe_exceptions
401 @reraise_safe_exceptions
398 def diff(
402 def diff(
399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
400 context):
404 context):
401 repo = self._factory.repo(wire)
405 repo = self._factory.repo(wire)
402
406
403 if file_filter:
407 if file_filter:
404 match_filter = match(file_filter[0], '', [file_filter[1]])
408 match_filter = match(file_filter[0], '', [file_filter[1]])
405 else:
409 else:
406 match_filter = file_filter
410 match_filter = file_filter
407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
408
412
409 try:
413 try:
410 return "".join(patch.diff(
414 return "".join(patch.diff(
411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
412 except RepoLookupError:
416 except RepoLookupError as e:
413 raise exceptions.LookupException()
417 raise exceptions.LookupException(e)()
414
418
415 @reraise_safe_exceptions
419 @reraise_safe_exceptions
416 def file_history(self, wire, revision, path, limit):
420 def file_history(self, wire, revision, path, limit):
417 repo = self._factory.repo(wire)
421 repo = self._factory.repo(wire)
418
422
419 ctx = repo[revision]
423 ctx = repo[revision]
420 fctx = ctx.filectx(path)
424 fctx = ctx.filectx(path)
421
425
422 def history_iter():
426 def history_iter():
423 limit_rev = fctx.rev()
427 limit_rev = fctx.rev()
424 for obj in reversed(list(fctx.filelog())):
428 for obj in reversed(list(fctx.filelog())):
425 obj = fctx.filectx(obj)
429 obj = fctx.filectx(obj)
426 if limit_rev >= obj.rev():
430 if limit_rev >= obj.rev():
427 yield obj
431 yield obj
428
432
429 history = []
433 history = []
430 for cnt, obj in enumerate(history_iter()):
434 for cnt, obj in enumerate(history_iter()):
431 if limit and cnt >= limit:
435 if limit and cnt >= limit:
432 break
436 break
433 history.append(hex(obj.node()))
437 history.append(hex(obj.node()))
434
438
435 return [x for x in history]
439 return [x for x in history]
436
440
437 @reraise_safe_exceptions
441 @reraise_safe_exceptions
438 def file_history_untill(self, wire, revision, path, limit):
442 def file_history_untill(self, wire, revision, path, limit):
439 repo = self._factory.repo(wire)
443 repo = self._factory.repo(wire)
440 ctx = repo[revision]
444 ctx = repo[revision]
441 fctx = ctx.filectx(path)
445 fctx = ctx.filectx(path)
442
446
443 file_log = list(fctx.filelog())
447 file_log = list(fctx.filelog())
444 if limit:
448 if limit:
445 # Limit to the last n items
449 # Limit to the last n items
446 file_log = file_log[-limit:]
450 file_log = file_log[-limit:]
447
451
448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
449
453
450 @reraise_safe_exceptions
454 @reraise_safe_exceptions
451 def fctx_annotate(self, wire, revision, path):
455 def fctx_annotate(self, wire, revision, path):
452 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
453 ctx = repo[revision]
457 ctx = repo[revision]
454 fctx = ctx.filectx(path)
458 fctx = ctx.filectx(path)
455
459
456 result = []
460 result = []
457 for i, (a_line, content) in enumerate(fctx.annotate()):
461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
458 ln_no = i + 1
462 ln_no = i
459 sha = hex(a_line.fctx.node())
463 sha = hex(annotate_obj.fctx.node())
464 content = annotate_obj.text
460 result.append((ln_no, sha, content))
465 result.append((ln_no, sha, content))
461 return result
466 return result
462
467
463 @reraise_safe_exceptions
468 @reraise_safe_exceptions
464 def fctx_data(self, wire, revision, path):
469 def fctx_data(self, wire, revision, path):
465 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
466 ctx = repo[revision]
471 ctx = repo[revision]
467 fctx = ctx.filectx(path)
472 fctx = ctx.filectx(path)
468 return fctx.data()
473 return fctx.data()
469
474
470 @reraise_safe_exceptions
475 @reraise_safe_exceptions
471 def fctx_flags(self, wire, revision, path):
476 def fctx_flags(self, wire, revision, path):
472 repo = self._factory.repo(wire)
477 repo = self._factory.repo(wire)
473 ctx = repo[revision]
478 ctx = repo[revision]
474 fctx = ctx.filectx(path)
479 fctx = ctx.filectx(path)
475 return fctx.flags()
480 return fctx.flags()
476
481
477 @reraise_safe_exceptions
482 @reraise_safe_exceptions
478 def fctx_size(self, wire, revision, path):
483 def fctx_size(self, wire, revision, path):
479 repo = self._factory.repo(wire)
484 repo = self._factory.repo(wire)
480 ctx = repo[revision]
485 ctx = repo[revision]
481 fctx = ctx.filectx(path)
486 fctx = ctx.filectx(path)
482 return fctx.size()
487 return fctx.size()
483
488
484 @reraise_safe_exceptions
489 @reraise_safe_exceptions
485 def get_all_commit_ids(self, wire, name):
490 def get_all_commit_ids(self, wire, name):
486 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
487 revs = repo.filtered(name).changelog.index
492 revs = repo.filtered(name).changelog.index
488 return map(lambda x: hex(x[7]), revs)[:-1]
493 return map(lambda x: hex(x[7]), revs)[:-1]
489
494
490 @reraise_safe_exceptions
495 @reraise_safe_exceptions
491 def get_config_value(self, wire, section, name, untrusted=False):
496 def get_config_value(self, wire, section, name, untrusted=False):
492 repo = self._factory.repo(wire)
497 repo = self._factory.repo(wire)
493 return repo.ui.config(section, name, untrusted=untrusted)
498 return repo.ui.config(section, name, untrusted=untrusted)
494
499
495 @reraise_safe_exceptions
500 @reraise_safe_exceptions
496 def get_config_bool(self, wire, section, name, untrusted=False):
501 def get_config_bool(self, wire, section, name, untrusted=False):
497 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
498 return repo.ui.configbool(section, name, untrusted=untrusted)
503 return repo.ui.configbool(section, name, untrusted=untrusted)
499
504
500 @reraise_safe_exceptions
505 @reraise_safe_exceptions
501 def get_config_list(self, wire, section, name, untrusted=False):
506 def get_config_list(self, wire, section, name, untrusted=False):
502 repo = self._factory.repo(wire)
507 repo = self._factory.repo(wire)
503 return repo.ui.configlist(section, name, untrusted=untrusted)
508 return repo.ui.configlist(section, name, untrusted=untrusted)
504
509
505 @reraise_safe_exceptions
510 @reraise_safe_exceptions
506 def is_large_file(self, wire, path):
511 def is_large_file(self, wire, path):
507 return largefiles.lfutil.isstandin(path)
512 return largefiles.lfutil.isstandin(path)
508
513
509 @reraise_safe_exceptions
514 @reraise_safe_exceptions
510 def in_largefiles_store(self, wire, sha):
515 def in_largefiles_store(self, wire, sha):
511 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
512 return largefiles.lfutil.instore(repo, sha)
517 return largefiles.lfutil.instore(repo, sha)
513
518
514 @reraise_safe_exceptions
519 @reraise_safe_exceptions
515 def in_user_cache(self, wire, sha):
520 def in_user_cache(self, wire, sha):
516 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
517 return largefiles.lfutil.inusercache(repo.ui, sha)
522 return largefiles.lfutil.inusercache(repo.ui, sha)
518
523
519 @reraise_safe_exceptions
524 @reraise_safe_exceptions
520 def store_path(self, wire, sha):
525 def store_path(self, wire, sha):
521 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
522 return largefiles.lfutil.storepath(repo, sha)
527 return largefiles.lfutil.storepath(repo, sha)
523
528
524 @reraise_safe_exceptions
529 @reraise_safe_exceptions
525 def link(self, wire, sha, path):
530 def link(self, wire, sha, path):
526 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
527 largefiles.lfutil.link(
532 largefiles.lfutil.link(
528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529
534
530 @reraise_safe_exceptions
535 @reraise_safe_exceptions
531 def localrepository(self, wire, create=False):
536 def localrepository(self, wire, create=False):
532 self._factory.repo(wire, create=create)
537 self._factory.repo(wire, create=create)
533
538
534 @reraise_safe_exceptions
539 @reraise_safe_exceptions
535 def lookup(self, wire, revision, both):
540 def lookup(self, wire, revision, both):
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
541
537 if isinstance(revision, float):
538 revision = long(revision)
539 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543
544 if isinstance(revision, int):
545 # NOTE(marcink):
546 # since Mercurial doesn't support indexes properly
547 # we need to shift accordingly by one to get proper index, e.g
548 # repo[-1] => repo[-2]
549 # repo[0] => repo[-1]
550 # repo[1] => repo[2] we also never call repo[0] because
551 # it's actually second commit
552 if revision <= 0:
553 revision = revision + -1
554 else:
555 revision = revision + 1
556
540 try:
557 try:
541 ctx = repo[revision]
558 ctx = repo[revision]
542 except RepoLookupError:
559 except RepoLookupError as e:
543 raise exceptions.LookupException(revision)
560 raise exceptions.LookupException(e)(revision)
544 except LookupError as e:
561 except LookupError as e:
545 raise exceptions.LookupException(e.name)
562 raise exceptions.LookupException(e)(e.name)
546
563
547 if not both:
564 if not both:
548 return ctx.hex()
565 return ctx.hex()
549
566
550 ctx = repo[ctx.hex()]
567 ctx = repo[ctx.hex()]
551 return ctx.hex(), ctx.rev()
568 return ctx.hex(), ctx.rev()
552
569
553 @reraise_safe_exceptions
570 @reraise_safe_exceptions
554 def pull(self, wire, url, commit_ids=None):
571 def pull(self, wire, url, commit_ids=None):
555 repo = self._factory.repo(wire)
572 repo = self._factory.repo(wire)
556 # Disable any prompts for this repo
573 # Disable any prompts for this repo
557 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
558
575
559 remote = peer(repo, {}, url)
576 remote = peer(repo, {}, url)
560 # Disable any prompts for this remote
577 # Disable any prompts for this remote
561 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
562
579
563 if commit_ids:
580 if commit_ids:
564 commit_ids = [bin(commit_id) for commit_id in commit_ids]
581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
565
582
566 return exchange.pull(
583 return exchange.pull(
567 repo, remote, heads=commit_ids, force=None).cgresult
584 repo, remote, heads=commit_ids, force=None).cgresult
568
585
569 @reraise_safe_exceptions
586 @reraise_safe_exceptions
570 def sync_push(self, wire, url):
587 def sync_push(self, wire, url):
571 if self.check_url(url, wire['config']):
588 if self.check_url(url, wire['config']):
572 repo = self._factory.repo(wire)
589 repo = self._factory.repo(wire)
573
590
574 # Disable any prompts for this repo
591 # Disable any prompts for this repo
575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
592 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576
593
577 bookmarks = dict(repo._bookmarks).keys()
594 bookmarks = dict(repo._bookmarks).keys()
578 remote = peer(repo, {}, url)
595 remote = peer(repo, {}, url)
579 # Disable any prompts for this remote
596 # Disable any prompts for this remote
580 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
597 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
581
598
582 return exchange.push(
599 return exchange.push(
583 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
600 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
584
601
585 @reraise_safe_exceptions
602 @reraise_safe_exceptions
586 def revision(self, wire, rev):
603 def revision(self, wire, rev):
587 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
588 ctx = repo[rev]
605 ctx = repo[rev]
589 return ctx.rev()
606 return ctx.rev()
590
607
591 @reraise_safe_exceptions
608 @reraise_safe_exceptions
592 def rev_range(self, wire, filter):
609 def rev_range(self, wire, filter):
593 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
594 revisions = [rev for rev in revrange(repo, filter)]
611 revisions = [rev for rev in revrange(repo, filter)]
595 return revisions
612 return revisions
596
613
597 @reraise_safe_exceptions
614 @reraise_safe_exceptions
598 def rev_range_hash(self, wire, node):
615 def rev_range_hash(self, wire, node):
599 repo = self._factory.repo(wire)
616 repo = self._factory.repo(wire)
600
617
601 def get_revs(repo, rev_opt):
618 def get_revs(repo, rev_opt):
602 if rev_opt:
619 if rev_opt:
603 revs = revrange(repo, rev_opt)
620 revs = revrange(repo, rev_opt)
604 if len(revs) == 0:
621 if len(revs) == 0:
605 return (nullrev, nullrev)
622 return (nullrev, nullrev)
606 return max(revs), min(revs)
623 return max(revs), min(revs)
607 else:
624 else:
608 return len(repo) - 1, 0
625 return len(repo) - 1, 0
609
626
610 stop, start = get_revs(repo, [node + ':'])
627 stop, start = get_revs(repo, [node + ':'])
611 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
628 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
612 return revs
629 return revs
613
630
614 @reraise_safe_exceptions
631 @reraise_safe_exceptions
615 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
632 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
616 other_path = kwargs.pop('other_path', None)
633 other_path = kwargs.pop('other_path', None)
617
634
618 # case when we want to compare two independent repositories
635 # case when we want to compare two independent repositories
619 if other_path and other_path != wire["path"]:
636 if other_path and other_path != wire["path"]:
620 baseui = self._factory._create_config(wire["config"])
637 baseui = self._factory._create_config(wire["config"])
621 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
638 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
622 else:
639 else:
623 repo = self._factory.repo(wire)
640 repo = self._factory.repo(wire)
624 return list(repo.revs(rev_spec, *args))
641 return list(repo.revs(rev_spec, *args))
625
642
626 @reraise_safe_exceptions
643 @reraise_safe_exceptions
627 def strip(self, wire, revision, update, backup):
644 def strip(self, wire, revision, update, backup):
628 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
629 ctx = repo[revision]
646 ctx = repo[revision]
630 hgext_strip(
647 hgext_strip(
631 repo.baseui, repo, ctx.node(), update=update, backup=backup)
648 repo.baseui, repo, ctx.node(), update=update, backup=backup)
632
649
633 @reraise_safe_exceptions
650 @reraise_safe_exceptions
634 def verify(self, wire,):
651 def verify(self, wire,):
635 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
636 baseui = self._factory._create_config(wire['config'])
653 baseui = self._factory._create_config(wire['config'])
637 baseui.setconfig('ui', 'quiet', 'false')
654 baseui.setconfig('ui', 'quiet', 'false')
638 output = io.BytesIO()
655 output = io.BytesIO()
639
656
640 def write(data, **unused_kwargs):
657 def write(data, **unused_kwargs):
641 output.write(data)
658 output.write(data)
642 baseui.write = write
659 baseui.write = write
643
660
644 repo.ui = baseui
661 repo.ui = baseui
645 verify.verify(repo)
662 verify.verify(repo)
646 return output.getvalue()
663 return output.getvalue()
647
664
648 @reraise_safe_exceptions
665 @reraise_safe_exceptions
649 def tag(self, wire, name, revision, message, local, user,
666 def tag(self, wire, name, revision, message, local, user,
650 tag_time, tag_timezone):
667 tag_time, tag_timezone):
651 repo = self._factory.repo(wire)
668 repo = self._factory.repo(wire)
652 ctx = repo[revision]
669 ctx = repo[revision]
653 node = ctx.node()
670 node = ctx.node()
654
671
655 date = (tag_time, tag_timezone)
672 date = (tag_time, tag_timezone)
656 try:
673 try:
657 hg_tag.tag(repo, name, node, message, local, user, date)
674 hg_tag.tag(repo, name, node, message, local, user, date)
658 except Abort as e:
675 except Abort as e:
659 log.exception("Tag operation aborted")
676 log.exception("Tag operation aborted")
660 # Exception can contain unicode which we convert
677 # Exception can contain unicode which we convert
661 raise exceptions.AbortException(repr(e))
678 raise exceptions.AbortException(e)(repr(e))
662
679
663 @reraise_safe_exceptions
680 @reraise_safe_exceptions
664 def tags(self, wire):
681 def tags(self, wire):
665 repo = self._factory.repo(wire)
682 repo = self._factory.repo(wire)
666 return repo.tags()
683 return repo.tags()
667
684
668 @reraise_safe_exceptions
685 @reraise_safe_exceptions
669 def update(self, wire, node=None, clean=False):
686 def update(self, wire, node=None, clean=False):
670 repo = self._factory.repo(wire)
687 repo = self._factory.repo(wire)
671 baseui = self._factory._create_config(wire['config'])
688 baseui = self._factory._create_config(wire['config'])
672 commands.update(baseui, repo, node=node, clean=clean)
689 commands.update(baseui, repo, node=node, clean=clean)
673
690
674 @reraise_safe_exceptions
691 @reraise_safe_exceptions
675 def identify(self, wire):
692 def identify(self, wire):
676 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
677 baseui = self._factory._create_config(wire['config'])
694 baseui = self._factory._create_config(wire['config'])
678 output = io.BytesIO()
695 output = io.BytesIO()
679 baseui.write = output.write
696 baseui.write = output.write
680 # This is required to get a full node id
697 # This is required to get a full node id
681 baseui.debugflag = True
698 baseui.debugflag = True
682 commands.identify(baseui, repo, id=True)
699 commands.identify(baseui, repo, id=True)
683
700
684 return output.getvalue()
701 return output.getvalue()
685
702
686 @reraise_safe_exceptions
703 @reraise_safe_exceptions
687 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
704 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
688 hooks=True):
705 hooks=True):
689 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
707 baseui = self._factory._create_config(wire['config'], hooks=hooks)
691
708
692 # Mercurial internally has a lot of logic that checks ONLY if
709 # Mercurial internally has a lot of logic that checks ONLY if
693 # option is defined, we just pass those if they are defined then
710 # option is defined, we just pass those if they are defined then
694 opts = {}
711 opts = {}
695 if bookmark:
712 if bookmark:
696 opts['bookmark'] = bookmark
713 opts['bookmark'] = bookmark
697 if branch:
714 if branch:
698 opts['branch'] = branch
715 opts['branch'] = branch
699 if revision:
716 if revision:
700 opts['rev'] = revision
717 opts['rev'] = revision
701
718
702 commands.pull(baseui, repo, source, **opts)
719 commands.pull(baseui, repo, source, **opts)
703
720
704 @reraise_safe_exceptions
721 @reraise_safe_exceptions
705 def heads(self, wire, branch=None):
722 def heads(self, wire, branch=None):
706 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
707 baseui = self._factory._create_config(wire['config'])
724 baseui = self._factory._create_config(wire['config'])
708 output = io.BytesIO()
725 output = io.BytesIO()
709
726
710 def write(data, **unused_kwargs):
727 def write(data, **unused_kwargs):
711 output.write(data)
728 output.write(data)
712
729
713 baseui.write = write
730 baseui.write = write
714 if branch:
731 if branch:
715 args = [branch]
732 args = [branch]
716 else:
733 else:
717 args = []
734 args = []
718 commands.heads(baseui, repo, template='{node} ', *args)
735 commands.heads(baseui, repo, template='{node} ', *args)
719
736
720 return output.getvalue()
737 return output.getvalue()
721
738
722 @reraise_safe_exceptions
739 @reraise_safe_exceptions
723 def ancestor(self, wire, revision1, revision2):
740 def ancestor(self, wire, revision1, revision2):
724 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
725 changelog = repo.changelog
742 changelog = repo.changelog
726 lookup = repo.lookup
743 lookup = repo.lookup
727 a = changelog.ancestor(lookup(revision1), lookup(revision2))
744 a = changelog.ancestor(lookup(revision1), lookup(revision2))
728 return hex(a)
745 return hex(a)
729
746
730 @reraise_safe_exceptions
747 @reraise_safe_exceptions
731 def push(self, wire, revisions, dest_path, hooks=True,
748 def push(self, wire, revisions, dest_path, hooks=True,
732 push_branches=False):
749 push_branches=False):
733 repo = self._factory.repo(wire)
750 repo = self._factory.repo(wire)
734 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751 baseui = self._factory._create_config(wire['config'], hooks=hooks)
735 commands.push(baseui, repo, dest=dest_path, rev=revisions,
752 commands.push(baseui, repo, dest=dest_path, rev=revisions,
736 new_branch=push_branches)
753 new_branch=push_branches)
737
754
738 @reraise_safe_exceptions
755 @reraise_safe_exceptions
739 def merge(self, wire, revision):
756 def merge(self, wire, revision):
740 repo = self._factory.repo(wire)
757 repo = self._factory.repo(wire)
741 baseui = self._factory._create_config(wire['config'])
758 baseui = self._factory._create_config(wire['config'])
742 repo.ui.setconfig('ui', 'merge', 'internal:dump')
759 repo.ui.setconfig('ui', 'merge', 'internal:dump')
743
760
744 # In case of sub repositories are used mercurial prompts the user in
761 # In case of sub repositories are used mercurial prompts the user in
745 # case of merge conflicts or different sub repository sources. By
762 # case of merge conflicts or different sub repository sources. By
746 # setting the interactive flag to `False` mercurial doesn't prompt the
763 # setting the interactive flag to `False` mercurial doesn't prompt the
747 # used but instead uses a default value.
764 # used but instead uses a default value.
748 repo.ui.setconfig('ui', 'interactive', False)
765 repo.ui.setconfig('ui', 'interactive', False)
749
766
750 commands.merge(baseui, repo, rev=revision)
767 commands.merge(baseui, repo, rev=revision)
751
768
752 @reraise_safe_exceptions
769 @reraise_safe_exceptions
753 def commit(self, wire, message, username, close_branch=False):
770 def commit(self, wire, message, username, close_branch=False):
754 repo = self._factory.repo(wire)
771 repo = self._factory.repo(wire)
755 baseui = self._factory._create_config(wire['config'])
772 baseui = self._factory._create_config(wire['config'])
756 repo.ui.setconfig('ui', 'username', username)
773 repo.ui.setconfig('ui', 'username', username)
757 commands.commit(baseui, repo, message=message, close_branch=close_branch)
774 commands.commit(baseui, repo, message=message, close_branch=close_branch)
758
775
759 @reraise_safe_exceptions
776 @reraise_safe_exceptions
760 def rebase(self, wire, source=None, dest=None, abort=False):
777 def rebase(self, wire, source=None, dest=None, abort=False):
761 repo = self._factory.repo(wire)
778 repo = self._factory.repo(wire)
762 baseui = self._factory._create_config(wire['config'])
779 baseui = self._factory._create_config(wire['config'])
763 repo.ui.setconfig('ui', 'merge', 'internal:dump')
780 repo.ui.setconfig('ui', 'merge', 'internal:dump')
764 rebase.rebase(
781 rebase.rebase(
765 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
782 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
766
783
767 @reraise_safe_exceptions
784 @reraise_safe_exceptions
768 def bookmark(self, wire, bookmark, revision=None):
785 def bookmark(self, wire, bookmark, revision=None):
769 repo = self._factory.repo(wire)
786 repo = self._factory.repo(wire)
770 baseui = self._factory._create_config(wire['config'])
787 baseui = self._factory._create_config(wire['config'])
771 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
788 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
772
789
773 @reraise_safe_exceptions
790 @reraise_safe_exceptions
774 def install_hooks(self, wire, force=False):
791 def install_hooks(self, wire, force=False):
775 # we don't need any special hooks for Mercurial
792 # we don't need any special hooks for Mercurial
776 pass
793 pass
@@ -1,63 +1,63 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import tags as hg_tag
39 from mercurial import tags as hg_tag
40
40
41 from mercurial.commands import clone, nullid, pull
41 from mercurial.commands import clone, nullid, pull
42 from mercurial.context import memctx, memfilectx
42 from mercurial.context import memctx, memfilectx
43 from mercurial.error import (
43 from mercurial.error import (
44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 RequirementError)
45 RequirementError)
46 from mercurial.hgweb import hgweb_mod
46 from mercurial.hgweb import hgweb_mod
47 from mercurial.localrepo import localrepository
47 from mercurial.localrepo import localrepository
48 from mercurial.match import match
48 from mercurial.match import match
49 from mercurial.mdiff import diffopts
49 from mercurial.mdiff import diffopts
50 from mercurial.node import bin, hex
50 from mercurial.node import bin, hex
51 from mercurial.encoding import tolocal
51 from mercurial.encoding import tolocal
52 from mercurial.discovery import findcommonoutgoing
52 from mercurial.discovery import findcommonoutgoing
53 from mercurial.hg import peer
53 from mercurial.hg import peer
54 from mercurial.httppeer import httppeer
54 from mercurial.httppeer import makepeer
55 from mercurial.util import url as hg_url
55 from mercurial.util import url as hg_url
56 from mercurial.scmutil import revrange
56 from mercurial.scmutil import revrange
57 from mercurial.node import nullrev
57 from mercurial.node import nullrev
58 from mercurial import exchange
58 from mercurial import exchange
59 from hgext import largefiles
59 from hgext import largefiles
60
60
61 # those authnadlers are patched for python 2.6.5 bug an
61 # those authnadlers are patched for python 2.6.5 bug an
62 # infinit looping when given invalid resources
62 # infinit looping when given invalid resources
63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto.capabilities = wrapper
39 lfproto._capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto.capabilities
44 wrapped_capabilities = lfproto._capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(repo, proto):
47 def _dynamic_capabilities(orig, repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 return calc_capabilities(orig, repo, proto)
55 else:
56 else:
56 logger.debug('Extension largefiles disabled')
57 logger.debug('Extension largefiles disabled')
57 calc_capabilities = lfproto.capabilitiesorig
58 return orig(repo, proto)
58 return calc_capabilities(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from hgcompat import subrepo
65 from hgcompat import subrepo
66 from exceptions import SubrepoMergeException
66 from exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False, missing=False):
90 def dirty(self, ignoreupdate=False, missing=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepo.state(self._ctx, self.ui)
100 substate = subrepo.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
119 raise SubrepoMergeException()()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,570 +1,657 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55 return json.loads(response.read())
55 return json.loads(response.read())
56
56
57 def _serialize(self, hook_name, extras):
57 def _serialize(self, hook_name, extras):
58 data = {
58 data = {
59 'method': hook_name,
59 'method': hook_name,
60 'extras': extras
60 'extras': extras
61 }
61 }
62 return json.dumps(data)
62 return json.dumps(data)
63
63
64
64
65 class HooksDummyClient(object):
65 class HooksDummyClient(object):
66 def __init__(self, hooks_module):
66 def __init__(self, hooks_module):
67 self._hooks_module = importlib.import_module(hooks_module)
67 self._hooks_module = importlib.import_module(hooks_module)
68
68
69 def __call__(self, hook_name, extras):
69 def __call__(self, hook_name, extras):
70 with self._hooks_module.Hooks() as hooks:
70 with self._hooks_module.Hooks() as hooks:
71 return getattr(hooks, hook_name)(extras)
71 return getattr(hooks, hook_name)(extras)
72
72
73
73
74 class RemoteMessageWriter(object):
74 class RemoteMessageWriter(object):
75 """Writer base class."""
75 """Writer base class."""
76 def write(self, message):
76 def write(self, message):
77 raise NotImplementedError()
77 raise NotImplementedError()
78
78
79
79
80 class HgMessageWriter(RemoteMessageWriter):
80 class HgMessageWriter(RemoteMessageWriter):
81 """Writer that knows how to send messages to mercurial clients."""
81 """Writer that knows how to send messages to mercurial clients."""
82
82
83 def __init__(self, ui):
83 def __init__(self, ui):
84 self.ui = ui
84 self.ui = ui
85
85
86 def write(self, message):
86 def write(self, message):
87 # TODO: Check why the quiet flag is set by default.
87 # TODO: Check why the quiet flag is set by default.
88 old = self.ui.quiet
88 old = self.ui.quiet
89 self.ui.quiet = False
89 self.ui.quiet = False
90 self.ui.status(message.encode('utf-8'))
90 self.ui.status(message.encode('utf-8'))
91 self.ui.quiet = old
91 self.ui.quiet = old
92
92
93
93
94 class GitMessageWriter(RemoteMessageWriter):
94 class GitMessageWriter(RemoteMessageWriter):
95 """Writer that knows how to send messages to git clients."""
95 """Writer that knows how to send messages to git clients."""
96
96
97 def __init__(self, stdout=None):
97 def __init__(self, stdout=None):
98 self.stdout = stdout or sys.stdout
98 self.stdout = stdout or sys.stdout
99
99
100 def write(self, message):
100 def write(self, message):
101 self.stdout.write(message.encode('utf-8'))
101 self.stdout.write(message.encode('utf-8'))
102
102
103
103
104 class SvnMessageWriter(RemoteMessageWriter):
104 class SvnMessageWriter(RemoteMessageWriter):
105 """Writer that knows how to send messages to svn clients."""
105 """Writer that knows how to send messages to svn clients."""
106
106
107 def __init__(self, stderr=None):
107 def __init__(self, stderr=None):
108 # SVN needs data sent to stderr for back-to-client messaging
108 # SVN needs data sent to stderr for back-to-client messaging
109 self.stderr = stderr or sys.stderr
109 self.stderr = stderr or sys.stderr
110
110
111 def write(self, message):
111 def write(self, message):
112 self.stderr.write(message.encode('utf-8'))
112 self.stderr.write(message.encode('utf-8'))
113
113
114
114
115 def _handle_exception(result):
115 def _handle_exception(result):
116 exception_class = result.get('exception')
116 exception_class = result.get('exception')
117 exception_traceback = result.get('exception_traceback')
117 exception_traceback = result.get('exception_traceback')
118
118
119 if exception_traceback:
119 if exception_traceback:
120 log.error('Got traceback from remote call:%s', exception_traceback)
120 log.error('Got traceback from remote call:%s', exception_traceback)
121
121
122 if exception_class == 'HTTPLockedRC':
122 if exception_class == 'HTTPLockedRC':
123 raise exceptions.RepositoryLockedException(*result['exception_args'])
123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 elif exception_class == 'HTTPBranchProtected':
125 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
124 elif exception_class == 'RepositoryError':
126 elif exception_class == 'RepositoryError':
125 raise exceptions.VcsException(*result['exception_args'])
127 raise exceptions.VcsException()(*result['exception_args'])
126 elif exception_class:
128 elif exception_class:
127 raise Exception('Got remote exception "%s" with args "%s"' %
129 raise Exception('Got remote exception "%s" with args "%s"' %
128 (exception_class, result['exception_args']))
130 (exception_class, result['exception_args']))
129
131
130
132
131 def _get_hooks_client(extras):
133 def _get_hooks_client(extras):
132 if 'hooks_uri' in extras:
134 if 'hooks_uri' in extras:
133 protocol = extras.get('hooks_protocol')
135 protocol = extras.get('hooks_protocol')
134 return HooksHttpClient(extras['hooks_uri'])
136 return HooksHttpClient(extras['hooks_uri'])
135 else:
137 else:
136 return HooksDummyClient(extras['hooks_module'])
138 return HooksDummyClient(extras['hooks_module'])
137
139
138
140
139 def _call_hook(hook_name, extras, writer):
141 def _call_hook(hook_name, extras, writer):
140 hooks_client = _get_hooks_client(extras)
142 hooks_client = _get_hooks_client(extras)
141 log.debug('Hooks, using client:%s', hooks_client)
143 log.debug('Hooks, using client:%s', hooks_client)
142 result = hooks_client(hook_name, extras)
144 result = hooks_client(hook_name, extras)
143 log.debug('Hooks got result: %s', result)
145 log.debug('Hooks got result: %s', result)
144 writer.write(result['output'])
146 writer.write(result['output'])
145 _handle_exception(result)
147 _handle_exception(result)
146
148
147 return result['status']
149 return result['status']
148
150
149
151
150 def _extras_from_ui(ui):
152 def _extras_from_ui(ui):
151 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
153 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
152 if not hook_data:
154 if not hook_data:
153 # maybe it's inside environ ?
155 # maybe it's inside environ ?
154 env_hook_data = os.environ.get('RC_SCM_DATA')
156 env_hook_data = os.environ.get('RC_SCM_DATA')
155 if env_hook_data:
157 if env_hook_data:
156 hook_data = env_hook_data
158 hook_data = env_hook_data
157
159
158 extras = {}
160 extras = {}
159 if hook_data:
161 if hook_data:
160 extras = json.loads(hook_data)
162 extras = json.loads(hook_data)
161 return extras
163 return extras
162
164
163
165
164 def _rev_range_hash(repo, node):
166 def _rev_range_hash(repo, node, check_heads=False):
165
167
166 commits = []
168 commits = []
167 for rev in xrange(repo[node], len(repo)):
169 revs = []
170 start = repo[node].rev()
171 end = len(repo)
172 for rev in range(start, end):
173 revs.append(rev)
168 ctx = repo[rev]
174 ctx = repo[rev]
169 commit_id = mercurial.node.hex(ctx.node())
175 commit_id = mercurial.node.hex(ctx.node())
170 branch = ctx.branch()
176 branch = ctx.branch()
171 commits.append((commit_id, branch))
177 commits.append((commit_id, branch))
172
178
173 return commits
179 parent_heads = []
180 if check_heads:
181 parent_heads = _check_heads(repo, start, end, revs)
182 return commits, parent_heads
183
184
185 def _check_heads(repo, start, end, commits):
186 changelog = repo.changelog
187 parents = set()
188
189 for new_rev in commits:
190 for p in changelog.parentrevs(new_rev):
191 if p == mercurial.node.nullrev:
192 continue
193 if p < start:
194 parents.add(p)
195
196 for p in parents:
197 branch = repo[p].branch()
198 # The heads descending from that parent, on the same branch
199 parent_heads = set([p])
200 reachable = set([p])
201 for x in xrange(p + 1, end):
202 if repo[x].branch() != branch:
203 continue
204 for pp in changelog.parentrevs(x):
205 if pp in reachable:
206 reachable.add(x)
207 parent_heads.discard(pp)
208 parent_heads.add(x)
209 # More than one head? Suggest merging
210 if len(parent_heads) > 1:
211 return list(parent_heads)
212
213 return []
174
214
175
215
176 def repo_size(ui, repo, **kwargs):
216 def repo_size(ui, repo, **kwargs):
177 extras = _extras_from_ui(ui)
217 extras = _extras_from_ui(ui)
178 return _call_hook('repo_size', extras, HgMessageWriter(ui))
218 return _call_hook('repo_size', extras, HgMessageWriter(ui))
179
219
180
220
181 def pre_pull(ui, repo, **kwargs):
221 def pre_pull(ui, repo, **kwargs):
182 extras = _extras_from_ui(ui)
222 extras = _extras_from_ui(ui)
183 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
223 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
184
224
185
225
186 def pre_pull_ssh(ui, repo, **kwargs):
226 def pre_pull_ssh(ui, repo, **kwargs):
187 extras = _extras_from_ui(ui)
227 extras = _extras_from_ui(ui)
188 if extras and extras.get('SSH'):
228 if extras and extras.get('SSH'):
189 return pre_pull(ui, repo, **kwargs)
229 return pre_pull(ui, repo, **kwargs)
190 return 0
230 return 0
191
231
192
232
193 def post_pull(ui, repo, **kwargs):
233 def post_pull(ui, repo, **kwargs):
194 extras = _extras_from_ui(ui)
234 extras = _extras_from_ui(ui)
195 return _call_hook('post_pull', extras, HgMessageWriter(ui))
235 return _call_hook('post_pull', extras, HgMessageWriter(ui))
196
236
197
237
198 def post_pull_ssh(ui, repo, **kwargs):
238 def post_pull_ssh(ui, repo, **kwargs):
199 extras = _extras_from_ui(ui)
239 extras = _extras_from_ui(ui)
200 if extras and extras.get('SSH'):
240 if extras and extras.get('SSH'):
201 return post_pull(ui, repo, **kwargs)
241 return post_pull(ui, repo, **kwargs)
202 return 0
242 return 0
203
243
204
244
205 def pre_push(ui, repo, node=None, **kwargs):
245 def pre_push(ui, repo, node=None, **kwargs):
246 """
247 Mercurial pre_push hook
248 """
206 extras = _extras_from_ui(ui)
249 extras = _extras_from_ui(ui)
250 detect_force_push = extras.get('detect_force_push')
207
251
208 rev_data = []
252 rev_data = []
209 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
253 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
210 branches = collections.defaultdict(list)
254 branches = collections.defaultdict(list)
211 for commit_id, branch in _rev_range_hash(repo, node):
255 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
256 for commit_id, branch in commits:
212 branches[branch].append(commit_id)
257 branches[branch].append(commit_id)
213
258
214 for branch, commits in branches.iteritems():
259 for branch, commits in branches.items():
215 old_rev = kwargs.get('node_last') or commits[0]
260 old_rev = kwargs.get('node_last') or commits[0]
216 rev_data.append({
261 rev_data.append({
217 'old_rev': old_rev,
262 'old_rev': old_rev,
218 'new_rev': commits[-1],
263 'new_rev': commits[-1],
219 'ref': '',
264 'ref': '',
220 'type': 'branch',
265 'type': 'branch',
221 'name': branch,
266 'name': branch,
222 })
267 })
223
268
269 for push_ref in rev_data:
270 push_ref['multiple_heads'] = _heads
271
224 extras['commit_ids'] = rev_data
272 extras['commit_ids'] = rev_data
225 return _call_hook('pre_push', extras, HgMessageWriter(ui))
273 return _call_hook('pre_push', extras, HgMessageWriter(ui))
226
274
227
275
228 def pre_push_ssh(ui, repo, node=None, **kwargs):
276 def pre_push_ssh(ui, repo, node=None, **kwargs):
229 if _extras_from_ui(ui).get('SSH'):
277 extras = _extras_from_ui(ui)
278 if extras.get('SSH'):
230 return pre_push(ui, repo, node, **kwargs)
279 return pre_push(ui, repo, node, **kwargs)
231
280
232 return 0
281 return 0
233
282
234
283
235 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
284 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
285 """
286 Mercurial pre_push hook for SSH
287 """
236 extras = _extras_from_ui(ui)
288 extras = _extras_from_ui(ui)
237 if extras.get('SSH'):
289 if extras.get('SSH'):
238 permission = extras['SSH_PERMISSIONS']
290 permission = extras['SSH_PERMISSIONS']
239
291
240 if 'repository.write' == permission or 'repository.admin' == permission:
292 if 'repository.write' == permission or 'repository.admin' == permission:
241 return 0
293 return 0
242
294
243 # non-zero ret code
295 # non-zero ret code
244 return 1
296 return 1
245
297
246 return 0
298 return 0
247
299
248
300
249 def post_push(ui, repo, node, **kwargs):
301 def post_push(ui, repo, node, **kwargs):
302 """
303 Mercurial post_push hook
304 """
250 extras = _extras_from_ui(ui)
305 extras = _extras_from_ui(ui)
251
306
252 commit_ids = []
307 commit_ids = []
253 branches = []
308 branches = []
254 bookmarks = []
309 bookmarks = []
255 tags = []
310 tags = []
256
311
257 for commit_id, branch in _rev_range_hash(repo, node):
312 commits, _heads = _rev_range_hash(repo, node)
313 for commit_id, branch in commits:
258 commit_ids.append(commit_id)
314 commit_ids.append(commit_id)
259 if branch not in branches:
315 if branch not in branches:
260 branches.append(branch)
316 branches.append(branch)
261
317
262 if hasattr(ui, '_rc_pushkey_branches'):
318 if hasattr(ui, '_rc_pushkey_branches'):
263 bookmarks = ui._rc_pushkey_branches
319 bookmarks = ui._rc_pushkey_branches
264
320
265 extras['commit_ids'] = commit_ids
321 extras['commit_ids'] = commit_ids
266 extras['new_refs'] = {
322 extras['new_refs'] = {
267 'branches': branches,
323 'branches': branches,
268 'bookmarks': bookmarks,
324 'bookmarks': bookmarks,
269 'tags': tags
325 'tags': tags
270 }
326 }
271
327
272 return _call_hook('post_push', extras, HgMessageWriter(ui))
328 return _call_hook('post_push', extras, HgMessageWriter(ui))
273
329
274
330
275 def post_push_ssh(ui, repo, node, **kwargs):
331 def post_push_ssh(ui, repo, node, **kwargs):
332 """
333 Mercurial post_push hook for SSH
334 """
276 if _extras_from_ui(ui).get('SSH'):
335 if _extras_from_ui(ui).get('SSH'):
277 return post_push(ui, repo, node, **kwargs)
336 return post_push(ui, repo, node, **kwargs)
278 return 0
337 return 0
279
338
280
339
281 def key_push(ui, repo, **kwargs):
340 def key_push(ui, repo, **kwargs):
282 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
341 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
283 # store new bookmarks in our UI object propagated later to post_push
342 # store new bookmarks in our UI object propagated later to post_push
284 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
343 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
285 return
344 return
286
345
287
346
288 # backward compat
347 # backward compat
289 log_pull_action = post_pull
348 log_pull_action = post_pull
290
349
291 # backward compat
350 # backward compat
292 log_push_action = post_push
351 log_push_action = post_push
293
352
294
353
295 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
354 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
296 """
355 """
297 Old hook name: keep here for backward compatibility.
356 Old hook name: keep here for backward compatibility.
298
357
299 This is only required when the installed git hooks are not upgraded.
358 This is only required when the installed git hooks are not upgraded.
300 """
359 """
301 pass
360 pass
302
361
303
362
304 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
363 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
305 """
364 """
306 Old hook name: keep here for backward compatibility.
365 Old hook name: keep here for backward compatibility.
307
366
308 This is only required when the installed git hooks are not upgraded.
367 This is only required when the installed git hooks are not upgraded.
309 """
368 """
310 pass
369 pass
311
370
312
371
313 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
372 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
314
373
315
374
316 def git_pre_pull(extras):
375 def git_pre_pull(extras):
317 """
376 """
318 Pre pull hook.
377 Pre pull hook.
319
378
320 :param extras: dictionary containing the keys defined in simplevcs
379 :param extras: dictionary containing the keys defined in simplevcs
321 :type extras: dict
380 :type extras: dict
322
381
323 :return: status code of the hook. 0 for success.
382 :return: status code of the hook. 0 for success.
324 :rtype: int
383 :rtype: int
325 """
384 """
326 if 'pull' not in extras['hooks']:
385 if 'pull' not in extras['hooks']:
327 return HookResponse(0, '')
386 return HookResponse(0, '')
328
387
329 stdout = io.BytesIO()
388 stdout = io.BytesIO()
330 try:
389 try:
331 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
390 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
332 except Exception as error:
391 except Exception as error:
333 status = 128
392 status = 128
334 stdout.write('ERROR: %s\n' % str(error))
393 stdout.write('ERROR: %s\n' % str(error))
335
394
336 return HookResponse(status, stdout.getvalue())
395 return HookResponse(status, stdout.getvalue())
337
396
338
397
339 def git_post_pull(extras):
398 def git_post_pull(extras):
340 """
399 """
341 Post pull hook.
400 Post pull hook.
342
401
343 :param extras: dictionary containing the keys defined in simplevcs
402 :param extras: dictionary containing the keys defined in simplevcs
344 :type extras: dict
403 :type extras: dict
345
404
346 :return: status code of the hook. 0 for success.
405 :return: status code of the hook. 0 for success.
347 :rtype: int
406 :rtype: int
348 """
407 """
349 if 'pull' not in extras['hooks']:
408 if 'pull' not in extras['hooks']:
350 return HookResponse(0, '')
409 return HookResponse(0, '')
351
410
352 stdout = io.BytesIO()
411 stdout = io.BytesIO()
353 try:
412 try:
354 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
413 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
355 except Exception as error:
414 except Exception as error:
356 status = 128
415 status = 128
357 stdout.write('ERROR: %s\n' % error)
416 stdout.write('ERROR: %s\n' % error)
358
417
359 return HookResponse(status, stdout.getvalue())
418 return HookResponse(status, stdout.getvalue())
360
419
361
420
362 def _parse_git_ref_lines(revision_lines):
421 def _parse_git_ref_lines(revision_lines):
363 rev_data = []
422 rev_data = []
364 for revision_line in revision_lines or []:
423 for revision_line in revision_lines or []:
365 old_rev, new_rev, ref = revision_line.strip().split(' ')
424 old_rev, new_rev, ref = revision_line.strip().split(' ')
366 ref_data = ref.split('/', 2)
425 ref_data = ref.split('/', 2)
367 if ref_data[1] in ('tags', 'heads'):
426 if ref_data[1] in ('tags', 'heads'):
368 rev_data.append({
427 rev_data.append({
369 'old_rev': old_rev,
428 'old_rev': old_rev,
370 'new_rev': new_rev,
429 'new_rev': new_rev,
371 'ref': ref,
430 'ref': ref,
372 'type': ref_data[1],
431 'type': ref_data[1],
373 'name': ref_data[2],
432 'name': ref_data[2],
374 })
433 })
375 return rev_data
434 return rev_data
376
435
377
436
378 def git_pre_receive(unused_repo_path, revision_lines, env):
437 def git_pre_receive(unused_repo_path, revision_lines, env):
379 """
438 """
380 Pre push hook.
439 Pre push hook.
381
440
382 :param extras: dictionary containing the keys defined in simplevcs
441 :param extras: dictionary containing the keys defined in simplevcs
383 :type extras: dict
442 :type extras: dict
384
443
385 :return: status code of the hook. 0 for success.
444 :return: status code of the hook. 0 for success.
386 :rtype: int
445 :rtype: int
387 """
446 """
388 extras = json.loads(env['RC_SCM_DATA'])
447 extras = json.loads(env['RC_SCM_DATA'])
389 rev_data = _parse_git_ref_lines(revision_lines)
448 rev_data = _parse_git_ref_lines(revision_lines)
390 if 'push' not in extras['hooks']:
449 if 'push' not in extras['hooks']:
391 return 0
450 return 0
451 empty_commit_id = '0' * 40
452
453 detect_force_push = extras.get('detect_force_push')
454
455 for push_ref in rev_data:
456 # store our git-env which holds the temp store
457 push_ref['git_env'] = [
458 (k, v) for k, v in os.environ.items() if k.startswith('GIT')]
459 push_ref['pruned_sha'] = ''
460 if not detect_force_push:
461 # don't check for forced-push when we don't need to
462 continue
463
464 type_ = push_ref['type']
465 new_branch = push_ref['old_rev'] == empty_commit_id
466 if type_ == 'heads' and not new_branch:
467 old_rev = push_ref['old_rev']
468 new_rev = push_ref['new_rev']
469 cmd = [settings.GIT_EXECUTABLE, 'rev-list',
470 old_rev, '^{}'.format(new_rev)]
471 stdout, stderr = subprocessio.run_command(
472 cmd, env=os.environ.copy())
473 # means we're having some non-reachable objects, this forced push
474 # was used
475 if stdout:
476 push_ref['pruned_sha'] = stdout.splitlines()
477
392 extras['commit_ids'] = rev_data
478 extras['commit_ids'] = rev_data
393 return _call_hook('pre_push', extras, GitMessageWriter())
479 return _call_hook('pre_push', extras, GitMessageWriter())
394
480
395
481
396 def git_post_receive(unused_repo_path, revision_lines, env):
482 def git_post_receive(unused_repo_path, revision_lines, env):
397 """
483 """
398 Post push hook.
484 Post push hook.
399
485
400 :param extras: dictionary containing the keys defined in simplevcs
486 :param extras: dictionary containing the keys defined in simplevcs
401 :type extras: dict
487 :type extras: dict
402
488
403 :return: status code of the hook. 0 for success.
489 :return: status code of the hook. 0 for success.
404 :rtype: int
490 :rtype: int
405 """
491 """
406 extras = json.loads(env['RC_SCM_DATA'])
492 extras = json.loads(env['RC_SCM_DATA'])
407 if 'push' not in extras['hooks']:
493 if 'push' not in extras['hooks']:
408 return 0
494 return 0
409
495
410 rev_data = _parse_git_ref_lines(revision_lines)
496 rev_data = _parse_git_ref_lines(revision_lines)
411
497
412 git_revs = []
498 git_revs = []
413
499
414 # N.B.(skreft): it is ok to just call git, as git before calling a
500 # N.B.(skreft): it is ok to just call git, as git before calling a
415 # subcommand sets the PATH environment variable so that it point to the
501 # subcommand sets the PATH environment variable so that it point to the
416 # correct version of the git executable.
502 # correct version of the git executable.
417 empty_commit_id = '0' * 40
503 empty_commit_id = '0' * 40
418 branches = []
504 branches = []
419 tags = []
505 tags = []
420 for push_ref in rev_data:
506 for push_ref in rev_data:
421 type_ = push_ref['type']
507 type_ = push_ref['type']
422
508
423 if type_ == 'heads':
509 if type_ == 'heads':
424 if push_ref['old_rev'] == empty_commit_id:
510 if push_ref['old_rev'] == empty_commit_id:
425 # starting new branch case
511 # starting new branch case
426 if push_ref['name'] not in branches:
512 if push_ref['name'] not in branches:
427 branches.append(push_ref['name'])
513 branches.append(push_ref['name'])
428
514
429 # Fix up head revision if needed
515 # Fix up head revision if needed
430 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
516 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
431 try:
517 try:
432 subprocessio.run_command(cmd, env=os.environ.copy())
518 subprocessio.run_command(cmd, env=os.environ.copy())
433 except Exception:
519 except Exception:
434 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
520 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
435 'refs/heads/%s' % push_ref['name']]
521 'refs/heads/%s' % push_ref['name']]
436 print("Setting default branch to %s" % push_ref['name'])
522 print("Setting default branch to %s" % push_ref['name'])
437 subprocessio.run_command(cmd, env=os.environ.copy())
523 subprocessio.run_command(cmd, env=os.environ.copy())
438
524
439 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
525 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
440 '--format=%(refname)', 'refs/heads/*']
526 '--format=%(refname)', 'refs/heads/*']
441 stdout, stderr = subprocessio.run_command(
527 stdout, stderr = subprocessio.run_command(
442 cmd, env=os.environ.copy())
528 cmd, env=os.environ.copy())
443 heads = stdout
529 heads = stdout
444 heads = heads.replace(push_ref['ref'], '')
530 heads = heads.replace(push_ref['ref'], '')
445 heads = ' '.join(head for head in heads.splitlines() if head)
531 heads = ' '.join(head for head
532 in heads.splitlines() if head) or '.'
446 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
533 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
447 '--pretty=format:%H', '--', push_ref['new_rev'],
534 '--pretty=format:%H', '--', push_ref['new_rev'],
448 '--not', heads]
535 '--not', heads]
449 stdout, stderr = subprocessio.run_command(
536 stdout, stderr = subprocessio.run_command(
450 cmd, env=os.environ.copy())
537 cmd, env=os.environ.copy())
451 git_revs.extend(stdout.splitlines())
538 git_revs.extend(stdout.splitlines())
452 elif push_ref['new_rev'] == empty_commit_id:
539 elif push_ref['new_rev'] == empty_commit_id:
453 # delete branch case
540 # delete branch case
454 git_revs.append('delete_branch=>%s' % push_ref['name'])
541 git_revs.append('delete_branch=>%s' % push_ref['name'])
455 else:
542 else:
456 if push_ref['name'] not in branches:
543 if push_ref['name'] not in branches:
457 branches.append(push_ref['name'])
544 branches.append(push_ref['name'])
458
545
459 cmd = [settings.GIT_EXECUTABLE, 'log',
546 cmd = [settings.GIT_EXECUTABLE, 'log',
460 '{old_rev}..{new_rev}'.format(**push_ref),
547 '{old_rev}..{new_rev}'.format(**push_ref),
461 '--reverse', '--pretty=format:%H']
548 '--reverse', '--pretty=format:%H']
462 stdout, stderr = subprocessio.run_command(
549 stdout, stderr = subprocessio.run_command(
463 cmd, env=os.environ.copy())
550 cmd, env=os.environ.copy())
464 git_revs.extend(stdout.splitlines())
551 git_revs.extend(stdout.splitlines())
465 elif type_ == 'tags':
552 elif type_ == 'tags':
466 if push_ref['name'] not in tags:
553 if push_ref['name'] not in tags:
467 tags.append(push_ref['name'])
554 tags.append(push_ref['name'])
468 git_revs.append('tag=>%s' % push_ref['name'])
555 git_revs.append('tag=>%s' % push_ref['name'])
469
556
470 extras['commit_ids'] = git_revs
557 extras['commit_ids'] = git_revs
471 extras['new_refs'] = {
558 extras['new_refs'] = {
472 'branches': branches,
559 'branches': branches,
473 'bookmarks': [],
560 'bookmarks': [],
474 'tags': tags,
561 'tags': tags,
475 }
562 }
476
563
477 if 'repo_size' in extras['hooks']:
564 if 'repo_size' in extras['hooks']:
478 try:
565 try:
479 _call_hook('repo_size', extras, GitMessageWriter())
566 _call_hook('repo_size', extras, GitMessageWriter())
480 except:
567 except:
481 pass
568 pass
482
569
483 return _call_hook('post_push', extras, GitMessageWriter())
570 return _call_hook('post_push', extras, GitMessageWriter())
484
571
485
572
486 def _get_extras_from_txn_id(path, txn_id):
573 def _get_extras_from_txn_id(path, txn_id):
487 extras = {}
574 extras = {}
488 try:
575 try:
489 cmd = ['svnlook', 'pget',
576 cmd = ['svnlook', 'pget',
490 '-t', txn_id,
577 '-t', txn_id,
491 '--revprop', path, 'rc-scm-extras']
578 '--revprop', path, 'rc-scm-extras']
492 stdout, stderr = subprocessio.run_command(
579 stdout, stderr = subprocessio.run_command(
493 cmd, env=os.environ.copy())
580 cmd, env=os.environ.copy())
494 extras = json.loads(base64.urlsafe_b64decode(stdout))
581 extras = json.loads(base64.urlsafe_b64decode(stdout))
495 except Exception:
582 except Exception:
496 log.exception('Failed to extract extras info from txn_id')
583 log.exception('Failed to extract extras info from txn_id')
497
584
498 return extras
585 return extras
499
586
500
587
501 def svn_pre_commit(repo_path, commit_data, env):
588 def svn_pre_commit(repo_path, commit_data, env):
502 path, txn_id = commit_data
589 path, txn_id = commit_data
503 branches = []
590 branches = []
504 tags = []
591 tags = []
505
592
506 if env.get('RC_SCM_DATA'):
593 if env.get('RC_SCM_DATA'):
507 extras = json.loads(env['RC_SCM_DATA'])
594 extras = json.loads(env['RC_SCM_DATA'])
508 else:
595 else:
509 # fallback method to read from TXN-ID stored data
596 # fallback method to read from TXN-ID stored data
510 extras = _get_extras_from_txn_id(path, txn_id)
597 extras = _get_extras_from_txn_id(path, txn_id)
511 if not extras:
598 if not extras:
512 return 0
599 return 0
513
600
514 extras['commit_ids'] = []
601 extras['commit_ids'] = []
515 extras['txn_id'] = txn_id
602 extras['txn_id'] = txn_id
516 extras['new_refs'] = {
603 extras['new_refs'] = {
517 'branches': branches,
604 'branches': branches,
518 'bookmarks': [],
605 'bookmarks': [],
519 'tags': tags,
606 'tags': tags,
520 }
607 }
521
608
522 return _call_hook('pre_push', extras, SvnMessageWriter())
609 return _call_hook('pre_push', extras, SvnMessageWriter())
523
610
524
611
525 def _get_extras_from_commit_id(commit_id, path):
612 def _get_extras_from_commit_id(commit_id, path):
526 extras = {}
613 extras = {}
527 try:
614 try:
528 cmd = ['svnlook', 'pget',
615 cmd = ['svnlook', 'pget',
529 '-r', commit_id,
616 '-r', commit_id,
530 '--revprop', path, 'rc-scm-extras']
617 '--revprop', path, 'rc-scm-extras']
531 stdout, stderr = subprocessio.run_command(
618 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
619 cmd, env=os.environ.copy())
533 extras = json.loads(base64.urlsafe_b64decode(stdout))
620 extras = json.loads(base64.urlsafe_b64decode(stdout))
534 except Exception:
621 except Exception:
535 log.exception('Failed to extract extras info from commit_id')
622 log.exception('Failed to extract extras info from commit_id')
536
623
537 return extras
624 return extras
538
625
539
626
540 def svn_post_commit(repo_path, commit_data, env):
627 def svn_post_commit(repo_path, commit_data, env):
541 """
628 """
542 commit_data is path, rev, txn_id
629 commit_data is path, rev, txn_id
543 """
630 """
544 path, commit_id, txn_id = commit_data
631 path, commit_id, txn_id = commit_data
545 branches = []
632 branches = []
546 tags = []
633 tags = []
547
634
548 if env.get('RC_SCM_DATA'):
635 if env.get('RC_SCM_DATA'):
549 extras = json.loads(env['RC_SCM_DATA'])
636 extras = json.loads(env['RC_SCM_DATA'])
550 else:
637 else:
551 # fallback method to read from TXN-ID stored data
638 # fallback method to read from TXN-ID stored data
552 extras = _get_extras_from_commit_id(commit_id, path)
639 extras = _get_extras_from_commit_id(commit_id, path)
553 if not extras:
640 if not extras:
554 return 0
641 return 0
555
642
556 extras['commit_ids'] = [commit_id]
643 extras['commit_ids'] = [commit_id]
557 extras['txn_id'] = txn_id
644 extras['txn_id'] = txn_id
558 extras['new_refs'] = {
645 extras['new_refs'] = {
559 'branches': branches,
646 'branches': branches,
560 'bookmarks': [],
647 'bookmarks': [],
561 'tags': tags,
648 'tags': tags,
562 }
649 }
563
650
564 if 'repo_size' in extras['hooks']:
651 if 'repo_size' in extras['hooks']:
565 try:
652 try:
566 _call_hook('repo_size', extras, SvnMessageWriter())
653 _call_hook('repo_size', extras, SvnMessageWriter())
567 except Exception:
654 except Exception:
568 pass
655 pass
569
656
570 return _call_hook('post_push', extras, SvnMessageWriter())
657 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,487 +1,563 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import base64
20 import base64
20 import locale
21 import locale
21 import logging
22 import logging
22 import uuid
23 import uuid
23 import wsgiref.util
24 import wsgiref.util
24 import traceback
25 import traceback
25 from itertools import chain
26 from itertools import chain
26
27
27 import simplejson as json
28 import simplejson as json
28 import msgpack
29 import msgpack
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
31 from pyramid.config import Configurator
30 from pyramid.config import Configurator
31 from pyramid.settings import asbool, aslist
32 from pyramid.wsgi import wsgiapp
32 from pyramid.wsgi import wsgiapp
33 from pyramid.compat import configparser
33 from pyramid.compat import configparser
34
34
35
36 log = logging.getLogger(__name__)
37
38 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
39 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
40
41 try:
42 locale.setlocale(locale.LC_ALL, '')
43 except locale.Error as e:
44 log.error(
45 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
46 os.environ['LC_ALL'] = 'C'
47
48
35 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
49 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
36 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
50 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
37 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
51 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
38 from vcsserver.echo_stub.echo_app import EchoApp
52 from vcsserver.echo_stub.echo_app import EchoApp
39 from vcsserver.exceptions import HTTPRepoLocked
53 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
54 from vcsserver.lib.exc_tracking import store_exception
40 from vcsserver.server import VcsServer
55 from vcsserver.server import VcsServer
41
56
42 try:
57 try:
43 from vcsserver.git import GitFactory, GitRemote
58 from vcsserver.git import GitFactory, GitRemote
44 except ImportError:
59 except ImportError:
45 GitFactory = None
60 GitFactory = None
46 GitRemote = None
61 GitRemote = None
47
62
48 try:
63 try:
49 from vcsserver.hg import MercurialFactory, HgRemote
64 from vcsserver.hg import MercurialFactory, HgRemote
50 except ImportError:
65 except ImportError:
51 MercurialFactory = None
66 MercurialFactory = None
52 HgRemote = None
67 HgRemote = None
53
68
54 try:
69 try:
55 from vcsserver.svn import SubversionFactory, SvnRemote
70 from vcsserver.svn import SubversionFactory, SvnRemote
56 except ImportError:
71 except ImportError:
57 SubversionFactory = None
72 SubversionFactory = None
58 SvnRemote = None
73 SvnRemote = None
59
74
60 log = logging.getLogger(__name__)
75
61
76
62
77
63 def _is_request_chunked(environ):
78 def _is_request_chunked(environ):
64 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
65 return stream
80 return stream
66
81
67
82
83 def _int_setting(settings, name, default):
84 settings[name] = int(settings.get(name, default))
85
86
87 def _bool_setting(settings, name, default):
88 input_val = settings.get(name, default)
89 if isinstance(input_val, unicode):
90 input_val = input_val.encode('utf8')
91 settings[name] = asbool(input_val)
92
93
94 def _list_setting(settings, name, default):
95 raw_value = settings.get(name, default)
96
97 # Otherwise we assume it uses pyramids space/newline separation.
98 settings[name] = aslist(raw_value)
99
100
101 def _string_setting(settings, name, default, lower=True):
102 value = settings.get(name, default)
103 if lower:
104 value = value.lower()
105 settings[name] = value
106
107
68 class VCS(object):
108 class VCS(object):
69 def __init__(self, locale=None, cache_config=None):
109 def __init__(self, locale=None, cache_config=None):
70 self.locale = locale
110 self.locale = locale
71 self.cache_config = cache_config
111 self.cache_config = cache_config
72 self._configure_locale()
112 self._configure_locale()
73 self._initialize_cache()
74
113
75 if GitFactory and GitRemote:
114 if GitFactory and GitRemote:
76 git_repo_cache = self.cache.get_cache_region(
115 git_factory = GitFactory()
77 'git', region='repo_object')
78 git_factory = GitFactory(git_repo_cache)
79 self._git_remote = GitRemote(git_factory)
116 self._git_remote = GitRemote(git_factory)
80 else:
117 else:
81 log.info("Git client import failed")
118 log.info("Git client import failed")
82
119
83 if MercurialFactory and HgRemote:
120 if MercurialFactory and HgRemote:
84 hg_repo_cache = self.cache.get_cache_region(
121 hg_factory = MercurialFactory()
85 'hg', region='repo_object')
86 hg_factory = MercurialFactory(hg_repo_cache)
87 self._hg_remote = HgRemote(hg_factory)
122 self._hg_remote = HgRemote(hg_factory)
88 else:
123 else:
89 log.info("Mercurial client import failed")
124 log.info("Mercurial client import failed")
90
125
91 if SubversionFactory and SvnRemote:
126 if SubversionFactory and SvnRemote:
92 svn_repo_cache = self.cache.get_cache_region(
127 svn_factory = SubversionFactory()
93 'svn', region='repo_object')
128
94 svn_factory = SubversionFactory(svn_repo_cache)
95 # hg factory is used for svn url validation
129 # hg factory is used for svn url validation
96 hg_repo_cache = self.cache.get_cache_region(
130 hg_factory = MercurialFactory()
97 'hg', region='repo_object')
98 hg_factory = MercurialFactory(hg_repo_cache)
99 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
131 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
100 else:
132 else:
101 log.info("Subversion client import failed")
133 log.info("Subversion client import failed")
102
134
103 self._vcsserver = VcsServer()
135 self._vcsserver = VcsServer()
104
136
105 def _initialize_cache(self):
106 cache_config = parse_cache_config_options(self.cache_config)
107 log.info('Initializing beaker cache: %s' % cache_config)
108 self.cache = CacheManager(**cache_config)
109
110 def _configure_locale(self):
137 def _configure_locale(self):
111 if self.locale:
138 if self.locale:
112 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
139 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
113 else:
140 else:
114 log.info(
141 log.info(
115 'Configuring locale subsystem based on environment variables')
142 'Configuring locale subsystem based on environment variables')
116 try:
143 try:
117 # If self.locale is the empty string, then the locale
144 # If self.locale is the empty string, then the locale
118 # module will use the environment variables. See the
145 # module will use the environment variables. See the
119 # documentation of the package `locale`.
146 # documentation of the package `locale`.
120 locale.setlocale(locale.LC_ALL, self.locale)
147 locale.setlocale(locale.LC_ALL, self.locale)
121
148
122 language_code, encoding = locale.getlocale()
149 language_code, encoding = locale.getlocale()
123 log.info(
150 log.info(
124 'Locale set to language code "%s" with encoding "%s".',
151 'Locale set to language code "%s" with encoding "%s".',
125 language_code, encoding)
152 language_code, encoding)
126 except locale.Error:
153 except locale.Error:
127 log.exception(
154 log.exception(
128 'Cannot set locale, not configuring the locale system')
155 'Cannot set locale, not configuring the locale system')
129
156
130
157
131 class WsgiProxy(object):
158 class WsgiProxy(object):
132 def __init__(self, wsgi):
159 def __init__(self, wsgi):
133 self.wsgi = wsgi
160 self.wsgi = wsgi
134
161
135 def __call__(self, environ, start_response):
162 def __call__(self, environ, start_response):
136 input_data = environ['wsgi.input'].read()
163 input_data = environ['wsgi.input'].read()
137 input_data = msgpack.unpackb(input_data)
164 input_data = msgpack.unpackb(input_data)
138
165
139 error = None
166 error = None
140 try:
167 try:
141 data, status, headers = self.wsgi.handle(
168 data, status, headers = self.wsgi.handle(
142 input_data['environment'], input_data['input_data'],
169 input_data['environment'], input_data['input_data'],
143 *input_data['args'], **input_data['kwargs'])
170 *input_data['args'], **input_data['kwargs'])
144 except Exception as e:
171 except Exception as e:
145 data, status, headers = [], None, None
172 data, status, headers = [], None, None
146 error = {
173 error = {
147 'message': str(e),
174 'message': str(e),
148 '_vcs_kind': getattr(e, '_vcs_kind', None)
175 '_vcs_kind': getattr(e, '_vcs_kind', None)
149 }
176 }
150
177
151 start_response(200, {})
178 start_response(200, {})
152 return self._iterator(error, status, headers, data)
179 return self._iterator(error, status, headers, data)
153
180
154 def _iterator(self, error, status, headers, data):
181 def _iterator(self, error, status, headers, data):
155 initial_data = [
182 initial_data = [
156 error,
183 error,
157 status,
184 status,
158 headers,
185 headers,
159 ]
186 ]
160
187
161 for d in chain(initial_data, data):
188 for d in chain(initial_data, data):
162 yield msgpack.packb(d)
189 yield msgpack.packb(d)
163
190
164
191
165 class HTTPApplication(object):
192 class HTTPApplication(object):
166 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
193 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
167
194
168 remote_wsgi = remote_wsgi
195 remote_wsgi = remote_wsgi
169 _use_echo_app = False
196 _use_echo_app = False
170
197
171 def __init__(self, settings=None, global_config=None):
198 def __init__(self, settings=None, global_config=None):
199 self._sanitize_settings_and_apply_defaults(settings)
200
172 self.config = Configurator(settings=settings)
201 self.config = Configurator(settings=settings)
173 self.global_config = global_config
202 self.global_config = global_config
203 self.config.include('vcsserver.lib.rc_cache')
174
204
175 locale = settings.get('locale', '') or 'en_US.UTF-8'
205 locale = settings.get('locale', '') or 'en_US.UTF-8'
176 vcs = VCS(locale=locale, cache_config=settings)
206 vcs = VCS(locale=locale, cache_config=settings)
177 self._remotes = {
207 self._remotes = {
178 'hg': vcs._hg_remote,
208 'hg': vcs._hg_remote,
179 'git': vcs._git_remote,
209 'git': vcs._git_remote,
180 'svn': vcs._svn_remote,
210 'svn': vcs._svn_remote,
181 'server': vcs._vcsserver,
211 'server': vcs._vcsserver,
182 }
212 }
183 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
213 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
184 self._use_echo_app = True
214 self._use_echo_app = True
185 log.warning("Using EchoApp for VCS operations.")
215 log.warning("Using EchoApp for VCS operations.")
186 self.remote_wsgi = remote_wsgi_stub
216 self.remote_wsgi = remote_wsgi_stub
187 self._configure_settings(settings)
217 self._configure_settings(settings)
188 self._configure()
218 self._configure()
189
219
190 def _configure_settings(self, app_settings):
220 def _configure_settings(self, app_settings):
191 """
221 """
192 Configure the settings module.
222 Configure the settings module.
193 """
223 """
194 git_path = app_settings.get('git_path', None)
224 git_path = app_settings.get('git_path', None)
195 if git_path:
225 if git_path:
196 settings.GIT_EXECUTABLE = git_path
226 settings.GIT_EXECUTABLE = git_path
197 binary_dir = app_settings.get('core.binary_dir', None)
227 binary_dir = app_settings.get('core.binary_dir', None)
198 if binary_dir:
228 if binary_dir:
199 settings.BINARY_DIR = binary_dir
229 settings.BINARY_DIR = binary_dir
200
230
231 def _sanitize_settings_and_apply_defaults(self, settings):
232 # repo_object cache
233 _string_setting(
234 settings,
235 'rc_cache.repo_object.backend',
236 'dogpile.cache.rc.memory_lru')
237 _int_setting(
238 settings,
239 'rc_cache.repo_object.expiration_time',
240 300)
241 _int_setting(
242 settings,
243 'rc_cache.repo_object.max_size',
244 1024)
245
201 def _configure(self):
246 def _configure(self):
202 self.config.add_renderer(
247 self.config.add_renderer(
203 name='msgpack',
248 name='msgpack',
204 factory=self._msgpack_renderer_factory)
249 factory=self._msgpack_renderer_factory)
205
250
206 self.config.add_route('service', '/_service')
251 self.config.add_route('service', '/_service')
207 self.config.add_route('status', '/status')
252 self.config.add_route('status', '/status')
208 self.config.add_route('hg_proxy', '/proxy/hg')
253 self.config.add_route('hg_proxy', '/proxy/hg')
209 self.config.add_route('git_proxy', '/proxy/git')
254 self.config.add_route('git_proxy', '/proxy/git')
210 self.config.add_route('vcs', '/{backend}')
255 self.config.add_route('vcs', '/{backend}')
211 self.config.add_route('stream_git', '/stream/git/*repo_name')
256 self.config.add_route('stream_git', '/stream/git/*repo_name')
212 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
257 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
213
258
214 self.config.add_view(
259 self.config.add_view(
215 self.status_view, route_name='status', renderer='json')
260 self.status_view, route_name='status', renderer='json')
216 self.config.add_view(
261 self.config.add_view(
217 self.service_view, route_name='service', renderer='msgpack')
262 self.service_view, route_name='service', renderer='msgpack')
218
263
219 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
264 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
220 self.config.add_view(self.git_proxy(), route_name='git_proxy')
265 self.config.add_view(self.git_proxy(), route_name='git_proxy')
221 self.config.add_view(
266 self.config.add_view(
222 self.vcs_view, route_name='vcs', renderer='msgpack',
267 self.vcs_view, route_name='vcs', renderer='msgpack',
223 custom_predicates=[self.is_vcs_view])
268 custom_predicates=[self.is_vcs_view])
224
269
225 self.config.add_view(self.hg_stream(), route_name='stream_hg')
270 self.config.add_view(self.hg_stream(), route_name='stream_hg')
226 self.config.add_view(self.git_stream(), route_name='stream_git')
271 self.config.add_view(self.git_stream(), route_name='stream_git')
227
272
228 def notfound(request):
273 def notfound(request):
229 return {'status': '404 NOT FOUND'}
274 return {'status': '404 NOT FOUND'}
230 self.config.add_notfound_view(notfound, renderer='json')
275 self.config.add_notfound_view(notfound, renderer='json')
231
276
232 self.config.add_view(self.handle_vcs_exception, context=Exception)
277 self.config.add_view(self.handle_vcs_exception, context=Exception)
233
278
234 self.config.add_tween(
279 self.config.add_tween(
235 'vcsserver.tweens.RequestWrapperTween',
280 'vcsserver.tweens.RequestWrapperTween',
236 )
281 )
237
282
238 def wsgi_app(self):
283 def wsgi_app(self):
239 return self.config.make_wsgi_app()
284 return self.config.make_wsgi_app()
240
285
241 def vcs_view(self, request):
286 def vcs_view(self, request):
242 remote = self._remotes[request.matchdict['backend']]
287 remote = self._remotes[request.matchdict['backend']]
243 payload = msgpack.unpackb(request.body, use_list=True)
288 payload = msgpack.unpackb(request.body, use_list=True)
244 method = payload.get('method')
289 method = payload.get('method')
245 params = payload.get('params')
290 params = payload.get('params')
246 wire = params.get('wire')
291 wire = params.get('wire')
247 args = params.get('args')
292 args = params.get('args')
248 kwargs = params.get('kwargs')
293 kwargs = params.get('kwargs')
294 context_uid = None
295
249 if wire:
296 if wire:
250 try:
297 try:
251 wire['context'] = uuid.UUID(wire['context'])
298 wire['context'] = context_uid = uuid.UUID(wire['context'])
252 except KeyError:
299 except KeyError:
253 pass
300 pass
254 args.insert(0, wire)
301 args.insert(0, wire)
255
302
256 log.debug('method called:%s with kwargs:%s', method, kwargs)
303 log.debug('method called:%s with kwargs:%s context_uid: %s',
304 method, kwargs, context_uid)
257 try:
305 try:
258 resp = getattr(remote, method)(*args, **kwargs)
306 resp = getattr(remote, method)(*args, **kwargs)
259 except Exception as e:
307 except Exception as e:
260 tb_info = traceback.format_exc()
308 exc_info = list(sys.exc_info())
309 exc_type, exc_value, exc_traceback = exc_info
310
311 org_exc = getattr(e, '_org_exc', None)
312 org_exc_name = None
313 if org_exc:
314 org_exc_name = org_exc.__class__.__name__
315 # replace our "faked" exception with our org
316 exc_info[0] = org_exc.__class__
317 exc_info[1] = org_exc
318
319 store_exception(id(exc_info), exc_info)
320
321 tb_info = ''.join(
322 traceback.format_exception(exc_type, exc_value, exc_traceback))
261
323
262 type_ = e.__class__.__name__
324 type_ = e.__class__.__name__
263 if type_ not in self.ALLOWED_EXCEPTIONS:
325 if type_ not in self.ALLOWED_EXCEPTIONS:
264 type_ = None
326 type_ = None
265
327
266 resp = {
328 resp = {
267 'id': payload.get('id'),
329 'id': payload.get('id'),
268 'error': {
330 'error': {
269 'message': e.message,
331 'message': e.message,
270 'traceback': tb_info,
332 'traceback': tb_info,
333 'org_exc': org_exc_name,
271 'type': type_
334 'type': type_
272 }
335 }
273 }
336 }
274 try:
337 try:
275 resp['error']['_vcs_kind'] = e._vcs_kind
338 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
276 except AttributeError:
339 except AttributeError:
277 pass
340 pass
278 else:
341 else:
279 resp = {
342 resp = {
280 'id': payload.get('id'),
343 'id': payload.get('id'),
281 'result': resp
344 'result': resp
282 }
345 }
283
346
284 return resp
347 return resp
285
348
286 def status_view(self, request):
349 def status_view(self, request):
287 import vcsserver
350 import vcsserver
288 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
351 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
289 'pid': os.getpid()}
352 'pid': os.getpid()}
290
353
291 def service_view(self, request):
354 def service_view(self, request):
292 import vcsserver
355 import vcsserver
293
356
294 payload = msgpack.unpackb(request.body, use_list=True)
357 payload = msgpack.unpackb(request.body, use_list=True)
295
358
296 try:
359 try:
297 path = self.global_config['__file__']
360 path = self.global_config['__file__']
298 config = configparser.ConfigParser()
361 config = configparser.ConfigParser()
299 config.read(path)
362 config.read(path)
300 parsed_ini = config
363 parsed_ini = config
301 if parsed_ini.has_section('server:main'):
364 if parsed_ini.has_section('server:main'):
302 parsed_ini = dict(parsed_ini.items('server:main'))
365 parsed_ini = dict(parsed_ini.items('server:main'))
303 except Exception:
366 except Exception:
304 log.exception('Failed to read .ini file for display')
367 log.exception('Failed to read .ini file for display')
305 parsed_ini = {}
368 parsed_ini = {}
306
369
307 resp = {
370 resp = {
308 'id': payload.get('id'),
371 'id': payload.get('id'),
309 'result': dict(
372 'result': dict(
310 version=vcsserver.__version__,
373 version=vcsserver.__version__,
311 config=parsed_ini,
374 config=parsed_ini,
312 payload=payload,
375 payload=payload,
313 )
376 )
314 }
377 }
315 return resp
378 return resp
316
379
317 def _msgpack_renderer_factory(self, info):
380 def _msgpack_renderer_factory(self, info):
318 def _render(value, system):
381 def _render(value, system):
319 value = msgpack.packb(value)
382 value = msgpack.packb(value)
320 request = system.get('request')
383 request = system.get('request')
321 if request is not None:
384 if request is not None:
322 response = request.response
385 response = request.response
323 ct = response.content_type
386 ct = response.content_type
324 if ct == response.default_content_type:
387 if ct == response.default_content_type:
325 response.content_type = 'application/x-msgpack'
388 response.content_type = 'application/x-msgpack'
326 return value
389 return value
327 return _render
390 return _render
328
391
329 def set_env_from_config(self, environ, config):
392 def set_env_from_config(self, environ, config):
330 dict_conf = {}
393 dict_conf = {}
331 try:
394 try:
332 for elem in config:
395 for elem in config:
333 if elem[0] == 'rhodecode':
396 if elem[0] == 'rhodecode':
334 dict_conf = json.loads(elem[2])
397 dict_conf = json.loads(elem[2])
335 break
398 break
336 except Exception:
399 except Exception:
337 log.exception('Failed to fetch SCM CONFIG')
400 log.exception('Failed to fetch SCM CONFIG')
338 return
401 return
339
402
340 username = dict_conf.get('username')
403 username = dict_conf.get('username')
341 if username:
404 if username:
342 environ['REMOTE_USER'] = username
405 environ['REMOTE_USER'] = username
343 # mercurial specific, some extension api rely on this
406 # mercurial specific, some extension api rely on this
344 environ['HGUSER'] = username
407 environ['HGUSER'] = username
345
408
346 ip = dict_conf.get('ip')
409 ip = dict_conf.get('ip')
347 if ip:
410 if ip:
348 environ['REMOTE_HOST'] = ip
411 environ['REMOTE_HOST'] = ip
349
412
350 if _is_request_chunked(environ):
413 if _is_request_chunked(environ):
351 # set the compatibility flag for webob
414 # set the compatibility flag for webob
352 environ['wsgi.input_terminated'] = True
415 environ['wsgi.input_terminated'] = True
353
416
354 def hg_proxy(self):
417 def hg_proxy(self):
355 @wsgiapp
418 @wsgiapp
356 def _hg_proxy(environ, start_response):
419 def _hg_proxy(environ, start_response):
357 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
420 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
358 return app(environ, start_response)
421 return app(environ, start_response)
359 return _hg_proxy
422 return _hg_proxy
360
423
361 def git_proxy(self):
424 def git_proxy(self):
362 @wsgiapp
425 @wsgiapp
363 def _git_proxy(environ, start_response):
426 def _git_proxy(environ, start_response):
364 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
427 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
365 return app(environ, start_response)
428 return app(environ, start_response)
366 return _git_proxy
429 return _git_proxy
367
430
368 def hg_stream(self):
431 def hg_stream(self):
369 if self._use_echo_app:
432 if self._use_echo_app:
370 @wsgiapp
433 @wsgiapp
371 def _hg_stream(environ, start_response):
434 def _hg_stream(environ, start_response):
372 app = EchoApp('fake_path', 'fake_name', None)
435 app = EchoApp('fake_path', 'fake_name', None)
373 return app(environ, start_response)
436 return app(environ, start_response)
374 return _hg_stream
437 return _hg_stream
375 else:
438 else:
376 @wsgiapp
439 @wsgiapp
377 def _hg_stream(environ, start_response):
440 def _hg_stream(environ, start_response):
378 log.debug('http-app: handling hg stream')
441 log.debug('http-app: handling hg stream')
379 repo_path = environ['HTTP_X_RC_REPO_PATH']
442 repo_path = environ['HTTP_X_RC_REPO_PATH']
380 repo_name = environ['HTTP_X_RC_REPO_NAME']
443 repo_name = environ['HTTP_X_RC_REPO_NAME']
381 packed_config = base64.b64decode(
444 packed_config = base64.b64decode(
382 environ['HTTP_X_RC_REPO_CONFIG'])
445 environ['HTTP_X_RC_REPO_CONFIG'])
383 config = msgpack.unpackb(packed_config)
446 config = msgpack.unpackb(packed_config)
384 app = scm_app.create_hg_wsgi_app(
447 app = scm_app.create_hg_wsgi_app(
385 repo_path, repo_name, config)
448 repo_path, repo_name, config)
386
449
387 # Consistent path information for hgweb
450 # Consistent path information for hgweb
388 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
451 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
389 environ['REPO_NAME'] = repo_name
452 environ['REPO_NAME'] = repo_name
390 self.set_env_from_config(environ, config)
453 self.set_env_from_config(environ, config)
391
454
392 log.debug('http-app: starting app handler '
455 log.debug('http-app: starting app handler '
393 'with %s and process request', app)
456 'with %s and process request', app)
394 return app(environ, ResponseFilter(start_response))
457 return app(environ, ResponseFilter(start_response))
395 return _hg_stream
458 return _hg_stream
396
459
397 def git_stream(self):
460 def git_stream(self):
398 if self._use_echo_app:
461 if self._use_echo_app:
399 @wsgiapp
462 @wsgiapp
400 def _git_stream(environ, start_response):
463 def _git_stream(environ, start_response):
401 app = EchoApp('fake_path', 'fake_name', None)
464 app = EchoApp('fake_path', 'fake_name', None)
402 return app(environ, start_response)
465 return app(environ, start_response)
403 return _git_stream
466 return _git_stream
404 else:
467 else:
405 @wsgiapp
468 @wsgiapp
406 def _git_stream(environ, start_response):
469 def _git_stream(environ, start_response):
407 log.debug('http-app: handling git stream')
470 log.debug('http-app: handling git stream')
408 repo_path = environ['HTTP_X_RC_REPO_PATH']
471 repo_path = environ['HTTP_X_RC_REPO_PATH']
409 repo_name = environ['HTTP_X_RC_REPO_NAME']
472 repo_name = environ['HTTP_X_RC_REPO_NAME']
410 packed_config = base64.b64decode(
473 packed_config = base64.b64decode(
411 environ['HTTP_X_RC_REPO_CONFIG'])
474 environ['HTTP_X_RC_REPO_CONFIG'])
412 config = msgpack.unpackb(packed_config)
475 config = msgpack.unpackb(packed_config)
413
476
414 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
477 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
415 self.set_env_from_config(environ, config)
478 self.set_env_from_config(environ, config)
416
479
417 content_type = environ.get('CONTENT_TYPE', '')
480 content_type = environ.get('CONTENT_TYPE', '')
418
481
419 path = environ['PATH_INFO']
482 path = environ['PATH_INFO']
420 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
483 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
421 log.debug(
484 log.debug(
422 'LFS: Detecting if request `%s` is LFS server path based '
485 'LFS: Detecting if request `%s` is LFS server path based '
423 'on content type:`%s`, is_lfs:%s',
486 'on content type:`%s`, is_lfs:%s',
424 path, content_type, is_lfs_request)
487 path, content_type, is_lfs_request)
425
488
426 if not is_lfs_request:
489 if not is_lfs_request:
427 # fallback detection by path
490 # fallback detection by path
428 if GIT_LFS_PROTO_PAT.match(path):
491 if GIT_LFS_PROTO_PAT.match(path):
429 is_lfs_request = True
492 is_lfs_request = True
430 log.debug(
493 log.debug(
431 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
494 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
432 path, is_lfs_request)
495 path, is_lfs_request)
433
496
434 if is_lfs_request:
497 if is_lfs_request:
435 app = scm_app.create_git_lfs_wsgi_app(
498 app = scm_app.create_git_lfs_wsgi_app(
436 repo_path, repo_name, config)
499 repo_path, repo_name, config)
437 else:
500 else:
438 app = scm_app.create_git_wsgi_app(
501 app = scm_app.create_git_wsgi_app(
439 repo_path, repo_name, config)
502 repo_path, repo_name, config)
440
503
441 log.debug('http-app: starting app handler '
504 log.debug('http-app: starting app handler '
442 'with %s and process request', app)
505 'with %s and process request', app)
443
506
444 return app(environ, start_response)
507 return app(environ, start_response)
445
508
446 return _git_stream
509 return _git_stream
447
510
448 def is_vcs_view(self, context, request):
511 def is_vcs_view(self, context, request):
449 """
512 """
450 View predicate that returns true if given backend is supported by
513 View predicate that returns true if given backend is supported by
451 defined remotes.
514 defined remotes.
452 """
515 """
453 backend = request.matchdict.get('backend')
516 backend = request.matchdict.get('backend')
454 return backend in self._remotes
517 return backend in self._remotes
455
518
456 def handle_vcs_exception(self, exception, request):
519 def handle_vcs_exception(self, exception, request):
457 _vcs_kind = getattr(exception, '_vcs_kind', '')
520 _vcs_kind = getattr(exception, '_vcs_kind', '')
458 if _vcs_kind == 'repo_locked':
521 if _vcs_kind == 'repo_locked':
459 # Get custom repo-locked status code if present.
522 # Get custom repo-locked status code if present.
460 status_code = request.headers.get('X-RC-Locked-Status-Code')
523 status_code = request.headers.get('X-RC-Locked-Status-Code')
461 return HTTPRepoLocked(
524 return HTTPRepoLocked(
462 title=exception.message, status_code=status_code)
525 title=exception.message, status_code=status_code)
463
526
464 # Re-raise exception if we can not handle it.
527 elif _vcs_kind == 'repo_branch_protected':
465 log.exception(
528 # Get custom repo-branch-protected status code if present.
466 'error occurred handling this request for path: %s', request.path)
529 return HTTPRepoBranchProtected(title=exception.message)
530
531 exc_info = request.exc_info
532 store_exception(id(exc_info), exc_info)
533
534 traceback_info = 'unavailable'
535 if request.exc_info:
536 exc_type, exc_value, exc_tb = request.exc_info
537 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
538
539 log.error(
540 'error occurred handling this request for path: %s, \n tb: %s',
541 request.path, traceback_info)
467 raise exception
542 raise exception
468
543
469
544
470 class ResponseFilter(object):
545 class ResponseFilter(object):
471
546
472 def __init__(self, start_response):
547 def __init__(self, start_response):
473 self._start_response = start_response
548 self._start_response = start_response
474
549
475 def __call__(self, status, response_headers, exc_info=None):
550 def __call__(self, status, response_headers, exc_info=None):
476 headers = tuple(
551 headers = tuple(
477 (h, v) for h, v in response_headers
552 (h, v) for h, v in response_headers
478 if not wsgiref.util.is_hop_by_hop(h))
553 if not wsgiref.util.is_hop_by_hop(h))
479 return self._start_response(status, headers, exc_info)
554 return self._start_response(status, headers, exc_info)
480
555
481
556
482 def main(global_config, **settings):
557 def main(global_config, **settings):
483 if MercurialFactory:
558 if MercurialFactory:
484 hgpatches.patch_largefiles_capabilities()
559 hgpatches.patch_largefiles_capabilities()
485 hgpatches.patch_subrepo_type_mapping()
560 hgpatches.patch_subrepo_type_mapping()
561
486 app = HTTPApplication(settings=settings, global_config=global_config)
562 app = HTTPApplication(settings=settings, global_config=global_config)
487 return app.wsgi_app()
563 return app.wsgi_app()
@@ -1,229 +1,234 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import itertools
20 import itertools
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.wireprotoserver
24 import mercurial.hgweb.common
25 import mercurial.hgweb.common
25 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.protocol
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30
30
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 # propagated from mercurial documentation
35 # propagated from mercurial documentation
36 HG_UI_SECTIONS = [
36 HG_UI_SECTIONS = [
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 ]
40 ]
41
41
42
42
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 """Extension of hgweb that simplifies some functions."""
44 """Extension of hgweb that simplifies some functions."""
45
45
46 def _get_view(self, repo):
46 def _get_view(self, repo):
47 """Views are not supported."""
47 """Views are not supported."""
48 return repo
48 return repo
49
49
50 def loadsubweb(self):
50 def loadsubweb(self):
51 """The result is only used in the templater method which is not used."""
51 """The result is only used in the templater method which is not used."""
52 return None
52 return None
53
53
54 def run(self):
54 def run(self):
55 """Unused function so raise an exception if accidentally called."""
55 """Unused function so raise an exception if accidentally called."""
56 raise NotImplementedError
56 raise NotImplementedError
57
57
58 def templater(self, req):
58 def templater(self, req):
59 """Function used in an unreachable code path.
59 """Function used in an unreachable code path.
60
60
61 This code is unreachable because we guarantee that the HTTP request,
61 This code is unreachable because we guarantee that the HTTP request,
62 corresponds to a Mercurial command. See the is_hg method. So, we are
62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 never going to get a user-visible url.
63 never going to get a user-visible url.
64 """
64 """
65 raise NotImplementedError
65 raise NotImplementedError
66
66
67 def archivelist(self, nodeid):
67 def archivelist(self, nodeid):
68 """Unused function so raise an exception if accidentally called."""
68 """Unused function so raise an exception if accidentally called."""
69 raise NotImplementedError
69 raise NotImplementedError
70
70
71 def __call__(self, environ, start_response):
71 def __call__(self, environ, start_response):
72 """Run the WSGI application.
72 """Run the WSGI application.
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 req = mercurial.hgweb.request.wsgirequest(environ, start_response)
76 from mercurial.hgweb import request as requestmod
77 gen = self.run_wsgi(req)
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
78
80
79 first_chunk = None
81 first_chunk = None
80
82
81 try:
83 try:
82 data = gen.next()
84 data = gen.next()
83 def first_chunk(): yield data
85
86 def first_chunk():
87 yield data
84 except StopIteration:
88 except StopIteration:
85 pass
89 pass
86
90
87 if first_chunk:
91 if first_chunk:
88 return itertools.chain(first_chunk(), gen)
92 return itertools.chain(first_chunk(), gen)
89 return gen
93 return gen
90
94
91 def _runwsgi(self, req, repo):
95 def _runwsgi(self, req, res, repo):
92 cmd = req.form.get('cmd', [''])[0]
93 if not mercurial.hgweb.protocol.iscmd(cmd):
94 req.respond(
95 mercurial.hgweb.common.ErrorResponse(
96 mercurial.hgweb.common.HTTP_BAD_REQUEST),
97 mercurial.hgweb.protocol.HGTYPE
98 )
99 return ['']
100
96
101 return super(HgWeb, self)._runwsgi(req, repo)
97 cmd = req.qsparams.get('cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
101 from mercurial.hgweb.common import statusmessage
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.setbodybytes('')
104 return res.sendresponse()
105
106 return super(HgWeb, self)._runwsgi(req, res, repo)
102
107
103
108
104 def make_hg_ui_from_config(repo_config):
109 def make_hg_ui_from_config(repo_config):
105 baseui = mercurial.ui.ui()
110 baseui = mercurial.ui.ui()
106
111
107 # clean the baseui object
112 # clean the baseui object
108 baseui._ocfg = mercurial.config.config()
113 baseui._ocfg = mercurial.config.config()
109 baseui._ucfg = mercurial.config.config()
114 baseui._ucfg = mercurial.config.config()
110 baseui._tcfg = mercurial.config.config()
115 baseui._tcfg = mercurial.config.config()
111
116
112 for section, option, value in repo_config:
117 for section, option, value in repo_config:
113 baseui.setconfig(section, option, value)
118 baseui.setconfig(section, option, value)
114
119
115 # make our hgweb quiet so it doesn't print output
120 # make our hgweb quiet so it doesn't print output
116 baseui.setconfig('ui', 'quiet', 'true')
121 baseui.setconfig('ui', 'quiet', 'true')
117
122
118 return baseui
123 return baseui
119
124
120
125
121 def update_hg_ui_from_hgrc(baseui, repo_path):
126 def update_hg_ui_from_hgrc(baseui, repo_path):
122 path = os.path.join(repo_path, '.hg', 'hgrc')
127 path = os.path.join(repo_path, '.hg', 'hgrc')
123
128
124 if not os.path.isfile(path):
129 if not os.path.isfile(path):
125 log.debug('hgrc file is not present at %s, skipping...', path)
130 log.debug('hgrc file is not present at %s, skipping...', path)
126 return
131 return
127 log.debug('reading hgrc from %s', path)
132 log.debug('reading hgrc from %s', path)
128 cfg = mercurial.config.config()
133 cfg = mercurial.config.config()
129 cfg.read(path)
134 cfg.read(path)
130 for section in HG_UI_SECTIONS:
135 for section in HG_UI_SECTIONS:
131 for k, v in cfg.items(section):
136 for k, v in cfg.items(section):
132 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
133 baseui.setconfig(section, k, v)
138 baseui.setconfig(section, k, v)
134
139
135
140
136 def create_hg_wsgi_app(repo_path, repo_name, config):
141 def create_hg_wsgi_app(repo_path, repo_name, config):
137 """
142 """
138 Prepares a WSGI application to handle Mercurial requests.
143 Prepares a WSGI application to handle Mercurial requests.
139
144
140 :param config: is a list of 3-item tuples representing a ConfigObject
145 :param config: is a list of 3-item tuples representing a ConfigObject
141 (it is the serialized version of the config object).
146 (it is the serialized version of the config object).
142 """
147 """
143 log.debug("Creating Mercurial WSGI application")
148 log.debug("Creating Mercurial WSGI application")
144
149
145 baseui = make_hg_ui_from_config(config)
150 baseui = make_hg_ui_from_config(config)
146 update_hg_ui_from_hgrc(baseui, repo_path)
151 update_hg_ui_from_hgrc(baseui, repo_path)
147
152
148 try:
153 try:
149 return HgWeb(repo_path, name=repo_name, baseui=baseui)
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
150 except mercurial.error.RequirementError as exc:
155 except mercurial.error.RequirementError as e:
151 raise exceptions.RequirementException(exc)
156 raise exceptions.RequirementException(e)(e)
152
157
153
158
154 class GitHandler(object):
159 class GitHandler(object):
155 """
160 """
156 Handler for Git operations like push/pull etc
161 Handler for Git operations like push/pull etc
157 """
162 """
158 def __init__(self, repo_location, repo_name, git_path, update_server_info,
163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
159 extras):
164 extras):
160 if not os.path.isdir(repo_location):
165 if not os.path.isdir(repo_location):
161 raise OSError(repo_location)
166 raise OSError(repo_location)
162 self.content_path = repo_location
167 self.content_path = repo_location
163 self.repo_name = repo_name
168 self.repo_name = repo_name
164 self.repo_location = repo_location
169 self.repo_location = repo_location
165 self.extras = extras
170 self.extras = extras
166 self.git_path = git_path
171 self.git_path = git_path
167 self.update_server_info = update_server_info
172 self.update_server_info = update_server_info
168
173
169 def __call__(self, environ, start_response):
174 def __call__(self, environ, start_response):
170 app = webob.exc.HTTPNotFound()
175 app = webob.exc.HTTPNotFound()
171 candidate_paths = (
176 candidate_paths = (
172 self.content_path, os.path.join(self.content_path, '.git'))
177 self.content_path, os.path.join(self.content_path, '.git'))
173
178
174 for content_path in candidate_paths:
179 for content_path in candidate_paths:
175 try:
180 try:
176 app = pygrack.GitRepository(
181 app = pygrack.GitRepository(
177 self.repo_name, content_path, self.git_path,
182 self.repo_name, content_path, self.git_path,
178 self.update_server_info, self.extras)
183 self.update_server_info, self.extras)
179 break
184 break
180 except OSError:
185 except OSError:
181 continue
186 continue
182
187
183 return app(environ, start_response)
188 return app(environ, start_response)
184
189
185
190
186 def create_git_wsgi_app(repo_path, repo_name, config):
191 def create_git_wsgi_app(repo_path, repo_name, config):
187 """
192 """
188 Creates a WSGI application to handle Git requests.
193 Creates a WSGI application to handle Git requests.
189
194
190 :param config: is a dictionary holding the extras.
195 :param config: is a dictionary holding the extras.
191 """
196 """
192 git_path = settings.GIT_EXECUTABLE
197 git_path = settings.GIT_EXECUTABLE
193 update_server_info = config.pop('git_update_server_info')
198 update_server_info = config.pop('git_update_server_info')
194 app = GitHandler(
199 app = GitHandler(
195 repo_path, repo_name, git_path, update_server_info, config)
200 repo_path, repo_name, git_path, update_server_info, config)
196
201
197 return app
202 return app
198
203
199
204
200 class GitLFSHandler(object):
205 class GitLFSHandler(object):
201 """
206 """
202 Handler for Git LFS operations
207 Handler for Git LFS operations
203 """
208 """
204
209
205 def __init__(self, repo_location, repo_name, git_path, update_server_info,
210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
206 extras):
211 extras):
207 if not os.path.isdir(repo_location):
212 if not os.path.isdir(repo_location):
208 raise OSError(repo_location)
213 raise OSError(repo_location)
209 self.content_path = repo_location
214 self.content_path = repo_location
210 self.repo_name = repo_name
215 self.repo_name = repo_name
211 self.repo_location = repo_location
216 self.repo_location = repo_location
212 self.extras = extras
217 self.extras = extras
213 self.git_path = git_path
218 self.git_path = git_path
214 self.update_server_info = update_server_info
219 self.update_server_info = update_server_info
215
220
216 def get_app(self, git_lfs_enabled, git_lfs_store_path):
221 def get_app(self, git_lfs_enabled, git_lfs_store_path):
217 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
218 return app
223 return app
219
224
220
225
221 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
222 git_path = settings.GIT_EXECUTABLE
227 git_path = settings.GIT_EXECUTABLE
223 update_server_info = config.pop('git_update_server_info')
228 update_server_info = config.pop('git_update_server_info')
224 git_lfs_enabled = config.pop('git_lfs_enabled')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
225 git_lfs_store_path = config.pop('git_lfs_store_path')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
226 app = GitLFSHandler(
231 app = GitLFSHandler(
227 repo_path, repo_name, git_path, update_server_info, config)
232 repo_path, repo_name, git_path, update_server_info, config)
228
233
229 return app.get_app(git_lfs_enabled, git_lfs_store_path)
234 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,689 +1,705 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 from urllib2 import URLError
21 from urllib2 import URLError
22 import logging
22 import logging
23 import posixpath as vcspath
23 import posixpath as vcspath
24 import StringIO
24 import StringIO
25 import urllib
25 import urllib
26 import traceback
26 import traceback
27
27
28 import svn.client
28 import svn.client
29 import svn.core
29 import svn.core
30 import svn.delta
30 import svn.delta
31 import svn.diff
31 import svn.diff
32 import svn.fs
32 import svn.fs
33 import svn.repos
33 import svn.repos
34
34
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
35 from vcsserver import svn_diff, exceptions, subprocessio, settings
36 from vcsserver.base import RepoFactory, raise_from_original
36 from vcsserver.base import RepoFactory, raise_from_original
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 # Set of svn compatible version flags.
41 # Set of svn compatible version flags.
42 # Compare with subversion/svnadmin/svnadmin.c
42 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
43 svn_compatible_versions = {
44 'pre-1.4-compatible',
44 'pre-1.4-compatible',
45 'pre-1.5-compatible',
45 'pre-1.5-compatible',
46 'pre-1.6-compatible',
46 'pre-1.6-compatible',
47 'pre-1.8-compatible',
47 'pre-1.8-compatible',
48 'pre-1.9-compatible',
48 'pre-1.9-compatible'
49 ])
49 }
50
50
51 svn_compatible_versions_map = {
51 svn_compatible_versions_map = {
52 'pre-1.4-compatible': '1.3',
52 'pre-1.4-compatible': '1.3',
53 'pre-1.5-compatible': '1.4',
53 'pre-1.5-compatible': '1.4',
54 'pre-1.6-compatible': '1.5',
54 'pre-1.6-compatible': '1.5',
55 'pre-1.8-compatible': '1.7',
55 'pre-1.8-compatible': '1.7',
56 'pre-1.9-compatible': '1.8',
56 'pre-1.9-compatible': '1.8',
57 }
57 }
58
58
59
59
60 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
61 """Decorator for converting svn exceptions to something neutral."""
61 """Decorator for converting svn exceptions to something neutral."""
62 def wrapper(*args, **kwargs):
62 def wrapper(*args, **kwargs):
63 try:
63 try:
64 return func(*args, **kwargs)
64 return func(*args, **kwargs)
65 except Exception as e:
65 except Exception as e:
66 if not hasattr(e, '_vcs_kind'):
66 if not hasattr(e, '_vcs_kind'):
67 log.exception("Unhandled exception in hg remote call")
67 log.exception("Unhandled exception in svn remote call")
68 raise_from_original(exceptions.UnhandledException)
68 raise_from_original(exceptions.UnhandledException(e))
69 raise
69 raise
70 return wrapper
70 return wrapper
71
71
72
72
73 class SubversionFactory(RepoFactory):
73 class SubversionFactory(RepoFactory):
74 repo_type = 'svn'
74
75
75 def _create_repo(self, wire, create, compatible_version):
76 def _create_repo(self, wire, create, compatible_version):
76 path = svn.core.svn_path_canonicalize(wire['path'])
77 path = svn.core.svn_path_canonicalize(wire['path'])
77 if create:
78 if create:
78 fs_config = {'compatible-version': '1.9'}
79 fs_config = {'compatible-version': '1.9'}
79 if compatible_version:
80 if compatible_version:
80 if compatible_version not in svn_compatible_versions:
81 if compatible_version not in svn_compatible_versions:
81 raise Exception('Unknown SVN compatible version "{}"'
82 raise Exception('Unknown SVN compatible version "{}"'
82 .format(compatible_version))
83 .format(compatible_version))
83 fs_config['compatible-version'] = \
84 fs_config['compatible-version'] = \
84 svn_compatible_versions_map[compatible_version]
85 svn_compatible_versions_map[compatible_version]
85
86
86 log.debug('Create SVN repo with config "%s"', fs_config)
87 log.debug('Create SVN repo with config "%s"', fs_config)
87 repo = svn.repos.create(path, "", "", None, fs_config)
88 repo = svn.repos.create(path, "", "", None, fs_config)
88 else:
89 else:
89 repo = svn.repos.open(path)
90 repo = svn.repos.open(path)
90
91
91 log.debug('Got SVN object: %s', repo)
92 log.debug('Got SVN object: %s', repo)
92 return repo
93 return repo
93
94
94 def repo(self, wire, create=False, compatible_version=None):
95 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
96 """
97 Get a repository instance for the given path.
98
99 Uses internally the low level beaker API since the decorators introduce
100 significant overhead.
101 """
102 region = self._cache_region
103 context = wire.get('context', None)
104 repo_path = wire.get('path', '')
105 context_uid = '{}'.format(context)
106 cache = wire.get('cache', True)
107 cache_on = context and cache
108
109 @region.conditional_cache_on_arguments(condition=cache_on)
110 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
96 return self._create_repo(wire, create, compatible_version)
111 return self._create_repo(wire, create, compatible_version)
97
112
98 return self._repo(wire, create_new_repo)
113 return create_new_repo(self.repo_type, repo_path, context_uid,
114 compatible_version)
99
115
100
116
101 NODE_TYPE_MAPPING = {
117 NODE_TYPE_MAPPING = {
102 svn.core.svn_node_file: 'file',
118 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_dir: 'dir',
119 svn.core.svn_node_dir: 'dir',
104 }
120 }
105
121
106
122
107 class SvnRemote(object):
123 class SvnRemote(object):
108
124
109 def __init__(self, factory, hg_factory=None):
125 def __init__(self, factory, hg_factory=None):
110 self._factory = factory
126 self._factory = factory
111 # TODO: Remove once we do not use internal Mercurial objects anymore
127 # TODO: Remove once we do not use internal Mercurial objects anymore
112 # for subversion
128 # for subversion
113 self._hg_factory = hg_factory
129 self._hg_factory = hg_factory
114
130
115 @reraise_safe_exceptions
131 @reraise_safe_exceptions
116 def discover_svn_version(self):
132 def discover_svn_version(self):
117 try:
133 try:
118 import svn.core
134 import svn.core
119 svn_ver = svn.core.SVN_VERSION
135 svn_ver = svn.core.SVN_VERSION
120 except ImportError:
136 except ImportError:
121 svn_ver = None
137 svn_ver = None
122 return svn_ver
138 return svn_ver
123
139
124 def check_url(self, url, config_items):
140 def check_url(self, url, config_items):
125 # this can throw exception if not installed, but we detect this
141 # this can throw exception if not installed, but we detect this
126 from hgsubversion import svnrepo
142 from hgsubversion import svnrepo
127
143
128 baseui = self._hg_factory._create_config(config_items)
144 baseui = self._hg_factory._create_config(config_items)
129 # uuid function get's only valid UUID from proper repo, else
145 # uuid function get's only valid UUID from proper repo, else
130 # throws exception
146 # throws exception
131 try:
147 try:
132 svnrepo.svnremoterepo(baseui, url).svn.uuid
148 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 except Exception:
149 except Exception:
134 tb = traceback.format_exc()
150 tb = traceback.format_exc()
135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
151 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
136 raise URLError(
152 raise URLError(
137 '"%s" is not a valid Subversion source url.' % (url, ))
153 '"%s" is not a valid Subversion source url.' % (url, ))
138 return True
154 return True
139
155
140 def is_path_valid_repository(self, wire, path):
156 def is_path_valid_repository(self, wire, path):
141
157
142 # NOTE(marcink): short circuit the check for SVN repo
158 # NOTE(marcink): short circuit the check for SVN repo
143 # the repos.open might be expensive to check, but we have one cheap
159 # the repos.open might be expensive to check, but we have one cheap
144 # pre condition that we can use, to check for 'format' file
160 # pre condition that we can use, to check for 'format' file
145
161
146 if not os.path.isfile(os.path.join(path, 'format')):
162 if not os.path.isfile(os.path.join(path, 'format')):
147 return False
163 return False
148
164
149 try:
165 try:
150 svn.repos.open(path)
166 svn.repos.open(path)
151 except svn.core.SubversionException:
167 except svn.core.SubversionException:
152 tb = traceback.format_exc()
168 tb = traceback.format_exc()
153 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
169 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
154 return False
170 return False
155 return True
171 return True
156
172
157 @reraise_safe_exceptions
173 @reraise_safe_exceptions
158 def verify(self, wire,):
174 def verify(self, wire,):
159 repo_path = wire['path']
175 repo_path = wire['path']
160 if not self.is_path_valid_repository(wire, repo_path):
176 if not self.is_path_valid_repository(wire, repo_path):
161 raise Exception(
177 raise Exception(
162 "Path %s is not a valid Subversion repository." % repo_path)
178 "Path %s is not a valid Subversion repository." % repo_path)
163
179
164 cmd = ['svnadmin', 'info', repo_path]
180 cmd = ['svnadmin', 'info', repo_path]
165 stdout, stderr = subprocessio.run_command(cmd)
181 stdout, stderr = subprocessio.run_command(cmd)
166 return stdout
182 return stdout
167
183
168 def lookup(self, wire, revision):
184 def lookup(self, wire, revision):
169 if revision not in [-1, None, 'HEAD']:
185 if revision not in [-1, None, 'HEAD']:
170 raise NotImplementedError
186 raise NotImplementedError
171 repo = self._factory.repo(wire)
187 repo = self._factory.repo(wire)
172 fs_ptr = svn.repos.fs(repo)
188 fs_ptr = svn.repos.fs(repo)
173 head = svn.fs.youngest_rev(fs_ptr)
189 head = svn.fs.youngest_rev(fs_ptr)
174 return head
190 return head
175
191
176 def lookup_interval(self, wire, start_ts, end_ts):
192 def lookup_interval(self, wire, start_ts, end_ts):
177 repo = self._factory.repo(wire)
193 repo = self._factory.repo(wire)
178 fsobj = svn.repos.fs(repo)
194 fsobj = svn.repos.fs(repo)
179 start_rev = None
195 start_rev = None
180 end_rev = None
196 end_rev = None
181 if start_ts:
197 if start_ts:
182 start_ts_svn = apr_time_t(start_ts)
198 start_ts_svn = apr_time_t(start_ts)
183 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
199 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
184 else:
200 else:
185 start_rev = 1
201 start_rev = 1
186 if end_ts:
202 if end_ts:
187 end_ts_svn = apr_time_t(end_ts)
203 end_ts_svn = apr_time_t(end_ts)
188 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
204 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
189 else:
205 else:
190 end_rev = svn.fs.youngest_rev(fsobj)
206 end_rev = svn.fs.youngest_rev(fsobj)
191 return start_rev, end_rev
207 return start_rev, end_rev
192
208
193 def revision_properties(self, wire, revision):
209 def revision_properties(self, wire, revision):
194 repo = self._factory.repo(wire)
210 repo = self._factory.repo(wire)
195 fs_ptr = svn.repos.fs(repo)
211 fs_ptr = svn.repos.fs(repo)
196 return svn.fs.revision_proplist(fs_ptr, revision)
212 return svn.fs.revision_proplist(fs_ptr, revision)
197
213
198 def revision_changes(self, wire, revision):
214 def revision_changes(self, wire, revision):
199
215
200 repo = self._factory.repo(wire)
216 repo = self._factory.repo(wire)
201 fsobj = svn.repos.fs(repo)
217 fsobj = svn.repos.fs(repo)
202 rev_root = svn.fs.revision_root(fsobj, revision)
218 rev_root = svn.fs.revision_root(fsobj, revision)
203
219
204 editor = svn.repos.ChangeCollector(fsobj, rev_root)
220 editor = svn.repos.ChangeCollector(fsobj, rev_root)
205 editor_ptr, editor_baton = svn.delta.make_editor(editor)
221 editor_ptr, editor_baton = svn.delta.make_editor(editor)
206 base_dir = ""
222 base_dir = ""
207 send_deltas = False
223 send_deltas = False
208 svn.repos.replay2(
224 svn.repos.replay2(
209 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
225 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
210 editor_ptr, editor_baton, None)
226 editor_ptr, editor_baton, None)
211
227
212 added = []
228 added = []
213 changed = []
229 changed = []
214 removed = []
230 removed = []
215
231
216 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
232 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
217 for path, change in editor.changes.iteritems():
233 for path, change in editor.changes.iteritems():
218 # TODO: Decide what to do with directory nodes. Subversion can add
234 # TODO: Decide what to do with directory nodes. Subversion can add
219 # empty directories.
235 # empty directories.
220
236
221 if change.item_kind == svn.core.svn_node_dir:
237 if change.item_kind == svn.core.svn_node_dir:
222 continue
238 continue
223 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
239 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
224 added.append(path)
240 added.append(path)
225 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
241 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
226 svn.repos.CHANGE_ACTION_REPLACE]:
242 svn.repos.CHANGE_ACTION_REPLACE]:
227 changed.append(path)
243 changed.append(path)
228 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
244 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
229 removed.append(path)
245 removed.append(path)
230 else:
246 else:
231 raise NotImplementedError(
247 raise NotImplementedError(
232 "Action %s not supported on path %s" % (
248 "Action %s not supported on path %s" % (
233 change.action, path))
249 change.action, path))
234
250
235 changes = {
251 changes = {
236 'added': added,
252 'added': added,
237 'changed': changed,
253 'changed': changed,
238 'removed': removed,
254 'removed': removed,
239 }
255 }
240 return changes
256 return changes
241
257
242 def node_history(self, wire, path, revision, limit):
258 def node_history(self, wire, path, revision, limit):
243 cross_copies = False
259 cross_copies = False
244 repo = self._factory.repo(wire)
260 repo = self._factory.repo(wire)
245 fsobj = svn.repos.fs(repo)
261 fsobj = svn.repos.fs(repo)
246 rev_root = svn.fs.revision_root(fsobj, revision)
262 rev_root = svn.fs.revision_root(fsobj, revision)
247
263
248 history_revisions = []
264 history_revisions = []
249 history = svn.fs.node_history(rev_root, path)
265 history = svn.fs.node_history(rev_root, path)
250 history = svn.fs.history_prev(history, cross_copies)
266 history = svn.fs.history_prev(history, cross_copies)
251 while history:
267 while history:
252 __, node_revision = svn.fs.history_location(history)
268 __, node_revision = svn.fs.history_location(history)
253 history_revisions.append(node_revision)
269 history_revisions.append(node_revision)
254 if limit and len(history_revisions) >= limit:
270 if limit and len(history_revisions) >= limit:
255 break
271 break
256 history = svn.fs.history_prev(history, cross_copies)
272 history = svn.fs.history_prev(history, cross_copies)
257 return history_revisions
273 return history_revisions
258
274
259 def node_properties(self, wire, path, revision):
275 def node_properties(self, wire, path, revision):
260 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
261 fsobj = svn.repos.fs(repo)
277 fsobj = svn.repos.fs(repo)
262 rev_root = svn.fs.revision_root(fsobj, revision)
278 rev_root = svn.fs.revision_root(fsobj, revision)
263 return svn.fs.node_proplist(rev_root, path)
279 return svn.fs.node_proplist(rev_root, path)
264
280
265 def file_annotate(self, wire, path, revision):
281 def file_annotate(self, wire, path, revision):
266 abs_path = 'file://' + urllib.pathname2url(
282 abs_path = 'file://' + urllib.pathname2url(
267 vcspath.join(wire['path'], path))
283 vcspath.join(wire['path'], path))
268 file_uri = svn.core.svn_path_canonicalize(abs_path)
284 file_uri = svn.core.svn_path_canonicalize(abs_path)
269
285
270 start_rev = svn_opt_revision_value_t(0)
286 start_rev = svn_opt_revision_value_t(0)
271 peg_rev = svn_opt_revision_value_t(revision)
287 peg_rev = svn_opt_revision_value_t(revision)
272 end_rev = peg_rev
288 end_rev = peg_rev
273
289
274 annotations = []
290 annotations = []
275
291
276 def receiver(line_no, revision, author, date, line, pool):
292 def receiver(line_no, revision, author, date, line, pool):
277 annotations.append((line_no, revision, line))
293 annotations.append((line_no, revision, line))
278
294
279 # TODO: Cannot use blame5, missing typemap function in the swig code
295 # TODO: Cannot use blame5, missing typemap function in the swig code
280 try:
296 try:
281 svn.client.blame2(
297 svn.client.blame2(
282 file_uri, peg_rev, start_rev, end_rev,
298 file_uri, peg_rev, start_rev, end_rev,
283 receiver, svn.client.create_context())
299 receiver, svn.client.create_context())
284 except svn.core.SubversionException as exc:
300 except svn.core.SubversionException as exc:
285 log.exception("Error during blame operation.")
301 log.exception("Error during blame operation.")
286 raise Exception(
302 raise Exception(
287 "Blame not supported or file does not exist at path %s. "
303 "Blame not supported or file does not exist at path %s. "
288 "Error %s." % (path, exc))
304 "Error %s." % (path, exc))
289
305
290 return annotations
306 return annotations
291
307
292 def get_node_type(self, wire, path, rev=None):
308 def get_node_type(self, wire, path, rev=None):
293 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
294 fs_ptr = svn.repos.fs(repo)
310 fs_ptr = svn.repos.fs(repo)
295 if rev is None:
311 if rev is None:
296 rev = svn.fs.youngest_rev(fs_ptr)
312 rev = svn.fs.youngest_rev(fs_ptr)
297 root = svn.fs.revision_root(fs_ptr, rev)
313 root = svn.fs.revision_root(fs_ptr, rev)
298 node = svn.fs.check_path(root, path)
314 node = svn.fs.check_path(root, path)
299 return NODE_TYPE_MAPPING.get(node, None)
315 return NODE_TYPE_MAPPING.get(node, None)
300
316
301 def get_nodes(self, wire, path, revision=None):
317 def get_nodes(self, wire, path, revision=None):
302 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
303 fsobj = svn.repos.fs(repo)
319 fsobj = svn.repos.fs(repo)
304 if revision is None:
320 if revision is None:
305 revision = svn.fs.youngest_rev(fsobj)
321 revision = svn.fs.youngest_rev(fsobj)
306 root = svn.fs.revision_root(fsobj, revision)
322 root = svn.fs.revision_root(fsobj, revision)
307 entries = svn.fs.dir_entries(root, path)
323 entries = svn.fs.dir_entries(root, path)
308 result = []
324 result = []
309 for entry_path, entry_info in entries.iteritems():
325 for entry_path, entry_info in entries.iteritems():
310 result.append(
326 result.append(
311 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
327 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
312 return result
328 return result
313
329
314 def get_file_content(self, wire, path, rev=None):
330 def get_file_content(self, wire, path, rev=None):
315 repo = self._factory.repo(wire)
331 repo = self._factory.repo(wire)
316 fsobj = svn.repos.fs(repo)
332 fsobj = svn.repos.fs(repo)
317 if rev is None:
333 if rev is None:
318 rev = svn.fs.youngest_revision(fsobj)
334 rev = svn.fs.youngest_revision(fsobj)
319 root = svn.fs.revision_root(fsobj, rev)
335 root = svn.fs.revision_root(fsobj, rev)
320 content = svn.core.Stream(svn.fs.file_contents(root, path))
336 content = svn.core.Stream(svn.fs.file_contents(root, path))
321 return content.read()
337 return content.read()
322
338
323 def get_file_size(self, wire, path, revision=None):
339 def get_file_size(self, wire, path, revision=None):
324 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
325 fsobj = svn.repos.fs(repo)
341 fsobj = svn.repos.fs(repo)
326 if revision is None:
342 if revision is None:
327 revision = svn.fs.youngest_revision(fsobj)
343 revision = svn.fs.youngest_revision(fsobj)
328 root = svn.fs.revision_root(fsobj, revision)
344 root = svn.fs.revision_root(fsobj, revision)
329 size = svn.fs.file_length(root, path)
345 size = svn.fs.file_length(root, path)
330 return size
346 return size
331
347
332 def create_repository(self, wire, compatible_version=None):
348 def create_repository(self, wire, compatible_version=None):
333 log.info('Creating Subversion repository in path "%s"', wire['path'])
349 log.info('Creating Subversion repository in path "%s"', wire['path'])
334 self._factory.repo(wire, create=True,
350 self._factory.repo(wire, create=True,
335 compatible_version=compatible_version)
351 compatible_version=compatible_version)
336
352
337 def import_remote_repository(self, wire, src_url):
353 def import_remote_repository(self, wire, src_url):
338 repo_path = wire['path']
354 repo_path = wire['path']
339 if not self.is_path_valid_repository(wire, repo_path):
355 if not self.is_path_valid_repository(wire, repo_path):
340 raise Exception(
356 raise Exception(
341 "Path %s is not a valid Subversion repository." % repo_path)
357 "Path %s is not a valid Subversion repository." % repo_path)
342
358
343 # TODO: johbo: URL checks ?
359 # TODO: johbo: URL checks ?
344 import subprocess
360 import subprocess
345 rdump = subprocess.Popen(
361 rdump = subprocess.Popen(
346 ['svnrdump', 'dump', '--non-interactive', src_url],
362 ['svnrdump', 'dump', '--non-interactive', src_url],
347 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
363 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
348 load = subprocess.Popen(
364 load = subprocess.Popen(
349 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
365 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
350
366
351 # TODO: johbo: This can be a very long operation, might be better
367 # TODO: johbo: This can be a very long operation, might be better
352 # to track some kind of status and provide an api to check if the
368 # to track some kind of status and provide an api to check if the
353 # import is done.
369 # import is done.
354 rdump.wait()
370 rdump.wait()
355 load.wait()
371 load.wait()
356
372
357 if rdump.returncode != 0:
373 if rdump.returncode != 0:
358 errors = rdump.stderr.read()
374 errors = rdump.stderr.read()
359 log.error('svnrdump dump failed: statuscode %s: message: %s',
375 log.error('svnrdump dump failed: statuscode %s: message: %s',
360 rdump.returncode, errors)
376 rdump.returncode, errors)
361 reason = 'UNKNOWN'
377 reason = 'UNKNOWN'
362 if 'svnrdump: E230001:' in errors:
378 if 'svnrdump: E230001:' in errors:
363 reason = 'INVALID_CERTIFICATE'
379 reason = 'INVALID_CERTIFICATE'
364 raise Exception(
380 raise Exception(
365 'Failed to dump the remote repository from %s.' % src_url,
381 'Failed to dump the remote repository from %s.' % src_url,
366 reason)
382 reason)
367 if load.returncode != 0:
383 if load.returncode != 0:
368 raise Exception(
384 raise Exception(
369 'Failed to load the dump of remote repository from %s.' %
385 'Failed to load the dump of remote repository from %s.' %
370 (src_url, ))
386 (src_url, ))
371
387
372 def commit(self, wire, message, author, timestamp, updated, removed):
388 def commit(self, wire, message, author, timestamp, updated, removed):
373 assert isinstance(message, str)
389 assert isinstance(message, str)
374 assert isinstance(author, str)
390 assert isinstance(author, str)
375
391
376 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
377 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
378
394
379 rev = svn.fs.youngest_rev(fsobj)
395 rev = svn.fs.youngest_rev(fsobj)
380 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
396 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
381 txn_root = svn.fs.txn_root(txn)
397 txn_root = svn.fs.txn_root(txn)
382
398
383 for node in updated:
399 for node in updated:
384 TxnNodeProcessor(node, txn_root).update()
400 TxnNodeProcessor(node, txn_root).update()
385 for node in removed:
401 for node in removed:
386 TxnNodeProcessor(node, txn_root).remove()
402 TxnNodeProcessor(node, txn_root).remove()
387
403
388 commit_id = svn.repos.fs_commit_txn(repo, txn)
404 commit_id = svn.repos.fs_commit_txn(repo, txn)
389
405
390 if timestamp:
406 if timestamp:
391 apr_time = apr_time_t(timestamp)
407 apr_time = apr_time_t(timestamp)
392 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
408 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
393 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
409 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
394
410
395 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
411 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
396 return commit_id
412 return commit_id
397
413
398 def diff(self, wire, rev1, rev2, path1=None, path2=None,
414 def diff(self, wire, rev1, rev2, path1=None, path2=None,
399 ignore_whitespace=False, context=3):
415 ignore_whitespace=False, context=3):
400
416
401 wire.update(cache=False)
417 wire.update(cache=False)
402 repo = self._factory.repo(wire)
418 repo = self._factory.repo(wire)
403 diff_creator = SvnDiffer(
419 diff_creator = SvnDiffer(
404 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
420 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
405 try:
421 try:
406 return diff_creator.generate_diff()
422 return diff_creator.generate_diff()
407 except svn.core.SubversionException as e:
423 except svn.core.SubversionException as e:
408 log.exception(
424 log.exception(
409 "Error during diff operation operation. "
425 "Error during diff operation operation. "
410 "Path might not exist %s, %s" % (path1, path2))
426 "Path might not exist %s, %s" % (path1, path2))
411 return ""
427 return ""
412
428
413 @reraise_safe_exceptions
429 @reraise_safe_exceptions
414 def is_large_file(self, wire, path):
430 def is_large_file(self, wire, path):
415 return False
431 return False
416
432
417 @reraise_safe_exceptions
433 @reraise_safe_exceptions
418 def install_hooks(self, wire, force=False):
434 def install_hooks(self, wire, force=False):
419 from vcsserver.hook_utils import install_svn_hooks
435 from vcsserver.hook_utils import install_svn_hooks
420 repo_path = wire['path']
436 repo_path = wire['path']
421 binary_dir = settings.BINARY_DIR
437 binary_dir = settings.BINARY_DIR
422 executable = None
438 executable = None
423 if binary_dir:
439 if binary_dir:
424 executable = os.path.join(binary_dir, 'python')
440 executable = os.path.join(binary_dir, 'python')
425 return install_svn_hooks(
441 return install_svn_hooks(
426 repo_path, executable=executable, force_create=force)
442 repo_path, executable=executable, force_create=force)
427
443
428
444
429 class SvnDiffer(object):
445 class SvnDiffer(object):
430 """
446 """
431 Utility to create diffs based on difflib and the Subversion api
447 Utility to create diffs based on difflib and the Subversion api
432 """
448 """
433
449
434 binary_content = False
450 binary_content = False
435
451
436 def __init__(
452 def __init__(
437 self, repo, src_rev, src_path, tgt_rev, tgt_path,
453 self, repo, src_rev, src_path, tgt_rev, tgt_path,
438 ignore_whitespace, context):
454 ignore_whitespace, context):
439 self.repo = repo
455 self.repo = repo
440 self.ignore_whitespace = ignore_whitespace
456 self.ignore_whitespace = ignore_whitespace
441 self.context = context
457 self.context = context
442
458
443 fsobj = svn.repos.fs(repo)
459 fsobj = svn.repos.fs(repo)
444
460
445 self.tgt_rev = tgt_rev
461 self.tgt_rev = tgt_rev
446 self.tgt_path = tgt_path or ''
462 self.tgt_path = tgt_path or ''
447 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
463 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
448 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
464 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
449
465
450 self.src_rev = src_rev
466 self.src_rev = src_rev
451 self.src_path = src_path or self.tgt_path
467 self.src_path = src_path or self.tgt_path
452 self.src_root = svn.fs.revision_root(fsobj, src_rev)
468 self.src_root = svn.fs.revision_root(fsobj, src_rev)
453 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
469 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
454
470
455 self._validate()
471 self._validate()
456
472
457 def _validate(self):
473 def _validate(self):
458 if (self.tgt_kind != svn.core.svn_node_none and
474 if (self.tgt_kind != svn.core.svn_node_none and
459 self.src_kind != svn.core.svn_node_none and
475 self.src_kind != svn.core.svn_node_none and
460 self.src_kind != self.tgt_kind):
476 self.src_kind != self.tgt_kind):
461 # TODO: johbo: proper error handling
477 # TODO: johbo: proper error handling
462 raise Exception(
478 raise Exception(
463 "Source and target are not compatible for diff generation. "
479 "Source and target are not compatible for diff generation. "
464 "Source type: %s, target type: %s" %
480 "Source type: %s, target type: %s" %
465 (self.src_kind, self.tgt_kind))
481 (self.src_kind, self.tgt_kind))
466
482
467 def generate_diff(self):
483 def generate_diff(self):
468 buf = StringIO.StringIO()
484 buf = StringIO.StringIO()
469 if self.tgt_kind == svn.core.svn_node_dir:
485 if self.tgt_kind == svn.core.svn_node_dir:
470 self._generate_dir_diff(buf)
486 self._generate_dir_diff(buf)
471 else:
487 else:
472 self._generate_file_diff(buf)
488 self._generate_file_diff(buf)
473 return buf.getvalue()
489 return buf.getvalue()
474
490
475 def _generate_dir_diff(self, buf):
491 def _generate_dir_diff(self, buf):
476 editor = DiffChangeEditor()
492 editor = DiffChangeEditor()
477 editor_ptr, editor_baton = svn.delta.make_editor(editor)
493 editor_ptr, editor_baton = svn.delta.make_editor(editor)
478 svn.repos.dir_delta2(
494 svn.repos.dir_delta2(
479 self.src_root,
495 self.src_root,
480 self.src_path,
496 self.src_path,
481 '', # src_entry
497 '', # src_entry
482 self.tgt_root,
498 self.tgt_root,
483 self.tgt_path,
499 self.tgt_path,
484 editor_ptr, editor_baton,
500 editor_ptr, editor_baton,
485 authorization_callback_allow_all,
501 authorization_callback_allow_all,
486 False, # text_deltas
502 False, # text_deltas
487 svn.core.svn_depth_infinity, # depth
503 svn.core.svn_depth_infinity, # depth
488 False, # entry_props
504 False, # entry_props
489 False, # ignore_ancestry
505 False, # ignore_ancestry
490 )
506 )
491
507
492 for path, __, change in sorted(editor.changes):
508 for path, __, change in sorted(editor.changes):
493 self._generate_node_diff(
509 self._generate_node_diff(
494 buf, change, path, self.tgt_path, path, self.src_path)
510 buf, change, path, self.tgt_path, path, self.src_path)
495
511
496 def _generate_file_diff(self, buf):
512 def _generate_file_diff(self, buf):
497 change = None
513 change = None
498 if self.src_kind == svn.core.svn_node_none:
514 if self.src_kind == svn.core.svn_node_none:
499 change = "add"
515 change = "add"
500 elif self.tgt_kind == svn.core.svn_node_none:
516 elif self.tgt_kind == svn.core.svn_node_none:
501 change = "delete"
517 change = "delete"
502 tgt_base, tgt_path = vcspath.split(self.tgt_path)
518 tgt_base, tgt_path = vcspath.split(self.tgt_path)
503 src_base, src_path = vcspath.split(self.src_path)
519 src_base, src_path = vcspath.split(self.src_path)
504 self._generate_node_diff(
520 self._generate_node_diff(
505 buf, change, tgt_path, tgt_base, src_path, src_base)
521 buf, change, tgt_path, tgt_base, src_path, src_base)
506
522
507 def _generate_node_diff(
523 def _generate_node_diff(
508 self, buf, change, tgt_path, tgt_base, src_path, src_base):
524 self, buf, change, tgt_path, tgt_base, src_path, src_base):
509
525
510 if self.src_rev == self.tgt_rev and tgt_base == src_base:
526 if self.src_rev == self.tgt_rev and tgt_base == src_base:
511 # makes consistent behaviour with git/hg to return empty diff if
527 # makes consistent behaviour with git/hg to return empty diff if
512 # we compare same revisions
528 # we compare same revisions
513 return
529 return
514
530
515 tgt_full_path = vcspath.join(tgt_base, tgt_path)
531 tgt_full_path = vcspath.join(tgt_base, tgt_path)
516 src_full_path = vcspath.join(src_base, src_path)
532 src_full_path = vcspath.join(src_base, src_path)
517
533
518 self.binary_content = False
534 self.binary_content = False
519 mime_type = self._get_mime_type(tgt_full_path)
535 mime_type = self._get_mime_type(tgt_full_path)
520
536
521 if mime_type and not mime_type.startswith('text'):
537 if mime_type and not mime_type.startswith('text'):
522 self.binary_content = True
538 self.binary_content = True
523 buf.write("=" * 67 + '\n')
539 buf.write("=" * 67 + '\n')
524 buf.write("Cannot display: file marked as a binary type.\n")
540 buf.write("Cannot display: file marked as a binary type.\n")
525 buf.write("svn:mime-type = %s\n" % mime_type)
541 buf.write("svn:mime-type = %s\n" % mime_type)
526 buf.write("Index: %s\n" % (tgt_path, ))
542 buf.write("Index: %s\n" % (tgt_path, ))
527 buf.write("=" * 67 + '\n')
543 buf.write("=" * 67 + '\n')
528 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
544 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
529 'tgt_path': tgt_path})
545 'tgt_path': tgt_path})
530
546
531 if change == 'add':
547 if change == 'add':
532 # TODO: johbo: SVN is missing a zero here compared to git
548 # TODO: johbo: SVN is missing a zero here compared to git
533 buf.write("new file mode 10644\n")
549 buf.write("new file mode 10644\n")
534
550
535 #TODO(marcink): intro to binary detection of svn patches
551 #TODO(marcink): intro to binary detection of svn patches
536 # if self.binary_content:
552 # if self.binary_content:
537 # buf.write('GIT binary patch\n')
553 # buf.write('GIT binary patch\n')
538
554
539 buf.write("--- /dev/null\t(revision 0)\n")
555 buf.write("--- /dev/null\t(revision 0)\n")
540 src_lines = []
556 src_lines = []
541 else:
557 else:
542 if change == 'delete':
558 if change == 'delete':
543 buf.write("deleted file mode 10644\n")
559 buf.write("deleted file mode 10644\n")
544
560
545 #TODO(marcink): intro to binary detection of svn patches
561 #TODO(marcink): intro to binary detection of svn patches
546 # if self.binary_content:
562 # if self.binary_content:
547 # buf.write('GIT binary patch\n')
563 # buf.write('GIT binary patch\n')
548
564
549 buf.write("--- a/%s\t(revision %s)\n" % (
565 buf.write("--- a/%s\t(revision %s)\n" % (
550 src_path, self.src_rev))
566 src_path, self.src_rev))
551 src_lines = self._svn_readlines(self.src_root, src_full_path)
567 src_lines = self._svn_readlines(self.src_root, src_full_path)
552
568
553 if change == 'delete':
569 if change == 'delete':
554 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
570 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
555 tgt_lines = []
571 tgt_lines = []
556 else:
572 else:
557 buf.write("+++ b/%s\t(revision %s)\n" % (
573 buf.write("+++ b/%s\t(revision %s)\n" % (
558 tgt_path, self.tgt_rev))
574 tgt_path, self.tgt_rev))
559 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
575 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
560
576
561 if not self.binary_content:
577 if not self.binary_content:
562 udiff = svn_diff.unified_diff(
578 udiff = svn_diff.unified_diff(
563 src_lines, tgt_lines, context=self.context,
579 src_lines, tgt_lines, context=self.context,
564 ignore_blank_lines=self.ignore_whitespace,
580 ignore_blank_lines=self.ignore_whitespace,
565 ignore_case=False,
581 ignore_case=False,
566 ignore_space_changes=self.ignore_whitespace)
582 ignore_space_changes=self.ignore_whitespace)
567 buf.writelines(udiff)
583 buf.writelines(udiff)
568
584
569 def _get_mime_type(self, path):
585 def _get_mime_type(self, path):
570 try:
586 try:
571 mime_type = svn.fs.node_prop(
587 mime_type = svn.fs.node_prop(
572 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
588 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
573 except svn.core.SubversionException:
589 except svn.core.SubversionException:
574 mime_type = svn.fs.node_prop(
590 mime_type = svn.fs.node_prop(
575 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
591 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
576 return mime_type
592 return mime_type
577
593
578 def _svn_readlines(self, fs_root, node_path):
594 def _svn_readlines(self, fs_root, node_path):
579 if self.binary_content:
595 if self.binary_content:
580 return []
596 return []
581 node_kind = svn.fs.check_path(fs_root, node_path)
597 node_kind = svn.fs.check_path(fs_root, node_path)
582 if node_kind not in (
598 if node_kind not in (
583 svn.core.svn_node_file, svn.core.svn_node_symlink):
599 svn.core.svn_node_file, svn.core.svn_node_symlink):
584 return []
600 return []
585 content = svn.core.Stream(
601 content = svn.core.Stream(
586 svn.fs.file_contents(fs_root, node_path)).read()
602 svn.fs.file_contents(fs_root, node_path)).read()
587 return content.splitlines(True)
603 return content.splitlines(True)
588
604
589
605
590
606
591 class DiffChangeEditor(svn.delta.Editor):
607 class DiffChangeEditor(svn.delta.Editor):
592 """
608 """
593 Records changes between two given revisions
609 Records changes between two given revisions
594 """
610 """
595
611
596 def __init__(self):
612 def __init__(self):
597 self.changes = []
613 self.changes = []
598
614
599 def delete_entry(self, path, revision, parent_baton, pool=None):
615 def delete_entry(self, path, revision, parent_baton, pool=None):
600 self.changes.append((path, None, 'delete'))
616 self.changes.append((path, None, 'delete'))
601
617
602 def add_file(
618 def add_file(
603 self, path, parent_baton, copyfrom_path, copyfrom_revision,
619 self, path, parent_baton, copyfrom_path, copyfrom_revision,
604 file_pool=None):
620 file_pool=None):
605 self.changes.append((path, 'file', 'add'))
621 self.changes.append((path, 'file', 'add'))
606
622
607 def open_file(self, path, parent_baton, base_revision, file_pool=None):
623 def open_file(self, path, parent_baton, base_revision, file_pool=None):
608 self.changes.append((path, 'file', 'change'))
624 self.changes.append((path, 'file', 'change'))
609
625
610
626
611 def authorization_callback_allow_all(root, path, pool):
627 def authorization_callback_allow_all(root, path, pool):
612 return True
628 return True
613
629
614
630
615 class TxnNodeProcessor(object):
631 class TxnNodeProcessor(object):
616 """
632 """
617 Utility to process the change of one node within a transaction root.
633 Utility to process the change of one node within a transaction root.
618
634
619 It encapsulates the knowledge of how to add, update or remove
635 It encapsulates the knowledge of how to add, update or remove
620 a node for a given transaction root. The purpose is to support the method
636 a node for a given transaction root. The purpose is to support the method
621 `SvnRemote.commit`.
637 `SvnRemote.commit`.
622 """
638 """
623
639
624 def __init__(self, node, txn_root):
640 def __init__(self, node, txn_root):
625 assert isinstance(node['path'], str)
641 assert isinstance(node['path'], str)
626
642
627 self.node = node
643 self.node = node
628 self.txn_root = txn_root
644 self.txn_root = txn_root
629
645
630 def update(self):
646 def update(self):
631 self._ensure_parent_dirs()
647 self._ensure_parent_dirs()
632 self._add_file_if_node_does_not_exist()
648 self._add_file_if_node_does_not_exist()
633 self._update_file_content()
649 self._update_file_content()
634 self._update_file_properties()
650 self._update_file_properties()
635
651
636 def remove(self):
652 def remove(self):
637 svn.fs.delete(self.txn_root, self.node['path'])
653 svn.fs.delete(self.txn_root, self.node['path'])
638 # TODO: Clean up directory if empty
654 # TODO: Clean up directory if empty
639
655
640 def _ensure_parent_dirs(self):
656 def _ensure_parent_dirs(self):
641 curdir = vcspath.dirname(self.node['path'])
657 curdir = vcspath.dirname(self.node['path'])
642 dirs_to_create = []
658 dirs_to_create = []
643 while not self._svn_path_exists(curdir):
659 while not self._svn_path_exists(curdir):
644 dirs_to_create.append(curdir)
660 dirs_to_create.append(curdir)
645 curdir = vcspath.dirname(curdir)
661 curdir = vcspath.dirname(curdir)
646
662
647 for curdir in reversed(dirs_to_create):
663 for curdir in reversed(dirs_to_create):
648 log.debug('Creating missing directory "%s"', curdir)
664 log.debug('Creating missing directory "%s"', curdir)
649 svn.fs.make_dir(self.txn_root, curdir)
665 svn.fs.make_dir(self.txn_root, curdir)
650
666
651 def _svn_path_exists(self, path):
667 def _svn_path_exists(self, path):
652 path_status = svn.fs.check_path(self.txn_root, path)
668 path_status = svn.fs.check_path(self.txn_root, path)
653 return path_status != svn.core.svn_node_none
669 return path_status != svn.core.svn_node_none
654
670
655 def _add_file_if_node_does_not_exist(self):
671 def _add_file_if_node_does_not_exist(self):
656 kind = svn.fs.check_path(self.txn_root, self.node['path'])
672 kind = svn.fs.check_path(self.txn_root, self.node['path'])
657 if kind == svn.core.svn_node_none:
673 if kind == svn.core.svn_node_none:
658 svn.fs.make_file(self.txn_root, self.node['path'])
674 svn.fs.make_file(self.txn_root, self.node['path'])
659
675
660 def _update_file_content(self):
676 def _update_file_content(self):
661 assert isinstance(self.node['content'], str)
677 assert isinstance(self.node['content'], str)
662 handler, baton = svn.fs.apply_textdelta(
678 handler, baton = svn.fs.apply_textdelta(
663 self.txn_root, self.node['path'], None, None)
679 self.txn_root, self.node['path'], None, None)
664 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
680 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
665
681
666 def _update_file_properties(self):
682 def _update_file_properties(self):
667 properties = self.node.get('properties', {})
683 properties = self.node.get('properties', {})
668 for key, value in properties.iteritems():
684 for key, value in properties.iteritems():
669 svn.fs.change_node_prop(
685 svn.fs.change_node_prop(
670 self.txn_root, self.node['path'], key, value)
686 self.txn_root, self.node['path'], key, value)
671
687
672
688
673 def apr_time_t(timestamp):
689 def apr_time_t(timestamp):
674 """
690 """
675 Convert a Python timestamp into APR timestamp type apr_time_t
691 Convert a Python timestamp into APR timestamp type apr_time_t
676 """
692 """
677 return timestamp * 1E6
693 return timestamp * 1E6
678
694
679
695
680 def svn_opt_revision_value_t(num):
696 def svn_opt_revision_value_t(num):
681 """
697 """
682 Put `num` into a `svn_opt_revision_value_t` structure.
698 Put `num` into a `svn_opt_revision_value_t` structure.
683 """
699 """
684 value = svn.core.svn_opt_revision_value_t()
700 value = svn.core.svn_opt_revision_value_t()
685 value.number = num
701 value.number = num
686 revision = svn.core.svn_opt_revision_t()
702 revision = svn.core.svn_opt_revision_t()
687 revision.kind = svn.core.svn_opt_revision_number
703 revision.kind = svn.core.svn_opt_revision_number
688 revision.value = value
704 revision.value = value
689 return revision
705 return revision
@@ -1,58 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19
19
20 import pytest
20 import pytest
21
21
22
22
23 def pytest_addoption(parser):
23 def pytest_addoption(parser):
24 parser.addoption(
24 parser.addoption(
25 '--repeat', type=int, default=100,
25 '--repeat', type=int, default=100,
26 help="Number of repetitions in performance tests.")
26 help="Number of repetitions in performance tests.")
27
27
28
28
29 @pytest.fixture(scope='session')
29 @pytest.fixture(scope='session')
30 def repeat(request):
30 def repeat(request):
31 """
31 """
32 The number of repetitions is based on this fixture.
32 The number of repetitions is based on this fixture.
33
33
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 tests are not too slow in our default test suite.
35 tests are not too slow in our default test suite.
36 """
36 """
37 return request.config.getoption('--repeat')
37 return request.config.getoption('--repeat')
38
38
39
39
40 @pytest.fixture(scope='session')
40 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
41 def vcsserver_port(request):
42 port = get_available_port()
42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print('Using vcsserver port %s' % (port, ))
44 return port
44 return port
45
45
46
46
47 def get_available_port():
47 def get_available_port():
48 family = socket.AF_INET
48 family = socket.AF_INET
49 socktype = socket.SOCK_STREAM
49 socktype = socket.SOCK_STREAM
50 host = '127.0.0.1'
50 host = '127.0.0.1'
51
51
52 mysocket = socket.socket(family, socktype)
52 mysocket = socket.socket(family, socktype)
53 mysocket.bind((host, 0))
53 mysocket.bind((host, 0))
54 port = mysocket.getsockname()[1]
54 port = mysocket.getsockname()[1]
55 mysocket.close()
55 mysocket.close()
56 del mysocket
56 del mysocket
57 return port
57 return port
58
@@ -1,162 +1,165 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.fetch(
81 self.remote_git.fetch(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire=None, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102 def test_remove_ref(self):
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
106 assert ref_to_remove not in self.mock_repo.refs
107
107
108
108
109 class TestReraiseSafeExceptions(object):
109 class TestReraiseSafeExceptions(object):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 factory = Mock()
111 factory = Mock()
112 git_remote = git.GitRemote(factory)
112 git_remote = git.GitRemote(factory)
113
113
114 def fake_function():
114 def fake_function():
115 return None
115 return None
116
116
117 decorator = git.reraise_safe_exceptions(fake_function)
117 decorator = git.reraise_safe_exceptions(fake_function)
118
118
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 for method_name, method in methods:
120 for method_name, method in methods:
121 if not method_name.startswith('_'):
121 if not method_name.startswith('_'):
122 assert method.im_func.__code__ == decorator.__code__
122 assert method.im_func.__code__ == decorator.__code__
123
123
124 @pytest.mark.parametrize('side_effect, expected_type', [
124 @pytest.mark.parametrize('side_effect, expected_type', [
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 (dulwich.errors.HangupException(), 'error'),
129 (dulwich.errors.HangupException(), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 ])
131 ])
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 @git.reraise_safe_exceptions
133 @git.reraise_safe_exceptions
134 def fake_method():
134 def fake_method():
135 raise side_effect
135 raise side_effect
136
136
137 with pytest.raises(Exception) as exc_info:
137 with pytest.raises(Exception) as exc_info:
138 fake_method()
138 fake_method()
139 assert type(exc_info.value) == Exception
139 assert type(exc_info.value) == Exception
140 assert exc_info.value._vcs_kind == expected_type
140 assert exc_info.value._vcs_kind == expected_type
141
141
142
142
143 class TestDulwichRepoWrapper(object):
143 class TestDulwichRepoWrapper(object):
144 def test_calls_close_on_delete(self):
144 def test_calls_close_on_delete(self):
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 with isdir_patcher:
146 with isdir_patcher:
147 repo = git.Repo('/tmp/abcde')
147 repo = git.Repo('/tmp/abcde')
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 del repo
149 del repo
150 close_mock.assert_called_once_with()
150 close_mock.assert_called_once_with()
151
151
152
152
153 class TestGitFactory(object):
153 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
154 def test_create_repo_returns_dulwich_wrapper(self):
155 factory = git.GitFactory(repo_cache=Mock())
155
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 mock.side_effect = {'repo_objects': ''}
158 factory = git.GitFactory()
156 wire = {
159 wire = {
157 'path': '/tmp/abcde'
160 'path': '/tmp/abcde'
158 }
161 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
163 with isdir_patcher:
161 result = factory._create_repo(wire, True)
164 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
165 assert isinstance(result, git.Repo)
@@ -1,127 +1,127 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestHGLookup(object):
29 class TestHGLookup(object):
30 def setup(self):
30 def setup(self):
31 self.mock_repo = MagicMock()
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
36 self.remote_hg = hg.HgRemote(factory)
37
37
38 def test_fail_lookup_hg(self):
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
41 wire=None, revision='revision_or_commit_id', both=True)
42
42
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
44 assert 'revision_or_commit_id' in exc_info.value.args
45
45
46
46
47 class TestDiff(object):
47 class TestDiff(object):
48 def test_raising_safe_exception_when_lookup_failed(self):
48 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
49 repo = Mock()
50 factory = Mock()
50 factory = Mock()
51 factory.repo = Mock(return_value=repo)
51 factory.repo = Mock(return_value=repo)
52 hg_remote = hg.HgRemote(factory)
52 hg_remote = hg.HgRemote(factory)
53 with patch('mercurial.patch.diff') as diff_mock:
53 with patch('mercurial.patch.diff') as diff_mock:
54 diff_mock.side_effect = LookupError(
54 diff_mock.side_effect = LookupError(
55 'deadbeef', 'index', 'message')
55 'deadbeef', 'index', 'message')
56 with pytest.raises(Exception) as exc_info:
56 with pytest.raises(Exception) as exc_info:
57 hg_remote.diff(
57 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
58 wire=None, rev1='deadbeef', rev2='deadbee1',
59 file_filter=None, opt_git=True, opt_ignorews=True,
59 file_filter=None, opt_git=True, opt_ignorews=True,
60 context=3)
60 context=3)
61 assert type(exc_info.value) == Exception
61 assert type(exc_info.value) == Exception
62 assert exc_info.value._vcs_kind == 'lookup'
62 assert exc_info.value._vcs_kind == 'lookup'
63
63
64
64
65 class TestReraiseSafeExceptions(object):
65 class TestReraiseSafeExceptions(object):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
67 factory = Mock()
67 factory = Mock()
68 hg_remote = hg.HgRemote(factory)
68 hg_remote = hg.HgRemote(factory)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 decorator = hg.reraise_safe_exceptions(None)
70 decorator = hg.reraise_safe_exceptions(None)
71 for method_name, method in methods:
71 for method_name, method in methods:
72 if not method_name.startswith('_'):
72 if not method_name.startswith('_'):
73 assert method.im_func.__code__ == decorator.__code__
73 assert method.im_func.__code__ == decorator.__code__
74
74
75 @pytest.mark.parametrize('side_effect, expected_type', [
75 @pytest.mark.parametrize('side_effect, expected_type', [
76 (hgcompat.Abort(), 'abort'),
76 (hgcompat.Abort(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
78 (hgcompat.RepoLookupError(), 'lookup'),
78 (hgcompat.RepoLookupError(), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 (hgcompat.RepoError(), 'error'),
80 (hgcompat.RepoError(), 'error'),
81 (hgcompat.RequirementError(), 'requirement'),
81 (hgcompat.RequirementError(), 'requirement'),
82 ])
82 ])
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 @hg.reraise_safe_exceptions
84 @hg.reraise_safe_exceptions
85 def fake_method():
85 def fake_method():
86 raise side_effect
86 raise side_effect
87
87
88 with pytest.raises(Exception) as exc_info:
88 with pytest.raises(Exception) as exc_info:
89 fake_method()
89 fake_method()
90 assert type(exc_info.value) == Exception
90 assert type(exc_info.value) == Exception
91 assert exc_info.value._vcs_kind == expected_type
91 assert exc_info.value._vcs_kind == expected_type
92
92
93 def test_keeps_original_traceback(self):
93 def test_keeps_original_traceback(self):
94 @hg.reraise_safe_exceptions
94 @hg.reraise_safe_exceptions
95 def fake_method():
95 def fake_method():
96 try:
96 try:
97 raise hgcompat.Abort()
97 raise hgcompat.Abort()
98 except:
98 except:
99 self.original_traceback = traceback.format_tb(
99 self.original_traceback = traceback.format_tb(
100 sys.exc_info()[2])
100 sys.exc_info()[2])
101 raise
101 raise
102
102
103 try:
103 try:
104 fake_method()
104 fake_method()
105 except Exception:
105 except Exception:
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
107
107
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 assert new_traceback_tail == self.original_traceback
109 assert new_traceback_tail == self.original_traceback
110
110
111 def test_maps_unknow_exceptions_to_unhandled(self):
111 def test_maps_unknow_exceptions_to_unhandled(self):
112 @hg.reraise_safe_exceptions
112 @hg.reraise_safe_exceptions
113 def stub_method():
113 def stub_method():
114 raise ValueError('stub')
114 raise ValueError('stub')
115
115
116 with pytest.raises(Exception) as exc_info:
116 with pytest.raises(Exception) as exc_info:
117 stub_method()
117 stub_method()
118 assert exc_info.value._vcs_kind == 'unhandled'
118 assert exc_info.value._vcs_kind == 'unhandled'
119
119
120 def test_does_not_map_known_exceptions(self):
120 def test_does_not_map_known_exceptions(self):
121 @hg.reraise_safe_exceptions
121 @hg.reraise_safe_exceptions
122 def stub_method():
122 def stub_method():
123 raise exceptions.LookupException('stub')
123 raise exceptions.LookupException()('stub')
124
124
125 with pytest.raises(Exception) as exc_info:
125 with pytest.raises(Exception) as exc_info:
126 stub_method()
126 stub_method()
127 assert exc_info.value._vcs_kind == 'lookup'
127 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,130 +1,124 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import hgcompat, hgpatches
21 from vcsserver import hgcompat, hgpatches
22
22
23
23
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25
25
26
26
27 def test_patch_largefiles_capabilities_applies_patch(
27 def test_patch_largefiles_capabilities_applies_patch(
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 orig_capabilities):
36 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 hgcompat.largefiles.proto, stub_extensions)
38 hgcompat.largefiles.proto, stub_extensions)
38
39
39 caps = dynamic_capabilities(stub_repo, stub_proto)
40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
40
41
41 stub_extensions.assert_called_once_with(stub_ui)
42 stub_extensions.assert_called_once_with(stub_ui)
42 assert LARGEFILES_CAPABILITY not in caps
43 assert LARGEFILES_CAPABILITY not in caps
43
44
44
45
45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
48 hgcompat.largefiles.proto, stub_extensions)
49
50 # This happens when the extension is loaded for the first time, important
51 # to ensure that an updated function is correctly picked up.
52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
53 return_value='REPLACED')
54
55 caps = dynamic_capabilities(stub_repo, stub_proto)
56 assert 'REPLACED' == caps
57
58
59 def test_dynamic_capabilities_ignores_updated_capabilities(
46 def test_dynamic_capabilities_ignores_updated_capabilities(
60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 orig_capabilities):
61 stub_extensions.return_value = [('largefiles', mock.Mock())]
49 stub_extensions.return_value = [('largefiles', mock.Mock())]
62 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
63 hgcompat.largefiles.proto, stub_extensions)
51 hgcompat.largefiles.proto, stub_extensions)
64
52
65 # This happens when the extension is loaded for the first time, important
53 # This happens when the extension is loaded for the first time, important
66 # to ensure that an updated function is correctly picked up.
54 # to ensure that an updated function is correctly picked up.
67 hgcompat.largefiles.proto.capabilities = mock.Mock(
55 hgcompat.largefiles.proto._capabilities = mock.Mock(
68 side_effect=Exception('Must not be called'))
56 side_effect=Exception('Must not be called'))
69
57
70 dynamic_capabilities(stub_repo, stub_proto)
58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
71
59
72
60
73 def test_dynamic_capabilities_uses_largefiles_if_enabled(
61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 orig_capabilities):
75 stub_extensions.return_value = [('largefiles', mock.Mock())]
64 stub_extensions.return_value = [('largefiles', mock.Mock())]
76
65
77 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
78 hgcompat.largefiles.proto, stub_extensions)
67 hgcompat.largefiles.proto, stub_extensions)
79
68
80 caps = dynamic_capabilities(stub_repo, stub_proto)
69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
81
70
82 stub_extensions.assert_called_once_with(stub_ui)
71 stub_extensions.assert_called_once_with(stub_ui)
83 assert LARGEFILES_CAPABILITY in caps
72 assert LARGEFILES_CAPABILITY in caps
84
73
85
74
86 def test_hgsubversion_import():
75 def test_hgsubversion_import():
87 from hgsubversion import svnrepo
76 from hgsubversion import svnrepo
88 assert svnrepo
77 assert svnrepo
89
78
90
79
91 @pytest.fixture
80 @pytest.fixture
92 def patched_capabilities(request):
81 def patched_capabilities(request):
93 """
82 """
94 Patch in `capabilitiesorig` and restore both capability functions.
83 Patch in `capabilitiesorig` and restore both capability functions.
95 """
84 """
96 lfproto = hgcompat.largefiles.proto
85 lfproto = hgcompat.largefiles.proto
97 orig_capabilities = lfproto.capabilities
86 orig_capabilities = lfproto._capabilities
98 orig_capabilitiesorig = lfproto.capabilitiesorig
99
100 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
101
87
102 @request.addfinalizer
88 @request.addfinalizer
103 def restore():
89 def restore():
104 lfproto.capabilities = orig_capabilities
90 lfproto._capabilities = orig_capabilities
105 lfproto.capabilitiesorig = orig_capabilitiesorig
106
91
107
92
108 @pytest.fixture
93 @pytest.fixture
109 def stub_repo(stub_ui):
94 def stub_repo(stub_ui):
110 repo = mock.Mock()
95 repo = mock.Mock()
111 repo.ui = stub_ui
96 repo.ui = stub_ui
112 return repo
97 return repo
113
98
114
99
115 @pytest.fixture
100 @pytest.fixture
116 def stub_proto(stub_ui):
101 def stub_proto(stub_ui):
117 proto = mock.Mock()
102 proto = mock.Mock()
118 proto.ui = stub_ui
103 proto.ui = stub_ui
119 return proto
104 return proto
120
105
121
106
122 @pytest.fixture
107 @pytest.fixture
108 def orig_capabilities():
109 from mercurial.wireprotov1server import wireprotocaps
110
111 def _capabilities(repo, proto):
112 return wireprotocaps
113 return _capabilities
114
115
116 @pytest.fixture
123 def stub_ui():
117 def stub_ui():
124 return hgcompat.ui.ui()
118 return hgcompat.ui.ui()
125
119
126
120
127 @pytest.fixture
121 @pytest.fixture
128 def stub_extensions():
122 def stub_extensions():
129 extensions = mock.Mock(return_value=tuple())
123 extensions = mock.Mock(return_value=tuple())
130 return extensions
124 return extensions
@@ -1,44 +1,39 b''
1 """
1 """
2 Tests used to profile the HTTP based implementation.
2 Tests used to profile the HTTP based implementation.
3 """
3 """
4
4
5 import pytest
5 import pytest
6 import webtest
6 import webtest
7
7
8 from vcsserver.http_main import main
8 from vcsserver.http_main import main
9
9
10
10
11 @pytest.fixture
11 @pytest.fixture
12 def vcs_app():
12 def vcs_app():
13 stub_settings = {
13 stub_settings = {
14 'dev.use_echo_app': 'true',
14 'dev.use_echo_app': 'true',
15 'beaker.cache.regions': 'repo_object',
16 'beaker.cache.repo_object.type': 'memorylru',
17 'beaker.cache.repo_object.max_items': '100',
18 'beaker.cache.repo_object.expire': '300',
19 'beaker.cache.repo_object.enabled': 'true',
20 'locale': 'en_US.UTF-8',
15 'locale': 'en_US.UTF-8',
21 }
16 }
22 vcs_app = main({}, **stub_settings)
17 vcs_app = main({}, **stub_settings)
23 app = webtest.TestApp(vcs_app)
18 app = webtest.TestApp(vcs_app)
24 return app
19 return app
25
20
26
21
27 @pytest.fixture(scope='module')
22 @pytest.fixture(scope='module')
28 def data():
23 def data():
29 one_kb = 'x' * 1024
24 one_kb = 'x' * 1024
30 return one_kb * 1024 * 10
25 return one_kb * 1024 * 10
31
26
32
27
33 def test_http_app_streaming_with_data(data, repeat, vcs_app):
28 def test_http_app_streaming_with_data(data, repeat, vcs_app):
34 app = vcs_app
29 app = vcs_app
35 for x in xrange(repeat / 10):
30 for x in xrange(repeat / 10):
36 response = app.post('/stream/git/', params=data)
31 response = app.post('/stream/git/', params=data)
37 assert response.status_code == 200
32 assert response.status_code == 200
38
33
39
34
40 def test_http_app_streaming_no_data(repeat, vcs_app):
35 def test_http_app_streaming_no_data(repeat, vcs_app):
41 app = vcs_app
36 app = vcs_app
42 for x in xrange(repeat / 10):
37 for x in xrange(repeat / 10):
43 response = app.post('/stream/git/')
38 response = app.post('/stream/git/')
44 assert response.status_code == 200
39 assert response.status_code == 200
@@ -1,82 +1,89 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18 import hashlib
18
19
19 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
20
21
21
22
22 def safe_int(val, default=None):
23 def safe_int(val, default=None):
23 """
24 """
24 Returns int() of val if val is not convertable to int use default
25 Returns int() of val if val is not convertable to int use default
25 instead
26 instead
26
27
27 :param val:
28 :param val:
28 :param default:
29 :param default:
29 """
30 """
30
31
31 try:
32 try:
32 val = int(val)
33 val = int(val)
33 except (ValueError, TypeError):
34 except (ValueError, TypeError):
34 val = default
35 val = default
35
36
36 return val
37 return val
37
38
38
39
39 def safe_str(unicode_, to_encoding=['utf8']):
40 def safe_str(unicode_, to_encoding=['utf8']):
40 """
41 """
41 safe str function. Does few trick to turn unicode_ into string
42 safe str function. Does few trick to turn unicode_ into string
42
43
43 In case of UnicodeEncodeError, we try to return it with encoding detected
44 In case of UnicodeEncodeError, we try to return it with encoding detected
44 by chardet library if it fails fallback to string with errors replaced
45 by chardet library if it fails fallback to string with errors replaced
45
46
46 :param unicode_: unicode to encode
47 :param unicode_: unicode to encode
47 :rtype: str
48 :rtype: str
48 :returns: str object
49 :returns: str object
49 """
50 """
50
51
51 # if it's not basestr cast to str
52 # if it's not basestr cast to str
52 if not isinstance(unicode_, basestring):
53 if not isinstance(unicode_, basestring):
53 return str(unicode_)
54 return str(unicode_)
54
55
55 if isinstance(unicode_, str):
56 if isinstance(unicode_, str):
56 return unicode_
57 return unicode_
57
58
58 if not isinstance(to_encoding, (list, tuple)):
59 if not isinstance(to_encoding, (list, tuple)):
59 to_encoding = [to_encoding]
60 to_encoding = [to_encoding]
60
61
61 for enc in to_encoding:
62 for enc in to_encoding:
62 try:
63 try:
63 return unicode_.encode(enc)
64 return unicode_.encode(enc)
64 except UnicodeEncodeError:
65 except UnicodeEncodeError:
65 pass
66 pass
66
67
67 try:
68 try:
68 import chardet
69 import chardet
69 encoding = chardet.detect(unicode_)['encoding']
70 encoding = chardet.detect(unicode_)['encoding']
70 if encoding is None:
71 if encoding is None:
71 raise UnicodeEncodeError()
72 raise UnicodeEncodeError()
72
73
73 return unicode_.encode(encoding)
74 return unicode_.encode(encoding)
74 except (ImportError, UnicodeEncodeError):
75 except (ImportError, UnicodeEncodeError):
75 return unicode_.encode(to_encoding[0], 'replace')
76 return unicode_.encode(to_encoding[0], 'replace')
76
77
77
78
78 class AttributeDict(dict):
79 class AttributeDict(dict):
79 def __getattr__(self, attr):
80 def __getattr__(self, attr):
80 return self.get(attr, None)
81 return self.get(attr, None)
81 __setattr__ = dict.__setitem__
82 __setattr__ = dict.__setitem__
82 __delattr__ = dict.__delitem__
83 __delattr__ = dict.__delitem__
84
85
86 def sha1(val):
87 return hashlib.sha1(val).hexdigest()
88
89
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now