##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r810:8ec05ec7 merge stable
parent child Browse files
Show More
@@ -0,0 +1,8 b''
1 ## special libraries we could extend the requirements.txt file with to add some
2 ## custom libraries useful for debug and memory tracing
3
4 ## uncomment inclusion of this file in requirements.txt run make generate-pkgs and nix-shell
5
6 objgraph
7 memory-profiler
8 pympler
@@ -0,0 +1,27 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 counter = 0
22
23
24 def get_request_counter(request):
25 global counter
26 counter += 1
27 return counter
@@ -0,0 +1,19 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,32 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
21
22 @property
23 def region(self):
24 return self._factory._cache_region
25
26 def _cache_on(self, wire):
27 context = wire.get('context', '')
28 context_uid = '{}'.format(context)
29 repo_id = wire.get('repo_id', '')
30 cache = wire.get('cache', True)
31 cache_on = context and cache
32 return cache_on, context_uid, repo_id
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.17.4
2 current_version = 4.18.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.17.4
13 version = 4.18.0
16
14
@@ -1,87 +1,237 b''
1 ################################################################################
1 ## -*- coding: utf-8 -*-
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 ################################################################################
4
2
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5
6
6 [server:main]
7 [server:main]
7 ## COMMON ##
8 ; COMMON HOST/IP CONFIG
8 host = 0.0.0.0
9 host = 0.0.0.0
9 port = 9900
10 port = 9900
10
11
12 ; ##################################################
13 ; WAITRESS WSGI SERVER - Recommended for Development
14 ; ##################################################
15
16 ; use server type
11 use = egg:waitress#main
17 use = egg:waitress#main
12
18
19 ; number of worker threads
20 threads = 5
21
22 ; MAX BODY SIZE 100GB
23 max_request_body_size = 107374182400
24
25 ; Use poll instead of select, fixes file descriptors limits problems.
26 ; May not work on old windows systems.
27 asyncore_use_poll = true
28
29
30 ; ###########################
31 ; GUNICORN APPLICATION SERVER
32 ; ###########################
33
34 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
35
36 ; Module to use, this setting shouldn't be changed
37 #use = egg:gunicorn#main
38
39 ; Sets the number of process workers. More workers means more concurrent connections
40 ; RhodeCode can handle at the same time. Each additional worker also it increases
41 ; memory usage as each has it's own set of caches.
42 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
43 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
44 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
45 ; when using more than 1 worker.
46 #workers = 2
47
48 ; Gunicorn access log level
49 #loglevel = info
50
51 ; Process name visible in process list
52 #proc_name = rhodecode_vcsserver
53
54 ; Type of worker class, one of `sync`, `gevent`
55 ; currently `sync` is the only option allowed.
56 #worker_class = sync
57
58 ; The maximum number of simultaneous clients. Valid only for gevent
59 #worker_connections = 10
60
61 ; Max number of requests that worker will handle before being gracefully restarted.
62 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
63 #max_requests = 1000
64 #max_requests_jitter = 30
65
66 ; Amount of time a worker can spend with handling a request before it
67 ; gets killed and restarted. By default set to 21600 (6hrs)
68 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
69 #timeout = 21600
70
71 ; The maximum size of HTTP request line in bytes.
72 ; 0 for unlimited
73 #limit_request_line = 0
74
75 ; Limit the number of HTTP headers fields in a request.
76 ; By default this value is 100 and can't be larger than 32768.
77 #limit_request_fields = 32768
78
79 ; Limit the allowed size of an HTTP request header field.
80 ; Value is a positive number or 0.
81 ; Setting it to 0 will allow unlimited header field sizes.
82 #limit_request_field_size = 0
83
84 ; Timeout for graceful workers restart.
85 ; After receiving a restart signal, workers have this much time to finish
86 ; serving requests. Workers still alive after the timeout (starting from the
87 ; receipt of the restart signal) are force killed.
88 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
89 #graceful_timeout = 3600
90
91 # The number of seconds to wait for requests on a Keep-Alive connection.
92 # Generally set in the 1-5 seconds range.
93 #keepalive = 2
94
95 ; Maximum memory usage that each worker can use before it will receive a
96 ; graceful restart signal 0 = memory monitoring is disabled
97 ; Examples: 268435456 (256MB), 536870912 (512MB)
98 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
99 #memory_max_usage = 0
100
101 ; How often in seconds to check for memory usage for each gunicorn worker
102 #memory_usage_check_interval = 60
103
104 ; Threshold value for which we don't recycle worker if GarbageCollection
105 ; frees up enough resources. Before each restart we try to run GC on worker
106 ; in case we get enough free memory after that, restart will not happen.
107 #memory_usage_recovery_threshold = 0.8
108
13
109
14 [app:main]
110 [app:main]
111 ; The %(here)s variable will be replaced with the absolute path of parent directory
112 ; of this file
15 use = egg:rhodecode-vcsserver
113 use = egg:rhodecode-vcsserver
16
114
17 pyramid.default_locale_name = en
115
116 ; #############
117 ; DEBUG OPTIONS
118 ; #############
119
120 # During development the we want to have the debug toolbar enabled
18 pyramid.includes =
121 pyramid.includes =
122 pyramid_debugtoolbar
19
123
20 ## default locale used by VCS systems
124 debugtoolbar.hosts = 0.0.0.0/0
125 debugtoolbar.exclude_prefixes =
126 /css
127 /fonts
128 /images
129 /js
130
131 ; #################
132 ; END DEBUG OPTIONS
133 ; #################
134
135 ; Pyramid default locales, we need this to be set
136 pyramid.default_locale_name = en
137
138 ; default locale used by VCS systems
21 locale = en_US.UTF-8
139 locale = en_US.UTF-8
22
140
23
141 ; path to binaries for vcsserver, it should be set by the installer
24 ## path to binaries for vcsserver, it should be set by the installer
142 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
25 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
143 ; it can also be a path to nix-build output in case of development
26 core.binary_dir = ""
144 core.binary_dir = ""
27
145
28 ## Custom exception store path, defaults to TMPDIR
146 ; Custom exception store path, defaults to TMPDIR
29 ## This is used to store exception from RhodeCode in shared directory
147 ; This is used to store exception from RhodeCode in shared directory
30 #exception_tracker.store_path =
148 #exception_tracker.store_path =
31
149
32 ## Default cache dir for caches. Putting this into a ramdisk
150 ; #############
33 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
151 ; DOGPILE CACHE
34 ## large amount of space
152 ; #############
35 cache_dir = %(here)s/rcdev/data
153
154 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
155 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
156 cache_dir = %(here)s/data
157
158 ; ***************************************
159 ; `repo_object` cache, default file based
160 ; ***************************************
161
162 ; `repo_object` cache settings for vcs methods for repositories
163 rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
164
165 ; cache auto-expires after N seconds
166 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
167 rc_cache.repo_object.expiration_time = 2592000
168
169 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
170 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
36
171
37 ## cache region for storing repo_objects cache
172 ; ***********************************************************
38 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
173 ; `repo_object` cache with redis backend
39 ## cache auto-expires after N seconds
174 ; recommended for larger instance, and for better performance
40 rc_cache.repo_object.expiration_time = 300
175 ; ***********************************************************
41 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
176
42 rc_cache.repo_object.max_size = 100
177 ; `repo_object` cache settings for vcs methods for repositories
178 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
179
180 ; cache auto-expires after N seconds
181 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
182 #rc_cache.repo_object.expiration_time = 2592000
183
184 ; redis_expiration_time needs to be greater then expiration_time
185 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
186
187 #rc_cache.repo_object.arguments.host = localhost
188 #rc_cache.repo_object.arguments.port = 6379
189 #rc_cache.repo_object.arguments.db = 5
190 #rc_cache.repo_object.arguments.socket_timeout = 30
191 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
192 #rc_cache.repo_object.arguments.distributed_lock = true
43
193
44
194
45 ################################
195 ; #####################
46 ### LOGGING CONFIGURATION ####
196 ; LOGGING CONFIGURATION
47 ################################
197 ; #####################
48 [loggers]
198 [loggers]
49 keys = root, vcsserver
199 keys = root, vcsserver
50
200
51 [handlers]
201 [handlers]
52 keys = console
202 keys = console
53
203
54 [formatters]
204 [formatters]
55 keys = generic
205 keys = generic
56
206
57 #############
207 ; #######
58 ## LOGGERS ##
208 ; LOGGERS
59 #############
209 ; #######
60 [logger_root]
210 [logger_root]
61 level = NOTSET
211 level = NOTSET
62 handlers = console
212 handlers = console
63
213
64 [logger_vcsserver]
214 [logger_vcsserver]
65 level = DEBUG
215 level = DEBUG
66 handlers =
216 handlers =
67 qualname = vcsserver
217 qualname = vcsserver
68 propagate = 1
218 propagate = 1
69
219
70
220
71 ##############
221 ; ########
72 ## HANDLERS ##
222 ; HANDLERS
73 ##############
223 ; ########
74
224
75 [handler_console]
225 [handler_console]
76 class = StreamHandler
226 class = StreamHandler
77 args = (sys.stderr,)
227 args = (sys.stderr, )
78 level = DEBUG
228 level = DEBUG
79 formatter = generic
229 formatter = generic
80
230
81 ################
231 ; ##########
82 ## FORMATTERS ##
232 ; FORMATTERS
83 ################
233 ; ##########
84
234
85 [formatter_generic]
235 [formatter_generic]
86 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
236 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
87 datefmt = %Y-%m-%d %H:%M:%S
237 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,154 +1,265 b''
1 """
1 """
2 gunicorn config extension and hooks. Sets additional configuration that is
2 Gunicorn config extension and hooks. This config file adds some extra settings and memory management.
3 available post the .ini config.
3 Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer
4
5 - workers = ${cpu_number}
6 - threads = 1
7 - proc_name = ${gunicorn_proc_name}
8 - worker_class = sync
9 - worker_connections = 10
10 - max_requests = 1000
11 - max_requests_jitter = 30
12 - timeout = 21600
13
14 """
4 """
15
5
16 import multiprocessing
6 import gc
7 import os
17 import sys
8 import sys
9 import math
18 import time
10 import time
19 import datetime
20 import threading
11 import threading
21 import traceback
12 import traceback
13 import random
22 from gunicorn.glogging import Logger
14 from gunicorn.glogging import Logger
23
15
24
16
17 def get_workers():
18 import multiprocessing
19 return multiprocessing.cpu_count() * 2 + 1
20
25 # GLOBAL
21 # GLOBAL
26 errorlog = '-'
22 errorlog = '-'
27 accesslog = '-'
23 accesslog = '-'
28 loglevel = 'debug'
29
30 # SECURITY
31
32 # The maximum size of HTTP request line in bytes.
33 # 0 for unlimited
34 limit_request_line = 0
35
36 # Limit the number of HTTP headers fields in a request.
37 # By default this value is 100 and can't be larger than 32768.
38 limit_request_fields = 10240
39
40 # Limit the allowed size of an HTTP request header field.
41 # Value is a positive number or 0.
42 # Setting it to 0 will allow unlimited header field sizes.
43 limit_request_field_size = 0
44
45
46 # Timeout for graceful workers restart.
47 # After receiving a restart signal, workers have this much time to finish
48 # serving requests. Workers still alive after the timeout (starting from the
49 # receipt of the restart signal) are force killed.
50 graceful_timeout = 30
51
52
53 # The number of seconds to wait for requests on a Keep-Alive connection.
54 # Generally set in the 1-5 seconds range.
55 keepalive = 2
56
24
57
25
58 # SERVER MECHANICS
26 # SERVER MECHANICS
59 # None == system temp dir
27 # None == system temp dir
60 # worker_tmp_dir is recommended to be set to some tmpfs
28 # worker_tmp_dir is recommended to be set to some tmpfs
61 worker_tmp_dir = None
29 worker_tmp_dir = None
62 tmp_upload_dir = None
30 tmp_upload_dir = None
63
31
64 # Custom log format
32 # Custom log format
65 access_log_format = (
33 access_log_format = (
66 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
34 '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
67
35
68 # self adjust workers based on CPU count
36 # self adjust workers based on CPU count
69 # workers = multiprocessing.cpu_count() * 2 + 1
37 # workers = get_workers()
38
39
40 def _get_process_rss(pid=None):
41 try:
42 import psutil
43 if pid:
44 proc = psutil.Process(pid)
45 else:
46 proc = psutil.Process()
47 return proc.memory_info().rss
48 except Exception:
49 return None
70
50
71
51
72 def post_fork(server, worker):
52 def _get_config(ini_path):
73 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
53
54 try:
55 import configparser
56 except ImportError:
57 import ConfigParser as configparser
58 try:
59 config = configparser.RawConfigParser()
60 config.read(ini_path)
61 return config
62 except Exception:
63 return None
64
65
66 def _time_with_offset(memory_usage_check_interval):
67 return time.time() - random.randint(0, memory_usage_check_interval/2.0)
74
68
75
69
76 def pre_fork(server, worker):
70 def pre_fork(server, worker):
77 pass
71 pass
78
72
79
73
74 def post_fork(server, worker):
75
76 # memory spec defaults
77 _memory_max_usage = 0
78 _memory_usage_check_interval = 60
79 _memory_usage_recovery_threshold = 0.8
80
81 ini_path = os.path.abspath(server.cfg.paste)
82 conf = _get_config(ini_path)
83
84 section = 'server:main'
85 if conf and conf.has_section(section):
86
87 if conf.has_option(section, 'memory_max_usage'):
88 _memory_max_usage = conf.getint(section, 'memory_max_usage')
89
90 if conf.has_option(section, 'memory_usage_check_interval'):
91 _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval')
92
93 if conf.has_option(section, 'memory_usage_recovery_threshold'):
94 _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold')
95
96 worker._memory_max_usage = _memory_max_usage
97 worker._memory_usage_check_interval = _memory_usage_check_interval
98 worker._memory_usage_recovery_threshold = _memory_usage_recovery_threshold
99
100 # register memory last check time, with some random offset so we don't recycle all
101 # at once
102 worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval)
103
104 if _memory_max_usage:
105 server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid,
106 _format_data_size(_memory_max_usage))
107 else:
108 server.log.info("[%-10s] WORKER spawned", worker.pid)
109
110
80 def pre_exec(server):
111 def pre_exec(server):
81 server.log.info("Forked child, re-executing.")
112 server.log.info("Forked child, re-executing.")
82
113
83
114
84 def on_starting(server):
115 def on_starting(server):
85 server.log.info("Server is starting.")
116 server_lbl = '{} {}'.format(server.proc_name, server.address)
117 server.log.info("Server %s is starting.", server_lbl)
86
118
87
119
88 def when_ready(server):
120 def when_ready(server):
89 server.log.info("Server is ready. Spawning workers")
121 server.log.info("Server %s is ready. Spawning workers", server)
90
122
91
123
92 def on_reload(server):
124 def on_reload(server):
93 pass
125 pass
94
126
95
127
128 def _format_data_size(size, unit="B", precision=1, binary=True):
129 """Format a number using SI units (kilo, mega, etc.).
130
131 ``size``: The number as a float or int.
132
133 ``unit``: The unit name in plural form. Examples: "bytes", "B".
134
135 ``precision``: How many digits to the right of the decimal point. Default
136 is 1. 0 suppresses the decimal point.
137
138 ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000).
139 If true, use base-2 binary prefixes (kibi = Ki = 1024).
140
141 ``full_name``: If false (default), use the prefix abbreviation ("k" or
142 "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false,
143 use abbreviation ("k" or "Ki").
144
145 """
146
147 if not binary:
148 base = 1000
149 multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
150 else:
151 base = 1024
152 multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi')
153
154 sign = ""
155 if size > 0:
156 m = int(math.log(size, base))
157 elif size < 0:
158 sign = "-"
159 size = -size
160 m = int(math.log(size, base))
161 else:
162 m = 0
163 if m > 8:
164 m = 8
165
166 if m == 0:
167 precision = '%.0f'
168 else:
169 precision = '%%.%df' % precision
170
171 size = precision % (size / math.pow(base, m))
172
173 return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit)
174
175
176 def _check_memory_usage(worker):
177 memory_max_usage = worker._memory_max_usage
178 if not memory_max_usage:
179 return
180
181 memory_usage_check_interval = worker._memory_usage_check_interval
182 memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold
183
184 elapsed = time.time() - worker._last_memory_check_time
185 if elapsed > memory_usage_check_interval:
186 mem_usage = _get_process_rss()
187 if mem_usage and mem_usage > memory_max_usage:
188 worker.log.info(
189 "memory usage %s > %s, forcing gc",
190 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
191 # Try to clean it up by forcing a full collection.
192 gc.collect()
193 mem_usage = _get_process_rss()
194 if mem_usage > memory_usage_recovery_threshold:
195 # Didn't clean up enough, we'll have to terminate.
196 worker.log.warning(
197 "memory usage %s > %s after gc, quitting",
198 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
199 # This will cause worker to auto-restart itself
200 worker.alive = False
201 worker._last_memory_check_time = time.time()
202
203
96 def worker_int(worker):
204 def worker_int(worker):
97 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
205 worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid)
98
206
99 # get traceback info, on worker crash
207 # get traceback info, on worker crash
100 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
208 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
101 code = []
209 code = []
102 for thread_id, stack in sys._current_frames().items():
210 for thread_id, stack in sys._current_frames().items():
103 code.append(
211 code.append(
104 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
212 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
105 for fname, lineno, name, line in traceback.extract_stack(stack):
213 for fname, lineno, name, line in traceback.extract_stack(stack):
106 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
214 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
107 if line:
215 if line:
108 code.append(" %s" % (line.strip()))
216 code.append(" %s" % (line.strip()))
109 worker.log.debug("\n".join(code))
217 worker.log.debug("\n".join(code))
110
218
111
219
112 def worker_abort(worker):
220 def worker_abort(worker):
113 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
221 worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid)
114
222
115
223
116 def worker_exit(server, worker):
224 def worker_exit(server, worker):
117 worker.log.info("[<%-10s>] worker exit", worker.pid)
225 worker.log.info("[%-10s] worker exit", worker.pid)
118
226
119
227
120 def child_exit(server, worker):
228 def child_exit(server, worker):
121 worker.log.info("[<%-10s>] worker child exit", worker.pid)
229 worker.log.info("[%-10s] worker child exit", worker.pid)
122
230
123
231
124 def pre_request(worker, req):
232 def pre_request(worker, req):
125 worker.start_time = time.time()
233 worker.start_time = time.time()
126 worker.log.debug(
234 worker.log.debug(
127 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
235 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
128
236
129
237
130 def post_request(worker, req, environ, resp):
238 def post_request(worker, req, environ, resp):
131 total_time = time.time() - worker.start_time
239 total_time = time.time() - worker.start_time
240 # Gunicorn sometimes has problems with reading the status_code
241 status_code = getattr(resp, 'status_code', '')
132 worker.log.debug(
242 worker.log.debug(
133 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
243 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs",
134 worker.nr, req.method, req.path, resp.status_code, total_time)
244 worker.nr, req.method, req.path, status_code, total_time)
245 _check_memory_usage(worker)
135
246
136
247
137 class RhodeCodeLogger(Logger):
248 class RhodeCodeLogger(Logger):
138 """
249 """
139 Custom Logger that allows some customization that gunicorn doesn't allow
250 Custom Logger that allows some customization that gunicorn doesn't allow
140 """
251 """
141
252
142 datefmt = r"%Y-%m-%d %H:%M:%S"
253 datefmt = r"%Y-%m-%d %H:%M:%S"
143
254
144 def __init__(self, cfg):
255 def __init__(self, cfg):
145 Logger.__init__(self, cfg)
256 Logger.__init__(self, cfg)
146
257
147 def now(self):
258 def now(self):
148 """ return date in RhodeCode Log format """
259 """ return date in RhodeCode Log format """
149 now = time.time()
260 now = time.time()
150 msecs = int((now - long(now)) * 1000)
261 msecs = int((now - long(now)) * 1000)
151 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
262 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
152
263
153
264
154 logger_class = RhodeCodeLogger
265 logger_class = RhodeCodeLogger
@@ -1,108 +1,200 b''
1 ################################################################################
1 ## -*- coding: utf-8 -*-
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 ################################################################################
4
2
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5
6
6 [server:main]
7 [server:main]
7 ## COMMON ##
8 ; COMMON HOST/IP CONFIG
8 host = 127.0.0.1
9 host = 127.0.0.1
9 port = 9900
10 port = 9900
10
11
11
12
12 ##########################
13 ; ###########################
13 ## GUNICORN WSGI SERVER ##
14 ; GUNICORN APPLICATION SERVER
14 ##########################
15 ; ###########################
15 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
16
17 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
18
19 ; Module to use, this setting shouldn't be changed
16 use = egg:gunicorn#main
20 use = egg:gunicorn#main
17 ## Sets the number of process workers. Recommended
21
18 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
22 ; Sets the number of process workers. More workers means more concurrent connections
23 ; RhodeCode can handle at the same time. Each additional worker also it increases
24 ; memory usage as each has it's own set of caches.
25 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
26 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
27 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
28 ; when using more than 1 worker.
19 workers = 2
29 workers = 2
20 ## process name
30
31 ; Gunicorn access log level
32 loglevel = info
33
34 ; Process name visible in process list
21 proc_name = rhodecode_vcsserver
35 proc_name = rhodecode_vcsserver
22 ## type of worker class, currently `sync` is the only option allowed.
36
37 ; Type of worker class, one of `sync`, `gevent`
38 ; currently `sync` is the only option allowed.
23 worker_class = sync
39 worker_class = sync
24 ## The maximum number of simultaneous clients. Valid only for Gevent
40
25 #worker_connections = 10
41 ; The maximum number of simultaneous clients. Valid only for gevent
26 ## max number of requests that worker will handle before being gracefully
42 worker_connections = 10
27 ## restarted, could prevent memory leaks
43
44 ; Max number of requests that worker will handle before being gracefully restarted.
45 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
28 max_requests = 1000
46 max_requests = 1000
29 max_requests_jitter = 30
47 max_requests_jitter = 30
30 ## amount of time a worker can spend with handling a request before it
48
31 ## gets killed and restarted. Set to 6hrs
49 ; Amount of time a worker can spend with handling a request before it
50 ; gets killed and restarted. By default set to 21600 (6hrs)
51 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
32 timeout = 21600
52 timeout = 21600
33
53
54 ; The maximum size of HTTP request line in bytes.
55 ; 0 for unlimited
56 limit_request_line = 0
57
58 ; Limit the number of HTTP headers fields in a request.
59 ; By default this value is 100 and can't be larger than 32768.
60 limit_request_fields = 32768
61
62 ; Limit the allowed size of an HTTP request header field.
63 ; Value is a positive number or 0.
64 ; Setting it to 0 will allow unlimited header field sizes.
65 limit_request_field_size = 0
66
67 ; Timeout for graceful workers restart.
68 ; After receiving a restart signal, workers have this much time to finish
69 ; serving requests. Workers still alive after the timeout (starting from the
70 ; receipt of the restart signal) are force killed.
71 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
72 graceful_timeout = 3600
73
74 # The number of seconds to wait for requests on a Keep-Alive connection.
75 # Generally set in the 1-5 seconds range.
76 keepalive = 2
77
78 ; Maximum memory usage that each worker can use before it will receive a
79 ; graceful restart signal 0 = memory monitoring is disabled
80 ; Examples: 268435456 (256MB), 536870912 (512MB)
81 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
82 memory_max_usage = 0
83
84 ; How often in seconds to check for memory usage for each gunicorn worker
85 memory_usage_check_interval = 60
86
87 ; Threshold value for which we don't recycle worker if GarbageCollection
88 ; frees up enough resources. Before each restart we try to run GC on worker
89 ; in case we get enough free memory after that, restart will not happen.
90 memory_usage_recovery_threshold = 0.8
91
34
92
35 [app:main]
93 [app:main]
94 ; The %(here)s variable will be replaced with the absolute path of parent directory
95 ; of this file
36 use = egg:rhodecode-vcsserver
96 use = egg:rhodecode-vcsserver
37
97
98 ; Pyramid default locales, we need this to be set
38 pyramid.default_locale_name = en
99 pyramid.default_locale_name = en
39 pyramid.includes =
40
100
41 ## default locale used by VCS systems
101 ; default locale used by VCS systems
42 locale = en_US.UTF-8
102 locale = en_US.UTF-8
43
103
44
104 ; path to binaries for vcsserver, it should be set by the installer
45 ## path to binaries for vcsserver, it should be set by the installer
105 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
46 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
106 ; it can also be a path to nix-build output in case of development
47 core.binary_dir = ""
107 core.binary_dir = ""
48
108
49 ## Custom exception store path, defaults to TMPDIR
109 ; Custom exception store path, defaults to TMPDIR
50 ## This is used to store exception from RhodeCode in shared directory
110 ; This is used to store exception from RhodeCode in shared directory
51 #exception_tracker.store_path =
111 #exception_tracker.store_path =
52
112
53 ## Default cache dir for caches. Putting this into a ramdisk
113 ; #############
54 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
114 ; DOGPILE CACHE
55 ## large amount of space
115 ; #############
56 cache_dir = %(here)s/rcdev/data
116
117 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
118 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
119 cache_dir = %(here)s/data
120
121 ; ***************************************
122 ; `repo_object` cache, default file based
123 ; ***************************************
124
125 ; `repo_object` cache settings for vcs methods for repositories
126 rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
127
128 ; cache auto-expires after N seconds
129 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
130 rc_cache.repo_object.expiration_time = 2592000
131
132 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
133 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
57
134
58 ## cache region for storing repo_objects cache
135 ; ***********************************************************
59 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
136 ; `repo_object` cache with redis backend
60 ## cache auto-expires after N seconds
137 ; recommended for larger instance, and for better performance
61 rc_cache.repo_object.expiration_time = 300
138 ; ***********************************************************
62 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
139
63 rc_cache.repo_object.max_size = 100
140 ; `repo_object` cache settings for vcs methods for repositories
141 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
142
143 ; cache auto-expires after N seconds
144 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
145 #rc_cache.repo_object.expiration_time = 2592000
146
147 ; redis_expiration_time needs to be greater then expiration_time
148 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
149
150 #rc_cache.repo_object.arguments.host = localhost
151 #rc_cache.repo_object.arguments.port = 6379
152 #rc_cache.repo_object.arguments.db = 5
153 #rc_cache.repo_object.arguments.socket_timeout = 30
154 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
155 #rc_cache.repo_object.arguments.distributed_lock = true
64
156
65
157
66 ################################
158 ; #####################
67 ### LOGGING CONFIGURATION ####
159 ; LOGGING CONFIGURATION
68 ################################
160 ; #####################
69 [loggers]
161 [loggers]
70 keys = root, vcsserver
162 keys = root, vcsserver
71
163
72 [handlers]
164 [handlers]
73 keys = console
165 keys = console
74
166
75 [formatters]
167 [formatters]
76 keys = generic
168 keys = generic
77
169
78 #############
170 ; #######
79 ## LOGGERS ##
171 ; LOGGERS
80 #############
172 ; #######
81 [logger_root]
173 [logger_root]
82 level = NOTSET
174 level = NOTSET
83 handlers = console
175 handlers = console
84
176
85 [logger_vcsserver]
177 [logger_vcsserver]
86 level = DEBUG
178 level = DEBUG
87 handlers =
179 handlers =
88 qualname = vcsserver
180 qualname = vcsserver
89 propagate = 1
181 propagate = 1
90
182
91
183
92 ##############
184 ; ########
93 ## HANDLERS ##
185 ; HANDLERS
94 ##############
186 ; ########
95
187
96 [handler_console]
188 [handler_console]
97 class = StreamHandler
189 class = StreamHandler
98 args = (sys.stderr,)
190 args = (sys.stderr, )
99 level = DEBUG
191 level = INFO
100 formatter = generic
192 formatter = generic
101
193
102 ################
194 ; ##########
103 ## FORMATTERS ##
195 ; FORMATTERS
104 ################
196 ; ##########
105
197
106 [formatter_generic]
198 [formatter_generic]
107 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
199 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
108 datefmt = %Y-%m-%d %H:%M:%S
200 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,47 +1,71 b''
1 self: super: {
1 self: super: {
2
2 # bump GIT version
3 # bump GIT version
3 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 name = "git-2.19.2";
5 name = "git-2.24.1";
5 src = self.fetchurl {
6 src = self.fetchurl {
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.19.2.tar.xz";
7 url = "https://www.kernel.org/pub/software/scm/git/git-2.24.1.tar.xz";
7 sha256 = "1scbggzghkzzfqg4ky3qh7h9w87c3zya4ls5disz7dbx56is7sgw";
8 sha256 = "0ql5z31vgl7b785gwrf00m129mg7zi9pa65n12ij3mpxx3f28gvj";
8 };
9 };
9
10
10 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
11 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
11 patches = [
12 patches = [
12 ./patches/git/docbook2texi.patch
13 ./patches/git/docbook2texi.patch
13 ./patches/git/git-sh-i18n.patch
14 ./patches/git/git-sh-i18n.patch
14 ./patches/git/ssh-path.patch
15 ./patches/git/ssh-path.patch
15 ./patches/git/git-send-email-honor-PATH.patch
16 ./patches/git/git-send-email-honor-PATH.patch
16 ./patches/git/installCheck-path.patch
17 ./patches/git/installCheck-path.patch
17 ];
18 ];
18
19
19 });
20 });
20
21
22 libgit2rc = super.lib.overrideDerivation super.libgit2 (oldAttrs: {
23 name = "libgit2-0.28.2";
24 version = "0.28.2";
25
26 src = self.fetchFromGitHub {
27 owner = "libgit2";
28 repo = "libgit2";
29 rev = "v0.28.2";
30 sha256 = "0cm8fvs05rj0baigs2133q5a0sm3pa234y8h6hmwhl2bz9xq3k4b";
31 };
32
33 cmakeFlags = [ "-DTHREADSAFE=ON" "-DUSE_HTTPS=no"];
34
35 buildInputs = [
36 super.zlib
37 super.libssh2
38 super.openssl
39 super.curl
40 ];
41
42
43 });
44
21 # Override subversion derivation to
45 # Override subversion derivation to
22 # - activate python bindings
46 # - activate python bindings
23 subversion =
47 subversion =
24 let
48 let
25 subversionWithPython = super.subversion.override {
49 subversionWithPython = super.subversion.override {
26 httpSupport = true;
50 httpSupport = true;
27 pythonBindings = true;
51 pythonBindings = true;
28 python = self.python27Packages.python;
52 python = self.python27Packages.python;
29 };
53 };
30 in
54 in
31 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
55 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
32 name = "subversion-1.10.2";
56 name = "subversion-1.12.2";
33 src = self.fetchurl {
57 src = self.fetchurl {
34 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
58 url = "https://archive.apache.org/dist/subversion/subversion-1.12.2.tar.gz";
35 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
59 sha256 = "1wr1pklnq67xdzmf237zj6l1hg43yshfkbxvpvd5sv6r0dk7v4pl";
36 };
60 };
37
61
38 ## use internal lz4/utf8proc because it is stable and shipped with SVN
62 ## use internal lz4/utf8proc because it is stable and shipped with SVN
39 configureFlags = oldAttrs.configureFlags ++ [
63 configureFlags = oldAttrs.configureFlags ++ [
40 " --with-lz4=internal"
64 " --with-lz4=internal"
41 " --with-utf8proc=internal"
65 " --with-utf8proc=internal"
42 ];
66 ];
43
67
44
45 });
68 });
46
69
70
47 }
71 }
@@ -1,37 +1,38 b''
1 This patch does two things: (1) use the right name for `docbook2texi',
1 This patch does two things: (1) use the right name for `docbook2texi',
2 and (2) make sure `gitman.info' isn't produced since it's broken (duplicate
2 and (2) make sure `gitman.info' isn't produced since it's broken (duplicate
3 node names).
3 node names).
4
4
5 diff --git a/Documentation/Makefile b/Documentation/Makefile
5 diff --git a/Documentation/Makefile b/Documentation/Makefile
6 index 26a2342bea..ceccd67ebb 100644
6 --- a/Documentation/Makefile
7 --- a/Documentation/Makefile
7 +++ b/Documentation/Makefile
8 +++ b/Documentation/Makefile
8 @@ -122,7 +122,7 @@
9 @@ -132,7 +132,7 @@ HTML_REPO = ../../git-htmldocs
9
10
10 MAKEINFO = makeinfo
11 MAKEINFO = makeinfo
11 INSTALL_INFO = install-info
12 INSTALL_INFO = install-info
12 -DOCBOOK2X_TEXI = docbook2x-texi
13 -DOCBOOK2X_TEXI = docbook2x-texi
13 +DOCBOOK2X_TEXI = docbook2texi
14 +DOCBOOK2X_TEXI = docbook2texi
14 DBLATEX = dblatex
15 DBLATEX = dblatex
15 ASCIIDOC_DBLATEX_DIR = /etc/asciidoc/dblatex
16 ASCIIDOC_DBLATEX_DIR = /etc/asciidoc/dblatex
16 DBLATEX_COMMON = -p $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.xsl -s $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.sty
17 DBLATEX_COMMON = -p $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.xsl -s $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.sty
17 @@ -240,7 +240,7 @@
18 @@ -250,7 +250,7 @@ man1: $(DOC_MAN1)
18 man5: $(DOC_MAN5)
19 man5: $(DOC_MAN5)
19 man7: $(DOC_MAN7)
20 man7: $(DOC_MAN7)
20
21
21 -info: git.info gitman.info
22 -info: git.info gitman.info
22 +info: git.info
23 +info: git.info
23
24
24 pdf: user-manual.pdf
25 pdf: user-manual.pdf
25
26
26 @@ -256,10 +256,9 @@
27 @@ -266,10 +266,9 @@ install-man: man
27
28
28 install-info: info
29 install-info: info
29 $(INSTALL) -d -m 755 $(DESTDIR)$(infodir)
30 $(INSTALL) -d -m 755 $(DESTDIR)$(infodir)
30 - $(INSTALL) -m 644 git.info gitman.info $(DESTDIR)$(infodir)
31 - $(INSTALL) -m 644 git.info gitman.info $(DESTDIR)$(infodir)
31 + $(INSTALL) -m 644 git.info $(DESTDIR)$(infodir)
32 + $(INSTALL) -m 644 git.info $(DESTDIR)$(infodir)
32 if test -r $(DESTDIR)$(infodir)/dir; then \
33 if test -r $(DESTDIR)$(infodir)/dir; then \
33 $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) git.info ;\
34 $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) git.info ;\
34 - $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) gitman.info ;\
35 - $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) gitman.info ;\
35 else \
36 else \
36 echo "No directory found in $(DESTDIR)$(infodir)" >&2 ; \
37 echo "No directory found in $(DESTDIR)$(infodir)" >&2 ; \
37 fi
38 fi
@@ -1,26 +1,28 b''
1 diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt
1 diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt
2 index 1afe9fc858..05dd7c3a90 100644
2 --- a/Documentation/git-send-email.txt
3 --- a/Documentation/git-send-email.txt
3 +++ b/Documentation/git-send-email.txt
4 +++ b/Documentation/git-send-email.txt
4 @@ -208,8 +208,7 @@ a password is obtained using 'git-credential'.
5 @@ -215,8 +215,7 @@ a password is obtained using 'git-credential'.
5 specify a full pathname of a sendmail-like program instead;
6 specify a full pathname of a sendmail-like program instead;
6 the program must support the `-i` option. Default value can
7 the program must support the `-i` option. Default value can
7 be specified by the `sendemail.smtpServer` configuration
8 be specified by the `sendemail.smtpServer` configuration
8 - option; the built-in default is to search for `sendmail` in
9 - option; the built-in default is to search for `sendmail` in
9 - `/usr/sbin`, `/usr/lib` and $PATH if such program is
10 - `/usr/sbin`, `/usr/lib` and $PATH if such program is
10 + option; the built-in default is to search in $PATH if such program is
11 + option; the built-in default is to search in $PATH if such program is
11 available, falling back to `localhost` otherwise.
12 available, falling back to `localhost` otherwise.
12
13
13 --smtp-server-port=<port>::
14 --smtp-server-port=<port>::
14 diff --git a/git-send-email.perl b/git-send-email.perl
15 diff --git a/git-send-email.perl b/git-send-email.perl
16 index 8eb63b5a2f..74a61d8213 100755
15 --- a/git-send-email.perl
17 --- a/git-send-email.perl
16 +++ b/git-send-email.perl
18 +++ b/git-send-email.perl
17 @@ -944,8 +944,7 @@ if (defined $reply_to) {
19 @@ -956,8 +956,7 @@ sub expand_one_alias {
18 }
20 }
19
21
20 if (!defined $smtp_server) {
22 if (!defined $smtp_server) {
21 - my @sendmail_paths = qw( /usr/sbin/sendmail /usr/lib/sendmail );
23 - my @sendmail_paths = qw( /usr/sbin/sendmail /usr/lib/sendmail );
22 - push @sendmail_paths, map {"$_/sendmail"} split /:/, $ENV{PATH};
24 - push @sendmail_paths, map {"$_/sendmail"} split /:/, $ENV{PATH};
23 + my @sendmail_paths = map {"$_/sendmail"} split /:/, $ENV{PATH};
25 + my @sendmail_paths = map {"$_/sendmail"} split /:/, $ENV{PATH};
24 foreach (@sendmail_paths) {
26 foreach (@sendmail_paths) {
25 if (-x $_) {
27 if (-x $_) {
26 $smtp_server = $_;
28 $smtp_server = $_;
@@ -1,94 +1,23 b''
1 diff --git a/git-sh-i18n.sh b/git-sh-i18n.sh
2 index e1d917fd27..e90f8e1414 100644
1 --- a/git-sh-i18n.sh
3 --- a/git-sh-i18n.sh
2 +++ b/git-sh-i18n.sh
4 +++ b/git-sh-i18n.sh
3 @@ -15,87 +15,11 @@
5 @@ -26,7 +26,7 @@ then
4 fi
6 elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS"
5 export TEXTDOMAINDIR
7 then
6
8 : no probing necessary
7 -# First decide what scheme to use...
8 -GIT_INTERNAL_GETTEXT_SH_SCHEME=fallthrough
9 -if test -n "$GIT_GETTEXT_POISON"
10 -then
11 - GIT_INTERNAL_GETTEXT_SH_SCHEME=poison
12 -elif test -n "@@USE_GETTEXT_SCHEME@@"
13 -then
14 - GIT_INTERNAL_GETTEXT_SH_SCHEME="@@USE_GETTEXT_SCHEME@@"
15 -elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS"
16 -then
17 - : no probing necessary
18 -elif type gettext.sh >/dev/null 2>&1
9 -elif type gettext.sh >/dev/null 2>&1
19 -then
10 +elif type @gettext@/bin/gettext.sh >/dev/null 2>&1
20 - # GNU libintl's gettext.sh
11 then
21 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
12 # GNU libintl's gettext.sh
22 -elif test "$(gettext -h 2>&1)" = "-h"
13 GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
23 -then
14 @@ -43,7 +43,8 @@ export GIT_INTERNAL_GETTEXT_SH_SCHEME
24 - # gettext binary exists but no gettext.sh. likely to be a gettext
15 case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in
25 - # binary on a Solaris or something that is not GNU libintl and
16 gnu)
26 - # lack eval_gettext.
17 # Use libintl's gettext.sh, or fall back to English if we can't.
27 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gettext_without_eval_gettext
28 -fi
29 -export GIT_INTERNAL_GETTEXT_SH_SCHEME
30 -
31 -# ... and then follow that decision.
32 -case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in
33 -gnu)
34 - # Use libintl's gettext.sh, or fall back to English if we can't.
35 - . gettext.sh
18 - . gettext.sh
36 - ;;
19 + . @gettext@/bin/gettext.sh
37 -gettext_without_eval_gettext)
20 + export PATH=@gettext@/bin:$PATH
38 - # Solaris has a gettext(1) but no eval_gettext(1)
21 ;;
39 - eval_gettext () {
22 gettext_without_eval_gettext)
40 - gettext "$1" | (
23 # Solaris has a gettext(1) but no eval_gettext(1)
41 - export PATH $(git sh-i18n--envsubst --variables "$1");
42 - git sh-i18n--envsubst "$1"
43 - )
44 - }
45 -
46 - eval_ngettext () {
47 - ngettext "$1" "$2" "$3" | (
48 - export PATH $(git sh-i18n--envsubst --variables "$2");
49 - git sh-i18n--envsubst "$2"
50 - )
51 - }
52 - ;;
53 -poison)
54 - # Emit garbage so that tests that incorrectly rely on translatable
55 - # strings will fail.
56 - gettext () {
57 - printf "%s" "# GETTEXT POISON #"
58 - }
59 -
60 - eval_gettext () {
61 - printf "%s" "# GETTEXT POISON #"
62 - }
63 -
64 - eval_ngettext () {
65 - printf "%s" "# GETTEXT POISON #"
66 - }
67 - ;;
68 -*)
69 - gettext () {
70 - printf "%s" "$1"
71 - }
72 -
73 - eval_gettext () {
74 - printf "%s" "$1" | (
75 - export PATH $(git sh-i18n--envsubst --variables "$1");
76 - git sh-i18n--envsubst "$1"
77 - )
78 - }
79 +# GNU gettext
80 +export GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
81 +export PATH=@gettext@/bin:$PATH
82
83 - eval_ngettext () {
84 - (test "$3" = 1 && printf "%s" "$1" || printf "%s" "$2") | (
85 - export PATH $(git sh-i18n--envsubst --variables "$2");
86 - git sh-i18n--envsubst "$2"
87 - )
88 - }
89 - ;;
90 -esac
91 +. @gettext@/bin/gettext.sh
92
93 # Git-specific wrapper functions
94 gettextln () {
@@ -1,12 +1,13 b''
1 diff --git a/t/test-lib.sh b/t/test-lib.sh
1 diff --git a/t/test-lib.sh b/t/test-lib.sh
2 index 8665b0a9b6..8bb892b1af 100644
2 --- a/t/test-lib.sh
3 --- a/t/test-lib.sh
3 +++ b/t/test-lib.sh
4 +++ b/t/test-lib.sh
4 @@ -923,7 +923,7 @@
5 @@ -1227,7 +1227,7 @@ elif test -n "$GIT_TEST_INSTALLED"
5 then
6 then
6 GIT_EXEC_PATH=$($GIT_TEST_INSTALLED/git --exec-path) ||
7 GIT_EXEC_PATH=$($GIT_TEST_INSTALLED/git --exec-path) ||
7 error "Cannot run git from $GIT_TEST_INSTALLED."
8 error "Cannot run git from $GIT_TEST_INSTALLED."
8 - PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR:$PATH
9 - PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$PATH
9 + PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$GIT_BUILD_DIR:$PATH
10 + PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$GIT_BUILD_DIR:$PATH
10 GIT_EXEC_PATH=${GIT_TEST_EXEC_PATH:-$GIT_EXEC_PATH}
11 GIT_EXEC_PATH=${GIT_TEST_EXEC_PATH:-$GIT_EXEC_PATH}
11 else # normal case, use ../bin-wrappers only unless $with_dashes:
12 else # normal case, use ../bin-wrappers only unless $with_dashes:
12 git_bin_dir="$GIT_BUILD_DIR/bin-wrappers"
13 if test -n "$no_bin_wrappers"
@@ -1,26 +1,26 b''
1 diff --git a/connect.c b/connect.c
1 diff --git a/connect.c b/connect.c
2 index c3a014c5b..fbca3262b 100644
2 index 4813f005ab..b3f12f3268 100644
3 --- a/connect.c
3 --- a/connect.c
4 +++ b/connect.c
4 +++ b/connect.c
5 @@ -1010,7 +1010,7 @@ static void fill_ssh_args(struct child_process *conn, const char *ssh_host,
5 @@ -1183,7 +1183,7 @@ static void fill_ssh_args(struct child_process *conn, const char *ssh_host,
6
6
7 ssh = getenv("GIT_SSH");
7 ssh = getenv("GIT_SSH");
8 if (!ssh)
8 if (!ssh)
9 - ssh = "ssh";
9 - ssh = "ssh";
10 + ssh = "@ssh@";
10 + ssh = "@ssh@";
11 variant = determine_ssh_variant(ssh, 0);
11 variant = determine_ssh_variant(ssh, 0);
12 }
12 }
13
13
14 diff --git a/git-gui/lib/remote_add.tcl b/git-gui/lib/remote_add.tcl
14 diff --git a/git-gui/lib/remote_add.tcl b/git-gui/lib/remote_add.tcl
15 index 480a6b30d..781720424 100644
15 index 480a6b30d0..7817204241 100644
16 --- a/git-gui/lib/remote_add.tcl
16 --- a/git-gui/lib/remote_add.tcl
17 +++ b/git-gui/lib/remote_add.tcl
17 +++ b/git-gui/lib/remote_add.tcl
18 @@ -139,7 +139,7 @@ method _add {} {
18 @@ -139,7 +139,7 @@ method _add {} {
19 # Parse the location
19 # Parse the location
20 if { [regexp {(?:git\+)?ssh://([^/]+)(/.+)} $location xx host path]
20 if { [regexp {(?:git\+)?ssh://([^/]+)(/.+)} $location xx host path]
21 || [regexp {([^:][^:]+):(.+)} $location xx host path]} {
21 || [regexp {([^:][^:]+):(.+)} $location xx host path]} {
22 - set ssh ssh
22 - set ssh ssh
23 + set ssh @ssh@
23 + set ssh @ssh@
24 if {[info exists env(GIT_SSH)]} {
24 if {[info exists env(GIT_SSH)]} {
25 set ssh $env(GIT_SSH)
25 set ssh $env(GIT_SSH)
26 }
26 }
@@ -1,60 +1,72 b''
1 # Overrides for the generated python-packages.nix
1 # Overrides for the generated python-packages.nix
2 #
2 #
3 # This function is intended to be used as an extension to the generated file
3 # This function is intended to be used as an extension to the generated file
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs
7 { pkgs
8 , basePythonPackages
8 , basePythonPackages
9 }:
9 }:
10
10
11 let
11 let
12 sed = "sed -i";
12 sed = "sed -i";
13
13
14 in
14 in
15
15
16 self: super: {
16 self: super: {
17
17
18 "cffi" = super."cffi".override (attrs: {
19 buildInputs = [
20 pkgs.libffi
21 ];
22 });
23
18 "gevent" = super."gevent".override (attrs: {
24 "gevent" = super."gevent".override (attrs: {
19 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
25 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
20 # NOTE: (marcink) odd requirements from gevent aren't set properly,
26 # NOTE: (marcink) odd requirements from gevent aren't set properly,
21 # thus we need to inject psutil manually
27 # thus we need to inject psutil manually
22 self."psutil"
28 self."psutil"
23 ];
29 ];
24 });
30 });
25
31
26 "hgsubversion" = super."hgsubversion".override (attrs: {
32 "hgsubversion" = super."hgsubversion".override (attrs: {
27 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
33 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 pkgs.sqlite
34 pkgs.sqlite
29 #basePythonPackages.sqlite3
35 #basePythonPackages.sqlite3
30 self.mercurial
36 self.mercurial
31 ];
37 ];
32 });
38 });
33
39
34 "subvertpy" = super."subvertpy".override (attrs: {
40 "subvertpy" = super."subvertpy".override (attrs: {
35 SVN_PREFIX = "${pkgs.subversion.dev}";
41 SVN_PREFIX = "${pkgs.subversion.dev}";
36 propagatedBuildInputs = [
42 propagatedBuildInputs = [
37 pkgs.apr.dev
43 pkgs.apr.dev
38 pkgs.aprutil
44 pkgs.aprutil
39 pkgs.subversion
45 pkgs.subversion
40 ];
46 ];
41 });
47 });
42
48
43 "mercurial" = super."mercurial".override (attrs: {
49 "mercurial" = super."mercurial".override (attrs: {
44 propagatedBuildInputs = [
50 propagatedBuildInputs = [
45 # self.python.modules.curses
51 # self.python.modules.curses
46 ];
52 ];
47 });
53 });
48
54
49 "dulwich" = super."dulwich".override (attrs: {
55 "dulwich" = super."dulwich".override (attrs: {
50 patches = [
56 patches = [
51 ./patches/dulwich/handle-dir-refs.patch
57 ./patches/dulwich/handle-dir-refs.patch
52 ];
58 ];
53 });
59 });
54
60
61 "pygit2" = super."pygit2".override (attrs: {
62 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
63 pkgs.libffi
64 pkgs.libgit2rc
65 ];
66 });
55
67
56 # Avoid that base packages screw up the build process
68 # Avoid that base packages screw up the build process
57 inherit (basePythonPackages)
69 inherit (basePythonPackages)
58 setuptools;
70 setuptools;
59
71
60 }
72 }
@@ -1,948 +1,1090 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.2.1";
8 name = "atomicwrites-1.3.0";
9 doCheck = false;
9 doCheck = false;
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
16 };
16 };
17 };
17 };
18 "attrs" = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.2.0";
19 name = "attrs-19.3.0";
20 doCheck = false;
20 doCheck = false;
21 src = fetchurl {
21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 };
24 };
25 meta = {
25 meta = {
26 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
27 };
27 };
28 };
28 };
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 name = "backports.shutil-get-terminal-size-1.0.0";
30 name = "backports.shutil-get-terminal-size-1.0.0";
31 doCheck = false;
31 doCheck = false;
32 src = fetchurl {
32 src = fetchurl {
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 };
35 };
36 meta = {
36 meta = {
37 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
38 };
38 };
39 };
39 };
40 "beautifulsoup4" = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
41 name = "beautifulsoup4-4.6.3";
41 name = "beautifulsoup4-4.6.3";
42 doCheck = false;
42 doCheck = false;
43 src = fetchurl {
43 src = fetchurl {
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 };
46 };
47 meta = {
47 meta = {
48 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
49 };
49 };
50 };
50 };
51 "cffi" = super.buildPythonPackage {
52 name = "cffi-1.12.3";
53 doCheck = false;
54 propagatedBuildInputs = [
55 self."pycparser"
56 ];
57 src = fetchurl {
58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 };
61 meta = {
62 license = [ pkgs.lib.licenses.mit ];
63 };
64 };
51 "configobj" = super.buildPythonPackage {
65 "configobj" = super.buildPythonPackage {
52 name = "configobj-5.0.6";
66 name = "configobj-5.0.6";
53 doCheck = false;
67 doCheck = false;
54 propagatedBuildInputs = [
68 propagatedBuildInputs = [
55 self."six"
69 self."six"
56 ];
70 ];
57 src = fetchurl {
71 src = fetchurl {
58 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
59 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
60 };
74 };
61 meta = {
75 meta = {
62 license = [ pkgs.lib.licenses.bsdOriginal ];
76 license = [ pkgs.lib.licenses.bsdOriginal ];
63 };
77 };
64 };
78 };
79 "configparser" = super.buildPythonPackage {
80 name = "configparser-4.0.2";
81 doCheck = false;
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 };
86 meta = {
87 license = [ pkgs.lib.licenses.mit ];
88 };
89 };
90 "contextlib2" = super.buildPythonPackage {
91 name = "contextlib2-0.6.0.post1";
92 doCheck = false;
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 };
97 meta = {
98 license = [ pkgs.lib.licenses.psfl ];
99 };
100 };
65 "cov-core" = super.buildPythonPackage {
101 "cov-core" = super.buildPythonPackage {
66 name = "cov-core-1.15.0";
102 name = "cov-core-1.15.0";
67 doCheck = false;
103 doCheck = false;
68 propagatedBuildInputs = [
104 propagatedBuildInputs = [
69 self."coverage"
105 self."coverage"
70 ];
106 ];
71 src = fetchurl {
107 src = fetchurl {
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
74 };
110 };
75 meta = {
111 meta = {
76 license = [ pkgs.lib.licenses.mit ];
112 license = [ pkgs.lib.licenses.mit ];
77 };
113 };
78 };
114 };
79 "coverage" = super.buildPythonPackage {
115 "coverage" = super.buildPythonPackage {
80 name = "coverage-4.5.3";
116 name = "coverage-4.5.4";
81 doCheck = false;
117 doCheck = false;
82 src = fetchurl {
118 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz";
119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
84 sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx";
120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
85 };
121 };
86 meta = {
122 meta = {
87 license = [ pkgs.lib.licenses.asl20 ];
123 license = [ pkgs.lib.licenses.asl20 ];
88 };
124 };
89 };
125 };
90 "decorator" = super.buildPythonPackage {
126 "decorator" = super.buildPythonPackage {
91 name = "decorator-4.1.2";
127 name = "decorator-4.1.2";
92 doCheck = false;
128 doCheck = false;
93 src = fetchurl {
129 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
96 };
132 };
97 meta = {
133 meta = {
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
99 };
135 };
100 };
136 };
101 "dogpile.cache" = super.buildPythonPackage {
137 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.7.1";
138 name = "dogpile.cache-0.9.0";
103 doCheck = false;
139 doCheck = false;
104 propagatedBuildInputs = [
140 propagatedBuildInputs = [
105 self."decorator"
141 self."decorator"
106 ];
142 ];
107 src = fetchurl {
143 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
109 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
110 };
146 };
111 meta = {
147 meta = {
112 license = [ pkgs.lib.licenses.bsdOriginal ];
148 license = [ pkgs.lib.licenses.bsdOriginal ];
113 };
149 };
114 };
150 };
115 "dogpile.core" = super.buildPythonPackage {
151 "dogpile.core" = super.buildPythonPackage {
116 name = "dogpile.core-0.4.1";
152 name = "dogpile.core-0.4.1";
117 doCheck = false;
153 doCheck = false;
118 src = fetchurl {
154 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
120 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
121 };
157 };
122 meta = {
158 meta = {
123 license = [ pkgs.lib.licenses.bsdOriginal ];
159 license = [ pkgs.lib.licenses.bsdOriginal ];
124 };
160 };
125 };
161 };
126 "dulwich" = super.buildPythonPackage {
162 "dulwich" = super.buildPythonPackage {
127 name = "dulwich-0.13.0";
163 name = "dulwich-0.13.0";
128 doCheck = false;
164 doCheck = false;
129 src = fetchurl {
165 src = fetchurl {
130 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
131 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
132 };
168 };
133 meta = {
169 meta = {
134 license = [ pkgs.lib.licenses.gpl2Plus ];
170 license = [ pkgs.lib.licenses.gpl2Plus ];
135 };
171 };
136 };
172 };
137 "enum34" = super.buildPythonPackage {
173 "enum34" = super.buildPythonPackage {
138 name = "enum34-1.1.6";
174 name = "enum34-1.1.6";
139 doCheck = false;
175 doCheck = false;
140 src = fetchurl {
176 src = fetchurl {
141 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
177 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
142 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
178 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
143 };
179 };
144 meta = {
180 meta = {
145 license = [ pkgs.lib.licenses.bsdOriginal ];
181 license = [ pkgs.lib.licenses.bsdOriginal ];
146 };
182 };
147 };
183 };
148 "funcsigs" = super.buildPythonPackage {
184 "funcsigs" = super.buildPythonPackage {
149 name = "funcsigs-1.0.2";
185 name = "funcsigs-1.0.2";
150 doCheck = false;
186 doCheck = false;
151 src = fetchurl {
187 src = fetchurl {
152 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
153 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
154 };
190 };
155 meta = {
191 meta = {
156 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
157 };
193 };
158 };
194 };
159 "gevent" = super.buildPythonPackage {
195 "gevent" = super.buildPythonPackage {
160 name = "gevent-1.4.0";
196 name = "gevent-1.4.0";
161 doCheck = false;
197 doCheck = false;
162 propagatedBuildInputs = [
198 propagatedBuildInputs = [
163 self."greenlet"
199 self."greenlet"
164 ];
200 ];
165 src = fetchurl {
201 src = fetchurl {
166 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
202 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
167 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
203 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
168 };
204 };
169 meta = {
205 meta = {
170 license = [ pkgs.lib.licenses.mit ];
206 license = [ pkgs.lib.licenses.mit ];
171 };
207 };
172 };
208 };
173 "gprof2dot" = super.buildPythonPackage {
209 "gprof2dot" = super.buildPythonPackage {
174 name = "gprof2dot-2017.9.19";
210 name = "gprof2dot-2017.9.19";
175 doCheck = false;
211 doCheck = false;
176 src = fetchurl {
212 src = fetchurl {
177 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
178 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
179 };
215 };
180 meta = {
216 meta = {
181 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
182 };
218 };
183 };
219 };
184 "greenlet" = super.buildPythonPackage {
220 "greenlet" = super.buildPythonPackage {
185 name = "greenlet-0.4.15";
221 name = "greenlet-0.4.15";
186 doCheck = false;
222 doCheck = false;
187 src = fetchurl {
223 src = fetchurl {
188 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
189 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
190 };
226 };
191 meta = {
227 meta = {
192 license = [ pkgs.lib.licenses.mit ];
228 license = [ pkgs.lib.licenses.mit ];
193 };
229 };
194 };
230 };
195 "gunicorn" = super.buildPythonPackage {
231 "gunicorn" = super.buildPythonPackage {
196 name = "gunicorn-19.9.0";
232 name = "gunicorn-19.9.0";
197 doCheck = false;
233 doCheck = false;
198 src = fetchurl {
234 src = fetchurl {
199 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
200 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
201 };
237 };
202 meta = {
238 meta = {
203 license = [ pkgs.lib.licenses.mit ];
239 license = [ pkgs.lib.licenses.mit ];
204 };
240 };
205 };
241 };
206 "hg-evolve" = super.buildPythonPackage {
242 "hg-evolve" = super.buildPythonPackage {
207 name = "hg-evolve-8.5.1";
243 name = "hg-evolve-9.1.0";
208 doCheck = false;
244 doCheck = false;
209 src = fetchurl {
245 src = fetchurl {
210 url = "https://files.pythonhosted.org/packages/e3/ce/6594aa403e3464831d4daf20e45fd2e3ef553d968ac13d2c7fa791d4eedd/hg-evolve-8.5.1.tar.gz";
246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
211 sha256 = "09avqn7c1biz97vb1zw91q6nfzydpcqv43mgpfrj7ywp0fscfgf3";
247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
212 };
248 };
213 meta = {
249 meta = {
214 license = [ { fullName = "GPLv2+"; } ];
250 license = [ { fullName = "GPLv2+"; } ];
215 };
251 };
216 };
252 };
217 "hgsubversion" = super.buildPythonPackage {
253 "hgsubversion" = super.buildPythonPackage {
218 name = "hgsubversion-1.9.3";
254 name = "hgsubversion-1.9.3";
219 doCheck = false;
255 doCheck = false;
220 propagatedBuildInputs = [
256 propagatedBuildInputs = [
221 self."mercurial"
257 self."mercurial"
222 self."subvertpy"
258 self."subvertpy"
223 ];
259 ];
224 src = fetchurl {
260 src = fetchurl {
225 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
226 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
227 };
263 };
228 meta = {
264 meta = {
229 license = [ pkgs.lib.licenses.gpl1 ];
265 license = [ pkgs.lib.licenses.gpl1 ];
230 };
266 };
231 };
267 };
232 "hupper" = super.buildPythonPackage {
268 "hupper" = super.buildPythonPackage {
233 name = "hupper-1.6.1";
269 name = "hupper-1.9.1";
234 doCheck = false;
270 doCheck = false;
235 src = fetchurl {
271 src = fetchurl {
236 url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz";
272 url = "https://files.pythonhosted.org/packages/09/3a/4f215659f31eeffe364a984dba486bfa3907bfcc54b7013bdfe825cebb5f/hupper-1.9.1.tar.gz";
237 sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy";
273 sha256 = "0pyg879fv9mbwlnbzw2a3234qqycqs9l97h5mpkmk0bvxhi2471v";
238 };
274 };
239 meta = {
275 meta = {
240 license = [ pkgs.lib.licenses.mit ];
276 license = [ pkgs.lib.licenses.mit ];
241 };
277 };
242 };
278 };
279 "importlib-metadata" = super.buildPythonPackage {
280 name = "importlib-metadata-0.23";
281 doCheck = false;
282 propagatedBuildInputs = [
283 self."zipp"
284 self."contextlib2"
285 self."configparser"
286 self."pathlib2"
287 ];
288 src = fetchurl {
289 url = "https://files.pythonhosted.org/packages/5d/44/636bcd15697791943e2dedda0dbe098d8530a38d113b202817133e0b06c0/importlib_metadata-0.23.tar.gz";
290 sha256 = "09mdqdfv5rdrwz80jh9m379gxmvk2vhjfz0fg53hid00icvxf65a";
291 };
292 meta = {
293 license = [ pkgs.lib.licenses.asl20 ];
294 };
295 };
243 "ipdb" = super.buildPythonPackage {
296 "ipdb" = super.buildPythonPackage {
244 name = "ipdb-0.12";
297 name = "ipdb-0.12";
245 doCheck = false;
298 doCheck = false;
246 propagatedBuildInputs = [
299 propagatedBuildInputs = [
247 self."setuptools"
300 self."setuptools"
248 self."ipython"
301 self."ipython"
249 ];
302 ];
250 src = fetchurl {
303 src = fetchurl {
251 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
304 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
252 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
305 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
253 };
306 };
254 meta = {
307 meta = {
255 license = [ pkgs.lib.licenses.bsdOriginal ];
308 license = [ pkgs.lib.licenses.bsdOriginal ];
256 };
309 };
257 };
310 };
258 "ipython" = super.buildPythonPackage {
311 "ipython" = super.buildPythonPackage {
259 name = "ipython-5.1.0";
312 name = "ipython-5.1.0";
260 doCheck = false;
313 doCheck = false;
261 propagatedBuildInputs = [
314 propagatedBuildInputs = [
262 self."setuptools"
315 self."setuptools"
263 self."decorator"
316 self."decorator"
264 self."pickleshare"
317 self."pickleshare"
265 self."simplegeneric"
318 self."simplegeneric"
266 self."traitlets"
319 self."traitlets"
267 self."prompt-toolkit"
320 self."prompt-toolkit"
268 self."pygments"
321 self."pygments"
269 self."pexpect"
322 self."pexpect"
270 self."backports.shutil-get-terminal-size"
323 self."backports.shutil-get-terminal-size"
271 self."pathlib2"
324 self."pathlib2"
272 self."pexpect"
325 self."pexpect"
273 ];
326 ];
274 src = fetchurl {
327 src = fetchurl {
275 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
276 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
277 };
330 };
278 meta = {
331 meta = {
279 license = [ pkgs.lib.licenses.bsdOriginal ];
332 license = [ pkgs.lib.licenses.bsdOriginal ];
280 };
333 };
281 };
334 };
282 "ipython-genutils" = super.buildPythonPackage {
335 "ipython-genutils" = super.buildPythonPackage {
283 name = "ipython-genutils-0.2.0";
336 name = "ipython-genutils-0.2.0";
284 doCheck = false;
337 doCheck = false;
285 src = fetchurl {
338 src = fetchurl {
286 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
287 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
288 };
341 };
289 meta = {
342 meta = {
290 license = [ pkgs.lib.licenses.bsdOriginal ];
343 license = [ pkgs.lib.licenses.bsdOriginal ];
291 };
344 };
292 };
345 };
293 "mako" = super.buildPythonPackage {
346 "mako" = super.buildPythonPackage {
294 name = "mako-1.0.7";
347 name = "mako-1.1.0";
295 doCheck = false;
348 doCheck = false;
296 propagatedBuildInputs = [
349 propagatedBuildInputs = [
297 self."markupsafe"
350 self."markupsafe"
298 ];
351 ];
299 src = fetchurl {
352 src = fetchurl {
300 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
301 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
302 };
355 };
303 meta = {
356 meta = {
304 license = [ pkgs.lib.licenses.mit ];
357 license = [ pkgs.lib.licenses.mit ];
305 };
358 };
306 };
359 };
307 "markupsafe" = super.buildPythonPackage {
360 "markupsafe" = super.buildPythonPackage {
308 name = "markupsafe-1.1.0";
361 name = "markupsafe-1.1.1";
309 doCheck = false;
362 doCheck = false;
310 src = fetchurl {
363 src = fetchurl {
311 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
312 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
313 };
366 };
314 meta = {
367 meta = {
315 license = [ pkgs.lib.licenses.bsdOriginal ];
368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
316 };
369 };
317 };
370 };
318 "mercurial" = super.buildPythonPackage {
371 "mercurial" = super.buildPythonPackage {
319 name = "mercurial-4.9.1";
372 name = "mercurial-5.1.1";
320 doCheck = false;
373 doCheck = false;
321 src = fetchurl {
374 src = fetchurl {
322 url = "https://files.pythonhosted.org/packages/60/58/a1c52d5f5c0b755e231faf7c4f507dc51fe26d979d36346bc9d28f4f8a75/mercurial-4.9.1.tar.gz";
375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
323 sha256 = "0iybbkd9add066729zg01kwz5hhc1s6lhp9rrnsmzq6ihyxj3p8v";
376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
324 };
377 };
325 meta = {
378 meta = {
326 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
327 };
380 };
328 };
381 };
329 "mock" = super.buildPythonPackage {
382 "mock" = super.buildPythonPackage {
330 name = "mock-1.0.1";
383 name = "mock-3.0.5";
331 doCheck = false;
384 doCheck = false;
385 propagatedBuildInputs = [
386 self."six"
387 self."funcsigs"
388 ];
332 src = fetchurl {
389 src = fetchurl {
333 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
334 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
335 };
392 };
336 meta = {
393 meta = {
337 license = [ pkgs.lib.licenses.bsdOriginal ];
394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
338 };
395 };
339 };
396 };
340 "more-itertools" = super.buildPythonPackage {
397 "more-itertools" = super.buildPythonPackage {
341 name = "more-itertools-5.0.0";
398 name = "more-itertools-5.0.0";
342 doCheck = false;
399 doCheck = false;
343 propagatedBuildInputs = [
400 propagatedBuildInputs = [
344 self."six"
401 self."six"
345 ];
402 ];
346 src = fetchurl {
403 src = fetchurl {
347 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
348 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
349 };
406 };
350 meta = {
407 meta = {
351 license = [ pkgs.lib.licenses.mit ];
408 license = [ pkgs.lib.licenses.mit ];
352 };
409 };
353 };
410 };
354 "msgpack-python" = super.buildPythonPackage {
411 "msgpack-python" = super.buildPythonPackage {
355 name = "msgpack-python-0.5.6";
412 name = "msgpack-python-0.5.6";
356 doCheck = false;
413 doCheck = false;
357 src = fetchurl {
414 src = fetchurl {
358 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
359 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
360 };
417 };
361 meta = {
418 meta = {
362 license = [ pkgs.lib.licenses.asl20 ];
419 license = [ pkgs.lib.licenses.asl20 ];
363 };
420 };
364 };
421 };
422 "packaging" = super.buildPythonPackage {
423 name = "packaging-19.2";
424 doCheck = false;
425 propagatedBuildInputs = [
426 self."pyparsing"
427 self."six"
428 ];
429 src = fetchurl {
430 url = "https://files.pythonhosted.org/packages/5a/2f/449ded84226d0e2fda8da9252e5ee7731bdf14cd338f622dfcd9934e0377/packaging-19.2.tar.gz";
431 sha256 = "0izwlz9h0bw171a1chr311g2y7n657zjaf4mq4rgm8pp9lbj9f98";
432 };
433 meta = {
434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 };
436 };
365 "pastedeploy" = super.buildPythonPackage {
437 "pastedeploy" = super.buildPythonPackage {
366 name = "pastedeploy-2.0.1";
438 name = "pastedeploy-2.0.1";
367 doCheck = false;
439 doCheck = false;
368 src = fetchurl {
440 src = fetchurl {
369 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
441 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
370 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
442 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
371 };
443 };
372 meta = {
444 meta = {
373 license = [ pkgs.lib.licenses.mit ];
445 license = [ pkgs.lib.licenses.mit ];
374 };
446 };
375 };
447 };
376 "pathlib2" = super.buildPythonPackage {
448 "pathlib2" = super.buildPythonPackage {
377 name = "pathlib2-2.3.4";
449 name = "pathlib2-2.3.5";
378 doCheck = false;
450 doCheck = false;
379 propagatedBuildInputs = [
451 propagatedBuildInputs = [
380 self."six"
452 self."six"
381 self."scandir"
453 self."scandir"
382 ];
454 ];
383 src = fetchurl {
455 src = fetchurl {
384 url = "https://files.pythonhosted.org/packages/b5/f4/9c7cc726ece2498b6c8b62d3262aa43f59039b953fe23c9964ac5e18d40b/pathlib2-2.3.4.tar.gz";
456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
385 sha256 = "1y0f9rkm1924zrc5dn4bwxlhgdkbml82lkcc28l5rgmr7d918q24";
457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
386 };
458 };
387 meta = {
459 meta = {
388 license = [ pkgs.lib.licenses.mit ];
460 license = [ pkgs.lib.licenses.mit ];
389 };
461 };
390 };
462 };
391 "pexpect" = super.buildPythonPackage {
463 "pexpect" = super.buildPythonPackage {
392 name = "pexpect-4.7.0";
464 name = "pexpect-4.7.0";
393 doCheck = false;
465 doCheck = false;
394 propagatedBuildInputs = [
466 propagatedBuildInputs = [
395 self."ptyprocess"
467 self."ptyprocess"
396 ];
468 ];
397 src = fetchurl {
469 src = fetchurl {
398 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
470 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
399 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
471 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
400 };
472 };
401 meta = {
473 meta = {
402 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
403 };
475 };
404 };
476 };
405 "pickleshare" = super.buildPythonPackage {
477 "pickleshare" = super.buildPythonPackage {
406 name = "pickleshare-0.7.5";
478 name = "pickleshare-0.7.5";
407 doCheck = false;
479 doCheck = false;
408 propagatedBuildInputs = [
480 propagatedBuildInputs = [
409 self."pathlib2"
481 self."pathlib2"
410 ];
482 ];
411 src = fetchurl {
483 src = fetchurl {
412 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
413 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
414 };
486 };
415 meta = {
487 meta = {
416 license = [ pkgs.lib.licenses.mit ];
488 license = [ pkgs.lib.licenses.mit ];
417 };
489 };
418 };
490 };
419 "plaster" = super.buildPythonPackage {
491 "plaster" = super.buildPythonPackage {
420 name = "plaster-1.0";
492 name = "plaster-1.0";
421 doCheck = false;
493 doCheck = false;
422 propagatedBuildInputs = [
494 propagatedBuildInputs = [
423 self."setuptools"
495 self."setuptools"
424 ];
496 ];
425 src = fetchurl {
497 src = fetchurl {
426 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
427 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
428 };
500 };
429 meta = {
501 meta = {
430 license = [ pkgs.lib.licenses.mit ];
502 license = [ pkgs.lib.licenses.mit ];
431 };
503 };
432 };
504 };
433 "plaster-pastedeploy" = super.buildPythonPackage {
505 "plaster-pastedeploy" = super.buildPythonPackage {
434 name = "plaster-pastedeploy-0.7";
506 name = "plaster-pastedeploy-0.7";
435 doCheck = false;
507 doCheck = false;
436 propagatedBuildInputs = [
508 propagatedBuildInputs = [
437 self."pastedeploy"
509 self."pastedeploy"
438 self."plaster"
510 self."plaster"
439 ];
511 ];
440 src = fetchurl {
512 src = fetchurl {
441 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
442 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
443 };
515 };
444 meta = {
516 meta = {
445 license = [ pkgs.lib.licenses.mit ];
517 license = [ pkgs.lib.licenses.mit ];
446 };
518 };
447 };
519 };
448 "pluggy" = super.buildPythonPackage {
520 "pluggy" = super.buildPythonPackage {
449 name = "pluggy-0.11.0";
521 name = "pluggy-0.13.1";
450 doCheck = false;
522 doCheck = false;
523 propagatedBuildInputs = [
524 self."importlib-metadata"
525 ];
451 src = fetchurl {
526 src = fetchurl {
452 url = "https://files.pythonhosted.org/packages/0d/a1/862ab336e8128fde20981d2c1aa8506693412daf5083b1911d539412676b/pluggy-0.11.0.tar.gz";
527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
453 sha256 = "10511a54dvafw1jrk75mrhml53c7b7w4yaw7241696lc2hfvr895";
528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
454 };
529 };
455 meta = {
530 meta = {
456 license = [ pkgs.lib.licenses.mit ];
531 license = [ pkgs.lib.licenses.mit ];
457 };
532 };
458 };
533 };
459 "prompt-toolkit" = super.buildPythonPackage {
534 "prompt-toolkit" = super.buildPythonPackage {
460 name = "prompt-toolkit-1.0.16";
535 name = "prompt-toolkit-1.0.18";
461 doCheck = false;
536 doCheck = false;
462 propagatedBuildInputs = [
537 propagatedBuildInputs = [
463 self."six"
538 self."six"
464 self."wcwidth"
539 self."wcwidth"
465 ];
540 ];
466 src = fetchurl {
541 src = fetchurl {
467 url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz";
542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
468 sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1";
543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
469 };
544 };
470 meta = {
545 meta = {
471 license = [ pkgs.lib.licenses.bsdOriginal ];
546 license = [ pkgs.lib.licenses.bsdOriginal ];
472 };
547 };
473 };
548 };
474 "psutil" = super.buildPythonPackage {
549 "psutil" = super.buildPythonPackage {
475 name = "psutil-5.5.1";
550 name = "psutil-5.6.5";
476 doCheck = false;
551 doCheck = false;
477 src = fetchurl {
552 src = fetchurl {
478 url = "https://files.pythonhosted.org/packages/c7/01/7c30b247cdc5ba29623faa5c8cf1f1bbf7e041783c340414b0ed7e067c64/psutil-5.5.1.tar.gz";
553 url = "https://files.pythonhosted.org/packages/03/9a/95c4b3d0424426e5fd94b5302ff74cea44d5d4f53466e1228ac8e73e14b4/psutil-5.6.5.tar.gz";
479 sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj";
554 sha256 = "0isil5jxwwd8awz54qk28rpgjg43i5l6yl70g40vxwa4r4m56lfh";
480 };
555 };
481 meta = {
556 meta = {
482 license = [ pkgs.lib.licenses.bsdOriginal ];
557 license = [ pkgs.lib.licenses.bsdOriginal ];
483 };
558 };
484 };
559 };
485 "ptyprocess" = super.buildPythonPackage {
560 "ptyprocess" = super.buildPythonPackage {
486 name = "ptyprocess-0.6.0";
561 name = "ptyprocess-0.6.0";
487 doCheck = false;
562 doCheck = false;
488 src = fetchurl {
563 src = fetchurl {
489 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
490 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
491 };
566 };
492 meta = {
567 meta = {
493 license = [ ];
568 license = [ ];
494 };
569 };
495 };
570 };
496 "py" = super.buildPythonPackage {
571 "py" = super.buildPythonPackage {
497 name = "py-1.6.0";
572 name = "py-1.8.0";
498 doCheck = false;
573 doCheck = false;
499 src = fetchurl {
574 src = fetchurl {
500 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
501 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
502 };
577 };
503 meta = {
578 meta = {
504 license = [ pkgs.lib.licenses.mit ];
579 license = [ pkgs.lib.licenses.mit ];
505 };
580 };
506 };
581 };
582 "pycparser" = super.buildPythonPackage {
583 name = "pycparser-2.19";
584 doCheck = false;
585 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz";
587 sha256 = "1cr5dcj9628lkz1qlwq3fv97c25363qppkmcayqvd05dpy573259";
588 };
589 meta = {
590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 };
592 };
593 "pygit2" = super.buildPythonPackage {
594 name = "pygit2-0.28.2";
595 doCheck = false;
596 propagatedBuildInputs = [
597 self."cffi"
598 self."six"
599 ];
600 src = fetchurl {
601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 };
604 meta = {
605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 };
607 };
507 "pygments" = super.buildPythonPackage {
608 "pygments" = super.buildPythonPackage {
508 name = "pygments-2.4.2";
609 name = "pygments-2.4.2";
509 doCheck = false;
610 doCheck = false;
510 src = fetchurl {
611 src = fetchurl {
511 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
512 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
513 };
614 };
514 meta = {
615 meta = {
515 license = [ pkgs.lib.licenses.bsdOriginal ];
616 license = [ pkgs.lib.licenses.bsdOriginal ];
516 };
617 };
517 };
618 };
619 "pyparsing" = super.buildPythonPackage {
620 name = "pyparsing-2.4.5";
621 doCheck = false;
622 src = fetchurl {
623 url = "https://files.pythonhosted.org/packages/00/32/8076fa13e832bb4dcff379f18f228e5a53412be0631808b9ca2610c0f566/pyparsing-2.4.5.tar.gz";
624 sha256 = "0fk8gsybiw1gm146mkjdjvaajwh20xwvpv4j7syh2zrnpq0j19jc";
625 };
626 meta = {
627 license = [ pkgs.lib.licenses.mit ];
628 };
629 };
518 "pyramid" = super.buildPythonPackage {
630 "pyramid" = super.buildPythonPackage {
519 name = "pyramid-1.10.4";
631 name = "pyramid-1.10.4";
520 doCheck = false;
632 doCheck = false;
521 propagatedBuildInputs = [
633 propagatedBuildInputs = [
522 self."hupper"
634 self."hupper"
523 self."plaster"
635 self."plaster"
524 self."plaster-pastedeploy"
636 self."plaster-pastedeploy"
525 self."setuptools"
637 self."setuptools"
526 self."translationstring"
638 self."translationstring"
527 self."venusian"
639 self."venusian"
528 self."webob"
640 self."webob"
529 self."zope.deprecation"
641 self."zope.deprecation"
530 self."zope.interface"
642 self."zope.interface"
531 self."repoze.lru"
643 self."repoze.lru"
532 ];
644 ];
533 src = fetchurl {
645 src = fetchurl {
534 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
535 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
536 };
648 };
537 meta = {
649 meta = {
538 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
539 };
651 };
540 };
652 };
541 "pyramid-mako" = super.buildPythonPackage {
653 "pyramid-mako" = super.buildPythonPackage {
542 name = "pyramid-mako-1.0.2";
654 name = "pyramid-mako-1.1.0";
543 doCheck = false;
655 doCheck = false;
544 propagatedBuildInputs = [
656 propagatedBuildInputs = [
545 self."pyramid"
657 self."pyramid"
546 self."mako"
658 self."mako"
547 ];
659 ];
548 src = fetchurl {
660 src = fetchurl {
549 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
550 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
551 };
663 };
552 meta = {
664 meta = {
553 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
554 };
666 };
555 };
667 };
556 "pytest" = super.buildPythonPackage {
668 "pytest" = super.buildPythonPackage {
557 name = "pytest-3.8.2";
669 name = "pytest-4.6.5";
558 doCheck = false;
670 doCheck = false;
559 propagatedBuildInputs = [
671 propagatedBuildInputs = [
560 self."py"
672 self."py"
561 self."six"
673 self."six"
562 self."setuptools"
674 self."packaging"
563 self."attrs"
675 self."attrs"
564 self."more-itertools"
565 self."atomicwrites"
676 self."atomicwrites"
566 self."pluggy"
677 self."pluggy"
678 self."importlib-metadata"
679 self."wcwidth"
567 self."funcsigs"
680 self."funcsigs"
568 self."pathlib2"
681 self."pathlib2"
682 self."more-itertools"
569 ];
683 ];
570 src = fetchurl {
684 src = fetchurl {
571 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
572 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
573 };
687 };
574 meta = {
688 meta = {
575 license = [ pkgs.lib.licenses.mit ];
689 license = [ pkgs.lib.licenses.mit ];
576 };
690 };
577 };
691 };
578 "pytest-cov" = super.buildPythonPackage {
692 "pytest-cov" = super.buildPythonPackage {
579 name = "pytest-cov-2.6.0";
693 name = "pytest-cov-2.7.1";
580 doCheck = false;
694 doCheck = false;
581 propagatedBuildInputs = [
695 propagatedBuildInputs = [
582 self."pytest"
696 self."pytest"
583 self."coverage"
697 self."coverage"
584 ];
698 ];
585 src = fetchurl {
699 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
587 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
588 };
702 };
589 meta = {
703 meta = {
590 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
591 };
705 };
592 };
706 };
593 "pytest-profiling" = super.buildPythonPackage {
707 "pytest-profiling" = super.buildPythonPackage {
594 name = "pytest-profiling-1.3.0";
708 name = "pytest-profiling-1.7.0";
595 doCheck = false;
709 doCheck = false;
596 propagatedBuildInputs = [
710 propagatedBuildInputs = [
597 self."six"
711 self."six"
598 self."pytest"
712 self."pytest"
599 self."gprof2dot"
713 self."gprof2dot"
600 ];
714 ];
601 src = fetchurl {
715 src = fetchurl {
602 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
603 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
604 };
718 };
605 meta = {
719 meta = {
606 license = [ pkgs.lib.licenses.mit ];
720 license = [ pkgs.lib.licenses.mit ];
607 };
721 };
608 };
722 };
609 "pytest-runner" = super.buildPythonPackage {
723 "pytest-runner" = super.buildPythonPackage {
610 name = "pytest-runner-4.2";
724 name = "pytest-runner-5.1";
611 doCheck = false;
725 doCheck = false;
612 src = fetchurl {
726 src = fetchurl {
613 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
614 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
615 };
729 };
616 meta = {
730 meta = {
617 license = [ pkgs.lib.licenses.mit ];
731 license = [ pkgs.lib.licenses.mit ];
618 };
732 };
619 };
733 };
620 "pytest-sugar" = super.buildPythonPackage {
734 "pytest-sugar" = super.buildPythonPackage {
621 name = "pytest-sugar-0.9.1";
735 name = "pytest-sugar-0.9.2";
622 doCheck = false;
736 doCheck = false;
623 propagatedBuildInputs = [
737 propagatedBuildInputs = [
624 self."pytest"
738 self."pytest"
625 self."termcolor"
739 self."termcolor"
740 self."packaging"
626 ];
741 ];
627 src = fetchurl {
742 src = fetchurl {
628 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
629 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
630 };
745 };
631 meta = {
746 meta = {
632 license = [ pkgs.lib.licenses.bsdOriginal ];
747 license = [ pkgs.lib.licenses.bsdOriginal ];
633 };
748 };
634 };
749 };
635 "pytest-timeout" = super.buildPythonPackage {
750 "pytest-timeout" = super.buildPythonPackage {
636 name = "pytest-timeout-1.3.2";
751 name = "pytest-timeout-1.3.3";
637 doCheck = false;
752 doCheck = false;
638 propagatedBuildInputs = [
753 propagatedBuildInputs = [
639 self."pytest"
754 self."pytest"
640 ];
755 ];
641 src = fetchurl {
756 src = fetchurl {
642 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
643 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
644 };
759 };
645 meta = {
760 meta = {
646 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
647 };
762 };
648 };
763 };
764 "redis" = super.buildPythonPackage {
765 name = "redis-3.3.11";
766 doCheck = false;
767 src = fetchurl {
768 url = "https://files.pythonhosted.org/packages/06/ca/00557c74279d2f256d3c42cabf237631355f3a132e4c74c2000e6647ad98/redis-3.3.11.tar.gz";
769 sha256 = "1hicqbi5xl92hhml82awrr2rxl9jar5fp8nbcycj9qgmsdwc43wd";
770 };
771 meta = {
772 license = [ pkgs.lib.licenses.mit ];
773 };
774 };
649 "repoze.lru" = super.buildPythonPackage {
775 "repoze.lru" = super.buildPythonPackage {
650 name = "repoze.lru-0.7";
776 name = "repoze.lru-0.7";
651 doCheck = false;
777 doCheck = false;
652 src = fetchurl {
778 src = fetchurl {
653 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
654 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
655 };
781 };
656 meta = {
782 meta = {
657 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
658 };
784 };
659 };
785 };
660 "rhodecode-vcsserver" = super.buildPythonPackage {
786 "rhodecode-vcsserver" = super.buildPythonPackage {
661 name = "rhodecode-vcsserver-4.17.4";
787 name = "rhodecode-vcsserver-4.18.0";
662 buildInputs = [
788 buildInputs = [
663 self."pytest"
789 self."pytest"
664 self."py"
790 self."py"
665 self."pytest-cov"
791 self."pytest-cov"
666 self."pytest-sugar"
792 self."pytest-sugar"
667 self."pytest-runner"
793 self."pytest-runner"
668 self."pytest-profiling"
794 self."pytest-profiling"
669 self."pytest-timeout"
795 self."pytest-timeout"
670 self."gprof2dot"
796 self."gprof2dot"
671 self."mock"
797 self."mock"
672 self."cov-core"
798 self."cov-core"
673 self."coverage"
799 self."coverage"
674 self."webtest"
800 self."webtest"
675 self."beautifulsoup4"
801 self."beautifulsoup4"
676 self."configobj"
802 self."configobj"
677 ];
803 ];
678 doCheck = true;
804 doCheck = true;
679 propagatedBuildInputs = [
805 propagatedBuildInputs = [
680 self."configobj"
806 self."configobj"
681 self."dogpile.cache"
807 self."dogpile.cache"
682 self."dogpile.core"
808 self."dogpile.core"
683 self."decorator"
809 self."decorator"
684 self."dulwich"
810 self."dulwich"
685 self."hgsubversion"
811 self."hgsubversion"
686 self."hg-evolve"
812 self."hg-evolve"
687 self."mako"
813 self."mako"
688 self."markupsafe"
814 self."markupsafe"
689 self."mercurial"
815 self."mercurial"
690 self."msgpack-python"
816 self."msgpack-python"
691 self."pastedeploy"
817 self."pastedeploy"
692 self."pyramid"
818 self."pyramid"
693 self."pyramid-mako"
819 self."pyramid-mako"
820 self."pygit2"
694 self."repoze.lru"
821 self."repoze.lru"
822 self."redis"
695 self."simplejson"
823 self."simplejson"
696 self."subprocess32"
824 self."subprocess32"
697 self."subvertpy"
825 self."subvertpy"
698 self."six"
826 self."six"
699 self."translationstring"
827 self."translationstring"
700 self."webob"
828 self."webob"
701 self."zope.deprecation"
829 self."zope.deprecation"
702 self."zope.interface"
830 self."zope.interface"
703 self."gevent"
831 self."gevent"
704 self."greenlet"
832 self."greenlet"
705 self."gunicorn"
833 self."gunicorn"
706 self."waitress"
834 self."waitress"
707 self."ipdb"
835 self."ipdb"
708 self."ipython"
836 self."ipython"
709 self."pytest"
837 self."pytest"
710 self."py"
838 self."py"
711 self."pytest-cov"
839 self."pytest-cov"
712 self."pytest-sugar"
840 self."pytest-sugar"
713 self."pytest-runner"
841 self."pytest-runner"
714 self."pytest-profiling"
842 self."pytest-profiling"
715 self."pytest-timeout"
843 self."pytest-timeout"
716 self."gprof2dot"
844 self."gprof2dot"
717 self."mock"
845 self."mock"
718 self."cov-core"
846 self."cov-core"
719 self."coverage"
847 self."coverage"
720 self."webtest"
848 self."webtest"
721 self."beautifulsoup4"
849 self."beautifulsoup4"
722 ];
850 ];
723 src = ./.;
851 src = ./.;
724 meta = {
852 meta = {
725 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
726 };
854 };
727 };
855 };
728 "scandir" = super.buildPythonPackage {
856 "scandir" = super.buildPythonPackage {
729 name = "scandir-1.10.0";
857 name = "scandir-1.10.0";
730 doCheck = false;
858 doCheck = false;
731 src = fetchurl {
859 src = fetchurl {
732 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
733 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
734 };
862 };
735 meta = {
863 meta = {
736 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
737 };
865 };
738 };
866 };
739 "setproctitle" = super.buildPythonPackage {
867 "setproctitle" = super.buildPythonPackage {
740 name = "setproctitle-1.1.10";
868 name = "setproctitle-1.1.10";
741 doCheck = false;
869 doCheck = false;
742 src = fetchurl {
870 src = fetchurl {
743 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
744 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
745 };
873 };
746 meta = {
874 meta = {
747 license = [ pkgs.lib.licenses.bsdOriginal ];
875 license = [ pkgs.lib.licenses.bsdOriginal ];
748 };
876 };
749 };
877 };
750 "setuptools" = super.buildPythonPackage {
878 "setuptools" = super.buildPythonPackage {
751 name = "setuptools-41.0.1";
879 name = "setuptools-44.0.0";
752 doCheck = false;
880 doCheck = false;
753 src = fetchurl {
881 src = fetchurl {
754 url = "https://files.pythonhosted.org/packages/1d/64/a18a487b4391a05b9c7f938b94a16d80305bf0369c6b0b9509e86165e1d3/setuptools-41.0.1.zip";
882 url = "https://files.pythonhosted.org/packages/b0/f3/44da7482ac6da3f36f68e253cb04de37365b3dba9036a3c70773b778b485/setuptools-44.0.0.zip";
755 sha256 = "04sns22y2hhsrwfy1mha2lgslvpjsjsz8xws7h2rh5a7ylkd28m2";
883 sha256 = "025h5cnxcmda1893l6i12hrwdvs1n8r31qs6q4pkif2v7rrggfp5";
756 };
884 };
757 meta = {
885 meta = {
758 license = [ pkgs.lib.licenses.mit ];
886 license = [ pkgs.lib.licenses.mit ];
759 };
887 };
760 };
888 };
761 "simplegeneric" = super.buildPythonPackage {
889 "simplegeneric" = super.buildPythonPackage {
762 name = "simplegeneric-0.8.1";
890 name = "simplegeneric-0.8.1";
763 doCheck = false;
891 doCheck = false;
764 src = fetchurl {
892 src = fetchurl {
765 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
893 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
766 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
894 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
767 };
895 };
768 meta = {
896 meta = {
769 license = [ pkgs.lib.licenses.zpl21 ];
897 license = [ pkgs.lib.licenses.zpl21 ];
770 };
898 };
771 };
899 };
772 "simplejson" = super.buildPythonPackage {
900 "simplejson" = super.buildPythonPackage {
773 name = "simplejson-3.16.0";
901 name = "simplejson-3.16.0";
774 doCheck = false;
902 doCheck = false;
775 src = fetchurl {
903 src = fetchurl {
776 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
904 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
777 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
905 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
778 };
906 };
779 meta = {
907 meta = {
780 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
908 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
781 };
909 };
782 };
910 };
783 "six" = super.buildPythonPackage {
911 "six" = super.buildPythonPackage {
784 name = "six-1.11.0";
912 name = "six-1.11.0";
785 doCheck = false;
913 doCheck = false;
786 src = fetchurl {
914 src = fetchurl {
787 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
915 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
788 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
916 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
789 };
917 };
790 meta = {
918 meta = {
791 license = [ pkgs.lib.licenses.mit ];
919 license = [ pkgs.lib.licenses.mit ];
792 };
920 };
793 };
921 };
794 "subprocess32" = super.buildPythonPackage {
922 "subprocess32" = super.buildPythonPackage {
795 name = "subprocess32-3.5.4";
923 name = "subprocess32-3.5.4";
796 doCheck = false;
924 doCheck = false;
797 src = fetchurl {
925 src = fetchurl {
798 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
926 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
799 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
927 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
800 };
928 };
801 meta = {
929 meta = {
802 license = [ pkgs.lib.licenses.psfl ];
930 license = [ pkgs.lib.licenses.psfl ];
803 };
931 };
804 };
932 };
805 "subvertpy" = super.buildPythonPackage {
933 "subvertpy" = super.buildPythonPackage {
806 name = "subvertpy-0.10.1";
934 name = "subvertpy-0.10.1";
807 doCheck = false;
935 doCheck = false;
808 src = fetchurl {
936 src = fetchurl {
809 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
937 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
810 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
938 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
811 };
939 };
812 meta = {
940 meta = {
813 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
941 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
814 };
942 };
815 };
943 };
816 "termcolor" = super.buildPythonPackage {
944 "termcolor" = super.buildPythonPackage {
817 name = "termcolor-1.1.0";
945 name = "termcolor-1.1.0";
818 doCheck = false;
946 doCheck = false;
819 src = fetchurl {
947 src = fetchurl {
820 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
948 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
821 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
949 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
822 };
950 };
823 meta = {
951 meta = {
824 license = [ pkgs.lib.licenses.mit ];
952 license = [ pkgs.lib.licenses.mit ];
825 };
953 };
826 };
954 };
827 "traitlets" = super.buildPythonPackage {
955 "traitlets" = super.buildPythonPackage {
828 name = "traitlets-4.3.2";
956 name = "traitlets-4.3.3";
829 doCheck = false;
957 doCheck = false;
830 propagatedBuildInputs = [
958 propagatedBuildInputs = [
831 self."ipython-genutils"
959 self."ipython-genutils"
832 self."six"
960 self."six"
833 self."decorator"
961 self."decorator"
834 self."enum34"
962 self."enum34"
835 ];
963 ];
836 src = fetchurl {
964 src = fetchurl {
837 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
965 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
838 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
966 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
839 };
967 };
840 meta = {
968 meta = {
841 license = [ pkgs.lib.licenses.bsdOriginal ];
969 license = [ pkgs.lib.licenses.bsdOriginal ];
842 };
970 };
843 };
971 };
844 "translationstring" = super.buildPythonPackage {
972 "translationstring" = super.buildPythonPackage {
845 name = "translationstring-1.3";
973 name = "translationstring-1.3";
846 doCheck = false;
974 doCheck = false;
847 src = fetchurl {
975 src = fetchurl {
848 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
976 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
849 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
977 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
850 };
978 };
851 meta = {
979 meta = {
852 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
980 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
853 };
981 };
854 };
982 };
855 "venusian" = super.buildPythonPackage {
983 "venusian" = super.buildPythonPackage {
856 name = "venusian-1.2.0";
984 name = "venusian-1.2.0";
857 doCheck = false;
985 doCheck = false;
858 src = fetchurl {
986 src = fetchurl {
859 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
987 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
860 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
988 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
861 };
989 };
862 meta = {
990 meta = {
863 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
991 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
864 };
992 };
865 };
993 };
866 "waitress" = super.buildPythonPackage {
994 "waitress" = super.buildPythonPackage {
867 name = "waitress-1.3.0";
995 name = "waitress-1.3.1";
868 doCheck = false;
996 doCheck = false;
869 src = fetchurl {
997 src = fetchurl {
870 url = "https://files.pythonhosted.org/packages/43/50/9890471320d5ad22761ae46661cf745f487b1c8c4ec49352b99e1078b970/waitress-1.3.0.tar.gz";
998 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
871 sha256 = "09j5dzbbcxib7vdskhx39s1qsydlr4n2p2png71d7mjnr9pnwajf";
999 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
872 };
1000 };
873 meta = {
1001 meta = {
874 license = [ pkgs.lib.licenses.zpl21 ];
1002 license = [ pkgs.lib.licenses.zpl21 ];
875 };
1003 };
876 };
1004 };
877 "wcwidth" = super.buildPythonPackage {
1005 "wcwidth" = super.buildPythonPackage {
878 name = "wcwidth-0.1.7";
1006 name = "wcwidth-0.1.7";
879 doCheck = false;
1007 doCheck = false;
880 src = fetchurl {
1008 src = fetchurl {
881 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1009 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
882 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
1010 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
883 };
1011 };
884 meta = {
1012 meta = {
885 license = [ pkgs.lib.licenses.mit ];
1013 license = [ pkgs.lib.licenses.mit ];
886 };
1014 };
887 };
1015 };
888 "webob" = super.buildPythonPackage {
1016 "webob" = super.buildPythonPackage {
889 name = "webob-1.8.5";
1017 name = "webob-1.8.5";
890 doCheck = false;
1018 doCheck = false;
891 src = fetchurl {
1019 src = fetchurl {
892 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1020 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
893 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1021 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
894 };
1022 };
895 meta = {
1023 meta = {
896 license = [ pkgs.lib.licenses.mit ];
1024 license = [ pkgs.lib.licenses.mit ];
897 };
1025 };
898 };
1026 };
899 "webtest" = super.buildPythonPackage {
1027 "webtest" = super.buildPythonPackage {
900 name = "webtest-2.0.33";
1028 name = "webtest-2.0.33";
901 doCheck = false;
1029 doCheck = false;
902 propagatedBuildInputs = [
1030 propagatedBuildInputs = [
903 self."six"
1031 self."six"
904 self."webob"
1032 self."webob"
905 self."waitress"
1033 self."waitress"
906 self."beautifulsoup4"
1034 self."beautifulsoup4"
907 ];
1035 ];
908 src = fetchurl {
1036 src = fetchurl {
909 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
1037 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
910 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
1038 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
911 };
1039 };
912 meta = {
1040 meta = {
913 license = [ pkgs.lib.licenses.mit ];
1041 license = [ pkgs.lib.licenses.mit ];
914 };
1042 };
915 };
1043 };
1044 "zipp" = super.buildPythonPackage {
1045 name = "zipp-0.6.0";
1046 doCheck = false;
1047 propagatedBuildInputs = [
1048 self."more-itertools"
1049 ];
1050 src = fetchurl {
1051 url = "https://files.pythonhosted.org/packages/57/dd/585d728479d97d25aeeb9aa470d36a4ad8d0ba5610f84e14770128ce6ff7/zipp-0.6.0.tar.gz";
1052 sha256 = "13ndkf7vklw978a4gdl1yfvn8hch28429a0iam67sg4nrp5v261p";
1053 };
1054 meta = {
1055 license = [ pkgs.lib.licenses.mit ];
1056 };
1057 };
916 "zope.deprecation" = super.buildPythonPackage {
1058 "zope.deprecation" = super.buildPythonPackage {
917 name = "zope.deprecation-4.4.0";
1059 name = "zope.deprecation-4.4.0";
918 doCheck = false;
1060 doCheck = false;
919 propagatedBuildInputs = [
1061 propagatedBuildInputs = [
920 self."setuptools"
1062 self."setuptools"
921 ];
1063 ];
922 src = fetchurl {
1064 src = fetchurl {
923 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1065 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
924 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1066 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
925 };
1067 };
926 meta = {
1068 meta = {
927 license = [ pkgs.lib.licenses.zpl21 ];
1069 license = [ pkgs.lib.licenses.zpl21 ];
928 };
1070 };
929 };
1071 };
930 "zope.interface" = super.buildPythonPackage {
1072 "zope.interface" = super.buildPythonPackage {
931 name = "zope.interface-4.6.0";
1073 name = "zope.interface-4.6.0";
932 doCheck = false;
1074 doCheck = false;
933 propagatedBuildInputs = [
1075 propagatedBuildInputs = [
934 self."setuptools"
1076 self."setuptools"
935 ];
1077 ];
936 src = fetchurl {
1078 src = fetchurl {
937 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1079 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
938 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1080 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
939 };
1081 };
940 meta = {
1082 meta = {
941 license = [ pkgs.lib.licenses.zpl21 ];
1083 license = [ pkgs.lib.licenses.zpl21 ];
942 };
1084 };
943 };
1085 };
944
1086
945 ### Test requirements
1087 ### Test requirements
946
1088
947
1089
948 }
1090 }
@@ -1,41 +1,42 b''
1 { pkgs ? (import <nixpkgs> {})
1 { pkgs ? (import <nixpkgs> {})
2 , pythonPackages ? "python27Packages"
2 , pythonPackages ? "python27Packages"
3 }:
3 }:
4
4
5 with pkgs.lib;
5 with pkgs.lib;
6
6
7 let _pythonPackages = pythonPackages; in
7 let _pythonPackages = pythonPackages; in
8 let
8 let
9 pythonPackages = getAttr _pythonPackages pkgs;
9 pythonPackages = getAttr _pythonPackages pkgs;
10
10
11 pip2nix = import ./nix-common/pip2nix.nix {
11 pip2nix = import ./nix-common/pip2nix.nix {
12 inherit
12 inherit
13 pkgs
13 pkgs
14 pythonPackages;
14 pythonPackages;
15 };
15 };
16
16
17 in
17 in
18
18
19 pkgs.stdenv.mkDerivation {
19 pkgs.stdenv.mkDerivation {
20 name = "pip2nix-generated";
20 name = "pip2nix-generated";
21 buildInputs = [
21 buildInputs = [
22 pip2nix.pip2nix
22 pip2nix.pip2nix
23 pythonPackages.pip-tools
23 pythonPackages.pip-tools
24 pkgs.apr
24 pkgs.apr
25 pkgs.aprutil
25 pkgs.aprutil
26 pkgs.libffi
26 ];
27 ];
27
28
28 shellHook = ''
29 shellHook = ''
29 runHook preShellHook
30 runHook preShellHook
30 echo "Setting SVN_* variables"
31 echo "Setting SVN_* variables"
31 export SVN_LIBRARY_PATH=${pkgs.subversion}/lib
32 export SVN_LIBRARY_PATH=${pkgs.subversion}/lib
32 export SVN_HEADER_PATH=${pkgs.subversion.dev}/include
33 export SVN_HEADER_PATH=${pkgs.subversion.dev}/include
33 runHook postShellHook
34 runHook postShellHook
34 '';
35 '';
35
36
36 preShellHook = ''
37 preShellHook = ''
37 echo "Starting Generate Shell"
38 echo "Starting Generate Shell"
38 # Custom prompt to distinguish from other dev envs.
39 # Custom prompt to distinguish from other dev envs.
39 export PS1="\n\[\033[1;32m\][Generate-shell:\w]$\[\033[0m\] "
40 export PS1="\n\[\033[1;32m\][Generate-shell:\w]$\[\033[0m\] "
40 '';
41 '';
41 }
42 }
@@ -1,43 +1,48 b''
1 ## dependencies
1 ## dependencies
2
2
3 # our custom configobj
3 # our custom configobj
4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
5
5
6 dogpile.cache==0.7.1
6 dogpile.cache==0.9.0
7 dogpile.core==0.4.1
7 dogpile.core==0.4.1
8 decorator==4.1.2
8 decorator==4.1.2
9 dulwich==0.13.0
9 dulwich==0.13.0
10 hgsubversion==1.9.3
10 hgsubversion==1.9.3
11 hg-evolve==8.5.1
11 hg-evolve==9.1.0
12 mako==1.0.7
12 mako==1.1.0
13 markupsafe==1.1.0
13 markupsafe==1.1.1
14 mercurial==4.9.1
14 mercurial==5.1.1
15 msgpack-python==0.5.6
15 msgpack-python==0.5.6
16
16
17 pastedeploy==2.0.1
17 pastedeploy==2.0.1
18 pyramid==1.10.4
18 pyramid==1.10.4
19 pyramid-mako==1.0.2
19 pyramid-mako==1.1.0
20 pygit2==0.28.2
20
21
21 repoze.lru==0.7
22 repoze.lru==0.7
23 redis==3.3.11
22 simplejson==3.16.0
24 simplejson==3.16.0
23 subprocess32==3.5.4
25 subprocess32==3.5.4
24 subvertpy==0.10.1
26 subvertpy==0.10.1
25
27
26 six==1.11.0
28 six==1.11.0
27 translationstring==1.3
29 translationstring==1.3
28 webob==1.8.5
30 webob==1.8.5
29 zope.deprecation==4.4.0
31 zope.deprecation==4.4.0
30 zope.interface==4.6.0
32 zope.interface==4.6.0
31
33
32 ## http servers
34 ## http servers
33 gevent==1.4.0
35 gevent==1.4.0
34 greenlet==0.4.15
36 greenlet==0.4.15
35 gunicorn==19.9.0
37 gunicorn==19.9.0
36 waitress==1.3.0
38 waitress==1.3.1
37
39
38 ## debug
40 ## debug
39 ipdb==0.12.0
41 ipdb==0.12.0
40 ipython==5.1.0
42 ipython==5.1.0
41
43
42 ## test related requirements
44 ## test related requirements
43 -r requirements_test.txt
45 -r requirements_test.txt
46
47 ## uncomment to add the debug libraries
48 #-r requirements_debug.txt
@@ -1,12 +1,18 b''
1 # contains not directly required libraries we want to pin the version.
1 # contains not directly required libraries we want to pin the version.
2
2
3 atomicwrites==1.2.1
3 atomicwrites==1.3.0
4 attrs==18.2.0
4 attrs==19.3.0
5 hupper==1.6.1
5 contextlib2==0.6.0.post1
6 pathlib2==2.3.4
6 cffi==1.12.3
7 hupper==1.9.1
8 importlib-metadata==0.23
9 packaging==19.2.0
10 pathlib2==2.3.5
7 pygments==2.4.2
11 pygments==2.4.2
8 psutil==5.5.1
12 pyparsing==2.4.5
9 pluggy==0.11.0
13 psutil==5.6.5
14 pluggy==0.13.1
10 scandir==1.10.0
15 scandir==1.10.0
11 setproctitle==1.1.10
16 setproctitle==1.1.10
12 venusian==1.2.0
17 venusian==1.2.0
18 wcwidth==0.1.7
@@ -1,16 +1,16 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.8.2
2 pytest==4.6.5
3 py==1.6.0
3 py==1.8.0
4 pytest-cov==2.6.0
4 pytest-cov==2.7.1
5 pytest-sugar==0.9.1
5 pytest-sugar==0.9.2
6 pytest-runner==4.2.0
6 pytest-runner==5.1.0
7 pytest-profiling==1.3.0
7 pytest-profiling==1.7.0
8 pytest-timeout==1.3.2
8 pytest-timeout==1.3.3
9 gprof2dot==2017.9.19
9 gprof2dot==2017.9.19
10
10
11 mock==1.0.1
11 mock==3.0.5
12 cov-core==1.15.0
12 cov-core==1.15.0
13 coverage==4.5.3
13 coverage==4.5.4
14
14
15 webtest==2.0.33
15 webtest==2.0.33
16 beautifulsoup4==4.6.3
16 beautifulsoup4==4.6.3
@@ -1,1 +1,1 b''
1 4.17.4 No newline at end of file
1 4.18.0 No newline at end of file
@@ -1,94 +1,76 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver.lib.rc_cache import region_meta
23 from vcsserver.lib.rc_cache import region_meta
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class RepoFactory(object):
27 class RepoFactory(object):
28 """
28 """
29 Utility to create instances of repository
29 Utility to create instances of repository
30
30
31 It provides internal caching of the `repo` object based on
31 It provides internal caching of the `repo` object based on
32 the :term:`call context`.
32 the :term:`call context`.
33 """
33 """
34 repo_type = None
34 repo_type = None
35
35
36 def __init__(self):
36 def __init__(self):
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38
38
39 def _create_config(self, path, config):
39 def _create_config(self, path, config):
40 config = {}
40 config = {}
41 return config
41 return config
42
42
43 def _create_repo(self, wire, create):
43 def _create_repo(self, wire, create):
44 raise NotImplementedError()
44 raise NotImplementedError()
45
45
46 def repo(self, wire, create=False):
46 def repo(self, wire, create=False):
47 """
47 raise NotImplementedError()
48 Get a repository instance for the given path.
49
50 Uses internally the low level beaker API since the decorators introduce
51 significant overhead.
52 """
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
62 return self._create_repo(wire, create)
63
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
65 return repo
66
48
67
49
68 def obfuscate_qs(query_string):
50 def obfuscate_qs(query_string):
69 if query_string is None:
51 if query_string is None:
70 return None
52 return None
71
53
72 parsed = []
54 parsed = []
73 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
74 if k in ['auth_token', 'api_key']:
56 if k in ['auth_token', 'api_key']:
75 v = "*****"
57 v = "*****"
76 parsed.append((k, v))
58 parsed.append((k, v))
77
59
78 return '&'.join('{}{}'.format(
60 return '&'.join('{}{}'.format(
79 k, '={}'.format(v) if v else '') for k, v in parsed)
61 k, '={}'.format(v) if v else '') for k, v in parsed)
80
62
81
63
82 def raise_from_original(new_type):
64 def raise_from_original(new_type):
83 """
65 """
84 Raise a new exception type with original args and traceback.
66 Raise a new exception type with original args and traceback.
85 """
67 """
86 exc_type, exc_value, exc_traceback = sys.exc_info()
68 exc_type, exc_value, exc_traceback = sys.exc_info()
87 new_exc = new_type(*exc_value.args)
69 new_exc = new_type(*exc_value.args)
88 # store the original traceback into the new exc
70 # store the original traceback into the new exc
89 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
90
72
91 try:
73 try:
92 raise new_exc, None, exc_traceback
74 raise new_exc, None, exc_traceback
93 finally:
75 finally:
94 del exc_traceback
76 del exc_traceback
This diff has been collapsed as it changes many lines, (753 lines changed) Show them Hide them
@@ -1,752 +1,1177 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import collections
18 import collections
18 import logging
19 import logging
19 import os
20 import os
20 import posixpath as vcspath
21 import posixpath as vcspath
21 import re
22 import re
22 import stat
23 import stat
23 import traceback
24 import traceback
24 import urllib
25 import urllib
25 import urllib2
26 import urllib2
26 from functools import wraps
27 from functools import wraps
27
28
28 import more_itertools
29 import more_itertools
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
29 from dulwich import index, objects
32 from dulwich import index, objects
30 from dulwich.client import HttpGitClient, LocalGitClient
33 from dulwich.client import HttpGitClient, LocalGitClient
31 from dulwich.errors import (
34 from dulwich.errors import (
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 MissingCommitError, ObjectMissing, HangupException,
36 MissingCommitError, ObjectMissing, HangupException,
34 UnexpectedCommandError)
37 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
38 from dulwich.repo import Repo as DulwichRepo
36 from dulwich.server import update_server_info
39 from dulwich.server import update_server_info
37
40
38 from vcsserver import exceptions, settings, subprocessio
41 from vcsserver import exceptions, settings, subprocessio
39 from vcsserver.utils import safe_str
42 from vcsserver.utils import safe_str, safe_int
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
43 from vcsserver.base import RepoFactory, obfuscate_qs
41 from vcsserver.hgcompat import (
44 from vcsserver.hgcompat import (
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 from vcsserver.git_lfs.lib import LFSOidStore
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
44
48
45 DIR_STAT = stat.S_IFDIR
49 DIR_STAT = stat.S_IFDIR
46 FILE_MODE = stat.S_IFMT
50 FILE_MODE = stat.S_IFMT
47 GIT_LINK = objects.S_IFGITLINK
51 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
53
48
54
49 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
50
56
51
57
58 def str_to_dulwich(value):
59 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
62 return value.decode(settings.WIRE_ENCODING)
63
64
52 def reraise_safe_exceptions(func):
65 def reraise_safe_exceptions(func):
53 """Converts Dulwich exceptions to something neutral."""
66 """Converts Dulwich exceptions to something neutral."""
67
54 @wraps(func)
68 @wraps(func)
55 def wrapper(*args, **kwargs):
69 def wrapper(*args, **kwargs):
56 try:
70 try:
57 return func(*args, **kwargs)
71 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
59 ObjectMissing) as e:
73 exc = exceptions.LookupException(org_exc=e)
60 exc = exceptions.LookupException(e)
74 raise exc(safe_str(e))
61 raise exc(e)
62 except (HangupException, UnexpectedCommandError) as e:
75 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
76 exc = exceptions.VcsException(org_exc=e)
64 raise exc(e)
77 raise exc(safe_str(e))
65 except Exception as e:
78 except Exception as e:
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 # (KeyError on empty repos), we cannot track this and catch all
80 # (KeyError on empty repos), we cannot track this and catch all
68 # exceptions, it's an exceptions from other handlers
81 # exceptions, it's an exceptions from other handlers
69 #if not hasattr(e, '_vcs_kind'):
82 #if not hasattr(e, '_vcs_kind'):
70 #log.exception("Unhandled exception in git remote call")
83 #log.exception("Unhandled exception in git remote call")
71 #raise_from_original(exceptions.UnhandledException)
84 #raise_from_original(exceptions.UnhandledException)
72 raise
85 raise
73 return wrapper
86 return wrapper
74
87
75
88
76 class Repo(DulwichRepo):
89 class Repo(DulwichRepo):
77 """
90 """
78 A wrapper for dulwich Repo class.
91 A wrapper for dulwich Repo class.
79
92
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 "Too many open files" error. We need to close all opened file descriptors
94 "Too many open files" error. We need to close all opened file descriptors
82 once the repo object is destroyed.
95 once the repo object is destroyed.
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
86 """
96 """
87 def __del__(self):
97 def __del__(self):
88 if hasattr(self, 'object_store'):
98 if hasattr(self, 'object_store'):
89 self.close()
99 self.close()
90
100
91
101
102 class Repository(LibGit2Repo):
103
104 def __enter__(self):
105 return self
106
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
109
110
92 class GitFactory(RepoFactory):
111 class GitFactory(RepoFactory):
93 repo_type = 'git'
112 repo_type = 'git'
94
113
95 def _create_repo(self, wire, create):
114 def _create_repo(self, wire, create, use_libgit2=False):
96 repo_path = str_to_dulwich(wire['path'])
115 if use_libgit2:
97 return Repo(repo_path)
116 return Repository(wire['path'])
117 else:
118 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
120
121 def repo(self, wire, create=False, use_libgit2=False):
122 """
123 Get a repository instance for the given path.
124 """
125 return self._create_repo(wire, create, use_libgit2)
126
127 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
98
129
99
130
100 class GitRemote(object):
131 class GitRemote(RemoteBase):
101
132
102 def __init__(self, factory):
133 def __init__(self, factory):
103 self._factory = factory
134 self._factory = factory
104 self.peeled_ref_marker = '^{}'
105 self._bulk_methods = {
135 self._bulk_methods = {
106 "author": self.commit_attribute,
136 "date": self.date,
107 "date": self.get_object_attrs,
137 "author": self.author,
108 "message": self.commit_attribute,
138 "branch": self.branch,
109 "parents": self.commit_attribute,
139 "message": self.message,
140 "parents": self.parents,
110 "_commit": self.revision,
141 "_commit": self.revision,
111 }
142 }
112
143
113 def _wire_to_config(self, wire):
144 def _wire_to_config(self, wire):
114 if 'config' in wire:
145 if 'config' in wire:
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 return {}
147 return {}
117
148
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
121
122 def _remote_conf(self, config):
149 def _remote_conf(self, config):
123 params = [
150 params = [
124 '-c', 'core.askpass=""',
151 '-c', 'core.askpass=""',
125 ]
152 ]
126 ssl_cert_dir = config.get('vcs_ssl_dir')
153 ssl_cert_dir = config.get('vcs_ssl_dir')
127 if ssl_cert_dir:
154 if ssl_cert_dir:
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 return params
156 return params
130
157
131 @reraise_safe_exceptions
158 @reraise_safe_exceptions
132 def is_empty(self, wire):
159 def discover_git_version(self):
133 repo = self._factory.repo(wire)
160 stdout, _ = self.run_git_command(
134 try:
161 {}, ['--version'], _bare=True, _safe=True)
135 return not repo.head()
162 prefix = 'git version'
136 except Exception:
163 if stdout.startswith(prefix):
137 log.exception("failed to read object_store")
164 stdout = stdout[len(prefix):]
138 return True
165 return stdout.strip()
139
166
140 @reraise_safe_exceptions
167 @reraise_safe_exceptions
141 def add_object(self, wire, content):
168 def is_empty(self, wire):
142 repo = self._factory.repo(wire)
169 repo_init = self._factory.repo_libgit2(wire)
143 blob = objects.Blob()
170 with repo_init as repo:
144 blob.set_raw_string(content)
171
145 repo.object_store.add_object(blob)
172 try:
146 return blob.id
173 has_head = repo.head.name
174 if has_head:
175 return False
176
177 # NOTE(marcink): check again using more expensive method
178 return repo.is_empty
179 except Exception:
180 pass
181
182 return True
147
183
148 @reraise_safe_exceptions
184 @reraise_safe_exceptions
149 def assert_correct_path(self, wire):
185 def assert_correct_path(self, wire):
150 path = wire.get('path')
186 cache_on, context_uid, repo_id = self._cache_on(wire)
151 try:
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
152 self._factory.repo(wire)
188 def _assert_correct_path(_context_uid, _repo_id):
153 except NotGitRepository as e:
189 try:
154 tb = traceback.format_exc()
190 repo_init = self._factory.repo_libgit2(wire)
155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
191 with repo_init as repo:
156 return False
192 pass
193 except pygit2.GitError:
194 path = wire.get('path')
195 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
157
198
158 return True
199 return True
200 return _assert_correct_path(context_uid, repo_id)
159
201
160 @reraise_safe_exceptions
202 @reraise_safe_exceptions
161 def bare(self, wire):
203 def bare(self, wire):
162 repo = self._factory.repo(wire)
204 repo_init = self._factory.repo_libgit2(wire)
163 return repo.bare
205 with repo_init as repo:
206 return repo.is_bare
164
207
165 @reraise_safe_exceptions
208 @reraise_safe_exceptions
166 def blob_as_pretty_string(self, wire, sha):
209 def blob_as_pretty_string(self, wire, sha):
167 repo = self._factory.repo(wire)
210 repo_init = self._factory.repo_libgit2(wire)
168 return repo[sha].as_pretty_string()
211 with repo_init as repo:
212 blob_obj = repo[sha]
213 blob = blob_obj.data
214 return blob
169
215
170 @reraise_safe_exceptions
216 @reraise_safe_exceptions
171 def blob_raw_length(self, wire, sha):
217 def blob_raw_length(self, wire, sha):
172 repo = self._factory.repo(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
173 blob = repo[sha]
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
174 return blob.raw_length()
220 def _blob_raw_length(_repo_id, _sha):
221
222 repo_init = self._factory.repo_libgit2(wire)
223 with repo_init as repo:
224 blob = repo[sha]
225 return blob.size
226
227 return _blob_raw_length(repo_id, sha)
175
228
176 def _parse_lfs_pointer(self, raw_content):
229 def _parse_lfs_pointer(self, raw_content):
177
230
178 spec_string = 'version https://git-lfs.github.com/spec'
231 spec_string = 'version https://git-lfs.github.com/spec'
179 if raw_content and raw_content.startswith(spec_string):
232 if raw_content and raw_content.startswith(spec_string):
180 pattern = re.compile(r"""
233 pattern = re.compile(r"""
181 (?:\n)?
234 (?:\n)?
182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
184 ^size[ ](?P<oid_size>[0-9]+)\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
185 (?:\n)?
238 (?:\n)?
186 """, re.VERBOSE | re.MULTILINE)
239 """, re.VERBOSE | re.MULTILINE)
187 match = pattern.match(raw_content)
240 match = pattern.match(raw_content)
188 if match:
241 if match:
189 return match.groupdict()
242 return match.groupdict()
190
243
191 return {}
244 return {}
192
245
193 @reraise_safe_exceptions
246 @reraise_safe_exceptions
194 def is_large_file(self, wire, sha):
247 def is_large_file(self, wire, commit_id):
195 repo = self._factory.repo(wire)
248 cache_on, context_uid, repo_id = self._cache_on(wire)
196 blob = repo[sha]
249
197 return self._parse_lfs_pointer(blob.as_raw_string())
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 def _is_large_file(_repo_id, _sha):
252 repo_init = self._factory.repo_libgit2(wire)
253 with repo_init as repo:
254 blob = repo[commit_id]
255 if blob.is_binary:
256 return {}
257
258 return self._parse_lfs_pointer(blob.data)
259
260 return _is_large_file(repo_id, commit_id)
261
262 @reraise_safe_exceptions
263 def is_binary(self, wire, tree_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
272
273 return _is_binary(repo_id, tree_id)
198
274
199 @reraise_safe_exceptions
275 @reraise_safe_exceptions
200 def in_largefiles_store(self, wire, oid):
276 def in_largefiles_store(self, wire, oid):
201 repo = self._factory.repo(wire)
202 conf = self._wire_to_config(wire)
277 conf = self._wire_to_config(wire)
278 repo_init = self._factory.repo_libgit2(wire)
279 with repo_init as repo:
280 repo_name = repo.path
203
281
204 store_location = conf.get('vcs_git_lfs_store_location')
282 store_location = conf.get('vcs_git_lfs_store_location')
205 if store_location:
283 if store_location:
206 repo_name = repo.path
284
207 store = LFSOidStore(
285 store = LFSOidStore(
208 oid=oid, repo=repo_name, store_location=store_location)
286 oid=oid, repo=repo_name, store_location=store_location)
209 return store.has_oid()
287 return store.has_oid()
210
288
211 return False
289 return False
212
290
213 @reraise_safe_exceptions
291 @reraise_safe_exceptions
214 def store_path(self, wire, oid):
292 def store_path(self, wire, oid):
215 repo = self._factory.repo(wire)
216 conf = self._wire_to_config(wire)
293 conf = self._wire_to_config(wire)
294 repo_init = self._factory.repo_libgit2(wire)
295 with repo_init as repo:
296 repo_name = repo.path
217
297
218 store_location = conf.get('vcs_git_lfs_store_location')
298 store_location = conf.get('vcs_git_lfs_store_location')
219 if store_location:
299 if store_location:
220 repo_name = repo.path
221 store = LFSOidStore(
300 store = LFSOidStore(
222 oid=oid, repo=repo_name, store_location=store_location)
301 oid=oid, repo=repo_name, store_location=store_location)
223 return store.oid_path
302 return store.oid_path
224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
225
304
226 @reraise_safe_exceptions
305 @reraise_safe_exceptions
227 def bulk_request(self, wire, rev, pre_load):
306 def bulk_request(self, wire, rev, pre_load):
228 result = {}
307 cache_on, context_uid, repo_id = self._cache_on(wire)
229 for attr in pre_load:
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
230 try:
309 def _bulk_request(_repo_id, _rev, _pre_load):
231 method = self._bulk_methods[attr]
310 result = {}
232 args = [wire, rev]
311 for attr in pre_load:
233 if attr == "date":
312 try:
234 args.extend(["commit_time", "commit_timezone"])
313 method = self._bulk_methods[attr]
235 elif attr in ["author", "message", "parents"]:
314 args = [wire, rev]
236 args.append(attr)
315 result[attr] = method(*args)
237 result[attr] = method(*args)
316 except KeyError as e:
238 except KeyError as e:
317 raise exceptions.VcsException(e)(
239 raise exceptions.VcsException(e)(
318 "Unknown bulk attribute: %s" % attr)
240 "Unknown bulk attribute: %s" % attr)
319 return result
241 return result
320
321 return _bulk_request(repo_id, rev, sorted(pre_load))
242
322
243 def _build_opener(self, url):
323 def _build_opener(self, url):
244 handlers = []
324 handlers = []
245 url_obj = url_parser(url)
325 url_obj = url_parser(url)
246 _, authinfo = url_obj.authinfo()
326 _, authinfo = url_obj.authinfo()
247
327
248 if authinfo:
328 if authinfo:
249 # create a password manager
329 # create a password manager
250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
251 passmgr.add_password(*authinfo)
331 passmgr.add_password(*authinfo)
252
332
253 handlers.extend((httpbasicauthhandler(passmgr),
333 handlers.extend((httpbasicauthhandler(passmgr),
254 httpdigestauthhandler(passmgr)))
334 httpdigestauthhandler(passmgr)))
255
335
256 return urllib2.build_opener(*handlers)
336 return urllib2.build_opener(*handlers)
257
337
338 def _type_id_to_name(self, type_id):
339 return {
340 1: b'commit',
341 2: b'tree',
342 3: b'blob',
343 4: b'tag'
344 }[type_id]
345
258 @reraise_safe_exceptions
346 @reraise_safe_exceptions
259 def check_url(self, url, config):
347 def check_url(self, url, config):
260 url_obj = url_parser(url)
348 url_obj = url_parser(url)
261 test_uri, _ = url_obj.authinfo()
349 test_uri, _ = url_obj.authinfo()
262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
263 url_obj.query = obfuscate_qs(url_obj.query)
351 url_obj.query = obfuscate_qs(url_obj.query)
264 cleaned_uri = str(url_obj)
352 cleaned_uri = str(url_obj)
265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
266
354
267 if not test_uri.endswith('info/refs'):
355 if not test_uri.endswith('info/refs'):
268 test_uri = test_uri.rstrip('/') + '/info/refs'
356 test_uri = test_uri.rstrip('/') + '/info/refs'
269
357
270 o = self._build_opener(url)
358 o = self._build_opener(url)
271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
272
360
273 q = {"service": 'git-upload-pack'}
361 q = {"service": 'git-upload-pack'}
274 qs = '?%s' % urllib.urlencode(q)
362 qs = '?%s' % urllib.urlencode(q)
275 cu = "%s%s" % (test_uri, qs)
363 cu = "%s%s" % (test_uri, qs)
276 req = urllib2.Request(cu, None, {})
364 req = urllib2.Request(cu, None, {})
277
365
278 try:
366 try:
279 log.debug("Trying to open URL %s", cleaned_uri)
367 log.debug("Trying to open URL %s", cleaned_uri)
280 resp = o.open(req)
368 resp = o.open(req)
281 if resp.code != 200:
369 if resp.code != 200:
282 raise exceptions.URLError()('Return Code is not 200')
370 raise exceptions.URLError()('Return Code is not 200')
283 except Exception as e:
371 except Exception as e:
284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
285 # means it cannot be cloned
373 # means it cannot be cloned
286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
287
375
288 # now detect if it's proper git repo
376 # now detect if it's proper git repo
289 gitdata = resp.read()
377 gitdata = resp.read()
290 if 'service=git-upload-pack' in gitdata:
378 if 'service=git-upload-pack' in gitdata:
291 pass
379 pass
292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
293 # old style git can return some other format !
381 # old style git can return some other format !
294 pass
382 pass
295 else:
383 else:
296 raise exceptions.URLError()(
384 raise exceptions.URLError()(
297 "url [%s] does not look like an git" % (cleaned_uri,))
385 "url [%s] does not look like an git" % (cleaned_uri,))
298
386
299 return True
387 return True
300
388
301 @reraise_safe_exceptions
389 @reraise_safe_exceptions
302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
304 remote_refs = self.pull(wire, url, apply_refs=False)
392 remote_refs = self.pull(wire, url, apply_refs=False)
305 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
306 if isinstance(valid_refs, list):
394 if isinstance(valid_refs, list):
307 valid_refs = tuple(valid_refs)
395 valid_refs = tuple(valid_refs)
308
396
309 for k in remote_refs:
397 for k in remote_refs:
310 # only parse heads/tags and skip so called deferred tags
398 # only parse heads/tags and skip so called deferred tags
311 if k.startswith(valid_refs) and not k.endswith(deferred):
399 if k.startswith(valid_refs) and not k.endswith(deferred):
312 repo[k] = remote_refs[k]
400 repo[k] = remote_refs[k]
313
401
314 if update_after_clone:
402 if update_after_clone:
315 # we want to checkout HEAD
403 # we want to checkout HEAD
316 repo["HEAD"] = remote_refs["HEAD"]
404 repo["HEAD"] = remote_refs["HEAD"]
317 index.build_index_from_tree(repo.path, repo.index_path(),
405 index.build_index_from_tree(repo.path, repo.index_path(),
318 repo.object_store, repo["HEAD"].tree)
406 repo.object_store, repo["HEAD"].tree)
319
407
408 @reraise_safe_exceptions
409 def branch(self, wire, commit_id):
410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 def _branch(_context_uid, _repo_id, _commit_id):
413 regex = re.compile('^refs/heads')
414
415 def filter_with(ref):
416 return regex.match(ref[0]) and ref[1] == _commit_id
417
418 branches = filter(filter_with, self.get_refs(wire).items())
419 return [x[0].split('refs/heads/')[-1] for x in branches]
420
421 return _branch(context_uid, repo_id, commit_id)
422
423 @reraise_safe_exceptions
424 def commit_branches(self, wire, commit_id):
425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 repo_init = self._factory.repo_libgit2(wire)
429 with repo_init as repo:
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 return branches
432
433 return _commit_branches(context_uid, repo_id, commit_id)
434
435 @reraise_safe_exceptions
436 def add_object(self, wire, content):
437 repo_init = self._factory.repo_libgit2(wire)
438 with repo_init as repo:
439 blob = objects.Blob()
440 blob.set_raw_string(content)
441 repo.object_store.add_object(blob)
442 return blob.id
443
320 # TODO: this is quite complex, check if that can be simplified
444 # TODO: this is quite complex, check if that can be simplified
321 @reraise_safe_exceptions
445 @reraise_safe_exceptions
322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
323 repo = self._factory.repo(wire)
447 repo = self._factory.repo(wire)
324 object_store = repo.object_store
448 object_store = repo.object_store
325
449
326 # Create tree and populates it with blobs
450 # Create tree and populates it with blobs
327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
328
452
329 for node in updated:
453 for node in updated:
330 # Compute subdirs if needed
454 # Compute subdirs if needed
331 dirpath, nodename = vcspath.split(node['path'])
455 dirpath, nodename = vcspath.split(node['path'])
332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
333 parent = commit_tree
457 parent = commit_tree
334 ancestors = [('', parent)]
458 ancestors = [('', parent)]
335
459
336 # Tries to dig for the deepest existing tree
460 # Tries to dig for the deepest existing tree
337 while dirnames:
461 while dirnames:
338 curdir = dirnames.pop(0)
462 curdir = dirnames.pop(0)
339 try:
463 try:
340 dir_id = parent[curdir][1]
464 dir_id = parent[curdir][1]
341 except KeyError:
465 except KeyError:
342 # put curdir back into dirnames and stops
466 # put curdir back into dirnames and stops
343 dirnames.insert(0, curdir)
467 dirnames.insert(0, curdir)
344 break
468 break
345 else:
469 else:
346 # If found, updates parent
470 # If found, updates parent
347 parent = repo[dir_id]
471 parent = repo[dir_id]
348 ancestors.append((curdir, parent))
472 ancestors.append((curdir, parent))
349 # Now parent is deepest existing tree and we need to create
473 # Now parent is deepest existing tree and we need to create
350 # subtrees for dirnames (in reverse order)
474 # subtrees for dirnames (in reverse order)
351 # [this only applies for nodes from added]
475 # [this only applies for nodes from added]
352 new_trees = []
476 new_trees = []
353
477
354 blob = objects.Blob.from_string(node['content'])
478 blob = objects.Blob.from_string(node['content'])
355
479
356 if dirnames:
480 if dirnames:
357 # If there are trees which should be created we need to build
481 # If there are trees which should be created we need to build
358 # them now (in reverse order)
482 # them now (in reverse order)
359 reversed_dirnames = list(reversed(dirnames))
483 reversed_dirnames = list(reversed(dirnames))
360 curtree = objects.Tree()
484 curtree = objects.Tree()
361 curtree[node['node_path']] = node['mode'], blob.id
485 curtree[node['node_path']] = node['mode'], blob.id
362 new_trees.append(curtree)
486 new_trees.append(curtree)
363 for dirname in reversed_dirnames[:-1]:
487 for dirname in reversed_dirnames[:-1]:
364 newtree = objects.Tree()
488 newtree = objects.Tree()
365 newtree[dirname] = (DIR_STAT, curtree.id)
489 newtree[dirname] = (DIR_STAT, curtree.id)
366 new_trees.append(newtree)
490 new_trees.append(newtree)
367 curtree = newtree
491 curtree = newtree
368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
369 else:
493 else:
370 parent.add(
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
372
495
373 new_trees.append(parent)
496 new_trees.append(parent)
374 # Update ancestors
497 # Update ancestors
375 reversed_ancestors = reversed(
498 reversed_ancestors = reversed(
376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
377 for parent, tree, path in reversed_ancestors:
500 for parent, tree, path in reversed_ancestors:
378 parent[path] = (DIR_STAT, tree.id)
501 parent[path] = (DIR_STAT, tree.id)
379 object_store.add_object(tree)
502 object_store.add_object(tree)
380
503
381 object_store.add_object(blob)
504 object_store.add_object(blob)
382 for tree in new_trees:
505 for tree in new_trees:
383 object_store.add_object(tree)
506 object_store.add_object(tree)
384
507
385 for node_path in removed:
508 for node_path in removed:
386 paths = node_path.split('/')
509 paths = node_path.split('/')
387 tree = commit_tree
510 tree = commit_tree
388 trees = [tree]
511 trees = [tree]
389 # Traverse deep into the forest...
512 # Traverse deep into the forest...
390 for path in paths:
513 for path in paths:
391 try:
514 try:
392 obj = repo[tree[path][1]]
515 obj = repo[tree[path][1]]
393 if isinstance(obj, objects.Tree):
516 if isinstance(obj, objects.Tree):
394 trees.append(obj)
517 trees.append(obj)
395 tree = obj
518 tree = obj
396 except KeyError:
519 except KeyError:
397 break
520 break
398 # Cut down the blob and all rotten trees on the way back...
521 # Cut down the blob and all rotten trees on the way back...
399 for path, tree in reversed(zip(paths, trees)):
522 for path, tree in reversed(zip(paths, trees)):
400 del tree[path]
523 del tree[path]
401 if tree:
524 if tree:
402 # This tree still has elements - don't remove it or any
525 # This tree still has elements - don't remove it or any
403 # of it's parents
526 # of it's parents
404 break
527 break
405
528
406 object_store.add_object(commit_tree)
529 object_store.add_object(commit_tree)
407
530
408 # Create commit
531 # Create commit
409 commit = objects.Commit()
532 commit = objects.Commit()
410 commit.tree = commit_tree.id
533 commit.tree = commit_tree.id
411 for k, v in commit_data.iteritems():
534 for k, v in commit_data.iteritems():
412 setattr(commit, k, v)
535 setattr(commit, k, v)
413 object_store.add_object(commit)
536 object_store.add_object(commit)
414
537
538 self.create_branch(wire, branch, commit.id)
539
540 # dulwich set-ref
415 ref = 'refs/heads/%s' % branch
541 ref = 'refs/heads/%s' % branch
416 repo.refs[ref] = commit.id
542 repo.refs[ref] = commit.id
417
543
418 return commit.id
544 return commit.id
419
545
420 @reraise_safe_exceptions
546 @reraise_safe_exceptions
421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
422 if url != 'default' and '://' not in url:
548 if url != 'default' and '://' not in url:
423 client = LocalGitClient(url)
549 client = LocalGitClient(url)
424 else:
550 else:
425 url_obj = url_parser(url)
551 url_obj = url_parser(url)
426 o = self._build_opener(url)
552 o = self._build_opener(url)
427 url, _ = url_obj.authinfo()
553 url, _ = url_obj.authinfo()
428 client = HttpGitClient(base_url=url, opener=o)
554 client = HttpGitClient(base_url=url, opener=o)
429 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
430
556
431 determine_wants = repo.object_store.determine_wants_all
557 determine_wants = repo.object_store.determine_wants_all
432 if refs:
558 if refs:
433 def determine_wants_requested(references):
559 def determine_wants_requested(references):
434 return [references[r] for r in references if r in refs]
560 return [references[r] for r in references if r in refs]
435 determine_wants = determine_wants_requested
561 determine_wants = determine_wants_requested
436
562
437 try:
563 try:
438 remote_refs = client.fetch(
564 remote_refs = client.fetch(
439 path=url, target=repo, determine_wants=determine_wants)
565 path=url, target=repo, determine_wants=determine_wants)
440 except NotGitRepository as e:
566 except NotGitRepository as e:
441 log.warning(
567 log.warning(
442 'Trying to fetch from "%s" failed, not a Git repository.', url)
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
443 # Exception can contain unicode which we convert
569 # Exception can contain unicode which we convert
444 raise exceptions.AbortException(e)(repr(e))
570 raise exceptions.AbortException(e)(repr(e))
445
571
446 # mikhail: client.fetch() returns all the remote refs, but fetches only
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
447 # refs filtered by `determine_wants` function. We need to filter result
573 # refs filtered by `determine_wants` function. We need to filter result
448 # as well
574 # as well
449 if refs:
575 if refs:
450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
451
577
452 if apply_refs:
578 if apply_refs:
453 # TODO: johbo: Needs proper test coverage with a git repository
579 # TODO: johbo: Needs proper test coverage with a git repository
454 # that contains a tag object, so that we would end up with
580 # that contains a tag object, so that we would end up with
455 # a peeled ref at this point.
581 # a peeled ref at this point.
456 for k in remote_refs:
582 for k in remote_refs:
457 if k.endswith(self.peeled_ref_marker):
583 if k.endswith(PEELED_REF_MARKER):
458 log.debug("Skipping peeled reference %s", k)
584 log.debug("Skipping peeled reference %s", k)
459 continue
585 continue
460 repo[k] = remote_refs[k]
586 repo[k] = remote_refs[k]
461
587
462 if refs and not update_after:
588 if refs and not update_after:
463 # mikhail: explicitly set the head to the last ref.
589 # mikhail: explicitly set the head to the last ref.
464 repo['HEAD'] = remote_refs[refs[-1]]
590 repo['HEAD'] = remote_refs[refs[-1]]
465
591
466 if update_after:
592 if update_after:
467 # we want to checkout HEAD
593 # we want to checkout HEAD
468 repo["HEAD"] = remote_refs["HEAD"]
594 repo["HEAD"] = remote_refs["HEAD"]
469 index.build_index_from_tree(repo.path, repo.index_path(),
595 index.build_index_from_tree(repo.path, repo.index_path(),
470 repo.object_store, repo["HEAD"].tree)
596 repo.object_store, repo["HEAD"].tree)
471 return remote_refs
597 return remote_refs
472
598
473 @reraise_safe_exceptions
599 @reraise_safe_exceptions
474 def sync_fetch(self, wire, url, refs=None):
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
475 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
476 if refs and not isinstance(refs, (list, tuple)):
602 if refs and not isinstance(refs, (list, tuple)):
477 refs = [refs]
603 refs = [refs]
604
478 config = self._wire_to_config(wire)
605 config = self._wire_to_config(wire)
479 # get all remote refs we'll use to fetch later
606 # get all remote refs we'll use to fetch later
607 cmd = ['ls-remote']
608 if not all_refs:
609 cmd += ['--heads', '--tags']
610 cmd += [url]
480 output, __ = self.run_git_command(
611 output, __ = self.run_git_command(
481 wire, ['ls-remote', url], fail_on_stderr=False,
612 wire, cmd, fail_on_stderr=False,
482 _copts=self._remote_conf(config),
613 _copts=self._remote_conf(config),
483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
484
615
485 remote_refs = collections.OrderedDict()
616 remote_refs = collections.OrderedDict()
486 fetch_refs = []
617 fetch_refs = []
487
618
488 for ref_line in output.splitlines():
619 for ref_line in output.splitlines():
489 sha, ref = ref_line.split('\t')
620 sha, ref = ref_line.split('\t')
490 sha = sha.strip()
621 sha = sha.strip()
491 if ref in remote_refs:
622 if ref in remote_refs:
492 # duplicate, skip
623 # duplicate, skip
493 continue
624 continue
494 if ref.endswith(self.peeled_ref_marker):
625 if ref.endswith(PEELED_REF_MARKER):
495 log.debug("Skipping peeled reference %s", ref)
626 log.debug("Skipping peeled reference %s", ref)
496 continue
627 continue
497 # don't sync HEAD
628 # don't sync HEAD
498 if ref in ['HEAD']:
629 if ref in ['HEAD']:
499 continue
630 continue
500
631
501 remote_refs[ref] = sha
632 remote_refs[ref] = sha
502
633
503 if refs and sha in refs:
634 if refs and sha in refs:
504 # we filter fetch using our specified refs
635 # we filter fetch using our specified refs
505 fetch_refs.append('{}:{}'.format(ref, ref))
636 fetch_refs.append('{}:{}'.format(ref, ref))
506 elif not refs:
637 elif not refs:
507 fetch_refs.append('{}:{}'.format(ref, ref))
638 fetch_refs.append('{}:{}'.format(ref, ref))
508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640
509 if fetch_refs:
641 if fetch_refs:
510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
511 fetch_refs_chunks = list(chunk)
643 fetch_refs_chunks = list(chunk)
512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
513 _out, _err = self.run_git_command(
645 _out, _err = self.run_git_command(
514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
515 fail_on_stderr=False,
647 fail_on_stderr=False,
516 _copts=self._remote_conf(config),
648 _copts=self._remote_conf(config),
517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
518
650
519 return remote_refs
651 return remote_refs
520
652
521 @reraise_safe_exceptions
653 @reraise_safe_exceptions
522 def sync_push(self, wire, url, refs=None):
654 def sync_push(self, wire, url, refs=None):
523 if not self.check_url(url, wire):
655 if not self.check_url(url, wire):
524 return
656 return
525 config = self._wire_to_config(wire)
657 config = self._wire_to_config(wire)
526 repo = self._factory.repo(wire)
658 self._factory.repo(wire)
527 self.run_git_command(
659 self.run_git_command(
528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
529 _copts=self._remote_conf(config),
661 _copts=self._remote_conf(config),
530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
531
663
532 @reraise_safe_exceptions
664 @reraise_safe_exceptions
533 def get_remote_refs(self, wire, url):
665 def get_remote_refs(self, wire, url):
534 repo = Repo(url)
666 repo = Repo(url)
535 return repo.get_refs()
667 return repo.get_refs()
536
668
537 @reraise_safe_exceptions
669 @reraise_safe_exceptions
538 def get_description(self, wire):
670 def get_description(self, wire):
539 repo = self._factory.repo(wire)
671 repo = self._factory.repo(wire)
540 return repo.get_description()
672 return repo.get_description()
541
673
542 @reraise_safe_exceptions
674 @reraise_safe_exceptions
543 def get_missing_revs(self, wire, rev1, rev2, path2):
675 def get_missing_revs(self, wire, rev1, rev2, path2):
544 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
545 LocalGitClient(thin_packs=False).fetch(path2, repo)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
546
678
547 wire_remote = wire.copy()
679 wire_remote = wire.copy()
548 wire_remote['path'] = path2
680 wire_remote['path'] = path2
549 repo_remote = self._factory.repo(wire_remote)
681 repo_remote = self._factory.repo(wire_remote)
550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
551
683
552 revs = [
684 revs = [
553 x.commit.id
685 x.commit.id
554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
555 return revs
687 return revs
556
688
557 @reraise_safe_exceptions
689 @reraise_safe_exceptions
558 def get_object(self, wire, sha):
690 def get_object(self, wire, sha):
559 repo = self._factory.repo(wire)
691 cache_on, context_uid, repo_id = self._cache_on(wire)
560 obj = repo.get_object(sha)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
561 commit_id = obj.id
693 def _get_object(_context_uid, _repo_id, _sha):
694 repo_init = self._factory.repo_libgit2(wire)
695 with repo_init as repo:
562
696
563 if isinstance(obj, Tag):
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
564 commit_id = obj.object[1]
698 try:
699 commit = repo.revparse_single(sha)
700 except (KeyError, ValueError) as e:
701 raise exceptions.LookupException(e)(missing_commit_err)
702
703 is_tag = False
704 if isinstance(commit, pygit2.Tag):
705 commit = repo.get(commit.target)
706 is_tag = True
707
708 check_dangling = True
709 if is_tag:
710 check_dangling = False
565
711
566 return {
712 # we used a reference and it parsed means we're not having a dangling commit
567 'id': obj.id,
713 if sha != commit.hex:
568 'type': obj.type_name,
714 check_dangling = False
569 'commit_id': commit_id,
715
570 'idx': 0
716 if check_dangling:
571 }
717 # check for dangling commit
718 for branch in repo.branches.with_commit(commit.hex):
719 if branch:
720 break
721 else:
722 raise exceptions.LookupException(None)(missing_commit_err)
572
723
573 @reraise_safe_exceptions
724 commit_id = commit.hex
574 def get_object_attrs(self, wire, sha, *attrs):
725 type_id = commit.type
575 repo = self._factory.repo(wire)
726
576 obj = repo.get_object(sha)
727 return {
577 return list(getattr(obj, a) for a in attrs)
728 'id': commit_id,
729 'type': self._type_id_to_name(type_id),
730 'commit_id': commit_id,
731 'idx': 0
732 }
733
734 return _get_object(context_uid, repo_id, sha)
578
735
579 @reraise_safe_exceptions
736 @reraise_safe_exceptions
580 def get_refs(self, wire):
737 def get_refs(self, wire):
581 repo = self._factory.repo(wire)
738 cache_on, context_uid, repo_id = self._cache_on(wire)
582 result = {}
739 @self.region.conditional_cache_on_arguments(condition=cache_on)
583 for ref, sha in repo.refs.as_dict().items():
740 def _get_refs(_context_uid, _repo_id):
584 peeled_sha = repo.get_peeled(ref)
741
585 result[ref] = peeled_sha
742 repo_init = self._factory.repo_libgit2(wire)
586 return result
743 with repo_init as repo:
744 regex = re.compile('^refs/(heads|tags)/')
745 return {x.name: x.target.hex for x in
746 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
747
748 return _get_refs(context_uid, repo_id)
587
749
588 @reraise_safe_exceptions
750 @reraise_safe_exceptions
589 def get_refs_path(self, wire):
751 def get_branch_pointers(self, wire):
590 repo = self._factory.repo(wire)
752 cache_on, context_uid, repo_id = self._cache_on(wire)
591 return repo.refs.path
753 @self.region.conditional_cache_on_arguments(condition=cache_on)
754 def _get_branch_pointers(_context_uid, _repo_id):
755
756 repo_init = self._factory.repo_libgit2(wire)
757 regex = re.compile('^refs/heads')
758 with repo_init as repo:
759 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
760 return {x.target.hex: x.shorthand for x in branches}
761
762 return _get_branch_pointers(context_uid, repo_id)
592
763
593 @reraise_safe_exceptions
764 @reraise_safe_exceptions
594 def head(self, wire, show_exc=True):
765 def head(self, wire, show_exc=True):
595 repo = self._factory.repo(wire)
766 cache_on, context_uid, repo_id = self._cache_on(wire)
596 try:
767 @self.region.conditional_cache_on_arguments(condition=cache_on)
597 return repo.head()
768 def _head(_context_uid, _repo_id, _show_exc):
598 except Exception:
769 repo_init = self._factory.repo_libgit2(wire)
599 if show_exc:
770 with repo_init as repo:
600 raise
771 try:
772 return repo.head.peel().hex
773 except Exception:
774 if show_exc:
775 raise
776 return _head(context_uid, repo_id, show_exc)
601
777
602 @reraise_safe_exceptions
778 @reraise_safe_exceptions
603 def init(self, wire):
779 def init(self, wire):
604 repo_path = str_to_dulwich(wire['path'])
780 repo_path = str_to_dulwich(wire['path'])
605 self.repo = Repo.init(repo_path)
781 self.repo = Repo.init(repo_path)
606
782
607 @reraise_safe_exceptions
783 @reraise_safe_exceptions
608 def init_bare(self, wire):
784 def init_bare(self, wire):
609 repo_path = str_to_dulwich(wire['path'])
785 repo_path = str_to_dulwich(wire['path'])
610 self.repo = Repo.init_bare(repo_path)
786 self.repo = Repo.init_bare(repo_path)
611
787
612 @reraise_safe_exceptions
788 @reraise_safe_exceptions
613 def revision(self, wire, rev):
789 def revision(self, wire, rev):
614 repo = self._factory.repo(wire)
790
615 obj = repo[rev]
791 cache_on, context_uid, repo_id = self._cache_on(wire)
616 obj_data = {
792 @self.region.conditional_cache_on_arguments(condition=cache_on)
617 'id': obj.id,
793 def _revision(_context_uid, _repo_id, _rev):
618 }
794 repo_init = self._factory.repo_libgit2(wire)
619 try:
795 with repo_init as repo:
620 obj_data['tree'] = obj.tree
796 commit = repo[rev]
621 except AttributeError:
797 obj_data = {
622 pass
798 'id': commit.id.hex,
623 return obj_data
799 }
800 # tree objects itself don't have tree_id attribute
801 if hasattr(commit, 'tree_id'):
802 obj_data['tree'] = commit.tree_id.hex
803
804 return obj_data
805 return _revision(context_uid, repo_id, rev)
806
807 @reraise_safe_exceptions
808 def date(self, wire, commit_id):
809 cache_on, context_uid, repo_id = self._cache_on(wire)
810 @self.region.conditional_cache_on_arguments(condition=cache_on)
811 def _date(_repo_id, _commit_id):
812 repo_init = self._factory.repo_libgit2(wire)
813 with repo_init as repo:
814 commit = repo[commit_id]
815
816 if hasattr(commit, 'commit_time'):
817 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
818 else:
819 commit = commit.get_object()
820 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
821
822 # TODO(marcink): check dulwich difference of offset vs timezone
823 return [commit_time, commit_time_offset]
824 return _date(repo_id, commit_id)
624
825
625 @reraise_safe_exceptions
826 @reraise_safe_exceptions
626 def commit_attribute(self, wire, rev, attr):
827 def author(self, wire, commit_id):
627 repo = self._factory.repo(wire)
828 cache_on, context_uid, repo_id = self._cache_on(wire)
628 obj = repo[rev]
829 @self.region.conditional_cache_on_arguments(condition=cache_on)
629 return getattr(obj, attr)
830 def _author(_repo_id, _commit_id):
831 repo_init = self._factory.repo_libgit2(wire)
832 with repo_init as repo:
833 commit = repo[commit_id]
834
835 if hasattr(commit, 'author'):
836 author = commit.author
837 else:
838 author = commit.get_object().author
839
840 if author.email:
841 return u"{} <{}>".format(author.name, author.email)
842
843 return u"{}".format(author.raw_name)
844 return _author(repo_id, commit_id)
845
846 @reraise_safe_exceptions
847 def message(self, wire, commit_id):
848 cache_on, context_uid, repo_id = self._cache_on(wire)
849 @self.region.conditional_cache_on_arguments(condition=cache_on)
850 def _message(_repo_id, _commit_id):
851 repo_init = self._factory.repo_libgit2(wire)
852 with repo_init as repo:
853 commit = repo[commit_id]
854 return commit.message
855 return _message(repo_id, commit_id)
856
857 @reraise_safe_exceptions
858 def parents(self, wire, commit_id):
859 cache_on, context_uid, repo_id = self._cache_on(wire)
860 @self.region.conditional_cache_on_arguments(condition=cache_on)
861 def _parents(_repo_id, _commit_id):
862 repo_init = self._factory.repo_libgit2(wire)
863 with repo_init as repo:
864 commit = repo[commit_id]
865 if hasattr(commit, 'parent_ids'):
866 parent_ids = commit.parent_ids
867 else:
868 parent_ids = commit.get_object().parent_ids
869
870 return [x.hex for x in parent_ids]
871 return _parents(repo_id, commit_id)
872
873 @reraise_safe_exceptions
874 def children(self, wire, commit_id):
875 cache_on, context_uid, repo_id = self._cache_on(wire)
876 @self.region.conditional_cache_on_arguments(condition=cache_on)
877 def _children(_repo_id, _commit_id):
878 output, __ = self.run_git_command(
879 wire, ['rev-list', '--all', '--children'])
880
881 child_ids = []
882 pat = re.compile(r'^%s' % commit_id)
883 for l in output.splitlines():
884 if pat.match(l):
885 found_ids = l.split(' ')[1:]
886 child_ids.extend(found_ids)
887
888 return child_ids
889 return _children(repo_id, commit_id)
630
890
631 @reraise_safe_exceptions
891 @reraise_safe_exceptions
632 def set_refs(self, wire, key, value):
892 def set_refs(self, wire, key, value):
633 repo = self._factory.repo(wire)
893 repo_init = self._factory.repo_libgit2(wire)
634 repo.refs[key] = value
894 with repo_init as repo:
895 repo.references.create(key, value, force=True)
896
897 @reraise_safe_exceptions
898 def create_branch(self, wire, branch_name, commit_id, force=False):
899 repo_init = self._factory.repo_libgit2(wire)
900 with repo_init as repo:
901 commit = repo[commit_id]
902
903 if force:
904 repo.branches.local.create(branch_name, commit, force=force)
905 elif not repo.branches.get(branch_name):
906 # create only if that branch isn't existing
907 repo.branches.local.create(branch_name, commit, force=force)
635
908
636 @reraise_safe_exceptions
909 @reraise_safe_exceptions
637 def remove_ref(self, wire, key):
910 def remove_ref(self, wire, key):
638 repo = self._factory.repo(wire)
911 repo_init = self._factory.repo_libgit2(wire)
639 del repo.refs[key]
912 with repo_init as repo:
913 repo.references.delete(key)
914
915 @reraise_safe_exceptions
916 def tag_remove(self, wire, tag_name):
917 repo_init = self._factory.repo_libgit2(wire)
918 with repo_init as repo:
919 key = 'refs/tags/{}'.format(tag_name)
920 repo.references.delete(key)
640
921
641 @reraise_safe_exceptions
922 @reraise_safe_exceptions
642 def tree_changes(self, wire, source_id, target_id):
923 def tree_changes(self, wire, source_id, target_id):
924 # TODO(marcink): remove this seems it's only used by tests
643 repo = self._factory.repo(wire)
925 repo = self._factory.repo(wire)
644 source = repo[source_id].tree if source_id else None
926 source = repo[source_id].tree if source_id else None
645 target = repo[target_id].tree
927 target = repo[target_id].tree
646 result = repo.object_store.tree_changes(source, target)
928 result = repo.object_store.tree_changes(source, target)
647 return list(result)
929 return list(result)
648
930
649 @reraise_safe_exceptions
931 @reraise_safe_exceptions
932 def tree_and_type_for_path(self, wire, commit_id, path):
933
934 cache_on, context_uid, repo_id = self._cache_on(wire)
935 @self.region.conditional_cache_on_arguments(condition=cache_on)
936 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
937 repo_init = self._factory.repo_libgit2(wire)
938
939 with repo_init as repo:
940 commit = repo[commit_id]
941 try:
942 tree = commit.tree[path]
943 except KeyError:
944 return None, None, None
945
946 return tree.id.hex, tree.type, tree.filemode
947 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
948
949 @reraise_safe_exceptions
650 def tree_items(self, wire, tree_id):
950 def tree_items(self, wire, tree_id):
651 repo = self._factory.repo(wire)
951 cache_on, context_uid, repo_id = self._cache_on(wire)
652 tree = repo[tree_id]
952 @self.region.conditional_cache_on_arguments(condition=cache_on)
953 def _tree_items(_repo_id, _tree_id):
954
955 repo_init = self._factory.repo_libgit2(wire)
956 with repo_init as repo:
957 try:
958 tree = repo[tree_id]
959 except KeyError:
960 raise ObjectMissing('No tree with id: {}'.format(tree_id))
961
962 result = []
963 for item in tree:
964 item_sha = item.hex
965 item_mode = item.filemode
966 item_type = item.type
967
968 if item_type == 'commit':
969 # NOTE(marcink): submodules we translate to 'link' for backward compat
970 item_type = 'link'
971
972 result.append((item.name, item_mode, item_sha, item_type))
973 return result
974 return _tree_items(repo_id, tree_id)
975
976 @reraise_safe_exceptions
977 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
978 """
979 Old version that uses subprocess to call diff
980 """
981
982 flags = [
983 '-U%s' % context, '--patch',
984 '--binary',
985 '--find-renames',
986 '--no-indent-heuristic',
987 # '--indent-heuristic',
988 #'--full-index',
989 #'--abbrev=40'
990 ]
991
992 if opt_ignorews:
993 flags.append('--ignore-all-space')
994
995 if commit_id_1 == self.EMPTY_COMMIT:
996 cmd = ['show'] + flags + [commit_id_2]
997 else:
998 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
999
1000 if file_filter:
1001 cmd.extend(['--', file_filter])
1002
1003 diff, __ = self.run_git_command(wire, cmd)
1004 # If we used 'show' command, strip first few lines (until actual diff
1005 # starts)
1006 if commit_id_1 == self.EMPTY_COMMIT:
1007 lines = diff.splitlines()
1008 x = 0
1009 for line in lines:
1010 if line.startswith('diff'):
1011 break
1012 x += 1
1013 # Append new line just like 'diff' command do
1014 diff = '\n'.join(lines[x:]) + '\n'
1015 return diff
1016
1017 @reraise_safe_exceptions
1018 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1019 repo_init = self._factory.repo_libgit2(wire)
1020 with repo_init as repo:
1021 swap = True
1022 flags = 0
1023 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1024
1025 if opt_ignorews:
1026 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1027
1028 if commit_id_1 == self.EMPTY_COMMIT:
1029 comm1 = repo[commit_id_2]
1030 diff_obj = comm1.tree.diff_to_tree(
1031 flags=flags, context_lines=context, swap=swap)
1032
1033 else:
1034 comm1 = repo[commit_id_2]
1035 comm2 = repo[commit_id_1]
1036 diff_obj = comm1.tree.diff_to_tree(
1037 comm2.tree, flags=flags, context_lines=context, swap=swap)
1038 similar_flags = 0
1039 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1040 diff_obj.find_similar(flags=similar_flags)
1041
1042 if file_filter:
1043 for p in diff_obj:
1044 if p.delta.old_file.path == file_filter:
1045 return p.patch or ''
1046 # fo matching path == no diff
1047 return ''
1048 return diff_obj.patch or ''
1049
1050 @reraise_safe_exceptions
1051 def node_history(self, wire, commit_id, path, limit):
1052 cache_on, context_uid, repo_id = self._cache_on(wire)
1053 @self.region.conditional_cache_on_arguments(condition=cache_on)
1054 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1055 # optimize for n==1, rev-list is much faster for that use-case
1056 if limit == 1:
1057 cmd = ['rev-list', '-1', commit_id, '--', path]
1058 else:
1059 cmd = ['log']
1060 if limit:
1061 cmd.extend(['-n', str(safe_int(limit, 0))])
1062 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1063
1064 output, __ = self.run_git_command(wire, cmd)
1065 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1066
1067 return [x for x in commit_ids]
1068 return _node_history(context_uid, repo_id, commit_id, path, limit)
1069
1070 @reraise_safe_exceptions
1071 def node_annotate(self, wire, commit_id, path):
1072
1073 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1074 # -l ==> outputs long shas (and we need all 40 characters)
1075 # --root ==> doesn't put '^' character for boundaries
1076 # -r commit_id ==> blames for the given commit
1077 output, __ = self.run_git_command(wire, cmd)
653
1078
654 result = []
1079 result = []
655 for item in tree.iteritems():
1080 for i, blame_line in enumerate(output.split('\n')[:-1]):
656 item_sha = item.sha
1081 line_no = i + 1
657 item_mode = item.mode
1082 commit_id, line = re.split(r' ', blame_line, 1)
658
1083 result.append((line_no, commit_id, line))
659 if FILE_MODE(item_mode) == GIT_LINK:
660 item_type = "link"
661 else:
662 item_type = repo[item_sha].type_name
663
664 result.append((item.path, item_mode, item_sha, item_type))
665 return result
1084 return result
666
1085
667 @reraise_safe_exceptions
1086 @reraise_safe_exceptions
668 def update_server_info(self, wire):
1087 def update_server_info(self, wire):
669 repo = self._factory.repo(wire)
1088 repo = self._factory.repo(wire)
670 update_server_info(repo)
1089 update_server_info(repo)
671
1090
672 @reraise_safe_exceptions
1091 @reraise_safe_exceptions
673 def discover_git_version(self):
1092 def get_all_commit_ids(self, wire):
674 stdout, _ = self.run_git_command(
1093
675 {}, ['--version'], _bare=True, _safe=True)
1094 cache_on, context_uid, repo_id = self._cache_on(wire)
676 prefix = 'git version'
1095 @self.region.conditional_cache_on_arguments(condition=cache_on)
677 if stdout.startswith(prefix):
1096 def _get_all_commit_ids(_context_uid, _repo_id):
678 stdout = stdout[len(prefix):]
1097
679 return stdout.strip()
1098 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1099 try:
1100 output, __ = self.run_git_command(wire, cmd)
1101 return output.splitlines()
1102 except Exception:
1103 # Can be raised for empty repositories
1104 return []
1105 return _get_all_commit_ids(context_uid, repo_id)
680
1106
681 @reraise_safe_exceptions
1107 @reraise_safe_exceptions
682 def run_git_command(self, wire, cmd, **opts):
1108 def run_git_command(self, wire, cmd, **opts):
683 path = wire.get('path', None)
1109 path = wire.get('path', None)
684
1110
685 if path and os.path.isdir(path):
1111 if path and os.path.isdir(path):
686 opts['cwd'] = path
1112 opts['cwd'] = path
687
1113
688 if '_bare' in opts:
1114 if '_bare' in opts:
689 _copts = []
1115 _copts = []
690 del opts['_bare']
1116 del opts['_bare']
691 else:
1117 else:
692 _copts = ['-c', 'core.quotepath=false', ]
1118 _copts = ['-c', 'core.quotepath=false', ]
693 safe_call = False
1119 safe_call = False
694 if '_safe' in opts:
1120 if '_safe' in opts:
695 # no exc on failure
1121 # no exc on failure
696 del opts['_safe']
1122 del opts['_safe']
697 safe_call = True
1123 safe_call = True
698
1124
699 if '_copts' in opts:
1125 if '_copts' in opts:
700 _copts.extend(opts['_copts'] or [])
1126 _copts.extend(opts['_copts'] or [])
701 del opts['_copts']
1127 del opts['_copts']
702
1128
703 gitenv = os.environ.copy()
1129 gitenv = os.environ.copy()
704 gitenv.update(opts.pop('extra_env', {}))
1130 gitenv.update(opts.pop('extra_env', {}))
705 # need to clean fix GIT_DIR !
1131 # need to clean fix GIT_DIR !
706 if 'GIT_DIR' in gitenv:
1132 if 'GIT_DIR' in gitenv:
707 del gitenv['GIT_DIR']
1133 del gitenv['GIT_DIR']
708 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1134 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
709 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1135 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
710
1136
711 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1137 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
712 _opts = {'env': gitenv, 'shell': False}
1138 _opts = {'env': gitenv, 'shell': False}
713
1139
1140 proc = None
714 try:
1141 try:
715 _opts.update(opts)
1142 _opts.update(opts)
716 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1143 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
717
1144
718 return ''.join(p), ''.join(p.error)
1145 return ''.join(proc), ''.join(proc.error)
719 except (EnvironmentError, OSError) as err:
1146 except (EnvironmentError, OSError) as err:
720 cmd = ' '.join(cmd) # human friendly CMD
1147 cmd = ' '.join(cmd) # human friendly CMD
721 tb_err = ("Couldn't run git command (%s).\n"
1148 tb_err = ("Couldn't run git command (%s).\n"
722 "Original error was:%s\n"
1149 "Original error was:%s\n"
723 "Call options:%s\n"
1150 "Call options:%s\n"
724 % (cmd, err, _opts))
1151 % (cmd, err, _opts))
725 log.exception(tb_err)
1152 log.exception(tb_err)
726 if safe_call:
1153 if safe_call:
727 return '', err
1154 return '', err
728 else:
1155 else:
729 raise exceptions.VcsException()(tb_err)
1156 raise exceptions.VcsException()(tb_err)
1157 finally:
1158 if proc:
1159 proc.close()
730
1160
731 @reraise_safe_exceptions
1161 @reraise_safe_exceptions
732 def install_hooks(self, wire, force=False):
1162 def install_hooks(self, wire, force=False):
733 from vcsserver.hook_utils import install_git_hooks
1163 from vcsserver.hook_utils import install_git_hooks
734 repo = self._factory.repo(wire)
1164 bare = self.bare(wire)
735 return install_git_hooks(repo.path, repo.bare, force_create=force)
1165 path = wire['path']
1166 return install_git_hooks(path, bare, force_create=force)
736
1167
737 @reraise_safe_exceptions
1168 @reraise_safe_exceptions
738 def get_hooks_info(self, wire):
1169 def get_hooks_info(self, wire):
739 from vcsserver.hook_utils import (
1170 from vcsserver.hook_utils import (
740 get_git_pre_hook_version, get_git_post_hook_version)
1171 get_git_pre_hook_version, get_git_post_hook_version)
741 repo = self._factory.repo(wire)
1172 bare = self.bare(wire)
1173 path = wire['path']
742 return {
1174 return {
743 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
1175 'pre_version': get_git_pre_hook_version(path, bare),
744 'post_version': get_git_post_hook_version(repo.path, repo.bare),
1176 'post_version': get_git_post_hook_version(path, bare),
745 }
1177 }
746
747
748 def str_to_dulwich(value):
749 """
750 Dulwich 0.10.1a requires `unicode` objects to be passed in.
751 """
752 return value.decode(settings.WIRE_ENCODING)
This diff has been collapsed as it changes many lines, (648 lines changed) Show them Hide them
@@ -1,856 +1,990 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase, purge
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30
30
31 import vcsserver
31 import vcsserver
32 from vcsserver import exceptions
32 from vcsserver import exceptions
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.hgcompat import (
34 from vcsserver.hgcompat import (
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 RepoLookupError, InterventionRequired, RequirementError)
39 RepoLookupError, InterventionRequired, RequirementError)
40 from vcsserver.vcs_base import RemoteBase
40
41
41 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
42
43
43
44
44 def make_ui_from_config(repo_config):
45 def make_ui_from_config(repo_config):
45
46
46 class LoggingUI(ui.ui):
47 class LoggingUI(ui.ui):
47 def status(self, *msg, **opts):
48 def status(self, *msg, **opts):
48 log.info(' '.join(msg).rstrip('\n'))
49 log.info(' '.join(msg).rstrip('\n'))
49 super(LoggingUI, self).status(*msg, **opts)
50 super(LoggingUI, self).status(*msg, **opts)
50
51
51 def warn(self, *msg, **opts):
52 def warn(self, *msg, **opts):
52 log.warn(' '.join(msg).rstrip('\n'))
53 log.warn(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).warn(*msg, **opts)
54 super(LoggingUI, self).warn(*msg, **opts)
54
55
55 def error(self, *msg, **opts):
56 def error(self, *msg, **opts):
56 log.error(' '.join(msg).rstrip('\n'))
57 log.error(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).error(*msg, **opts)
58 super(LoggingUI, self).error(*msg, **opts)
58
59
59 def note(self, *msg, **opts):
60 def note(self, *msg, **opts):
60 log.info(' '.join(msg).rstrip('\n'))
61 log.info(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).note(*msg, **opts)
62 super(LoggingUI, self).note(*msg, **opts)
62
63
63 def debug(self, *msg, **opts):
64 def debug(self, *msg, **opts):
64 log.debug(' '.join(msg).rstrip('\n'))
65 log.debug(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).debug(*msg, **opts)
66 super(LoggingUI, self).debug(*msg, **opts)
66
67
67 baseui = LoggingUI()
68 baseui = LoggingUI()
68
69
69 # clean the baseui object
70 # clean the baseui object
70 baseui._ocfg = hgconfig.config()
71 baseui._ocfg = hgconfig.config()
71 baseui._ucfg = hgconfig.config()
72 baseui._ucfg = hgconfig.config()
72 baseui._tcfg = hgconfig.config()
73 baseui._tcfg = hgconfig.config()
73
74
74 for section, option, value in repo_config:
75 for section, option, value in repo_config:
75 baseui.setconfig(section, option, value)
76 baseui.setconfig(section, option, value)
76
77
77 # make our hgweb quiet so it doesn't print output
78 # make our hgweb quiet so it doesn't print output
78 baseui.setconfig('ui', 'quiet', 'true')
79 baseui.setconfig('ui', 'quiet', 'true')
79
80
80 baseui.setconfig('ui', 'paginate', 'never')
81 baseui.setconfig('ui', 'paginate', 'never')
81 # for better Error reporting of Mercurial
82 # for better Error reporting of Mercurial
82 baseui.setconfig('ui', 'message-output', 'stderr')
83 baseui.setconfig('ui', 'message-output', 'stderr')
83
84
84 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # signal in a non-main thread, thus generating a ValueError.
86 # signal in a non-main thread, thus generating a ValueError.
86 baseui.setconfig('worker', 'numcpus', 1)
87 baseui.setconfig('worker', 'numcpus', 1)
87
88
88 # If there is no config for the largefiles extension, we explicitly disable
89 # If there is no config for the largefiles extension, we explicitly disable
89 # it here. This overrides settings from repositories hgrc file. Recent
90 # it here. This overrides settings from repositories hgrc file. Recent
90 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # repo.
92 # repo.
92 if not baseui.hasconfig('extensions', 'largefiles'):
93 if not baseui.hasconfig('extensions', 'largefiles'):
93 log.debug('Explicitly disable largefiles extension for repo.')
94 log.debug('Explicitly disable largefiles extension for repo.')
94 baseui.setconfig('extensions', 'largefiles', '!')
95 baseui.setconfig('extensions', 'largefiles', '!')
95
96
96 return baseui
97 return baseui
97
98
98
99
99 def reraise_safe_exceptions(func):
100 def reraise_safe_exceptions(func):
100 """Decorator for converting mercurial exceptions to something neutral."""
101 """Decorator for converting mercurial exceptions to something neutral."""
102
101 def wrapper(*args, **kwargs):
103 def wrapper(*args, **kwargs):
102 try:
104 try:
103 return func(*args, **kwargs)
105 return func(*args, **kwargs)
104 except (Abort, InterventionRequired) as e:
106 except (Abort, InterventionRequired) as e:
105 raise_from_original(exceptions.AbortException(e))
107 raise_from_original(exceptions.AbortException(e))
106 except RepoLookupError as e:
108 except RepoLookupError as e:
107 raise_from_original(exceptions.LookupException(e))
109 raise_from_original(exceptions.LookupException(e))
108 except RequirementError as e:
110 except RequirementError as e:
109 raise_from_original(exceptions.RequirementException(e))
111 raise_from_original(exceptions.RequirementException(e))
110 except RepoError as e:
112 except RepoError as e:
111 raise_from_original(exceptions.VcsException(e))
113 raise_from_original(exceptions.VcsException(e))
112 except LookupError as e:
114 except LookupError as e:
113 raise_from_original(exceptions.LookupException(e))
115 raise_from_original(exceptions.LookupException(e))
114 except Exception as e:
116 except Exception as e:
115 if not hasattr(e, '_vcs_kind'):
117 if not hasattr(e, '_vcs_kind'):
116 log.exception("Unhandled exception in hg remote call")
118 log.exception("Unhandled exception in hg remote call")
117 raise_from_original(exceptions.UnhandledException(e))
119 raise_from_original(exceptions.UnhandledException(e))
118
120
119 raise
121 raise
120 return wrapper
122 return wrapper
121
123
122
124
123 class MercurialFactory(RepoFactory):
125 class MercurialFactory(RepoFactory):
124 repo_type = 'hg'
126 repo_type = 'hg'
125
127
126 def _create_config(self, config, hooks=True):
128 def _create_config(self, config, hooks=True):
127 if not hooks:
129 if not hooks:
128 hooks_to_clean = frozenset((
130 hooks_to_clean = frozenset((
129 'changegroup.repo_size', 'preoutgoing.pre_pull',
131 'changegroup.repo_size', 'preoutgoing.pre_pull',
130 'outgoing.pull_logger', 'prechangegroup.pre_push'))
132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
131 new_config = []
133 new_config = []
132 for section, option, value in config:
134 for section, option, value in config:
133 if section == 'hooks' and option in hooks_to_clean:
135 if section == 'hooks' and option in hooks_to_clean:
134 continue
136 continue
135 new_config.append((section, option, value))
137 new_config.append((section, option, value))
136 config = new_config
138 config = new_config
137
139
138 baseui = make_ui_from_config(config)
140 baseui = make_ui_from_config(config)
139 return baseui
141 return baseui
140
142
141 def _create_repo(self, wire, create):
143 def _create_repo(self, wire, create):
142 baseui = self._create_config(wire["config"])
144 baseui = self._create_config(wire["config"])
143 return instance(baseui, wire["path"], create)
145 return instance(baseui, wire["path"], create)
144
146
147 def repo(self, wire, create=False):
148 """
149 Get a repository instance for the given path.
150 """
151 return self._create_repo(wire, create)
145
152
146 class HgRemote(object):
153
154 class HgRemote(RemoteBase):
147
155
148 def __init__(self, factory):
156 def __init__(self, factory):
149 self._factory = factory
157 self._factory = factory
150
151 self._bulk_methods = {
158 self._bulk_methods = {
152 "affected_files": self.ctx_files,
159 "affected_files": self.ctx_files,
153 "author": self.ctx_user,
160 "author": self.ctx_user,
154 "branch": self.ctx_branch,
161 "branch": self.ctx_branch,
155 "children": self.ctx_children,
162 "children": self.ctx_children,
156 "date": self.ctx_date,
163 "date": self.ctx_date,
157 "message": self.ctx_description,
164 "message": self.ctx_description,
158 "parents": self.ctx_parents,
165 "parents": self.ctx_parents,
159 "status": self.ctx_status,
166 "status": self.ctx_status,
160 "obsolete": self.ctx_obsolete,
167 "obsolete": self.ctx_obsolete,
161 "phase": self.ctx_phase,
168 "phase": self.ctx_phase,
162 "hidden": self.ctx_hidden,
169 "hidden": self.ctx_hidden,
163 "_file_paths": self.ctx_list,
170 "_file_paths": self.ctx_list,
164 }
171 }
165
172
166 def _get_ctx(self, repo, ref):
173 def _get_ctx(self, repo, ref):
167 return get_ctx(repo, ref)
174 return get_ctx(repo, ref)
168
175
169 @reraise_safe_exceptions
176 @reraise_safe_exceptions
170 def discover_hg_version(self):
177 def discover_hg_version(self):
171 from mercurial import util
178 from mercurial import util
172 return util.version()
179 return util.version()
173
180
174 @reraise_safe_exceptions
181 @reraise_safe_exceptions
175 def is_empty(self, wire):
182 def is_empty(self, wire):
176 repo = self._factory.repo(wire)
183 repo = self._factory.repo(wire)
177
184
178 try:
185 try:
179 return len(repo) == 0
186 return len(repo) == 0
180 except Exception:
187 except Exception:
181 log.exception("failed to read object_store")
188 log.exception("failed to read object_store")
182 return False
189 return False
183
190
184 @reraise_safe_exceptions
191 @reraise_safe_exceptions
185 def archive_repo(self, archive_path, mtime, file_info, kind):
192 def archive_repo(self, archive_path, mtime, file_info, kind):
186 if kind == "tgz":
193 if kind == "tgz":
187 archiver = archival.tarit(archive_path, mtime, "gz")
194 archiver = archival.tarit(archive_path, mtime, "gz")
188 elif kind == "tbz2":
195 elif kind == "tbz2":
189 archiver = archival.tarit(archive_path, mtime, "bz2")
196 archiver = archival.tarit(archive_path, mtime, "bz2")
190 elif kind == 'zip':
197 elif kind == 'zip':
191 archiver = archival.zipit(archive_path, mtime)
198 archiver = archival.zipit(archive_path, mtime)
192 else:
199 else:
193 raise exceptions.ArchiveException()(
200 raise exceptions.ArchiveException()(
194 'Remote does not support: "%s".' % kind)
201 'Remote does not support: "%s".' % kind)
195
202
196 for f_path, f_mode, f_is_link, f_content in file_info:
203 for f_path, f_mode, f_is_link, f_content in file_info:
197 archiver.addfile(f_path, f_mode, f_is_link, f_content)
204 archiver.addfile(f_path, f_mode, f_is_link, f_content)
198 archiver.done()
205 archiver.done()
199
206
200 @reraise_safe_exceptions
207 @reraise_safe_exceptions
201 def bookmarks(self, wire):
208 def bookmarks(self, wire):
202 repo = self._factory.repo(wire)
209 cache_on, context_uid, repo_id = self._cache_on(wire)
203 return dict(repo._bookmarks)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
211 def _bookmarks(_context_uid, _repo_id):
212 repo = self._factory.repo(wire)
213 return dict(repo._bookmarks)
214
215 return _bookmarks(context_uid, repo_id)
204
216
205 @reraise_safe_exceptions
217 @reraise_safe_exceptions
206 def branches(self, wire, normal, closed):
218 def branches(self, wire, normal, closed):
207 repo = self._factory.repo(wire)
219 cache_on, context_uid, repo_id = self._cache_on(wire)
208 iter_branches = repo.branchmap().iterbranches()
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
209 bt = {}
221 def _branches(_context_uid, _repo_id, _normal, _closed):
210 for branch_name, _heads, tip, is_closed in iter_branches:
222 repo = self._factory.repo(wire)
211 if normal and not is_closed:
223 iter_branches = repo.branchmap().iterbranches()
212 bt[branch_name] = tip
224 bt = {}
213 if closed and is_closed:
225 for branch_name, _heads, tip, is_closed in iter_branches:
214 bt[branch_name] = tip
226 if normal and not is_closed:
215
227 bt[branch_name] = tip
216 return bt
228 if closed and is_closed:
229 bt[branch_name] = tip
217
230
218 @reraise_safe_exceptions
231 return bt
219 def bulk_request(self, wire, rev, pre_load):
220 result = {}
221 for attr in pre_load:
222 try:
223 method = self._bulk_methods[attr]
224 result[attr] = method(wire, rev)
225 except KeyError as e:
226 raise exceptions.VcsException(e)(
227 'Unknown bulk attribute: "%s"' % attr)
228 return result
229
232
230 @reraise_safe_exceptions
233 return _branches(context_uid, repo_id, normal, closed)
231 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
232 baseui = self._factory._create_config(wire["config"], hooks=hooks)
233 clone(baseui, source, dest, noupdate=not update_after_clone)
234
234
235 @reraise_safe_exceptions
235 @reraise_safe_exceptions
236 def commitctx(
236 def bulk_request(self, wire, commit_id, pre_load):
237 self, wire, message, parents, commit_time, commit_timezone,
237 cache_on, context_uid, repo_id = self._cache_on(wire)
238 user, files, extra, removed, updated):
238 @self.region.conditional_cache_on_arguments(condition=cache_on)
239
239 def _bulk_request(_repo_id, _commit_id, _pre_load):
240 repo = self._factory.repo(wire)
240 result = {}
241 baseui = self._factory._create_config(wire['config'])
241 for attr in pre_load:
242 publishing = baseui.configbool('phases', 'publish')
242 try:
243 if publishing:
243 method = self._bulk_methods[attr]
244 new_commit = 'public'
244 result[attr] = method(wire, commit_id)
245 else:
245 except KeyError as e:
246 new_commit = 'draft'
246 raise exceptions.VcsException(e)(
247
247 'Unknown bulk attribute: "%s"' % attr)
248 def _filectxfn(_repo, ctx, path):
248 return result
249 """
250 Marks given path as added/changed/removed in a given _repo. This is
251 for internal mercurial commit function.
252 """
253
254 # check if this path is removed
255 if path in removed:
256 # returning None is a way to mark node for removal
257 return None
258
249
259 # check if this path is added
250 return _bulk_request(repo_id, commit_id, sorted(pre_load))
260 for node in updated:
261 if node['path'] == path:
262 return memfilectx(
263 _repo,
264 changectx=ctx,
265 path=node['path'],
266 data=node['content'],
267 islink=False,
268 isexec=bool(node['mode'] & stat.S_IXUSR),
269 copied=False)
270
271 raise exceptions.AbortException()(
272 "Given path haven't been marked as added, "
273 "changed or removed (%s)" % path)
274
275 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
276
277 commit_ctx = memctx(
278 repo=repo,
279 parents=parents,
280 text=message,
281 files=files,
282 filectxfn=_filectxfn,
283 user=user,
284 date=(commit_time, commit_timezone),
285 extra=extra)
286
287 n = repo.commitctx(commit_ctx)
288 new_id = hex(n)
289
290 return new_id
291
251
292 @reraise_safe_exceptions
252 @reraise_safe_exceptions
293 def ctx_branch(self, wire, revision):
253 def ctx_branch(self, wire, commit_id):
294 repo = self._factory.repo(wire)
254 cache_on, context_uid, repo_id = self._cache_on(wire)
295 ctx = self._get_ctx(repo, revision)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
296 return ctx.branch()
256 def _ctx_branch(_repo_id, _commit_id):
257 repo = self._factory.repo(wire)
258 ctx = self._get_ctx(repo, commit_id)
259 return ctx.branch()
260 return _ctx_branch(repo_id, commit_id)
297
261
298 @reraise_safe_exceptions
262 @reraise_safe_exceptions
299 def ctx_children(self, wire, revision):
263 def ctx_date(self, wire, commit_id):
300 repo = self._factory.repo(wire)
264 cache_on, context_uid, repo_id = self._cache_on(wire)
301 ctx = self._get_ctx(repo, revision)
265 @self.region.conditional_cache_on_arguments(condition=cache_on)
302 return [child.rev() for child in ctx.children()]
266 def _ctx_date(_repo_id, _commit_id):
303
267 repo = self._factory.repo(wire)
304 @reraise_safe_exceptions
268 ctx = self._get_ctx(repo, commit_id)
305 def ctx_date(self, wire, revision):
269 return ctx.date()
306 repo = self._factory.repo(wire)
270 return _ctx_date(repo_id, commit_id)
307 ctx = self._get_ctx(repo, revision)
308 return ctx.date()
309
271
310 @reraise_safe_exceptions
272 @reraise_safe_exceptions
311 def ctx_description(self, wire, revision):
273 def ctx_description(self, wire, revision):
312 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, revision)
275 ctx = self._get_ctx(repo, revision)
314 return ctx.description()
276 return ctx.description()
315
277
316 @reraise_safe_exceptions
278 @reraise_safe_exceptions
317 def ctx_files(self, wire, revision):
279 def ctx_files(self, wire, commit_id):
318 repo = self._factory.repo(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
319 ctx = self._get_ctx(repo, revision)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
320 return ctx.files()
282 def _ctx_files(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
285 return ctx.files()
286
287 return _ctx_files(repo_id, commit_id)
321
288
322 @reraise_safe_exceptions
289 @reraise_safe_exceptions
323 def ctx_list(self, path, revision):
290 def ctx_list(self, path, revision):
324 repo = self._factory.repo(path)
291 repo = self._factory.repo(path)
325 ctx = self._get_ctx(repo, revision)
292 ctx = self._get_ctx(repo, revision)
326 return list(ctx)
293 return list(ctx)
327
294
328 @reraise_safe_exceptions
295 @reraise_safe_exceptions
329 def ctx_parents(self, wire, revision):
296 def ctx_parents(self, wire, commit_id):
330 repo = self._factory.repo(wire)
297 cache_on, context_uid, repo_id = self._cache_on(wire)
331 ctx = self._get_ctx(repo, revision)
298 @self.region.conditional_cache_on_arguments(condition=cache_on)
332 return [parent.rev() for parent in ctx.parents()]
299 def _ctx_parents(_repo_id, _commit_id):
300 repo = self._factory.repo(wire)
301 ctx = self._get_ctx(repo, commit_id)
302 return [parent.hex() for parent in ctx.parents()
303 if not (parent.hidden() or parent.obsolete())]
304
305 return _ctx_parents(repo_id, commit_id)
306
307 @reraise_safe_exceptions
308 def ctx_children(self, wire, commit_id):
309 cache_on, context_uid, repo_id = self._cache_on(wire)
310 @self.region.conditional_cache_on_arguments(condition=cache_on)
311 def _ctx_children(_repo_id, _commit_id):
312 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, commit_id)
314 return [child.hex() for child in ctx.children()
315 if not (child.hidden() or child.obsolete())]
316
317 return _ctx_children(repo_id, commit_id)
333
318
334 @reraise_safe_exceptions
319 @reraise_safe_exceptions
335 def ctx_phase(self, wire, revision):
320 def ctx_phase(self, wire, commit_id):
336 repo = self._factory.repo(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
337 ctx = self._get_ctx(repo, revision)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
338 # public=0, draft=1, secret=3
323 def _ctx_phase(_context_uid, _repo_id, _commit_id):
339 return ctx.phase()
324 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
326 # public=0, draft=1, secret=3
327 return ctx.phase()
328 return _ctx_phase(context_uid, repo_id, commit_id)
340
329
341 @reraise_safe_exceptions
330 @reraise_safe_exceptions
342 def ctx_obsolete(self, wire, revision):
331 def ctx_obsolete(self, wire, commit_id):
343 repo = self._factory.repo(wire)
332 cache_on, context_uid, repo_id = self._cache_on(wire)
344 ctx = self._get_ctx(repo, revision)
333 @self.region.conditional_cache_on_arguments(condition=cache_on)
345 return ctx.obsolete()
334 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
335 repo = self._factory.repo(wire)
336 ctx = self._get_ctx(repo, commit_id)
337 return ctx.obsolete()
338 return _ctx_obsolete(context_uid, repo_id, commit_id)
346
339
347 @reraise_safe_exceptions
340 @reraise_safe_exceptions
348 def ctx_hidden(self, wire, revision):
341 def ctx_hidden(self, wire, commit_id):
349 repo = self._factory.repo(wire)
342 cache_on, context_uid, repo_id = self._cache_on(wire)
350 ctx = self._get_ctx(repo, revision)
343 @self.region.conditional_cache_on_arguments(condition=cache_on)
351 return ctx.hidden()
344 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
345 repo = self._factory.repo(wire)
346 ctx = self._get_ctx(repo, commit_id)
347 return ctx.hidden()
348 return _ctx_hidden(context_uid, repo_id, commit_id)
352
349
353 @reraise_safe_exceptions
350 @reraise_safe_exceptions
354 def ctx_substate(self, wire, revision):
351 def ctx_substate(self, wire, revision):
355 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
356 ctx = self._get_ctx(repo, revision)
353 ctx = self._get_ctx(repo, revision)
357 return ctx.substate
354 return ctx.substate
358
355
359 @reraise_safe_exceptions
356 @reraise_safe_exceptions
360 def ctx_status(self, wire, revision):
357 def ctx_status(self, wire, revision):
361 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
362 ctx = self._get_ctx(repo, revision)
359 ctx = self._get_ctx(repo, revision)
363 status = repo[ctx.p1().node()].status(other=ctx.node())
360 status = repo[ctx.p1().node()].status(other=ctx.node())
364 # object of status (odd, custom named tuple in mercurial) is not
361 # object of status (odd, custom named tuple in mercurial) is not
365 # correctly serializable, we make it a list, as the underling
362 # correctly serializable, we make it a list, as the underling
366 # API expects this to be a list
363 # API expects this to be a list
367 return list(status)
364 return list(status)
368
365
369 @reraise_safe_exceptions
366 @reraise_safe_exceptions
370 def ctx_user(self, wire, revision):
367 def ctx_user(self, wire, revision):
371 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
372 ctx = self._get_ctx(repo, revision)
369 ctx = self._get_ctx(repo, revision)
373 return ctx.user()
370 return ctx.user()
374
371
375 @reraise_safe_exceptions
372 @reraise_safe_exceptions
376 def check_url(self, url, config):
373 def check_url(self, url, config):
377 _proto = None
374 _proto = None
378 if '+' in url[:url.find('://')]:
375 if '+' in url[:url.find('://')]:
379 _proto = url[0:url.find('+')]
376 _proto = url[0:url.find('+')]
380 url = url[url.find('+') + 1:]
377 url = url[url.find('+') + 1:]
381 handlers = []
378 handlers = []
382 url_obj = url_parser(url)
379 url_obj = url_parser(url)
383 test_uri, authinfo = url_obj.authinfo()
380 test_uri, authinfo = url_obj.authinfo()
384 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
381 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
385 url_obj.query = obfuscate_qs(url_obj.query)
382 url_obj.query = obfuscate_qs(url_obj.query)
386
383
387 cleaned_uri = str(url_obj)
384 cleaned_uri = str(url_obj)
388 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
385 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
389
386
390 if authinfo:
387 if authinfo:
391 # create a password manager
388 # create a password manager
392 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
389 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
393 passmgr.add_password(*authinfo)
390 passmgr.add_password(*authinfo)
394
391
395 handlers.extend((httpbasicauthhandler(passmgr),
392 handlers.extend((httpbasicauthhandler(passmgr),
396 httpdigestauthhandler(passmgr)))
393 httpdigestauthhandler(passmgr)))
397
394
398 o = urllib2.build_opener(*handlers)
395 o = urllib2.build_opener(*handlers)
399 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
396 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
400 ('Accept', 'application/mercurial-0.1')]
397 ('Accept', 'application/mercurial-0.1')]
401
398
402 q = {"cmd": 'between'}
399 q = {"cmd": 'between'}
403 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
400 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
404 qs = '?%s' % urllib.urlencode(q)
401 qs = '?%s' % urllib.urlencode(q)
405 cu = "%s%s" % (test_uri, qs)
402 cu = "%s%s" % (test_uri, qs)
406 req = urllib2.Request(cu, None, {})
403 req = urllib2.Request(cu, None, {})
407
404
408 try:
405 try:
409 log.debug("Trying to open URL %s", cleaned_uri)
406 log.debug("Trying to open URL %s", cleaned_uri)
410 resp = o.open(req)
407 resp = o.open(req)
411 if resp.code != 200:
408 if resp.code != 200:
412 raise exceptions.URLError()('Return Code is not 200')
409 raise exceptions.URLError()('Return Code is not 200')
413 except Exception as e:
410 except Exception as e:
414 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
411 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
415 # means it cannot be cloned
412 # means it cannot be cloned
416 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
413 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
417
414
418 # now check if it's a proper hg repo, but don't do it for svn
415 # now check if it's a proper hg repo, but don't do it for svn
419 try:
416 try:
420 if _proto == 'svn':
417 if _proto == 'svn':
421 pass
418 pass
422 else:
419 else:
423 # check for pure hg repos
420 # check for pure hg repos
424 log.debug(
421 log.debug(
425 "Verifying if URL is a Mercurial repository: %s",
422 "Verifying if URL is a Mercurial repository: %s",
426 cleaned_uri)
423 cleaned_uri)
427 ui = make_ui_from_config(config)
424 ui = make_ui_from_config(config)
428 peer_checker = makepeer(ui, url)
425 peer_checker = makepeer(ui, url)
429 peer_checker.lookup('tip')
426 peer_checker.lookup('tip')
430 except Exception as e:
427 except Exception as e:
431 log.warning("URL is not a valid Mercurial repository: %s",
428 log.warning("URL is not a valid Mercurial repository: %s",
432 cleaned_uri)
429 cleaned_uri)
433 raise exceptions.URLError(e)(
430 raise exceptions.URLError(e)(
434 "url [%s] does not look like an hg repo org_exc: %s"
431 "url [%s] does not look like an hg repo org_exc: %s"
435 % (cleaned_uri, e))
432 % (cleaned_uri, e))
436
433
437 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
434 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
438 return True
435 return True
439
436
440 @reraise_safe_exceptions
437 @reraise_safe_exceptions
441 def diff(
438 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
442 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
443 context):
444 repo = self._factory.repo(wire)
439 repo = self._factory.repo(wire)
445
440
446 if file_filter:
441 if file_filter:
447 match_filter = match(file_filter[0], '', [file_filter[1]])
442 match_filter = match(file_filter[0], '', [file_filter[1]])
448 else:
443 else:
449 match_filter = file_filter
444 match_filter = file_filter
450 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
445 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
451
446
452 try:
447 try:
453 return "".join(patch.diff(
448 return "".join(patch.diff(
454 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
449 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
455 except RepoLookupError as e:
450 except RepoLookupError as e:
456 raise exceptions.LookupException(e)()
451 raise exceptions.LookupException(e)()
457
452
458 @reraise_safe_exceptions
453 @reraise_safe_exceptions
459 def node_history(self, wire, revision, path, limit):
454 def node_history(self, wire, revision, path, limit):
460 repo = self._factory.repo(wire)
455 cache_on, context_uid, repo_id = self._cache_on(wire)
456 @self.region.conditional_cache_on_arguments(condition=cache_on)
457 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
458 repo = self._factory.repo(wire)
461
459
462 ctx = self._get_ctx(repo, revision)
460 ctx = self._get_ctx(repo, revision)
463 fctx = ctx.filectx(path)
461 fctx = ctx.filectx(path)
464
462
465 def history_iter():
463 def history_iter():
466 limit_rev = fctx.rev()
464 limit_rev = fctx.rev()
467 for obj in reversed(list(fctx.filelog())):
465 for obj in reversed(list(fctx.filelog())):
468 obj = fctx.filectx(obj)
466 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
467 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
468 if ctx.hidden() or ctx.obsolete():
471 continue
469 continue
472
470
473 if limit_rev >= obj.rev():
471 if limit_rev >= obj.rev():
474 yield obj
472 yield obj
475
473
476 history = []
474 history = []
477 for cnt, obj in enumerate(history_iter()):
475 for cnt, obj in enumerate(history_iter()):
478 if limit and cnt >= limit:
476 if limit and cnt >= limit:
479 break
477 break
480 history.append(hex(obj.node()))
478 history.append(hex(obj.node()))
481
479
482 return [x for x in history]
480 return [x for x in history]
481 return _node_history(context_uid, repo_id, revision, path, limit)
483
482
484 @reraise_safe_exceptions
483 @reraise_safe_exceptions
485 def node_history_untill(self, wire, revision, path, limit):
484 def node_history_untill(self, wire, revision, path, limit):
486 repo = self._factory.repo(wire)
485 cache_on, context_uid, repo_id = self._cache_on(wire)
487 ctx = self._get_ctx(repo, revision)
486 @self.region.conditional_cache_on_arguments(condition=cache_on)
488 fctx = ctx.filectx(path)
487 def _node_history_until(_context_uid, _repo_id):
488 repo = self._factory.repo(wire)
489 ctx = self._get_ctx(repo, revision)
490 fctx = ctx.filectx(path)
489
491
490 file_log = list(fctx.filelog())
492 file_log = list(fctx.filelog())
491 if limit:
493 if limit:
492 # Limit to the last n items
494 # Limit to the last n items
493 file_log = file_log[-limit:]
495 file_log = file_log[-limit:]
494
496
495 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
497 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
498 return _node_history_until(context_uid, repo_id, revision, path, limit)
496
499
497 @reraise_safe_exceptions
500 @reraise_safe_exceptions
498 def fctx_annotate(self, wire, revision, path):
501 def fctx_annotate(self, wire, revision, path):
499 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
500 ctx = self._get_ctx(repo, revision)
503 ctx = self._get_ctx(repo, revision)
501 fctx = ctx.filectx(path)
504 fctx = ctx.filectx(path)
502
505
503 result = []
506 result = []
504 for i, annotate_obj in enumerate(fctx.annotate(), 1):
507 for i, annotate_obj in enumerate(fctx.annotate(), 1):
505 ln_no = i
508 ln_no = i
506 sha = hex(annotate_obj.fctx.node())
509 sha = hex(annotate_obj.fctx.node())
507 content = annotate_obj.text
510 content = annotate_obj.text
508 result.append((ln_no, sha, content))
511 result.append((ln_no, sha, content))
509 return result
512 return result
510
513
511 @reraise_safe_exceptions
514 @reraise_safe_exceptions
512 def fctx_data(self, wire, revision, path):
515 def fctx_node_data(self, wire, revision, path):
513 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
514 ctx = self._get_ctx(repo, revision)
517 ctx = self._get_ctx(repo, revision)
515 fctx = ctx.filectx(path)
518 fctx = ctx.filectx(path)
516 return fctx.data()
519 return fctx.data()
517
520
518 @reraise_safe_exceptions
521 @reraise_safe_exceptions
519 def fctx_flags(self, wire, revision, path):
522 def fctx_flags(self, wire, commit_id, path):
520 repo = self._factory.repo(wire)
523 cache_on, context_uid, repo_id = self._cache_on(wire)
521 ctx = self._get_ctx(repo, revision)
524 @self.region.conditional_cache_on_arguments(condition=cache_on)
522 fctx = ctx.filectx(path)
525 def _fctx_flags(_repo_id, _commit_id, _path):
523 return fctx.flags()
526 repo = self._factory.repo(wire)
527 ctx = self._get_ctx(repo, commit_id)
528 fctx = ctx.filectx(path)
529 return fctx.flags()
530
531 return _fctx_flags(repo_id, commit_id, path)
524
532
525 @reraise_safe_exceptions
533 @reraise_safe_exceptions
526 def fctx_size(self, wire, revision, path):
534 def fctx_size(self, wire, commit_id, path):
527 repo = self._factory.repo(wire)
535 cache_on, context_uid, repo_id = self._cache_on(wire)
528 ctx = self._get_ctx(repo, revision)
536 @self.region.conditional_cache_on_arguments(condition=cache_on)
529 fctx = ctx.filectx(path)
537 def _fctx_size(_repo_id, _revision, _path):
530 return fctx.size()
538 repo = self._factory.repo(wire)
539 ctx = self._get_ctx(repo, commit_id)
540 fctx = ctx.filectx(path)
541 return fctx.size()
542 return _fctx_size(repo_id, commit_id, path)
531
543
532 @reraise_safe_exceptions
544 @reraise_safe_exceptions
533 def get_all_commit_ids(self, wire, name):
545 def get_all_commit_ids(self, wire, name):
534 repo = self._factory.repo(wire)
546 cache_on, context_uid, repo_id = self._cache_on(wire)
535 repo = repo.filtered(name)
547 @self.region.conditional_cache_on_arguments(condition=cache_on)
536 revs = map(lambda x: hex(x[7]), repo.changelog.index)
548 def _get_all_commit_ids(_context_uid, _repo_id, _name):
537 return revs
549 repo = self._factory.repo(wire)
550 repo = repo.filtered(name)
551 revs = map(lambda x: hex(x[7]), repo.changelog.index)
552 return revs
553 return _get_all_commit_ids(context_uid, repo_id, name)
538
554
539 @reraise_safe_exceptions
555 @reraise_safe_exceptions
540 def get_config_value(self, wire, section, name, untrusted=False):
556 def get_config_value(self, wire, section, name, untrusted=False):
541 repo = self._factory.repo(wire)
557 repo = self._factory.repo(wire)
542 return repo.ui.config(section, name, untrusted=untrusted)
558 return repo.ui.config(section, name, untrusted=untrusted)
543
559
544 @reraise_safe_exceptions
560 @reraise_safe_exceptions
545 def get_config_bool(self, wire, section, name, untrusted=False):
561 def is_large_file(self, wire, commit_id, path):
546 repo = self._factory.repo(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
547 return repo.ui.configbool(section, name, untrusted=untrusted)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
565 return largefiles.lfutil.isstandin(path)
566
567 return _is_large_file(context_uid, repo_id, commit_id, path)
548
568
549 @reraise_safe_exceptions
569 @reraise_safe_exceptions
550 def get_config_list(self, wire, section, name, untrusted=False):
570 def is_binary(self, wire, revision, path):
551 repo = self._factory.repo(wire)
571 cache_on, context_uid, repo_id = self._cache_on(wire)
552 return repo.ui.configlist(section, name, untrusted=untrusted)
553
572
554 @reraise_safe_exceptions
573 @self.region.conditional_cache_on_arguments(condition=cache_on)
555 def is_large_file(self, wire, path):
574 def _is_binary(_repo_id, _sha, _path):
556 return largefiles.lfutil.isstandin(path)
575 repo = self._factory.repo(wire)
576 ctx = self._get_ctx(repo, revision)
577 fctx = ctx.filectx(path)
578 return fctx.isbinary()
579
580 return _is_binary(repo_id, revision, path)
557
581
558 @reraise_safe_exceptions
582 @reraise_safe_exceptions
559 def in_largefiles_store(self, wire, sha):
583 def in_largefiles_store(self, wire, sha):
560 repo = self._factory.repo(wire)
584 repo = self._factory.repo(wire)
561 return largefiles.lfutil.instore(repo, sha)
585 return largefiles.lfutil.instore(repo, sha)
562
586
563 @reraise_safe_exceptions
587 @reraise_safe_exceptions
564 def in_user_cache(self, wire, sha):
588 def in_user_cache(self, wire, sha):
565 repo = self._factory.repo(wire)
589 repo = self._factory.repo(wire)
566 return largefiles.lfutil.inusercache(repo.ui, sha)
590 return largefiles.lfutil.inusercache(repo.ui, sha)
567
591
568 @reraise_safe_exceptions
592 @reraise_safe_exceptions
569 def store_path(self, wire, sha):
593 def store_path(self, wire, sha):
570 repo = self._factory.repo(wire)
594 repo = self._factory.repo(wire)
571 return largefiles.lfutil.storepath(repo, sha)
595 return largefiles.lfutil.storepath(repo, sha)
572
596
573 @reraise_safe_exceptions
597 @reraise_safe_exceptions
574 def link(self, wire, sha, path):
598 def link(self, wire, sha, path):
575 repo = self._factory.repo(wire)
599 repo = self._factory.repo(wire)
576 largefiles.lfutil.link(
600 largefiles.lfutil.link(
577 largefiles.lfutil.usercachepath(repo.ui, sha), path)
601 largefiles.lfutil.usercachepath(repo.ui, sha), path)
578
602
579 @reraise_safe_exceptions
603 @reraise_safe_exceptions
580 def localrepository(self, wire, create=False):
604 def localrepository(self, wire, create=False):
581 self._factory.repo(wire, create=create)
605 self._factory.repo(wire, create=create)
582
606
583 @reraise_safe_exceptions
607 @reraise_safe_exceptions
584 def lookup(self, wire, revision, both):
608 def lookup(self, wire, revision, both):
585
609 cache_on, context_uid, repo_id = self._cache_on(wire)
586 repo = self._factory.repo(wire)
610 @self.region.conditional_cache_on_arguments(condition=cache_on)
587
611 def _lookup(_context_uid, _repo_id, _revision, _both):
588 if isinstance(revision, int):
589 # NOTE(marcink):
590 # since Mercurial doesn't support negative indexes properly
591 # we need to shift accordingly by one to get proper index, e.g
592 # repo[-1] => repo[-2]
593 # repo[0] => repo[-1]
594 if revision <= 0:
595 revision = revision + -1
596 try:
597 ctx = self._get_ctx(repo, revision)
598 except (TypeError, RepoLookupError) as e:
599 e._org_exc_tb = traceback.format_exc()
600 raise exceptions.LookupException(e)(revision)
601 except LookupError as e:
602 e._org_exc_tb = traceback.format_exc()
603 raise exceptions.LookupException(e)(e.name)
604
612
605 if not both:
613 repo = self._factory.repo(wire)
606 return ctx.hex()
614 rev = _revision
607
615 if isinstance(rev, int):
608 ctx = repo[ctx.hex()]
616 # NOTE(marcink):
609 return ctx.hex(), ctx.rev()
617 # since Mercurial doesn't support negative indexes properly
618 # we need to shift accordingly by one to get proper index, e.g
619 # repo[-1] => repo[-2]
620 # repo[0] => repo[-1]
621 if rev <= 0:
622 rev = rev + -1
623 try:
624 ctx = self._get_ctx(repo, rev)
625 except (TypeError, RepoLookupError) as e:
626 e._org_exc_tb = traceback.format_exc()
627 raise exceptions.LookupException(e)(rev)
628 except LookupError as e:
629 e._org_exc_tb = traceback.format_exc()
630 raise exceptions.LookupException(e)(e.name)
610
631
611 @reraise_safe_exceptions
632 if not both:
612 def pull(self, wire, url, commit_ids=None):
633 return ctx.hex()
613 repo = self._factory.repo(wire)
614 # Disable any prompts for this repo
615 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
616
634
617 remote = peer(repo, {}, url)
635 ctx = repo[ctx.hex()]
618 # Disable any prompts for this remote
636 return ctx.hex(), ctx.rev()
619 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
620
637
621 if commit_ids:
638 return _lookup(context_uid, repo_id, revision, both)
622 commit_ids = [bin(commit_id) for commit_id in commit_ids]
623
624 return exchange.pull(
625 repo, remote, heads=commit_ids, force=None).cgresult
626
639
627 @reraise_safe_exceptions
640 @reraise_safe_exceptions
628 def sync_push(self, wire, url):
641 def sync_push(self, wire, url):
629 if not self.check_url(url, wire['config']):
642 if not self.check_url(url, wire['config']):
630 return
643 return
631
644
632 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
633
646
634 # Disable any prompts for this repo
647 # Disable any prompts for this repo
635 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
648 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
636
649
637 bookmarks = dict(repo._bookmarks).keys()
650 bookmarks = dict(repo._bookmarks).keys()
638 remote = peer(repo, {}, url)
651 remote = peer(repo, {}, url)
639 # Disable any prompts for this remote
652 # Disable any prompts for this remote
640 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
653 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
641
654
642 return exchange.push(
655 return exchange.push(
643 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
656 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
644
657
645 @reraise_safe_exceptions
658 @reraise_safe_exceptions
646 def revision(self, wire, rev):
659 def revision(self, wire, rev):
647 repo = self._factory.repo(wire)
660 repo = self._factory.repo(wire)
648 ctx = self._get_ctx(repo, rev)
661 ctx = self._get_ctx(repo, rev)
649 return ctx.rev()
662 return ctx.rev()
650
663
651 @reraise_safe_exceptions
664 @reraise_safe_exceptions
652 def rev_range(self, wire, filter):
665 def rev_range(self, wire, commit_filter):
653 repo = self._factory.repo(wire)
666 cache_on, context_uid, repo_id = self._cache_on(wire)
654 revisions = [rev for rev in revrange(repo, filter)]
667
655 return revisions
668 @self.region.conditional_cache_on_arguments(condition=cache_on)
669 def _rev_range(_context_uid, _repo_id, _filter):
670 repo = self._factory.repo(wire)
671 revisions = [rev for rev in revrange(repo, commit_filter)]
672 return revisions
673
674 return _rev_range(context_uid, repo_id, sorted(commit_filter))
656
675
657 @reraise_safe_exceptions
676 @reraise_safe_exceptions
658 def rev_range_hash(self, wire, node):
677 def rev_range_hash(self, wire, node):
659 repo = self._factory.repo(wire)
678 repo = self._factory.repo(wire)
660
679
661 def get_revs(repo, rev_opt):
680 def get_revs(repo, rev_opt):
662 if rev_opt:
681 if rev_opt:
663 revs = revrange(repo, rev_opt)
682 revs = revrange(repo, rev_opt)
664 if len(revs) == 0:
683 if len(revs) == 0:
665 return (nullrev, nullrev)
684 return (nullrev, nullrev)
666 return max(revs), min(revs)
685 return max(revs), min(revs)
667 else:
686 else:
668 return len(repo) - 1, 0
687 return len(repo) - 1, 0
669
688
670 stop, start = get_revs(repo, [node + ':'])
689 stop, start = get_revs(repo, [node + ':'])
671 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
690 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
672 return revs
691 return revs
673
692
674 @reraise_safe_exceptions
693 @reraise_safe_exceptions
675 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
694 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
676 other_path = kwargs.pop('other_path', None)
695 other_path = kwargs.pop('other_path', None)
677
696
678 # case when we want to compare two independent repositories
697 # case when we want to compare two independent repositories
679 if other_path and other_path != wire["path"]:
698 if other_path and other_path != wire["path"]:
680 baseui = self._factory._create_config(wire["config"])
699 baseui = self._factory._create_config(wire["config"])
681 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
700 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
682 else:
701 else:
683 repo = self._factory.repo(wire)
702 repo = self._factory.repo(wire)
684 return list(repo.revs(rev_spec, *args))
703 return list(repo.revs(rev_spec, *args))
685
704
686 @reraise_safe_exceptions
705 @reraise_safe_exceptions
687 def strip(self, wire, revision, update, backup):
688 repo = self._factory.repo(wire)
689 ctx = self._get_ctx(repo, revision)
690 hgext_strip(
691 repo.baseui, repo, ctx.node(), update=update, backup=backup)
692
693 @reraise_safe_exceptions
694 def verify(self, wire,):
706 def verify(self, wire,):
695 repo = self._factory.repo(wire)
707 repo = self._factory.repo(wire)
696 baseui = self._factory._create_config(wire['config'])
708 baseui = self._factory._create_config(wire['config'])
697 baseui.setconfig('ui', 'quiet', 'false')
709 baseui.setconfig('ui', 'quiet', 'false')
698 output = io.BytesIO()
710 output = io.BytesIO()
699
711
700 def write(data, **unused_kwargs):
712 def write(data, **unused_kwargs):
701 output.write(data)
713 output.write(data)
702 baseui.write = write
714 baseui.write = write
703
715
704 repo.ui = baseui
716 repo.ui = baseui
705 verify.verify(repo)
717 verify.verify(repo)
706 return output.getvalue()
718 return output.getvalue()
707
719
708 @reraise_safe_exceptions
720 @reraise_safe_exceptions
709 def tag(self, wire, name, revision, message, local, user,
721 def hg_update_cache(self, wire,):
710 tag_time, tag_timezone):
711 repo = self._factory.repo(wire)
722 repo = self._factory.repo(wire)
712 ctx = self._get_ctx(repo, revision)
723 baseui = self._factory._create_config(wire['config'])
713 node = ctx.node()
724 baseui.setconfig('ui', 'quiet', 'false')
725 output = io.BytesIO()
714
726
715 date = (tag_time, tag_timezone)
727 def write(data, **unused_kwargs):
716 try:
728 output.write(data)
717 hg_tag.tag(repo, name, node, message, local, user, date)
729 baseui.write = write
718 except Abort as e:
730
719 log.exception("Tag operation aborted")
731 repo.ui = baseui
720 # Exception can contain unicode which we convert
732 with repo.wlock(), repo.lock():
721 raise exceptions.AbortException(e)(repr(e))
733 repo.updatecaches(full=True)
734
735 return output.getvalue()
722
736
723 @reraise_safe_exceptions
737 @reraise_safe_exceptions
724 def tags(self, wire):
738 def tags(self, wire):
725 repo = self._factory.repo(wire)
739 cache_on, context_uid, repo_id = self._cache_on(wire)
726 return repo.tags()
740 @self.region.conditional_cache_on_arguments(condition=cache_on)
741 def _tags(_context_uid, _repo_id):
742 repo = self._factory.repo(wire)
743 return repo.tags()
744
745 return _tags(context_uid, repo_id)
727
746
728 @reraise_safe_exceptions
747 @reraise_safe_exceptions
729 def update(self, wire, node=None, clean=False):
748 def update(self, wire, node=None, clean=False):
730 repo = self._factory.repo(wire)
749 repo = self._factory.repo(wire)
731 baseui = self._factory._create_config(wire['config'])
750 baseui = self._factory._create_config(wire['config'])
732 commands.update(baseui, repo, node=node, clean=clean)
751 commands.update(baseui, repo, node=node, clean=clean)
733
752
734 @reraise_safe_exceptions
753 @reraise_safe_exceptions
735 def identify(self, wire):
754 def identify(self, wire):
736 repo = self._factory.repo(wire)
755 repo = self._factory.repo(wire)
737 baseui = self._factory._create_config(wire['config'])
756 baseui = self._factory._create_config(wire['config'])
738 output = io.BytesIO()
757 output = io.BytesIO()
739 baseui.write = output.write
758 baseui.write = output.write
740 # This is required to get a full node id
759 # This is required to get a full node id
741 baseui.debugflag = True
760 baseui.debugflag = True
742 commands.identify(baseui, repo, id=True)
761 commands.identify(baseui, repo, id=True)
743
762
744 return output.getvalue()
763 return output.getvalue()
745
764
746 @reraise_safe_exceptions
765 @reraise_safe_exceptions
747 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
748 hooks=True):
749 repo = self._factory.repo(wire)
750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751
752 # Mercurial internally has a lot of logic that checks ONLY if
753 # option is defined, we just pass those if they are defined then
754 opts = {}
755 if bookmark:
756 opts['bookmark'] = bookmark
757 if branch:
758 opts['branch'] = branch
759 if revision:
760 opts['rev'] = revision
761
762 commands.pull(baseui, repo, source, **opts)
763
764 @reraise_safe_exceptions
765 def heads(self, wire, branch=None):
766 def heads(self, wire, branch=None):
766 repo = self._factory.repo(wire)
767 repo = self._factory.repo(wire)
767 baseui = self._factory._create_config(wire['config'])
768 baseui = self._factory._create_config(wire['config'])
768 output = io.BytesIO()
769 output = io.BytesIO()
769
770
770 def write(data, **unused_kwargs):
771 def write(data, **unused_kwargs):
771 output.write(data)
772 output.write(data)
772
773
773 baseui.write = write
774 baseui.write = write
774 if branch:
775 if branch:
775 args = [branch]
776 args = [branch]
776 else:
777 else:
777 args = []
778 args = []
778 commands.heads(baseui, repo, template='{node} ', *args)
779 commands.heads(baseui, repo, template='{node} ', *args)
779
780
780 return output.getvalue()
781 return output.getvalue()
781
782
782 @reraise_safe_exceptions
783 @reraise_safe_exceptions
783 def ancestor(self, wire, revision1, revision2):
784 def ancestor(self, wire, revision1, revision2):
784 repo = self._factory.repo(wire)
785 repo = self._factory.repo(wire)
785 changelog = repo.changelog
786 changelog = repo.changelog
786 lookup = repo.lookup
787 lookup = repo.lookup
787 a = changelog.ancestor(lookup(revision1), lookup(revision2))
788 a = changelog.ancestor(lookup(revision1), lookup(revision2))
788 return hex(a)
789 return hex(a)
789
790
790 @reraise_safe_exceptions
791 @reraise_safe_exceptions
791 def push(self, wire, revisions, dest_path, hooks=True,
792 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
792 push_branches=False):
793 baseui = self._factory._create_config(wire["config"], hooks=hooks)
794 clone(baseui, source, dest, noupdate=not update_after_clone)
795
796 @reraise_safe_exceptions
797 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
798
799 repo = self._factory.repo(wire)
800 baseui = self._factory._create_config(wire['config'])
801 publishing = baseui.configbool('phases', 'publish')
802 if publishing:
803 new_commit = 'public'
804 else:
805 new_commit = 'draft'
806
807 def _filectxfn(_repo, ctx, path):
808 """
809 Marks given path as added/changed/removed in a given _repo. This is
810 for internal mercurial commit function.
811 """
812
813 # check if this path is removed
814 if path in removed:
815 # returning None is a way to mark node for removal
816 return None
817
818 # check if this path is added
819 for node in updated:
820 if node['path'] == path:
821 return memfilectx(
822 _repo,
823 changectx=ctx,
824 path=node['path'],
825 data=node['content'],
826 islink=False,
827 isexec=bool(node['mode'] & stat.S_IXUSR),
828 copysource=False)
829
830 raise exceptions.AbortException()(
831 "Given path haven't been marked as added, "
832 "changed or removed (%s)" % path)
833
834 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
835
836 commit_ctx = memctx(
837 repo=repo,
838 parents=parents,
839 text=message,
840 files=files,
841 filectxfn=_filectxfn,
842 user=user,
843 date=(commit_time, commit_timezone),
844 extra=extra)
845
846 n = repo.commitctx(commit_ctx)
847 new_id = hex(n)
848
849 return new_id
850
851 @reraise_safe_exceptions
852 def pull(self, wire, url, commit_ids=None):
853 repo = self._factory.repo(wire)
854 # Disable any prompts for this repo
855 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
856
857 remote = peer(repo, {}, url)
858 # Disable any prompts for this remote
859 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
860
861 if commit_ids:
862 commit_ids = [bin(commit_id) for commit_id in commit_ids]
863
864 return exchange.pull(
865 repo, remote, heads=commit_ids, force=None).cgresult
866
867 @reraise_safe_exceptions
868 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
869 repo = self._factory.repo(wire)
870 baseui = self._factory._create_config(wire['config'], hooks=hooks)
871
872 # Mercurial internally has a lot of logic that checks ONLY if
873 # option is defined, we just pass those if they are defined then
874 opts = {}
875 if bookmark:
876 opts['bookmark'] = bookmark
877 if branch:
878 opts['branch'] = branch
879 if revision:
880 opts['rev'] = revision
881
882 commands.pull(baseui, repo, source, **opts)
883
884 @reraise_safe_exceptions
885 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
793 repo = self._factory.repo(wire)
886 repo = self._factory.repo(wire)
794 baseui = self._factory._create_config(wire['config'], hooks=hooks)
887 baseui = self._factory._create_config(wire['config'], hooks=hooks)
795 commands.push(baseui, repo, dest=dest_path, rev=revisions,
888 commands.push(baseui, repo, dest=dest_path, rev=revisions,
796 new_branch=push_branches)
889 new_branch=push_branches)
797
890
798 @reraise_safe_exceptions
891 @reraise_safe_exceptions
892 def strip(self, wire, revision, update, backup):
893 repo = self._factory.repo(wire)
894 ctx = self._get_ctx(repo, revision)
895 hgext_strip(
896 repo.baseui, repo, ctx.node(), update=update, backup=backup)
897
898 @reraise_safe_exceptions
899 def get_unresolved_files(self, wire):
900 repo = self._factory.repo(wire)
901
902 log.debug('Calculating unresolved files for repo: %s', repo)
903 output = io.BytesIO()
904
905 def write(data, **unused_kwargs):
906 output.write(data)
907
908 baseui = self._factory._create_config(wire['config'])
909 baseui.write = write
910
911 commands.resolve(baseui, repo, list=True)
912 unresolved = output.getvalue().splitlines(0)
913 return unresolved
914
915 @reraise_safe_exceptions
799 def merge(self, wire, revision):
916 def merge(self, wire, revision):
800 repo = self._factory.repo(wire)
917 repo = self._factory.repo(wire)
801 baseui = self._factory._create_config(wire['config'])
918 baseui = self._factory._create_config(wire['config'])
802 repo.ui.setconfig('ui', 'merge', 'internal:dump')
919 repo.ui.setconfig('ui', 'merge', 'internal:dump')
803
920
804 # In case of sub repositories are used mercurial prompts the user in
921 # In case of sub repositories are used mercurial prompts the user in
805 # case of merge conflicts or different sub repository sources. By
922 # case of merge conflicts or different sub repository sources. By
806 # setting the interactive flag to `False` mercurial doesn't prompt the
923 # setting the interactive flag to `False` mercurial doesn't prompt the
807 # used but instead uses a default value.
924 # used but instead uses a default value.
808 repo.ui.setconfig('ui', 'interactive', False)
925 repo.ui.setconfig('ui', 'interactive', False)
809 commands.merge(baseui, repo, rev=revision)
926 commands.merge(baseui, repo, rev=revision)
810
927
811 @reraise_safe_exceptions
928 @reraise_safe_exceptions
812 def merge_state(self, wire):
929 def merge_state(self, wire):
813 repo = self._factory.repo(wire)
930 repo = self._factory.repo(wire)
814 repo.ui.setconfig('ui', 'merge', 'internal:dump')
931 repo.ui.setconfig('ui', 'merge', 'internal:dump')
815
932
816 # In case of sub repositories are used mercurial prompts the user in
933 # In case of sub repositories are used mercurial prompts the user in
817 # case of merge conflicts or different sub repository sources. By
934 # case of merge conflicts or different sub repository sources. By
818 # setting the interactive flag to `False` mercurial doesn't prompt the
935 # setting the interactive flag to `False` mercurial doesn't prompt the
819 # used but instead uses a default value.
936 # used but instead uses a default value.
820 repo.ui.setconfig('ui', 'interactive', False)
937 repo.ui.setconfig('ui', 'interactive', False)
821 ms = hg_merge.mergestate(repo)
938 ms = hg_merge.mergestate(repo)
822 return [x for x in ms.unresolved()]
939 return [x for x in ms.unresolved()]
823
940
824 @reraise_safe_exceptions
941 @reraise_safe_exceptions
825 def commit(self, wire, message, username, close_branch=False):
942 def commit(self, wire, message, username, close_branch=False):
826 repo = self._factory.repo(wire)
943 repo = self._factory.repo(wire)
827 baseui = self._factory._create_config(wire['config'])
944 baseui = self._factory._create_config(wire['config'])
828 repo.ui.setconfig('ui', 'username', username)
945 repo.ui.setconfig('ui', 'username', username)
829 commands.commit(baseui, repo, message=message, close_branch=close_branch)
946 commands.commit(baseui, repo, message=message, close_branch=close_branch)
830
947
831
832 @reraise_safe_exceptions
948 @reraise_safe_exceptions
833 def rebase(self, wire, source=None, dest=None, abort=False):
949 def rebase(self, wire, source=None, dest=None, abort=False):
834 repo = self._factory.repo(wire)
950 repo = self._factory.repo(wire)
835 baseui = self._factory._create_config(wire['config'])
951 baseui = self._factory._create_config(wire['config'])
836 repo.ui.setconfig('ui', 'merge', 'internal:dump')
952 repo.ui.setconfig('ui', 'merge', 'internal:dump')
837 rebase.rebase(
953 # In case of sub repositories are used mercurial prompts the user in
838 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
954 # case of merge conflicts or different sub repository sources. By
955 # setting the interactive flag to `False` mercurial doesn't prompt the
956 # used but instead uses a default value.
957 repo.ui.setconfig('ui', 'interactive', False)
958 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
959
960 @reraise_safe_exceptions
961 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
962 repo = self._factory.repo(wire)
963 ctx = self._get_ctx(repo, revision)
964 node = ctx.node()
965
966 date = (tag_time, tag_timezone)
967 try:
968 hg_tag.tag(repo, name, node, message, local, user, date)
969 except Abort as e:
970 log.exception("Tag operation aborted")
971 # Exception can contain unicode which we convert
972 raise exceptions.AbortException(e)(repr(e))
839
973
840 @reraise_safe_exceptions
974 @reraise_safe_exceptions
841 def bookmark(self, wire, bookmark, revision=None):
975 def bookmark(self, wire, bookmark, revision=None):
842 repo = self._factory.repo(wire)
976 repo = self._factory.repo(wire)
843 baseui = self._factory._create_config(wire['config'])
977 baseui = self._factory._create_config(wire['config'])
844 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
978 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
845
979
846 @reraise_safe_exceptions
980 @reraise_safe_exceptions
847 def install_hooks(self, wire, force=False):
981 def install_hooks(self, wire, force=False):
848 # we don't need any special hooks for Mercurial
982 # we don't need any special hooks for Mercurial
849 pass
983 pass
850
984
851 @reraise_safe_exceptions
985 @reraise_safe_exceptions
852 def get_hooks_info(self, wire):
986 def get_hooks_info(self, wire):
853 return {
987 return {
854 'pre_version': vcsserver.__version__,
988 'pre_version': vcsserver.__version__,
855 'post_version': vcsserver.__version__,
989 'post_version': vcsserver.__version__,
856 }
990 }
@@ -1,711 +1,722 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver.hgcompat import get_ctx
37
36
38 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
39
38
40
39
41 class HooksHttpClient(object):
40 class HooksHttpClient(object):
42 connection = None
41 connection = None
43
42
44 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
45 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
46
45
47 def __call__(self, method, extras):
46 def __call__(self, method, extras):
48 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
49 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
50 try:
49 try:
51 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
52 except Exception:
51 except Exception:
53 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
54 raise
53 raise
55 response = connection.getresponse()
54 response = connection.getresponse()
56
55
57 response_data = response.read()
56 response_data = response.read()
58
57
59 try:
58 try:
60 return json.loads(response_data)
59 return json.loads(response_data)
61 except Exception:
60 except Exception:
62 log.exception('Failed to decode hook response json data. '
61 log.exception('Failed to decode hook response json data. '
63 'response_code:%s, raw_data:%s',
62 'response_code:%s, raw_data:%s',
64 response.status, response_data)
63 response.status, response_data)
65 raise
64 raise
66
65
67 def _serialize(self, hook_name, extras):
66 def _serialize(self, hook_name, extras):
68 data = {
67 data = {
69 'method': hook_name,
68 'method': hook_name,
70 'extras': extras
69 'extras': extras
71 }
70 }
72 return json.dumps(data)
71 return json.dumps(data)
73
72
74
73
75 class HooksDummyClient(object):
74 class HooksDummyClient(object):
76 def __init__(self, hooks_module):
75 def __init__(self, hooks_module):
77 self._hooks_module = importlib.import_module(hooks_module)
76 self._hooks_module = importlib.import_module(hooks_module)
78
77
79 def __call__(self, hook_name, extras):
78 def __call__(self, hook_name, extras):
80 with self._hooks_module.Hooks() as hooks:
79 with self._hooks_module.Hooks() as hooks:
81 return getattr(hooks, hook_name)(extras)
80 return getattr(hooks, hook_name)(extras)
82
81
83
82
83 class HooksShadowRepoClient(object):
84
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
87
88
84 class RemoteMessageWriter(object):
89 class RemoteMessageWriter(object):
85 """Writer base class."""
90 """Writer base class."""
86 def write(self, message):
91 def write(self, message):
87 raise NotImplementedError()
92 raise NotImplementedError()
88
93
89
94
90 class HgMessageWriter(RemoteMessageWriter):
95 class HgMessageWriter(RemoteMessageWriter):
91 """Writer that knows how to send messages to mercurial clients."""
96 """Writer that knows how to send messages to mercurial clients."""
92
97
93 def __init__(self, ui):
98 def __init__(self, ui):
94 self.ui = ui
99 self.ui = ui
95
100
96 def write(self, message):
101 def write(self, message):
97 # TODO: Check why the quiet flag is set by default.
102 # TODO: Check why the quiet flag is set by default.
98 old = self.ui.quiet
103 old = self.ui.quiet
99 self.ui.quiet = False
104 self.ui.quiet = False
100 self.ui.status(message.encode('utf-8'))
105 self.ui.status(message.encode('utf-8'))
101 self.ui.quiet = old
106 self.ui.quiet = old
102
107
103
108
104 class GitMessageWriter(RemoteMessageWriter):
109 class GitMessageWriter(RemoteMessageWriter):
105 """Writer that knows how to send messages to git clients."""
110 """Writer that knows how to send messages to git clients."""
106
111
107 def __init__(self, stdout=None):
112 def __init__(self, stdout=None):
108 self.stdout = stdout or sys.stdout
113 self.stdout = stdout or sys.stdout
109
114
110 def write(self, message):
115 def write(self, message):
111 self.stdout.write(message.encode('utf-8'))
116 self.stdout.write(message.encode('utf-8'))
112
117
113
118
114 class SvnMessageWriter(RemoteMessageWriter):
119 class SvnMessageWriter(RemoteMessageWriter):
115 """Writer that knows how to send messages to svn clients."""
120 """Writer that knows how to send messages to svn clients."""
116
121
117 def __init__(self, stderr=None):
122 def __init__(self, stderr=None):
118 # SVN needs data sent to stderr for back-to-client messaging
123 # SVN needs data sent to stderr for back-to-client messaging
119 self.stderr = stderr or sys.stderr
124 self.stderr = stderr or sys.stderr
120
125
121 def write(self, message):
126 def write(self, message):
122 self.stderr.write(message.encode('utf-8'))
127 self.stderr.write(message.encode('utf-8'))
123
128
124
129
125 def _handle_exception(result):
130 def _handle_exception(result):
126 exception_class = result.get('exception')
131 exception_class = result.get('exception')
127 exception_traceback = result.get('exception_traceback')
132 exception_traceback = result.get('exception_traceback')
128
133
129 if exception_traceback:
134 if exception_traceback:
130 log.error('Got traceback from remote call:%s', exception_traceback)
135 log.error('Got traceback from remote call:%s', exception_traceback)
131
136
132 if exception_class == 'HTTPLockedRC':
137 if exception_class == 'HTTPLockedRC':
133 raise exceptions.RepositoryLockedException()(*result['exception_args'])
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
134 elif exception_class == 'HTTPBranchProtected':
139 elif exception_class == 'HTTPBranchProtected':
135 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
136 elif exception_class == 'RepositoryError':
141 elif exception_class == 'RepositoryError':
137 raise exceptions.VcsException()(*result['exception_args'])
142 raise exceptions.VcsException()(*result['exception_args'])
138 elif exception_class:
143 elif exception_class:
139 raise Exception('Got remote exception "%s" with args "%s"' %
144 raise Exception('Got remote exception "%s" with args "%s"' %
140 (exception_class, result['exception_args']))
145 (exception_class, result['exception_args']))
141
146
142
147
143 def _get_hooks_client(extras):
148 def _get_hooks_client(extras):
144 if 'hooks_uri' in extras:
149 hooks_uri = extras.get('hooks_uri')
145 protocol = extras.get('hooks_protocol')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
146 return HooksHttpClient(extras['hooks_uri'])
152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
147 else:
155 else:
148 return HooksDummyClient(extras['hooks_module'])
156 return HooksDummyClient(extras['hooks_module'])
149
157
150
158
151 def _call_hook(hook_name, extras, writer):
159 def _call_hook(hook_name, extras, writer):
152 hooks_client = _get_hooks_client(extras)
160 hooks_client = _get_hooks_client(extras)
153 log.debug('Hooks, using client:%s', hooks_client)
161 log.debug('Hooks, using client:%s', hooks_client)
154 result = hooks_client(hook_name, extras)
162 result = hooks_client(hook_name, extras)
155 log.debug('Hooks got result: %s', result)
163 log.debug('Hooks got result: %s', result)
156
164
157 _handle_exception(result)
165 _handle_exception(result)
158 writer.write(result['output'])
166 writer.write(result['output'])
159
167
160 return result['status']
168 return result['status']
161
169
162
170
163 def _extras_from_ui(ui):
171 def _extras_from_ui(ui):
164 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
165 if not hook_data:
173 if not hook_data:
166 # maybe it's inside environ ?
174 # maybe it's inside environ ?
167 env_hook_data = os.environ.get('RC_SCM_DATA')
175 env_hook_data = os.environ.get('RC_SCM_DATA')
168 if env_hook_data:
176 if env_hook_data:
169 hook_data = env_hook_data
177 hook_data = env_hook_data
170
178
171 extras = {}
179 extras = {}
172 if hook_data:
180 if hook_data:
173 extras = json.loads(hook_data)
181 extras = json.loads(hook_data)
174 return extras
182 return extras
175
183
176
184
177 def _rev_range_hash(repo, node, check_heads=False):
185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
178
187
179 commits = []
188 commits = []
180 revs = []
189 revs = []
181 start = get_ctx(repo, node).rev()
190 start = get_ctx(repo, node).rev()
182 end = len(repo)
191 end = len(repo)
183 for rev in range(start, end):
192 for rev in range(start, end):
184 revs.append(rev)
193 revs.append(rev)
185 ctx = get_ctx(repo, rev)
194 ctx = get_ctx(repo, rev)
186 commit_id = mercurial.node.hex(ctx.node())
195 commit_id = mercurial.node.hex(ctx.node())
187 branch = ctx.branch()
196 branch = ctx.branch()
188 commits.append((commit_id, branch))
197 commits.append((commit_id, branch))
189
198
190 parent_heads = []
199 parent_heads = []
191 if check_heads:
200 if check_heads:
192 parent_heads = _check_heads(repo, start, end, revs)
201 parent_heads = _check_heads(repo, start, end, revs)
193 return commits, parent_heads
202 return commits, parent_heads
194
203
195
204
196 def _check_heads(repo, start, end, commits):
205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
197 changelog = repo.changelog
207 changelog = repo.changelog
198 parents = set()
208 parents = set()
199
209
200 for new_rev in commits:
210 for new_rev in commits:
201 for p in changelog.parentrevs(new_rev):
211 for p in changelog.parentrevs(new_rev):
202 if p == mercurial.node.nullrev:
212 if p == mercurial.node.nullrev:
203 continue
213 continue
204 if p < start:
214 if p < start:
205 parents.add(p)
215 parents.add(p)
206
216
207 for p in parents:
217 for p in parents:
208 branch = get_ctx(repo, p).branch()
218 branch = get_ctx(repo, p).branch()
209 # The heads descending from that parent, on the same branch
219 # The heads descending from that parent, on the same branch
210 parent_heads = set([p])
220 parent_heads = set([p])
211 reachable = set([p])
221 reachable = set([p])
212 for x in xrange(p + 1, end):
222 for x in xrange(p + 1, end):
213 if get_ctx(repo, x).branch() != branch:
223 if get_ctx(repo, x).branch() != branch:
214 continue
224 continue
215 for pp in changelog.parentrevs(x):
225 for pp in changelog.parentrevs(x):
216 if pp in reachable:
226 if pp in reachable:
217 reachable.add(x)
227 reachable.add(x)
218 parent_heads.discard(pp)
228 parent_heads.discard(pp)
219 parent_heads.add(x)
229 parent_heads.add(x)
220 # More than one head? Suggest merging
230 # More than one head? Suggest merging
221 if len(parent_heads) > 1:
231 if len(parent_heads) > 1:
222 return list(parent_heads)
232 return list(parent_heads)
223
233
224 return []
234 return []
225
235
226
236
227 def _get_git_env():
237 def _get_git_env():
228 env = {}
238 env = {}
229 for k, v in os.environ.items():
239 for k, v in os.environ.items():
230 if k.startswith('GIT'):
240 if k.startswith('GIT'):
231 env[k] = v
241 env[k] = v
232
242
233 # serialized version
243 # serialized version
234 return [(k, v) for k, v in env.items()]
244 return [(k, v) for k, v in env.items()]
235
245
236
246
237 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
238 env = {}
248 env = {}
239 for k, v in os.environ.items():
249 for k, v in os.environ.items():
240 if k.startswith('HG'):
250 if k.startswith('HG'):
241 env[k] = v
251 env[k] = v
242
252
243 env['HG_NODE'] = old_rev
253 env['HG_NODE'] = old_rev
244 env['HG_NODE_LAST'] = new_rev
254 env['HG_NODE_LAST'] = new_rev
245 env['HG_TXNID'] = txnid
255 env['HG_TXNID'] = txnid
246 env['HG_PENDING'] = repo_path
256 env['HG_PENDING'] = repo_path
247
257
248 return [(k, v) for k, v in env.items()]
258 return [(k, v) for k, v in env.items()]
249
259
250
260
251 def repo_size(ui, repo, **kwargs):
261 def repo_size(ui, repo, **kwargs):
252 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
253 return _call_hook('repo_size', extras, HgMessageWriter(ui))
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
254
264
255
265
256 def pre_pull(ui, repo, **kwargs):
266 def pre_pull(ui, repo, **kwargs):
257 extras = _extras_from_ui(ui)
267 extras = _extras_from_ui(ui)
258 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
259
269
260
270
261 def pre_pull_ssh(ui, repo, **kwargs):
271 def pre_pull_ssh(ui, repo, **kwargs):
262 extras = _extras_from_ui(ui)
272 extras = _extras_from_ui(ui)
263 if extras and extras.get('SSH'):
273 if extras and extras.get('SSH'):
264 return pre_pull(ui, repo, **kwargs)
274 return pre_pull(ui, repo, **kwargs)
265 return 0
275 return 0
266
276
267
277
268 def post_pull(ui, repo, **kwargs):
278 def post_pull(ui, repo, **kwargs):
269 extras = _extras_from_ui(ui)
279 extras = _extras_from_ui(ui)
270 return _call_hook('post_pull', extras, HgMessageWriter(ui))
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
271
281
272
282
273 def post_pull_ssh(ui, repo, **kwargs):
283 def post_pull_ssh(ui, repo, **kwargs):
274 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
275 if extras and extras.get('SSH'):
285 if extras and extras.get('SSH'):
276 return post_pull(ui, repo, **kwargs)
286 return post_pull(ui, repo, **kwargs)
277 return 0
287 return 0
278
288
279
289
280 def pre_push(ui, repo, node=None, **kwargs):
290 def pre_push(ui, repo, node=None, **kwargs):
281 """
291 """
282 Mercurial pre_push hook
292 Mercurial pre_push hook
283 """
293 """
284 extras = _extras_from_ui(ui)
294 extras = _extras_from_ui(ui)
285 detect_force_push = extras.get('detect_force_push')
295 detect_force_push = extras.get('detect_force_push')
286
296
287 rev_data = []
297 rev_data = []
288 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
289 branches = collections.defaultdict(list)
299 branches = collections.defaultdict(list)
290 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
291 for commit_id, branch in commits:
301 for commit_id, branch in commits:
292 branches[branch].append(commit_id)
302 branches[branch].append(commit_id)
293
303
294 for branch, commits in branches.items():
304 for branch, commits in branches.items():
295 old_rev = kwargs.get('node_last') or commits[0]
305 old_rev = kwargs.get('node_last') or commits[0]
296 rev_data.append({
306 rev_data.append({
297 'total_commits': len(commits),
307 'total_commits': len(commits),
298 'old_rev': old_rev,
308 'old_rev': old_rev,
299 'new_rev': commits[-1],
309 'new_rev': commits[-1],
300 'ref': '',
310 'ref': '',
301 'type': 'branch',
311 'type': 'branch',
302 'name': branch,
312 'name': branch,
303 })
313 })
304
314
305 for push_ref in rev_data:
315 for push_ref in rev_data:
306 push_ref['multiple_heads'] = _heads
316 push_ref['multiple_heads'] = _heads
307
317
308 repo_path = os.path.join(
318 repo_path = os.path.join(
309 extras.get('repo_store', ''), extras.get('repository', ''))
319 extras.get('repo_store', ''), extras.get('repository', ''))
310 push_ref['hg_env'] = _get_hg_env(
320 push_ref['hg_env'] = _get_hg_env(
311 old_rev=push_ref['old_rev'],
321 old_rev=push_ref['old_rev'],
312 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
313 repo_path=repo_path)
323 repo_path=repo_path)
314
324
315 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
316 extras['commit_ids'] = rev_data
326 extras['commit_ids'] = rev_data
317
327
318 return _call_hook('pre_push', extras, HgMessageWriter(ui))
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
319
329
320
330
321 def pre_push_ssh(ui, repo, node=None, **kwargs):
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
322 extras = _extras_from_ui(ui)
332 extras = _extras_from_ui(ui)
323 if extras.get('SSH'):
333 if extras.get('SSH'):
324 return pre_push(ui, repo, node, **kwargs)
334 return pre_push(ui, repo, node, **kwargs)
325
335
326 return 0
336 return 0
327
337
328
338
329 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
330 """
340 """
331 Mercurial pre_push hook for SSH
341 Mercurial pre_push hook for SSH
332 """
342 """
333 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
334 if extras.get('SSH'):
344 if extras.get('SSH'):
335 permission = extras['SSH_PERMISSIONS']
345 permission = extras['SSH_PERMISSIONS']
336
346
337 if 'repository.write' == permission or 'repository.admin' == permission:
347 if 'repository.write' == permission or 'repository.admin' == permission:
338 return 0
348 return 0
339
349
340 # non-zero ret code
350 # non-zero ret code
341 return 1
351 return 1
342
352
343 return 0
353 return 0
344
354
345
355
346 def post_push(ui, repo, node, **kwargs):
356 def post_push(ui, repo, node, **kwargs):
347 """
357 """
348 Mercurial post_push hook
358 Mercurial post_push hook
349 """
359 """
350 extras = _extras_from_ui(ui)
360 extras = _extras_from_ui(ui)
351
361
352 commit_ids = []
362 commit_ids = []
353 branches = []
363 branches = []
354 bookmarks = []
364 bookmarks = []
355 tags = []
365 tags = []
356
366
357 commits, _heads = _rev_range_hash(repo, node)
367 commits, _heads = _rev_range_hash(repo, node)
358 for commit_id, branch in commits:
368 for commit_id, branch in commits:
359 commit_ids.append(commit_id)
369 commit_ids.append(commit_id)
360 if branch not in branches:
370 if branch not in branches:
361 branches.append(branch)
371 branches.append(branch)
362
372
363 if hasattr(ui, '_rc_pushkey_branches'):
373 if hasattr(ui, '_rc_pushkey_branches'):
364 bookmarks = ui._rc_pushkey_branches
374 bookmarks = ui._rc_pushkey_branches
365
375
366 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
367 extras['commit_ids'] = commit_ids
377 extras['commit_ids'] = commit_ids
368 extras['new_refs'] = {
378 extras['new_refs'] = {
369 'branches': branches,
379 'branches': branches,
370 'bookmarks': bookmarks,
380 'bookmarks': bookmarks,
371 'tags': tags
381 'tags': tags
372 }
382 }
373
383
374 return _call_hook('post_push', extras, HgMessageWriter(ui))
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
375
385
376
386
377 def post_push_ssh(ui, repo, node, **kwargs):
387 def post_push_ssh(ui, repo, node, **kwargs):
378 """
388 """
379 Mercurial post_push hook for SSH
389 Mercurial post_push hook for SSH
380 """
390 """
381 if _extras_from_ui(ui).get('SSH'):
391 if _extras_from_ui(ui).get('SSH'):
382 return post_push(ui, repo, node, **kwargs)
392 return post_push(ui, repo, node, **kwargs)
383 return 0
393 return 0
384
394
385
395
386 def key_push(ui, repo, **kwargs):
396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
387 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
388 # store new bookmarks in our UI object propagated later to post_push
399 # store new bookmarks in our UI object propagated later to post_push
389 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
390 return
401 return
391
402
392
403
393 # backward compat
404 # backward compat
394 log_pull_action = post_pull
405 log_pull_action = post_pull
395
406
396 # backward compat
407 # backward compat
397 log_push_action = post_push
408 log_push_action = post_push
398
409
399
410
400 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
401 """
412 """
402 Old hook name: keep here for backward compatibility.
413 Old hook name: keep here for backward compatibility.
403
414
404 This is only required when the installed git hooks are not upgraded.
415 This is only required when the installed git hooks are not upgraded.
405 """
416 """
406 pass
417 pass
407
418
408
419
409 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
410 """
421 """
411 Old hook name: keep here for backward compatibility.
422 Old hook name: keep here for backward compatibility.
412
423
413 This is only required when the installed git hooks are not upgraded.
424 This is only required when the installed git hooks are not upgraded.
414 """
425 """
415 pass
426 pass
416
427
417
428
418 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
419
430
420
431
421 def git_pre_pull(extras):
432 def git_pre_pull(extras):
422 """
433 """
423 Pre pull hook.
434 Pre pull hook.
424
435
425 :param extras: dictionary containing the keys defined in simplevcs
436 :param extras: dictionary containing the keys defined in simplevcs
426 :type extras: dict
437 :type extras: dict
427
438
428 :return: status code of the hook. 0 for success.
439 :return: status code of the hook. 0 for success.
429 :rtype: int
440 :rtype: int
430 """
441 """
431 if 'pull' not in extras['hooks']:
442 if 'pull' not in extras['hooks']:
432 return HookResponse(0, '')
443 return HookResponse(0, '')
433
444
434 stdout = io.BytesIO()
445 stdout = io.BytesIO()
435 try:
446 try:
436 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
437 except Exception as error:
448 except Exception as error:
438 status = 128
449 status = 128
439 stdout.write('ERROR: %s\n' % str(error))
450 stdout.write('ERROR: %s\n' % str(error))
440
451
441 return HookResponse(status, stdout.getvalue())
452 return HookResponse(status, stdout.getvalue())
442
453
443
454
444 def git_post_pull(extras):
455 def git_post_pull(extras):
445 """
456 """
446 Post pull hook.
457 Post pull hook.
447
458
448 :param extras: dictionary containing the keys defined in simplevcs
459 :param extras: dictionary containing the keys defined in simplevcs
449 :type extras: dict
460 :type extras: dict
450
461
451 :return: status code of the hook. 0 for success.
462 :return: status code of the hook. 0 for success.
452 :rtype: int
463 :rtype: int
453 """
464 """
454 if 'pull' not in extras['hooks']:
465 if 'pull' not in extras['hooks']:
455 return HookResponse(0, '')
466 return HookResponse(0, '')
456
467
457 stdout = io.BytesIO()
468 stdout = io.BytesIO()
458 try:
469 try:
459 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
460 except Exception as error:
471 except Exception as error:
461 status = 128
472 status = 128
462 stdout.write('ERROR: %s\n' % error)
473 stdout.write('ERROR: %s\n' % error)
463
474
464 return HookResponse(status, stdout.getvalue())
475 return HookResponse(status, stdout.getvalue())
465
476
466
477
467 def _parse_git_ref_lines(revision_lines):
478 def _parse_git_ref_lines(revision_lines):
468 rev_data = []
479 rev_data = []
469 for revision_line in revision_lines or []:
480 for revision_line in revision_lines or []:
470 old_rev, new_rev, ref = revision_line.strip().split(' ')
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
471 ref_data = ref.split('/', 2)
482 ref_data = ref.split('/', 2)
472 if ref_data[1] in ('tags', 'heads'):
483 if ref_data[1] in ('tags', 'heads'):
473 rev_data.append({
484 rev_data.append({
474 # NOTE(marcink):
485 # NOTE(marcink):
475 # we're unable to tell total_commits for git at this point
486 # we're unable to tell total_commits for git at this point
476 # but we set the variable for consistency with GIT
487 # but we set the variable for consistency with GIT
477 'total_commits': -1,
488 'total_commits': -1,
478 'old_rev': old_rev,
489 'old_rev': old_rev,
479 'new_rev': new_rev,
490 'new_rev': new_rev,
480 'ref': ref,
491 'ref': ref,
481 'type': ref_data[1],
492 'type': ref_data[1],
482 'name': ref_data[2],
493 'name': ref_data[2],
483 })
494 })
484 return rev_data
495 return rev_data
485
496
486
497
487 def git_pre_receive(unused_repo_path, revision_lines, env):
498 def git_pre_receive(unused_repo_path, revision_lines, env):
488 """
499 """
489 Pre push hook.
500 Pre push hook.
490
501
491 :param extras: dictionary containing the keys defined in simplevcs
502 :param extras: dictionary containing the keys defined in simplevcs
492 :type extras: dict
503 :type extras: dict
493
504
494 :return: status code of the hook. 0 for success.
505 :return: status code of the hook. 0 for success.
495 :rtype: int
506 :rtype: int
496 """
507 """
497 extras = json.loads(env['RC_SCM_DATA'])
508 extras = json.loads(env['RC_SCM_DATA'])
498 rev_data = _parse_git_ref_lines(revision_lines)
509 rev_data = _parse_git_ref_lines(revision_lines)
499 if 'push' not in extras['hooks']:
510 if 'push' not in extras['hooks']:
500 return 0
511 return 0
501 empty_commit_id = '0' * 40
512 empty_commit_id = '0' * 40
502
513
503 detect_force_push = extras.get('detect_force_push')
514 detect_force_push = extras.get('detect_force_push')
504
515
505 for push_ref in rev_data:
516 for push_ref in rev_data:
506 # store our git-env which holds the temp store
517 # store our git-env which holds the temp store
507 push_ref['git_env'] = _get_git_env()
518 push_ref['git_env'] = _get_git_env()
508 push_ref['pruned_sha'] = ''
519 push_ref['pruned_sha'] = ''
509 if not detect_force_push:
520 if not detect_force_push:
510 # don't check for forced-push when we don't need to
521 # don't check for forced-push when we don't need to
511 continue
522 continue
512
523
513 type_ = push_ref['type']
524 type_ = push_ref['type']
514 new_branch = push_ref['old_rev'] == empty_commit_id
525 new_branch = push_ref['old_rev'] == empty_commit_id
515 delete_branch = push_ref['new_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
516 if type_ == 'heads' and not (new_branch or delete_branch):
527 if type_ == 'heads' and not (new_branch or delete_branch):
517 old_rev = push_ref['old_rev']
528 old_rev = push_ref['old_rev']
518 new_rev = push_ref['new_rev']
529 new_rev = push_ref['new_rev']
519 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
520 stdout, stderr = subprocessio.run_command(
531 stdout, stderr = subprocessio.run_command(
521 cmd, env=os.environ.copy())
532 cmd, env=os.environ.copy())
522 # means we're having some non-reachable objects, this forced push was used
533 # means we're having some non-reachable objects, this forced push was used
523 if stdout:
534 if stdout:
524 push_ref['pruned_sha'] = stdout.splitlines()
535 push_ref['pruned_sha'] = stdout.splitlines()
525
536
526 extras['hook_type'] = 'pre_receive'
537 extras['hook_type'] = 'pre_receive'
527 extras['commit_ids'] = rev_data
538 extras['commit_ids'] = rev_data
528 return _call_hook('pre_push', extras, GitMessageWriter())
539 return _call_hook('pre_push', extras, GitMessageWriter())
529
540
530
541
531 def git_post_receive(unused_repo_path, revision_lines, env):
542 def git_post_receive(unused_repo_path, revision_lines, env):
532 """
543 """
533 Post push hook.
544 Post push hook.
534
545
535 :param extras: dictionary containing the keys defined in simplevcs
546 :param extras: dictionary containing the keys defined in simplevcs
536 :type extras: dict
547 :type extras: dict
537
548
538 :return: status code of the hook. 0 for success.
549 :return: status code of the hook. 0 for success.
539 :rtype: int
550 :rtype: int
540 """
551 """
541 extras = json.loads(env['RC_SCM_DATA'])
552 extras = json.loads(env['RC_SCM_DATA'])
542 if 'push' not in extras['hooks']:
553 if 'push' not in extras['hooks']:
543 return 0
554 return 0
544
555
545 rev_data = _parse_git_ref_lines(revision_lines)
556 rev_data = _parse_git_ref_lines(revision_lines)
546
557
547 git_revs = []
558 git_revs = []
548
559
549 # N.B.(skreft): it is ok to just call git, as git before calling a
560 # N.B.(skreft): it is ok to just call git, as git before calling a
550 # subcommand sets the PATH environment variable so that it point to the
561 # subcommand sets the PATH environment variable so that it point to the
551 # correct version of the git executable.
562 # correct version of the git executable.
552 empty_commit_id = '0' * 40
563 empty_commit_id = '0' * 40
553 branches = []
564 branches = []
554 tags = []
565 tags = []
555 for push_ref in rev_data:
566 for push_ref in rev_data:
556 type_ = push_ref['type']
567 type_ = push_ref['type']
557
568
558 if type_ == 'heads':
569 if type_ == 'heads':
559 if push_ref['old_rev'] == empty_commit_id:
570 if push_ref['old_rev'] == empty_commit_id:
560 # starting new branch case
571 # starting new branch case
561 if push_ref['name'] not in branches:
572 if push_ref['name'] not in branches:
562 branches.append(push_ref['name'])
573 branches.append(push_ref['name'])
563
574
564 # Fix up head revision if needed
575 # Fix up head revision if needed
565 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
566 try:
577 try:
567 subprocessio.run_command(cmd, env=os.environ.copy())
578 subprocessio.run_command(cmd, env=os.environ.copy())
568 except Exception:
579 except Exception:
569 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
570 'refs/heads/%s' % push_ref['name']]
581 'refs/heads/%s' % push_ref['name']]
571 print("Setting default branch to %s" % push_ref['name'])
582 print("Setting default branch to %s" % push_ref['name'])
572 subprocessio.run_command(cmd, env=os.environ.copy())
583 subprocessio.run_command(cmd, env=os.environ.copy())
573
584
574 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
575 '--format=%(refname)', 'refs/heads/*']
586 '--format=%(refname)', 'refs/heads/*']
576 stdout, stderr = subprocessio.run_command(
587 stdout, stderr = subprocessio.run_command(
577 cmd, env=os.environ.copy())
588 cmd, env=os.environ.copy())
578 heads = stdout
589 heads = stdout
579 heads = heads.replace(push_ref['ref'], '')
590 heads = heads.replace(push_ref['ref'], '')
580 heads = ' '.join(head for head
591 heads = ' '.join(head for head
581 in heads.splitlines() if head) or '.'
592 in heads.splitlines() if head) or '.'
582 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
583 '--pretty=format:%H', '--', push_ref['new_rev'],
594 '--pretty=format:%H', '--', push_ref['new_rev'],
584 '--not', heads]
595 '--not', heads]
585 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
586 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
587 git_revs.extend(stdout.splitlines())
598 git_revs.extend(stdout.splitlines())
588 elif push_ref['new_rev'] == empty_commit_id:
599 elif push_ref['new_rev'] == empty_commit_id:
589 # delete branch case
600 # delete branch case
590 git_revs.append('delete_branch=>%s' % push_ref['name'])
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
591 else:
602 else:
592 if push_ref['name'] not in branches:
603 if push_ref['name'] not in branches:
593 branches.append(push_ref['name'])
604 branches.append(push_ref['name'])
594
605
595 cmd = [settings.GIT_EXECUTABLE, 'log',
606 cmd = [settings.GIT_EXECUTABLE, 'log',
596 '{old_rev}..{new_rev}'.format(**push_ref),
607 '{old_rev}..{new_rev}'.format(**push_ref),
597 '--reverse', '--pretty=format:%H']
608 '--reverse', '--pretty=format:%H']
598 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
599 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
600 git_revs.extend(stdout.splitlines())
611 git_revs.extend(stdout.splitlines())
601 elif type_ == 'tags':
612 elif type_ == 'tags':
602 if push_ref['name'] not in tags:
613 if push_ref['name'] not in tags:
603 tags.append(push_ref['name'])
614 tags.append(push_ref['name'])
604 git_revs.append('tag=>%s' % push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
605
616
606 extras['hook_type'] = 'post_receive'
617 extras['hook_type'] = 'post_receive'
607 extras['commit_ids'] = git_revs
618 extras['commit_ids'] = git_revs
608 extras['new_refs'] = {
619 extras['new_refs'] = {
609 'branches': branches,
620 'branches': branches,
610 'bookmarks': [],
621 'bookmarks': [],
611 'tags': tags,
622 'tags': tags,
612 }
623 }
613
624
614 if 'repo_size' in extras['hooks']:
625 if 'repo_size' in extras['hooks']:
615 try:
626 try:
616 _call_hook('repo_size', extras, GitMessageWriter())
627 _call_hook('repo_size', extras, GitMessageWriter())
617 except:
628 except:
618 pass
629 pass
619
630
620 return _call_hook('post_push', extras, GitMessageWriter())
631 return _call_hook('post_push', extras, GitMessageWriter())
621
632
622
633
623 def _get_extras_from_txn_id(path, txn_id):
634 def _get_extras_from_txn_id(path, txn_id):
624 extras = {}
635 extras = {}
625 try:
636 try:
626 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
627 '-t', txn_id,
638 '-t', txn_id,
628 '--revprop', path, 'rc-scm-extras']
639 '--revprop', path, 'rc-scm-extras']
629 stdout, stderr = subprocessio.run_command(
640 stdout, stderr = subprocessio.run_command(
630 cmd, env=os.environ.copy())
641 cmd, env=os.environ.copy())
631 extras = json.loads(base64.urlsafe_b64decode(stdout))
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
632 except Exception:
643 except Exception:
633 log.exception('Failed to extract extras info from txn_id')
644 log.exception('Failed to extract extras info from txn_id')
634
645
635 return extras
646 return extras
636
647
637
648
638 def _get_extras_from_commit_id(commit_id, path):
649 def _get_extras_from_commit_id(commit_id, path):
639 extras = {}
650 extras = {}
640 try:
651 try:
641 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
642 '-r', commit_id,
653 '-r', commit_id,
643 '--revprop', path, 'rc-scm-extras']
654 '--revprop', path, 'rc-scm-extras']
644 stdout, stderr = subprocessio.run_command(
655 stdout, stderr = subprocessio.run_command(
645 cmd, env=os.environ.copy())
656 cmd, env=os.environ.copy())
646 extras = json.loads(base64.urlsafe_b64decode(stdout))
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
647 except Exception:
658 except Exception:
648 log.exception('Failed to extract extras info from commit_id')
659 log.exception('Failed to extract extras info from commit_id')
649
660
650 return extras
661 return extras
651
662
652
663
653 def svn_pre_commit(repo_path, commit_data, env):
664 def svn_pre_commit(repo_path, commit_data, env):
654 path, txn_id = commit_data
665 path, txn_id = commit_data
655 branches = []
666 branches = []
656 tags = []
667 tags = []
657
668
658 if env.get('RC_SCM_DATA'):
669 if env.get('RC_SCM_DATA'):
659 extras = json.loads(env['RC_SCM_DATA'])
670 extras = json.loads(env['RC_SCM_DATA'])
660 else:
671 else:
661 # fallback method to read from TXN-ID stored data
672 # fallback method to read from TXN-ID stored data
662 extras = _get_extras_from_txn_id(path, txn_id)
673 extras = _get_extras_from_txn_id(path, txn_id)
663 if not extras:
674 if not extras:
664 return 0
675 return 0
665
676
666 extras['hook_type'] = 'pre_commit'
677 extras['hook_type'] = 'pre_commit'
667 extras['commit_ids'] = [txn_id]
678 extras['commit_ids'] = [txn_id]
668 extras['txn_id'] = txn_id
679 extras['txn_id'] = txn_id
669 extras['new_refs'] = {
680 extras['new_refs'] = {
670 'total_commits': 1,
681 'total_commits': 1,
671 'branches': branches,
682 'branches': branches,
672 'bookmarks': [],
683 'bookmarks': [],
673 'tags': tags,
684 'tags': tags,
674 }
685 }
675
686
676 return _call_hook('pre_push', extras, SvnMessageWriter())
687 return _call_hook('pre_push', extras, SvnMessageWriter())
677
688
678
689
679 def svn_post_commit(repo_path, commit_data, env):
690 def svn_post_commit(repo_path, commit_data, env):
680 """
691 """
681 commit_data is path, rev, txn_id
692 commit_data is path, rev, txn_id
682 """
693 """
683 path, commit_id, txn_id = commit_data
694 path, commit_id, txn_id = commit_data
684 branches = []
695 branches = []
685 tags = []
696 tags = []
686
697
687 if env.get('RC_SCM_DATA'):
698 if env.get('RC_SCM_DATA'):
688 extras = json.loads(env['RC_SCM_DATA'])
699 extras = json.loads(env['RC_SCM_DATA'])
689 else:
700 else:
690 # fallback method to read from TXN-ID stored data
701 # fallback method to read from TXN-ID stored data
691 extras = _get_extras_from_commit_id(commit_id, path)
702 extras = _get_extras_from_commit_id(commit_id, path)
692 if not extras:
703 if not extras:
693 return 0
704 return 0
694
705
695 extras['hook_type'] = 'post_commit'
706 extras['hook_type'] = 'post_commit'
696 extras['commit_ids'] = [commit_id]
707 extras['commit_ids'] = [commit_id]
697 extras['txn_id'] = txn_id
708 extras['txn_id'] = txn_id
698 extras['new_refs'] = {
709 extras['new_refs'] = {
699 'branches': branches,
710 'branches': branches,
700 'bookmarks': [],
711 'bookmarks': [],
701 'tags': tags,
712 'tags': tags,
702 'total_commits': 1,
713 'total_commits': 1,
703 }
714 }
704
715
705 if 'repo_size' in extras['hooks']:
716 if 'repo_size' in extras['hooks']:
706 try:
717 try:
707 _call_hook('repo_size', extras, SvnMessageWriter())
718 _call_hook('repo_size', extras, SvnMessageWriter())
708 except Exception:
719 except Exception:
709 pass
720 pass
710
721
711 return _call_hook('post_push', extras, SvnMessageWriter())
722 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,610 +1,675 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import wsgiref.util
24 import wsgiref.util
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 from itertools import chain
27 from itertools import chain
28 from cStringIO import StringIO
28
29
29 import simplejson as json
30 import simplejson as json
30 import msgpack
31 import msgpack
31 from pyramid.config import Configurator
32 from pyramid.config import Configurator
32 from pyramid.settings import asbool, aslist
33 from pyramid.settings import asbool, aslist
33 from pyramid.wsgi import wsgiapp
34 from pyramid.wsgi import wsgiapp
34 from pyramid.compat import configparser
35 from pyramid.compat import configparser
36 from pyramid.response import Response
35
37
38 from vcsserver.utils import safe_int
36
39
37 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
38
41
39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
42 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
43 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
41
44
42 try:
45 try:
43 locale.setlocale(locale.LC_ALL, '')
46 locale.setlocale(locale.LC_ALL, '')
44 except locale.Error as e:
47 except locale.Error as e:
45 log.error(
48 log.error(
46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
49 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
47 os.environ['LC_ALL'] = 'C'
50 os.environ['LC_ALL'] = 'C'
48
51
49 import vcsserver
52 import vcsserver
50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
53 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
54 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
55 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
53 from vcsserver.echo_stub.echo_app import EchoApp
56 from vcsserver.echo_stub.echo_app import EchoApp
54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
57 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
55 from vcsserver.lib.exc_tracking import store_exception
58 from vcsserver.lib.exc_tracking import store_exception
56 from vcsserver.server import VcsServer
59 from vcsserver.server import VcsServer
57
60
58 try:
61 try:
59 from vcsserver.git import GitFactory, GitRemote
62 from vcsserver.git import GitFactory, GitRemote
60 except ImportError:
63 except ImportError:
61 GitFactory = None
64 GitFactory = None
62 GitRemote = None
65 GitRemote = None
63
66
64 try:
67 try:
65 from vcsserver.hg import MercurialFactory, HgRemote
68 from vcsserver.hg import MercurialFactory, HgRemote
66 except ImportError:
69 except ImportError:
67 MercurialFactory = None
70 MercurialFactory = None
68 HgRemote = None
71 HgRemote = None
69
72
70 try:
73 try:
71 from vcsserver.svn import SubversionFactory, SvnRemote
74 from vcsserver.svn import SubversionFactory, SvnRemote
72 except ImportError:
75 except ImportError:
73 SubversionFactory = None
76 SubversionFactory = None
74 SvnRemote = None
77 SvnRemote = None
75
78
76
79
77 def _is_request_chunked(environ):
80 def _is_request_chunked(environ):
78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
81 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 return stream
82 return stream
80
83
81
84
82 def _int_setting(settings, name, default):
85 def _int_setting(settings, name, default):
83 settings[name] = int(settings.get(name, default))
86 settings[name] = int(settings.get(name, default))
84 return settings[name]
87 return settings[name]
85
88
86
89
87 def _bool_setting(settings, name, default):
90 def _bool_setting(settings, name, default):
88 input_val = settings.get(name, default)
91 input_val = settings.get(name, default)
89 if isinstance(input_val, unicode):
92 if isinstance(input_val, unicode):
90 input_val = input_val.encode('utf8')
93 input_val = input_val.encode('utf8')
91 settings[name] = asbool(input_val)
94 settings[name] = asbool(input_val)
92 return settings[name]
95 return settings[name]
93
96
94
97
95 def _list_setting(settings, name, default):
98 def _list_setting(settings, name, default):
96 raw_value = settings.get(name, default)
99 raw_value = settings.get(name, default)
97
100
98 # Otherwise we assume it uses pyramids space/newline separation.
101 # Otherwise we assume it uses pyramids space/newline separation.
99 settings[name] = aslist(raw_value)
102 settings[name] = aslist(raw_value)
100 return settings[name]
103 return settings[name]
101
104
102
105
103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
106 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
104 value = settings.get(name, default)
107 value = settings.get(name, default)
105
108
106 if default_when_empty and not value:
109 if default_when_empty and not value:
107 # use default value when value is empty
110 # use default value when value is empty
108 value = default
111 value = default
109
112
110 if lower:
113 if lower:
111 value = value.lower()
114 value = value.lower()
112 settings[name] = value
115 settings[name] = value
113 return settings[name]
116 return settings[name]
114
117
115
118
116 class VCS(object):
119 class VCS(object):
117 def __init__(self, locale=None, cache_config=None):
120 def __init__(self, locale_conf=None, cache_config=None):
118 self.locale = locale
121 self.locale = locale_conf
119 self.cache_config = cache_config
122 self.cache_config = cache_config
120 self._configure_locale()
123 self._configure_locale()
121
124
122 if GitFactory and GitRemote:
125 if GitFactory and GitRemote:
123 git_factory = GitFactory()
126 git_factory = GitFactory()
124 self._git_remote = GitRemote(git_factory)
127 self._git_remote = GitRemote(git_factory)
125 else:
128 else:
126 log.info("Git client import failed")
129 log.info("Git client import failed")
127
130
128 if MercurialFactory and HgRemote:
131 if MercurialFactory and HgRemote:
129 hg_factory = MercurialFactory()
132 hg_factory = MercurialFactory()
130 self._hg_remote = HgRemote(hg_factory)
133 self._hg_remote = HgRemote(hg_factory)
131 else:
134 else:
132 log.info("Mercurial client import failed")
135 log.info("Mercurial client import failed")
133
136
134 if SubversionFactory and SvnRemote:
137 if SubversionFactory and SvnRemote:
135 svn_factory = SubversionFactory()
138 svn_factory = SubversionFactory()
136
139
137 # hg factory is used for svn url validation
140 # hg factory is used for svn url validation
138 hg_factory = MercurialFactory()
141 hg_factory = MercurialFactory()
139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 else:
143 else:
141 log.info("Subversion client import failed")
144 log.info("Subversion client import failed")
142
145
143 self._vcsserver = VcsServer()
146 self._vcsserver = VcsServer()
144
147
145 def _configure_locale(self):
148 def _configure_locale(self):
146 if self.locale:
149 if self.locale:
147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 else:
151 else:
149 log.info(
152 log.info(
150 'Configuring locale subsystem based on environment variables')
153 'Configuring locale subsystem based on environment variables')
151 try:
154 try:
152 # If self.locale is the empty string, then the locale
155 # If self.locale is the empty string, then the locale
153 # module will use the environment variables. See the
156 # module will use the environment variables. See the
154 # documentation of the package `locale`.
157 # documentation of the package `locale`.
155 locale.setlocale(locale.LC_ALL, self.locale)
158 locale.setlocale(locale.LC_ALL, self.locale)
156
159
157 language_code, encoding = locale.getlocale()
160 language_code, encoding = locale.getlocale()
158 log.info(
161 log.info(
159 'Locale set to language code "%s" with encoding "%s".',
162 'Locale set to language code "%s" with encoding "%s".',
160 language_code, encoding)
163 language_code, encoding)
161 except locale.Error:
164 except locale.Error:
162 log.exception(
165 log.exception(
163 'Cannot set locale, not configuring the locale system')
166 'Cannot set locale, not configuring the locale system')
164
167
165
168
166 class WsgiProxy(object):
169 class WsgiProxy(object):
167 def __init__(self, wsgi):
170 def __init__(self, wsgi):
168 self.wsgi = wsgi
171 self.wsgi = wsgi
169
172
170 def __call__(self, environ, start_response):
173 def __call__(self, environ, start_response):
171 input_data = environ['wsgi.input'].read()
174 input_data = environ['wsgi.input'].read()
172 input_data = msgpack.unpackb(input_data)
175 input_data = msgpack.unpackb(input_data)
173
176
174 error = None
177 error = None
175 try:
178 try:
176 data, status, headers = self.wsgi.handle(
179 data, status, headers = self.wsgi.handle(
177 input_data['environment'], input_data['input_data'],
180 input_data['environment'], input_data['input_data'],
178 *input_data['args'], **input_data['kwargs'])
181 *input_data['args'], **input_data['kwargs'])
179 except Exception as e:
182 except Exception as e:
180 data, status, headers = [], None, None
183 data, status, headers = [], None, None
181 error = {
184 error = {
182 'message': str(e),
185 'message': str(e),
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
186 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 }
187 }
185
188
186 start_response(200, {})
189 start_response(200, {})
187 return self._iterator(error, status, headers, data)
190 return self._iterator(error, status, headers, data)
188
191
189 def _iterator(self, error, status, headers, data):
192 def _iterator(self, error, status, headers, data):
190 initial_data = [
193 initial_data = [
191 error,
194 error,
192 status,
195 status,
193 headers,
196 headers,
194 ]
197 ]
195
198
196 for d in chain(initial_data, data):
199 for d in chain(initial_data, data):
197 yield msgpack.packb(d)
200 yield msgpack.packb(d)
198
201
199
202
200 def not_found(request):
203 def not_found(request):
201 return {'status': '404 NOT FOUND'}
204 return {'status': '404 NOT FOUND'}
202
205
203
206
204 class VCSViewPredicate(object):
207 class VCSViewPredicate(object):
205 def __init__(self, val, config):
208 def __init__(self, val, config):
206 self.remotes = val
209 self.remotes = val
207
210
208 def text(self):
211 def text(self):
209 return 'vcs view method = %s' % (self.remotes.keys(),)
212 return 'vcs view method = %s' % (self.remotes.keys(),)
210
213
211 phash = text
214 phash = text
212
215
213 def __call__(self, context, request):
216 def __call__(self, context, request):
214 """
217 """
215 View predicate that returns true if given backend is supported by
218 View predicate that returns true if given backend is supported by
216 defined remotes.
219 defined remotes.
217 """
220 """
218 backend = request.matchdict.get('backend')
221 backend = request.matchdict.get('backend')
219 return backend in self.remotes
222 return backend in self.remotes
220
223
221
224
222 class HTTPApplication(object):
225 class HTTPApplication(object):
223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
226 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224
227
225 remote_wsgi = remote_wsgi
228 remote_wsgi = remote_wsgi
226 _use_echo_app = False
229 _use_echo_app = False
227
230
228 def __init__(self, settings=None, global_config=None):
231 def __init__(self, settings=None, global_config=None):
229 self._sanitize_settings_and_apply_defaults(settings)
232 self._sanitize_settings_and_apply_defaults(settings)
230
233
231 self.config = Configurator(settings=settings)
234 self.config = Configurator(settings=settings)
232 self.global_config = global_config
235 self.global_config = global_config
233 self.config.include('vcsserver.lib.rc_cache')
236 self.config.include('vcsserver.lib.rc_cache')
234
237
235 locale = settings.get('locale', '') or 'en_US.UTF-8'
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
236 vcs = VCS(locale=locale, cache_config=settings)
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
237 self._remotes = {
240 self._remotes = {
238 'hg': vcs._hg_remote,
241 'hg': vcs._hg_remote,
239 'git': vcs._git_remote,
242 'git': vcs._git_remote,
240 'svn': vcs._svn_remote,
243 'svn': vcs._svn_remote,
241 'server': vcs._vcsserver,
244 'server': vcs._vcsserver,
242 }
245 }
243 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
244 self._use_echo_app = True
247 self._use_echo_app = True
245 log.warning("Using EchoApp for VCS operations.")
248 log.warning("Using EchoApp for VCS operations.")
246 self.remote_wsgi = remote_wsgi_stub
249 self.remote_wsgi = remote_wsgi_stub
247
250
248 self._configure_settings(global_config, settings)
251 self._configure_settings(global_config, settings)
249 self._configure()
252 self._configure()
250
253
251 def _configure_settings(self, global_config, app_settings):
254 def _configure_settings(self, global_config, app_settings):
252 """
255 """
253 Configure the settings module.
256 Configure the settings module.
254 """
257 """
255 settings_merged = global_config.copy()
258 settings_merged = global_config.copy()
256 settings_merged.update(app_settings)
259 settings_merged.update(app_settings)
257
260
258 git_path = app_settings.get('git_path', None)
261 git_path = app_settings.get('git_path', None)
259 if git_path:
262 if git_path:
260 settings.GIT_EXECUTABLE = git_path
263 settings.GIT_EXECUTABLE = git_path
261 binary_dir = app_settings.get('core.binary_dir', None)
264 binary_dir = app_settings.get('core.binary_dir', None)
262 if binary_dir:
265 if binary_dir:
263 settings.BINARY_DIR = binary_dir
266 settings.BINARY_DIR = binary_dir
264
267
265 # Store the settings to make them available to other modules.
268 # Store the settings to make them available to other modules.
266 vcsserver.PYRAMID_SETTINGS = settings_merged
269 vcsserver.PYRAMID_SETTINGS = settings_merged
267 vcsserver.CONFIG = settings_merged
270 vcsserver.CONFIG = settings_merged
268
271
269 def _sanitize_settings_and_apply_defaults(self, settings):
272 def _sanitize_settings_and_apply_defaults(self, settings):
270 temp_store = tempfile.gettempdir()
273 temp_store = tempfile.gettempdir()
271 default_cache_dir = os.path.join(temp_store, 'rc_cache')
274 default_cache_dir = os.path.join(temp_store, 'rc_cache')
272
275
273 # save default, cache dir, and use it for all backends later.
276 # save default, cache dir, and use it for all backends later.
274 default_cache_dir = _string_setting(
277 default_cache_dir = _string_setting(
275 settings,
278 settings,
276 'cache_dir',
279 'cache_dir',
277 default_cache_dir, lower=False, default_when_empty=True)
280 default_cache_dir, lower=False, default_when_empty=True)
278
281
279 # ensure we have our dir created
282 # ensure we have our dir created
280 if not os.path.isdir(default_cache_dir):
283 if not os.path.isdir(default_cache_dir):
281 os.makedirs(default_cache_dir, mode=0o755)
284 os.makedirs(default_cache_dir, mode=0o755)
282
285
283 # exception store cache
286 # exception store cache
284 _string_setting(
287 _string_setting(
285 settings,
288 settings,
286 'exception_tracker.store_path',
289 'exception_tracker.store_path',
287 temp_store, lower=False, default_when_empty=True)
290 temp_store, lower=False, default_when_empty=True)
288
291
289 # repo_object cache
292 # repo_object cache
290 _string_setting(
293 _string_setting(
291 settings,
294 settings,
292 'rc_cache.repo_object.backend',
295 'rc_cache.repo_object.backend',
293 'dogpile.cache.rc.memory_lru')
296 'dogpile.cache.rc.file_namespace', lower=False)
294 _int_setting(
297 _int_setting(
295 settings,
298 settings,
296 'rc_cache.repo_object.expiration_time',
299 'rc_cache.repo_object.expiration_time',
297 300)
300 30 * 24 * 60 * 60)
298 _int_setting(
301 _string_setting(
299 settings,
302 settings,
300 'rc_cache.repo_object.max_size',
303 'rc_cache.repo_object.arguments.filename',
301 1024)
304 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
302
305
303 def _configure(self):
306 def _configure(self):
304 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
307 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
305
308
306 self.config.add_route('service', '/_service')
309 self.config.add_route('service', '/_service')
307 self.config.add_route('status', '/status')
310 self.config.add_route('status', '/status')
308 self.config.add_route('hg_proxy', '/proxy/hg')
311 self.config.add_route('hg_proxy', '/proxy/hg')
309 self.config.add_route('git_proxy', '/proxy/git')
312 self.config.add_route('git_proxy', '/proxy/git')
313
314 # rpc methods
310 self.config.add_route('vcs', '/{backend}')
315 self.config.add_route('vcs', '/{backend}')
316
317 # streaming rpc remote methods
318 self.config.add_route('vcs_stream', '/{backend}/stream')
319
320 # vcs operations clone/push as streaming
311 self.config.add_route('stream_git', '/stream/git/*repo_name')
321 self.config.add_route('stream_git', '/stream/git/*repo_name')
312 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
322 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
313
323
314 self.config.add_view(self.status_view, route_name='status', renderer='json')
324 self.config.add_view(self.status_view, route_name='status', renderer='json')
315 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
325 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
316
326
317 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
327 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
318 self.config.add_view(self.git_proxy(), route_name='git_proxy')
328 self.config.add_view(self.git_proxy(), route_name='git_proxy')
319 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
329 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
320 vcs_view=self._remotes)
330 vcs_view=self._remotes)
331 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
332 vcs_view=self._remotes)
321
333
322 self.config.add_view(self.hg_stream(), route_name='stream_hg')
334 self.config.add_view(self.hg_stream(), route_name='stream_hg')
323 self.config.add_view(self.git_stream(), route_name='stream_git')
335 self.config.add_view(self.git_stream(), route_name='stream_git')
324
336
325 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
337 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
326
338
327 self.config.add_notfound_view(not_found, renderer='json')
339 self.config.add_notfound_view(not_found, renderer='json')
328
340
329 self.config.add_view(self.handle_vcs_exception, context=Exception)
341 self.config.add_view(self.handle_vcs_exception, context=Exception)
330
342
331 self.config.add_tween(
343 self.config.add_tween(
332 'vcsserver.tweens.RequestWrapperTween',
344 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
333 )
345 )
346 self.config.add_request_method(
347 'vcsserver.lib.request_counter.get_request_counter',
348 'request_count')
334
349
335 def wsgi_app(self):
350 def wsgi_app(self):
336 return self.config.make_wsgi_app()
351 return self.config.make_wsgi_app()
337
352
338 def vcs_view(self, request):
353 def _vcs_view_params(self, request):
339 remote = self._remotes[request.matchdict['backend']]
354 remote = self._remotes[request.matchdict['backend']]
340 payload = msgpack.unpackb(request.body, use_list=True)
355 payload = msgpack.unpackb(request.body, use_list=True)
341 method = payload.get('method')
356 method = payload.get('method')
342 params = payload.get('params')
357 params = payload['params']
343 wire = params.get('wire')
358 wire = params.get('wire')
344 args = params.get('args')
359 args = params.get('args')
345 kwargs = params.get('kwargs')
360 kwargs = params.get('kwargs')
346 context_uid = None
361 context_uid = None
347
362
348 if wire:
363 if wire:
349 try:
364 try:
350 wire['context'] = context_uid = uuid.UUID(wire['context'])
365 wire['context'] = context_uid = uuid.UUID(wire['context'])
351 except KeyError:
366 except KeyError:
352 pass
367 pass
353 args.insert(0, wire)
368 args.insert(0, wire)
369 repo_state_uid = wire.get('repo_state_uid') if wire else None
354
370
355 log.debug('method called:%s with kwargs:%s context_uid: %s',
371 # NOTE(marcink): trading complexity for slight performance
356 method, kwargs, context_uid)
372 if log.isEnabledFor(logging.DEBUG):
373 no_args_methods = [
374 'archive_repo'
375 ]
376 if method in no_args_methods:
377 call_args = ''
378 else:
379 call_args = args[1:]
380
381 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
382 method, call_args, kwargs, context_uid, repo_state_uid)
383
384 return payload, remote, method, args, kwargs
385
386 def vcs_view(self, request):
387
388 payload, remote, method, args, kwargs = self._vcs_view_params(request)
389 payload_id = payload.get('id')
390
357 try:
391 try:
358 resp = getattr(remote, method)(*args, **kwargs)
392 resp = getattr(remote, method)(*args, **kwargs)
359 except Exception as e:
393 except Exception as e:
360 exc_info = list(sys.exc_info())
394 exc_info = list(sys.exc_info())
361 exc_type, exc_value, exc_traceback = exc_info
395 exc_type, exc_value, exc_traceback = exc_info
362
396
363 org_exc = getattr(e, '_org_exc', None)
397 org_exc = getattr(e, '_org_exc', None)
364 org_exc_name = None
398 org_exc_name = None
365 org_exc_tb = ''
399 org_exc_tb = ''
366 if org_exc:
400 if org_exc:
367 org_exc_name = org_exc.__class__.__name__
401 org_exc_name = org_exc.__class__.__name__
368 org_exc_tb = getattr(e, '_org_exc_tb', '')
402 org_exc_tb = getattr(e, '_org_exc_tb', '')
369 # replace our "faked" exception with our org
403 # replace our "faked" exception with our org
370 exc_info[0] = org_exc.__class__
404 exc_info[0] = org_exc.__class__
371 exc_info[1] = org_exc
405 exc_info[1] = org_exc
372
406
373 store_exception(id(exc_info), exc_info)
407 store_exception(id(exc_info), exc_info)
374
408
375 tb_info = ''.join(
409 tb_info = ''.join(
376 traceback.format_exception(exc_type, exc_value, exc_traceback))
410 traceback.format_exception(exc_type, exc_value, exc_traceback))
377
411
378 type_ = e.__class__.__name__
412 type_ = e.__class__.__name__
379 if type_ not in self.ALLOWED_EXCEPTIONS:
413 if type_ not in self.ALLOWED_EXCEPTIONS:
380 type_ = None
414 type_ = None
381
415
382 resp = {
416 resp = {
383 'id': payload.get('id'),
417 'id': payload_id,
384 'error': {
418 'error': {
385 'message': e.message,
419 'message': e.message,
386 'traceback': tb_info,
420 'traceback': tb_info,
387 'org_exc': org_exc_name,
421 'org_exc': org_exc_name,
388 'org_exc_tb': org_exc_tb,
422 'org_exc_tb': org_exc_tb,
389 'type': type_
423 'type': type_
390 }
424 }
391 }
425 }
392 try:
426 try:
393 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
427 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
394 except AttributeError:
428 except AttributeError:
395 pass
429 pass
396 else:
430 else:
397 resp = {
431 resp = {
398 'id': payload.get('id'),
432 'id': payload_id,
399 'result': resp
433 'result': resp
400 }
434 }
401
435
402 return resp
436 return resp
403
437
438 def vcs_stream_view(self, request):
439 payload, remote, method, args, kwargs = self._vcs_view_params(request)
440 # this method has a stream: marker we remove it here
441 method = method.split('stream:')[-1]
442 chunk_size = safe_int(payload.get('chunk_size')) or 4096
443
444 try:
445 resp = getattr(remote, method)(*args, **kwargs)
446 except Exception as e:
447 raise
448
449 def get_chunked_data(method_resp):
450 stream = StringIO(method_resp)
451 while 1:
452 chunk = stream.read(chunk_size)
453 if not chunk:
454 break
455 yield chunk
456
457 response = Response(app_iter=get_chunked_data(resp))
458 response.content_type = 'application/octet-stream'
459
460 return response
461
404 def status_view(self, request):
462 def status_view(self, request):
405 import vcsserver
463 import vcsserver
406 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
464 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
407 'pid': os.getpid()}
465 'pid': os.getpid()}
408
466
409 def service_view(self, request):
467 def service_view(self, request):
410 import vcsserver
468 import vcsserver
411
469
412 payload = msgpack.unpackb(request.body, use_list=True)
470 payload = msgpack.unpackb(request.body, use_list=True)
471 server_config, app_config = {}, {}
413
472
414 try:
473 try:
415 path = self.global_config['__file__']
474 path = self.global_config['__file__']
416 config = configparser.ConfigParser()
475 config = configparser.RawConfigParser()
476
417 config.read(path)
477 config.read(path)
418 parsed_ini = config
478
419 if parsed_ini.has_section('server:main'):
479 if config.has_section('server:main'):
420 parsed_ini = dict(parsed_ini.items('server:main'))
480 server_config = dict(config.items('server:main'))
481 if config.has_section('app:main'):
482 app_config = dict(config.items('app:main'))
483
421 except Exception:
484 except Exception:
422 log.exception('Failed to read .ini file for display')
485 log.exception('Failed to read .ini file for display')
423 parsed_ini = {}
486
487 environ = os.environ.items()
424
488
425 resp = {
489 resp = {
426 'id': payload.get('id'),
490 'id': payload.get('id'),
427 'result': dict(
491 'result': dict(
428 version=vcsserver.__version__,
492 version=vcsserver.__version__,
429 config=parsed_ini,
493 config=server_config,
494 app_config=app_config,
495 environ=environ,
430 payload=payload,
496 payload=payload,
431 )
497 )
432 }
498 }
433 return resp
499 return resp
434
500
435 def _msgpack_renderer_factory(self, info):
501 def _msgpack_renderer_factory(self, info):
436 def _render(value, system):
502 def _render(value, system):
437 value = msgpack.packb(value)
438 request = system.get('request')
503 request = system.get('request')
439 if request is not None:
504 if request is not None:
440 response = request.response
505 response = request.response
441 ct = response.content_type
506 ct = response.content_type
442 if ct == response.default_content_type:
507 if ct == response.default_content_type:
443 response.content_type = 'application/x-msgpack'
508 response.content_type = 'application/x-msgpack'
444 return value
509 return msgpack.packb(value)
445 return _render
510 return _render
446
511
447 def set_env_from_config(self, environ, config):
512 def set_env_from_config(self, environ, config):
448 dict_conf = {}
513 dict_conf = {}
449 try:
514 try:
450 for elem in config:
515 for elem in config:
451 if elem[0] == 'rhodecode':
516 if elem[0] == 'rhodecode':
452 dict_conf = json.loads(elem[2])
517 dict_conf = json.loads(elem[2])
453 break
518 break
454 except Exception:
519 except Exception:
455 log.exception('Failed to fetch SCM CONFIG')
520 log.exception('Failed to fetch SCM CONFIG')
456 return
521 return
457
522
458 username = dict_conf.get('username')
523 username = dict_conf.get('username')
459 if username:
524 if username:
460 environ['REMOTE_USER'] = username
525 environ['REMOTE_USER'] = username
461 # mercurial specific, some extension api rely on this
526 # mercurial specific, some extension api rely on this
462 environ['HGUSER'] = username
527 environ['HGUSER'] = username
463
528
464 ip = dict_conf.get('ip')
529 ip = dict_conf.get('ip')
465 if ip:
530 if ip:
466 environ['REMOTE_HOST'] = ip
531 environ['REMOTE_HOST'] = ip
467
532
468 if _is_request_chunked(environ):
533 if _is_request_chunked(environ):
469 # set the compatibility flag for webob
534 # set the compatibility flag for webob
470 environ['wsgi.input_terminated'] = True
535 environ['wsgi.input_terminated'] = True
471
536
472 def hg_proxy(self):
537 def hg_proxy(self):
473 @wsgiapp
538 @wsgiapp
474 def _hg_proxy(environ, start_response):
539 def _hg_proxy(environ, start_response):
475 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
540 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
476 return app(environ, start_response)
541 return app(environ, start_response)
477 return _hg_proxy
542 return _hg_proxy
478
543
479 def git_proxy(self):
544 def git_proxy(self):
480 @wsgiapp
545 @wsgiapp
481 def _git_proxy(environ, start_response):
546 def _git_proxy(environ, start_response):
482 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
547 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
483 return app(environ, start_response)
548 return app(environ, start_response)
484 return _git_proxy
549 return _git_proxy
485
550
486 def hg_stream(self):
551 def hg_stream(self):
487 if self._use_echo_app:
552 if self._use_echo_app:
488 @wsgiapp
553 @wsgiapp
489 def _hg_stream(environ, start_response):
554 def _hg_stream(environ, start_response):
490 app = EchoApp('fake_path', 'fake_name', None)
555 app = EchoApp('fake_path', 'fake_name', None)
491 return app(environ, start_response)
556 return app(environ, start_response)
492 return _hg_stream
557 return _hg_stream
493 else:
558 else:
494 @wsgiapp
559 @wsgiapp
495 def _hg_stream(environ, start_response):
560 def _hg_stream(environ, start_response):
496 log.debug('http-app: handling hg stream')
561 log.debug('http-app: handling hg stream')
497 repo_path = environ['HTTP_X_RC_REPO_PATH']
562 repo_path = environ['HTTP_X_RC_REPO_PATH']
498 repo_name = environ['HTTP_X_RC_REPO_NAME']
563 repo_name = environ['HTTP_X_RC_REPO_NAME']
499 packed_config = base64.b64decode(
564 packed_config = base64.b64decode(
500 environ['HTTP_X_RC_REPO_CONFIG'])
565 environ['HTTP_X_RC_REPO_CONFIG'])
501 config = msgpack.unpackb(packed_config)
566 config = msgpack.unpackb(packed_config)
502 app = scm_app.create_hg_wsgi_app(
567 app = scm_app.create_hg_wsgi_app(
503 repo_path, repo_name, config)
568 repo_path, repo_name, config)
504
569
505 # Consistent path information for hgweb
570 # Consistent path information for hgweb
506 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
571 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
507 environ['REPO_NAME'] = repo_name
572 environ['REPO_NAME'] = repo_name
508 self.set_env_from_config(environ, config)
573 self.set_env_from_config(environ, config)
509
574
510 log.debug('http-app: starting app handler '
575 log.debug('http-app: starting app handler '
511 'with %s and process request', app)
576 'with %s and process request', app)
512 return app(environ, ResponseFilter(start_response))
577 return app(environ, ResponseFilter(start_response))
513 return _hg_stream
578 return _hg_stream
514
579
515 def git_stream(self):
580 def git_stream(self):
516 if self._use_echo_app:
581 if self._use_echo_app:
517 @wsgiapp
582 @wsgiapp
518 def _git_stream(environ, start_response):
583 def _git_stream(environ, start_response):
519 app = EchoApp('fake_path', 'fake_name', None)
584 app = EchoApp('fake_path', 'fake_name', None)
520 return app(environ, start_response)
585 return app(environ, start_response)
521 return _git_stream
586 return _git_stream
522 else:
587 else:
523 @wsgiapp
588 @wsgiapp
524 def _git_stream(environ, start_response):
589 def _git_stream(environ, start_response):
525 log.debug('http-app: handling git stream')
590 log.debug('http-app: handling git stream')
526 repo_path = environ['HTTP_X_RC_REPO_PATH']
591 repo_path = environ['HTTP_X_RC_REPO_PATH']
527 repo_name = environ['HTTP_X_RC_REPO_NAME']
592 repo_name = environ['HTTP_X_RC_REPO_NAME']
528 packed_config = base64.b64decode(
593 packed_config = base64.b64decode(
529 environ['HTTP_X_RC_REPO_CONFIG'])
594 environ['HTTP_X_RC_REPO_CONFIG'])
530 config = msgpack.unpackb(packed_config)
595 config = msgpack.unpackb(packed_config)
531
596
532 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
597 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
533 self.set_env_from_config(environ, config)
598 self.set_env_from_config(environ, config)
534
599
535 content_type = environ.get('CONTENT_TYPE', '')
600 content_type = environ.get('CONTENT_TYPE', '')
536
601
537 path = environ['PATH_INFO']
602 path = environ['PATH_INFO']
538 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
603 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
539 log.debug(
604 log.debug(
540 'LFS: Detecting if request `%s` is LFS server path based '
605 'LFS: Detecting if request `%s` is LFS server path based '
541 'on content type:`%s`, is_lfs:%s',
606 'on content type:`%s`, is_lfs:%s',
542 path, content_type, is_lfs_request)
607 path, content_type, is_lfs_request)
543
608
544 if not is_lfs_request:
609 if not is_lfs_request:
545 # fallback detection by path
610 # fallback detection by path
546 if GIT_LFS_PROTO_PAT.match(path):
611 if GIT_LFS_PROTO_PAT.match(path):
547 is_lfs_request = True
612 is_lfs_request = True
548 log.debug(
613 log.debug(
549 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
614 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
550 path, is_lfs_request)
615 path, is_lfs_request)
551
616
552 if is_lfs_request:
617 if is_lfs_request:
553 app = scm_app.create_git_lfs_wsgi_app(
618 app = scm_app.create_git_lfs_wsgi_app(
554 repo_path, repo_name, config)
619 repo_path, repo_name, config)
555 else:
620 else:
556 app = scm_app.create_git_wsgi_app(
621 app = scm_app.create_git_wsgi_app(
557 repo_path, repo_name, config)
622 repo_path, repo_name, config)
558
623
559 log.debug('http-app: starting app handler '
624 log.debug('http-app: starting app handler '
560 'with %s and process request', app)
625 'with %s and process request', app)
561
626
562 return app(environ, start_response)
627 return app(environ, start_response)
563
628
564 return _git_stream
629 return _git_stream
565
630
566 def handle_vcs_exception(self, exception, request):
631 def handle_vcs_exception(self, exception, request):
567 _vcs_kind = getattr(exception, '_vcs_kind', '')
632 _vcs_kind = getattr(exception, '_vcs_kind', '')
568 if _vcs_kind == 'repo_locked':
633 if _vcs_kind == 'repo_locked':
569 # Get custom repo-locked status code if present.
634 # Get custom repo-locked status code if present.
570 status_code = request.headers.get('X-RC-Locked-Status-Code')
635 status_code = request.headers.get('X-RC-Locked-Status-Code')
571 return HTTPRepoLocked(
636 return HTTPRepoLocked(
572 title=exception.message, status_code=status_code)
637 title=exception.message, status_code=status_code)
573
638
574 elif _vcs_kind == 'repo_branch_protected':
639 elif _vcs_kind == 'repo_branch_protected':
575 # Get custom repo-branch-protected status code if present.
640 # Get custom repo-branch-protected status code if present.
576 return HTTPRepoBranchProtected(title=exception.message)
641 return HTTPRepoBranchProtected(title=exception.message)
577
642
578 exc_info = request.exc_info
643 exc_info = request.exc_info
579 store_exception(id(exc_info), exc_info)
644 store_exception(id(exc_info), exc_info)
580
645
581 traceback_info = 'unavailable'
646 traceback_info = 'unavailable'
582 if request.exc_info:
647 if request.exc_info:
583 exc_type, exc_value, exc_tb = request.exc_info
648 exc_type, exc_value, exc_tb = request.exc_info
584 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
649 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
585
650
586 log.error(
651 log.error(
587 'error occurred handling this request for path: %s, \n tb: %s',
652 'error occurred handling this request for path: %s, \n tb: %s',
588 request.path, traceback_info)
653 request.path, traceback_info)
589 raise exception
654 raise exception
590
655
591
656
592 class ResponseFilter(object):
657 class ResponseFilter(object):
593
658
594 def __init__(self, start_response):
659 def __init__(self, start_response):
595 self._start_response = start_response
660 self._start_response = start_response
596
661
597 def __call__(self, status, response_headers, exc_info=None):
662 def __call__(self, status, response_headers, exc_info=None):
598 headers = tuple(
663 headers = tuple(
599 (h, v) for h, v in response_headers
664 (h, v) for h, v in response_headers
600 if not wsgiref.util.is_hop_by_hop(h))
665 if not wsgiref.util.is_hop_by_hop(h))
601 return self._start_response(status, headers, exc_info)
666 return self._start_response(status, headers, exc_info)
602
667
603
668
604 def main(global_config, **settings):
669 def main(global_config, **settings):
605 if MercurialFactory:
670 if MercurialFactory:
606 hgpatches.patch_largefiles_capabilities()
671 hgpatches.patch_largefiles_capabilities()
607 hgpatches.patch_subrepo_type_mapping()
672 hgpatches.patch_subrepo_type_mapping()
608
673
609 app = HTTPApplication(settings=settings, global_config=global_config)
674 app = HTTPApplication(settings=settings, global_config=global_config)
610 return app.wsgi_app()
675 return app.wsgi_app()
@@ -1,60 +1,72 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 from dogpile.cache import register_backend
19 from dogpile.cache import register_backend
20
20
21 register_backend(
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
23 "LRUMemoryBackend")
24
24
25 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "FileNamespaceBackend")
28
29 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "RedisPickleBackend")
32
33 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
36
37
25 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
26
39
27 from . import region_meta
40 from . import region_meta
28 from .util import key_generator, get_default_cache_settings, make_region
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
29
42
30
43
31 def configure_dogpile_cache(settings):
44 def configure_dogpile_cache(settings):
32 cache_dir = settings.get('cache_dir')
45 cache_dir = settings.get('cache_dir')
33 if cache_dir:
46 if cache_dir:
34 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
35
48
36 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
37
50
38 # inspect available namespaces
51 # inspect available namespaces
39 avail_regions = set()
52 avail_regions = set()
40 for key in rc_cache_data.keys():
53 for key in rc_cache_data.keys():
41 namespace_name = key.split('.', 1)[0]
54 namespace_name = key.split('.', 1)[0]
42 avail_regions.add(namespace_name)
55 avail_regions.add(namespace_name)
43 log.debug('dogpile: found following cache regions: %s', avail_regions)
56 log.debug('dogpile: found following cache regions: %s', avail_regions)
44
57
45 # register them into namespace
58 # register them into namespace
46 for region_name in avail_regions:
59 for region_name in avail_regions:
47 new_region = make_region(
60 new_region = make_region(
48 name=region_name,
61 name=region_name,
49 function_key_generator=key_generator
62 function_key_generator=None
50 )
63 )
51
64
52 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
53
66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
54 log.debug('dogpile: registering a new region %s[%s]',
67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
55 region_name, new_region.__dict__)
56 region_meta.dogpile_cache_regions[region_name] = new_region
68 region_meta.dogpile_cache_regions[region_name] = new_region
57
69
58
70
59 def includeme(config):
71 def includeme(config):
60 configure_dogpile_cache(config.registry.settings)
72 configure_dogpile_cache(config.registry.settings)
@@ -1,51 +1,253 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
19 import errno
18 import logging
20 import logging
19
21
22 import msgpack
23 import redis
24
25 from dogpile.cache.api import CachedValue
20 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
31
21 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
22
33
23
34
24 _default_max_size = 1024
35 _default_max_size = 1024
25
36
26 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
27
38
28
39
29 class LRUMemoryBackend(memory_backend.MemoryBackend):
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 key_prefix = 'lru_mem_backend'
30 pickle_values = False
42 pickle_values = False
31
43
32 def __init__(self, arguments):
44 def __init__(self, arguments):
33 max_size = arguments.pop('max_size', _default_max_size)
45 max_size = arguments.pop('max_size', _default_max_size)
34
46
35 LRUDictClass = LRUDict
47 LRUDictClass = LRUDict
36 if arguments.pop('log_key_count', None):
48 if arguments.pop('log_key_count', None):
37 LRUDictClass = LRUDictDebug
49 LRUDictClass = LRUDictDebug
38
50
39 arguments['cache_dict'] = LRUDictClass(max_size)
51 arguments['cache_dict'] = LRUDictClass(max_size)
40 super(LRUMemoryBackend, self).__init__(arguments)
52 super(LRUMemoryBackend, self).__init__(arguments)
41
53
42 def delete(self, key):
54 def delete(self, key):
43 try:
55 try:
44 del self._cache[key]
56 del self._cache[key]
45 except KeyError:
57 except KeyError:
46 # we don't care if key isn't there at deletion
58 # we don't care if key isn't there at deletion
47 pass
59 pass
48
60
49 def delete_multi(self, keys):
61 def delete_multi(self, keys):
50 for key in keys:
62 for key in keys:
51 self.delete(key)
63 self.delete(key)
64
65
66 class PickleSerializer(object):
67
68 def _dumps(self, value, safe=False):
69 try:
70 return compat.pickle.dumps(value)
71 except Exception:
72 if safe:
73 return NO_VALUE
74 else:
75 raise
76
77 def _loads(self, value, safe=True):
78 try:
79 return compat.pickle.loads(value)
80 except Exception:
81 if safe:
82 return NO_VALUE
83 else:
84 raise
85
86
87 class MsgPackSerializer(object):
88
89 def _dumps(self, value, safe=False):
90 try:
91 return msgpack.packb(value)
92 except Exception:
93 if safe:
94 return NO_VALUE
95 else:
96 raise
97
98 def _loads(self, value, safe=True):
99 """
100 pickle maintained the `CachedValue` wrapper of the tuple
101 msgpack does not, so it must be added back in.
102 """
103 try:
104 value = msgpack.unpackb(value, use_list=False)
105 return CachedValue(*value)
106 except Exception:
107 if safe:
108 return NO_VALUE
109 else:
110 raise
111
112
113 import fcntl
114 flock_org = fcntl.flock
115
116
117 class CustomLockFactory(FileLock):
118
119 pass
120
121
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 key_prefix = 'file_backend'
124
125 def __init__(self, arguments):
126 arguments['lock_factory'] = CustomLockFactory
127 super(FileNamespaceBackend, self).__init__(arguments)
128
129 def __repr__(self):
130 return '{} `{}`'.format(self.__class__, self.filename)
131
132 def list_keys(self, prefix=''):
133 prefix = '{}:{}'.format(self.key_prefix, prefix)
134
135 def cond(v):
136 if not prefix:
137 return True
138
139 if v.startswith(prefix):
140 return True
141 return False
142
143 with self._dbm_file(True) as dbm:
144
145 return filter(cond, dbm.keys())
146
147 def get_store(self):
148 return self.filename
149
150 def get(self, key):
151 with self._dbm_file(False) as dbm:
152 if hasattr(dbm, 'get'):
153 value = dbm.get(key, NO_VALUE)
154 else:
155 # gdbm objects lack a .get method
156 try:
157 value = dbm[key]
158 except KeyError:
159 value = NO_VALUE
160 if value is not NO_VALUE:
161 value = self._loads(value)
162 return value
163
164 def set(self, key, value):
165 with self._dbm_file(True) as dbm:
166 dbm[key] = self._dumps(value)
167
168 def set_multi(self, mapping):
169 with self._dbm_file(True) as dbm:
170 for key, value in mapping.items():
171 dbm[key] = self._dumps(value)
172
173
174 class BaseRedisBackend(redis_backend.RedisBackend):
175
176 def _create_client(self):
177 args = {}
178
179 if self.url is not None:
180 args.update(url=self.url)
181
182 else:
183 args.update(
184 host=self.host, password=self.password,
185 port=self.port, db=self.db
186 )
187
188 connection_pool = redis.ConnectionPool(**args)
189
190 return redis.StrictRedis(connection_pool=connection_pool)
191
192 def list_keys(self, prefix=''):
193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 return self.client.keys(prefix)
195
196 def get_store(self):
197 return self.client.connection_pool
198
199 def get(self, key):
200 value = self.client.get(key)
201 if value is None:
202 return NO_VALUE
203 return self._loads(value)
204
205 def get_multi(self, keys):
206 if not keys:
207 return []
208 values = self.client.mget(keys)
209 loads = self._loads
210 return [
211 loads(v) if v is not None else NO_VALUE
212 for v in values]
213
214 def set(self, key, value):
215 if self.redis_expiration_time:
216 self.client.setex(key, self.redis_expiration_time,
217 self._dumps(value))
218 else:
219 self.client.set(key, self._dumps(value))
220
221 def set_multi(self, mapping):
222 dumps = self._dumps
223 mapping = dict(
224 (k, dumps(v))
225 for k, v in mapping.items()
226 )
227
228 if not self.redis_expiration_time:
229 self.client.mset(mapping)
230 else:
231 pipe = self.client.pipeline()
232 for key, value in mapping.items():
233 pipe.setex(key, self.redis_expiration_time, value)
234 pipe.execute()
235
236 def get_mutex(self, key):
237 u = redis_backend.u
238 if self.distributed_lock:
239 lock_key = u('_lock_{0}').format(key)
240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
242 else:
243 return None
244
245
246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 key_prefix = 'redis_pickle_backend'
248 pass
249
250
251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 key_prefix = 'redis_msgpack_backend'
253 pass
@@ -1,136 +1,153 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import functools
20 import functools
21 from decorator import decorate
22
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
21
25
22 from vcsserver.utils import safe_str, sha1
26 from vcsserver.utils import safe_str, sha1
23 from dogpile.cache import CacheRegion
27
24 from dogpile.cache.util import compat
25
28
26 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
27
30
28
31
29 class RhodeCodeCacheRegion(CacheRegion):
32 class RhodeCodeCacheRegion(CacheRegion):
30
33
31 def conditional_cache_on_arguments(
34 def conditional_cache_on_arguments(
32 self, namespace=None,
35 self, namespace=None,
33 expiration_time=None,
36 expiration_time=None,
34 should_cache_fn=None,
37 should_cache_fn=None,
35 to_str=compat.string_type,
38 to_str=compat.string_type,
36 function_key_generator=None,
39 function_key_generator=None,
37 condition=True):
40 condition=True):
38 """
41 """
39 Custom conditional decorator, that will not touch any dogpile internals if
42 Custom conditional decorator, that will not touch any dogpile internals if
40 condition isn't meet. This works a bit different than should_cache_fn
43 condition isn't meet. This works a bit different than should_cache_fn
41 And it's faster in cases we don't ever want to compute cached values
44 And it's faster in cases we don't ever want to compute cached values
42 """
45 """
43 expiration_time_is_callable = compat.callable(expiration_time)
46 expiration_time_is_callable = compat.callable(expiration_time)
44
47
45 if function_key_generator is None:
48 if function_key_generator is None:
46 function_key_generator = self.function_key_generator
49 function_key_generator = self.function_key_generator
47
50
48 def decorator(fn):
51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52
53 if not condition:
54 log.debug('Calling un-cached func:%s', user_func.func_name)
55 return user_func(*arg, **kw)
56
57 key = key_generator(*arg, **kw)
58
59 timeout = expiration_time() if expiration_time_is_callable \
60 else expiration_time
61
62 log.debug('Calling cached fn:%s', user_func.func_name)
63 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
64
65 def cache_decorator(user_func):
49 if to_str is compat.string_type:
66 if to_str is compat.string_type:
50 # backwards compatible
67 # backwards compatible
51 key_generator = function_key_generator(namespace, fn)
68 key_generator = function_key_generator(namespace, user_func)
52 else:
69 else:
53 key_generator = function_key_generator(namespace, fn, to_str=to_str)
70 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
54
55 @functools.wraps(fn)
56 def decorate(*arg, **kw):
57 key = key_generator(*arg, **kw)
58
71
59 @functools.wraps(fn)
72 def refresh(*arg, **kw):
60 def creator():
73 """
61 return fn(*arg, **kw)
74 Like invalidate, but regenerates the value instead
62
75 """
63 if not condition:
76 key = key_generator(*arg, **kw)
64 return creator()
77 value = user_func(*arg, **kw)
65
78 self.set(key, value)
66 timeout = expiration_time() if expiration_time_is_callable \
79 return value
67 else expiration_time
68
69 return self.get_or_create(key, creator, timeout, should_cache_fn)
70
80
71 def invalidate(*arg, **kw):
81 def invalidate(*arg, **kw):
72 key = key_generator(*arg, **kw)
82 key = key_generator(*arg, **kw)
73 self.delete(key)
83 self.delete(key)
74
84
75 def set_(value, *arg, **kw):
85 def set_(value, *arg, **kw):
76 key = key_generator(*arg, **kw)
86 key = key_generator(*arg, **kw)
77 self.set(key, value)
87 self.set(key, value)
78
88
79 def get(*arg, **kw):
89 def get(*arg, **kw):
80 key = key_generator(*arg, **kw)
90 key = key_generator(*arg, **kw)
81 return self.get(key)
91 return self.get(key)
82
92
83 def refresh(*arg, **kw):
93 user_func.set = set_
84 key = key_generator(*arg, **kw)
94 user_func.invalidate = invalidate
85 value = fn(*arg, **kw)
95 user_func.get = get
86 self.set(key, value)
96 user_func.refresh = refresh
87 return value
97 user_func.key_generator = key_generator
98 user_func.original = user_func
88
99
89 decorate.set = set_
100 # Use `decorate` to preserve the signature of :param:`user_func`.
90 decorate.invalidate = invalidate
91 decorate.refresh = refresh
92 decorate.get = get
93 decorate.original = fn
94 decorate.key_generator = key_generator
95
101
96 return decorate
102 return decorate(user_func, functools.partial(
103 get_or_create_for_user_func, key_generator))
97
104
98 return decorator
105 return cache_decorator
99
106
100
107
101 def make_region(*arg, **kw):
108 def make_region(*arg, **kw):
102 return RhodeCodeCacheRegion(*arg, **kw)
109 return RhodeCodeCacheRegion(*arg, **kw)
103
110
104
111
105 def get_default_cache_settings(settings, prefixes=None):
112 def get_default_cache_settings(settings, prefixes=None):
106 prefixes = prefixes or []
113 prefixes = prefixes or []
107 cache_settings = {}
114 cache_settings = {}
108 for key in settings.keys():
115 for key in settings.keys():
109 for prefix in prefixes:
116 for prefix in prefixes:
110 if key.startswith(prefix):
117 if key.startswith(prefix):
111 name = key.split(prefix)[1].strip()
118 name = key.split(prefix)[1].strip()
112 val = settings[key]
119 val = settings[key]
113 if isinstance(val, basestring):
120 if isinstance(val, compat.string_types):
114 val = val.strip()
121 val = val.strip()
115 cache_settings[name] = val
122 cache_settings[name] = val
116 return cache_settings
123 return cache_settings
117
124
118
125
119 def compute_key_from_params(*args):
126 def compute_key_from_params(*args):
120 """
127 """
121 Helper to compute key from given params to be used in cache manager
128 Helper to compute key from given params to be used in cache manager
122 """
129 """
123 return sha1("_".join(map(safe_str, args)))
130 return sha1("_".join(map(safe_str, args)))
124
131
125
132
126 def key_generator(namespace, fn):
133 def backend_key_generator(backend):
134 """
135 Special wrapper that also sends over the backend to the key generator
136 """
137 def wrapper(namespace, fn):
138 return key_generator(backend, namespace, fn)
139 return wrapper
140
141
142 def key_generator(backend, namespace, fn):
127 fname = fn.__name__
143 fname = fn.__name__
128
144
129 def generate_key(*args):
145 def generate_key(*args):
130 namespace_pref = namespace or 'default'
146 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
147 namespace_pref = namespace or 'default_namespace'
131 arg_key = compute_key_from_params(*args)
148 arg_key = compute_key_from_params(*args)
132 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
149 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
133
150
134 return final_key
151 return final_key
135
152
136 return generate_key
153 return generate_key
@@ -1,523 +1,519 b''
1 """
1 """
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningfull, non-blocking, concurrent
3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 """
24 """
25 import os
25 import os
26 import logging
26 import logging
27 import subprocess32 as subprocess
27 import subprocess32 as subprocess
28 from collections import deque
28 from collections import deque
29 from threading import Event, Thread
29 from threading import Event, Thread
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class StreamFeeder(Thread):
34 class StreamFeeder(Thread):
35 """
35 """
36 Normal writing into pipe-like is blocking once the buffer is filled.
36 Normal writing into pipe-like is blocking once the buffer is filled.
37 This thread allows a thread to seep data from a file-like into a pipe
37 This thread allows a thread to seep data from a file-like into a pipe
38 without blocking the main thread.
38 without blocking the main thread.
39 We close inpipe once the end of the source stream is reached.
39 We close inpipe once the end of the source stream is reached.
40 """
40 """
41
41
42 def __init__(self, source):
42 def __init__(self, source):
43 super(StreamFeeder, self).__init__()
43 super(StreamFeeder, self).__init__()
44 self.daemon = True
44 self.daemon = True
45 filelike = False
45 filelike = False
46 self.bytes = bytes()
46 self.bytes = bytes()
47 if type(source) in (type(''), bytes, bytearray): # string-like
47 if type(source) in (type(''), bytes, bytearray): # string-like
48 self.bytes = bytes(source)
48 self.bytes = bytes(source)
49 else: # can be either file pointer or file-like
49 else: # can be either file pointer or file-like
50 if type(source) in (int, long): # file pointer it is
50 if type(source) in (int, long): # file pointer it is
51 # converting file descriptor (int) stdin into file-like
51 # converting file descriptor (int) stdin into file-like
52 try:
52 try:
53 source = os.fdopen(source, 'rb', 16384)
53 source = os.fdopen(source, 'rb', 16384)
54 except Exception:
54 except Exception:
55 pass
55 pass
56 # let's see if source is file-like by now
56 # let's see if source is file-like by now
57 try:
57 try:
58 filelike = source.read
58 filelike = source.read
59 except Exception:
59 except Exception:
60 pass
60 pass
61 if not filelike and not self.bytes:
61 if not filelike and not self.bytes:
62 raise TypeError("StreamFeeder's source object must be a readable "
62 raise TypeError("StreamFeeder's source object must be a readable "
63 "file-like, a file descriptor, or a string-like.")
63 "file-like, a file descriptor, or a string-like.")
64 self.source = source
64 self.source = source
65 self.readiface, self.writeiface = os.pipe()
65 self.readiface, self.writeiface = os.pipe()
66
66
67 def run(self):
67 def run(self):
68 t = self.writeiface
68 t = self.writeiface
69 try:
69 try:
70 if self.bytes:
70 if self.bytes:
71 os.write(t, self.bytes)
71 os.write(t, self.bytes)
72 else:
72 else:
73 s = self.source
73 s = self.source
74 b = s.read(4096)
74 b = s.read(4096)
75 while b:
75 while b:
76 os.write(t, b)
76 os.write(t, b)
77 b = s.read(4096)
77 b = s.read(4096)
78 finally:
78 finally:
79 os.close(t)
79 os.close(t)
80
80
81 @property
81 @property
82 def output(self):
82 def output(self):
83 return self.readiface
83 return self.readiface
84
84
85
85
86 class InputStreamChunker(Thread):
86 class InputStreamChunker(Thread):
87 def __init__(self, source, target, buffer_size, chunk_size):
87 def __init__(self, source, target, buffer_size, chunk_size):
88
88
89 super(InputStreamChunker, self).__init__()
89 super(InputStreamChunker, self).__init__()
90
90
91 self.daemon = True # die die die.
91 self.daemon = True # die die die.
92
92
93 self.source = source
93 self.source = source
94 self.target = target
94 self.target = target
95 self.chunk_count_max = int(buffer_size / chunk_size) + 1
95 self.chunk_count_max = int(buffer_size / chunk_size) + 1
96 self.chunk_size = chunk_size
96 self.chunk_size = chunk_size
97
97
98 self.data_added = Event()
98 self.data_added = Event()
99 self.data_added.clear()
99 self.data_added.clear()
100
100
101 self.keep_reading = Event()
101 self.keep_reading = Event()
102 self.keep_reading.set()
102 self.keep_reading.set()
103
103
104 self.EOF = Event()
104 self.EOF = Event()
105 self.EOF.clear()
105 self.EOF.clear()
106
106
107 self.go = Event()
107 self.go = Event()
108 self.go.set()
108 self.go.set()
109
109
110 def stop(self):
110 def stop(self):
111 self.go.clear()
111 self.go.clear()
112 self.EOF.set()
112 self.EOF.set()
113 try:
113 try:
114 # this is not proper, but is done to force the reader thread let
114 # this is not proper, but is done to force the reader thread let
115 # go of the input because, if successful, .close() will send EOF
115 # go of the input because, if successful, .close() will send EOF
116 # down the pipe.
116 # down the pipe.
117 self.source.close()
117 self.source.close()
118 except:
118 except:
119 pass
119 pass
120
120
121 def run(self):
121 def run(self):
122 s = self.source
122 s = self.source
123 t = self.target
123 t = self.target
124 cs = self.chunk_size
124 cs = self.chunk_size
125 chunk_count_max = self.chunk_count_max
125 chunk_count_max = self.chunk_count_max
126 keep_reading = self.keep_reading
126 keep_reading = self.keep_reading
127 da = self.data_added
127 da = self.data_added
128 go = self.go
128 go = self.go
129
129
130 try:
130 try:
131 b = s.read(cs)
131 b = s.read(cs)
132 except ValueError:
132 except ValueError:
133 b = ''
133 b = ''
134
134
135 timeout_input = 20
135 timeout_input = 20
136 while b and go.is_set():
136 while b and go.is_set():
137 if len(t) > chunk_count_max:
137 if len(t) > chunk_count_max:
138 keep_reading.clear()
138 keep_reading.clear()
139 keep_reading.wait(timeout_input)
139 keep_reading.wait(timeout_input)
140 if len(t) > chunk_count_max + timeout_input:
140 if len(t) > chunk_count_max + timeout_input:
141 log.error("Timed out while waiting for input from subprocess.")
141 log.error("Timed out while waiting for input from subprocess.")
142 os._exit(-1) # this will cause the worker to recycle itself
142 os._exit(-1) # this will cause the worker to recycle itself
143
143
144 t.append(b)
144 t.append(b)
145 da.set()
145 da.set()
146
146
147 try:
147 try:
148 b = s.read(cs)
148 b = s.read(cs)
149 except ValueError:
149 except ValueError:
150 b = ''
150 b = ''
151
151
152 self.EOF.set()
152 self.EOF.set()
153 da.set() # for cases when done but there was no input.
153 da.set() # for cases when done but there was no input.
154
154
155
155
156 class BufferedGenerator(object):
156 class BufferedGenerator(object):
157 """
157 """
158 Class behaves as a non-blocking, buffered pipe reader.
158 Class behaves as a non-blocking, buffered pipe reader.
159 Reads chunks of data (through a thread)
159 Reads chunks of data (through a thread)
160 from a blocking pipe, and attaches these to an array (Deque) of chunks.
160 from a blocking pipe, and attaches these to an array (Deque) of chunks.
161 Reading is halted in the thread when max chunks is internally buffered.
161 Reading is halted in the thread when max chunks is internally buffered.
162 The .next() may operate in blocking or non-blocking fashion by yielding
162 The .next() may operate in blocking or non-blocking fashion by yielding
163 '' if no data is ready
163 '' if no data is ready
164 to be sent or by not returning until there is some data to send
164 to be sent or by not returning until there is some data to send
165 When we get EOF from underlying source pipe we raise the marker to raise
165 When we get EOF from underlying source pipe we raise the marker to raise
166 StopIteration after the last chunk of data is yielded.
166 StopIteration after the last chunk of data is yielded.
167 """
167 """
168
168
169 def __init__(self, source, buffer_size=65536, chunk_size=4096,
169 def __init__(self, source, buffer_size=65536, chunk_size=4096,
170 starting_values=None, bottomless=False):
170 starting_values=None, bottomless=False):
171 starting_values = starting_values or []
171 starting_values = starting_values or []
172
172
173 if bottomless:
173 if bottomless:
174 maxlen = int(buffer_size / chunk_size)
174 maxlen = int(buffer_size / chunk_size)
175 else:
175 else:
176 maxlen = None
176 maxlen = None
177
177
178 self.data = deque(starting_values, maxlen)
178 self.data = deque(starting_values, maxlen)
179 self.worker = InputStreamChunker(source, self.data, buffer_size,
179 self.worker = InputStreamChunker(source, self.data, buffer_size,
180 chunk_size)
180 chunk_size)
181 if starting_values:
181 if starting_values:
182 self.worker.data_added.set()
182 self.worker.data_added.set()
183 self.worker.start()
183 self.worker.start()
184
184
185 ####################
185 ####################
186 # Generator's methods
186 # Generator's methods
187 ####################
187 ####################
188
188
189 def __iter__(self):
189 def __iter__(self):
190 return self
190 return self
191
191
192 def next(self):
192 def next(self):
193 while not len(self.data) and not self.worker.EOF.is_set():
193 while not len(self.data) and not self.worker.EOF.is_set():
194 self.worker.data_added.clear()
194 self.worker.data_added.clear()
195 self.worker.data_added.wait(0.2)
195 self.worker.data_added.wait(0.2)
196 if len(self.data):
196 if len(self.data):
197 self.worker.keep_reading.set()
197 self.worker.keep_reading.set()
198 return bytes(self.data.popleft())
198 return bytes(self.data.popleft())
199 elif self.worker.EOF.is_set():
199 elif self.worker.EOF.is_set():
200 raise StopIteration
200 raise StopIteration
201
201
202 def throw(self, exc_type, value=None, traceback=None):
202 def throw(self, exc_type, value=None, traceback=None):
203 if not self.worker.EOF.is_set():
203 if not self.worker.EOF.is_set():
204 raise exc_type(value)
204 raise exc_type(value)
205
205
206 def start(self):
206 def start(self):
207 self.worker.start()
207 self.worker.start()
208
208
209 def stop(self):
209 def stop(self):
210 self.worker.stop()
210 self.worker.stop()
211
211
212 def close(self):
212 def close(self):
213 try:
213 try:
214 self.worker.stop()
214 self.worker.stop()
215 self.throw(GeneratorExit)
215 self.throw(GeneratorExit)
216 except (GeneratorExit, StopIteration):
216 except (GeneratorExit, StopIteration):
217 pass
217 pass
218
218
219 def __del__(self):
220 self.close()
221
222 ####################
219 ####################
223 # Threaded reader's infrastructure.
220 # Threaded reader's infrastructure.
224 ####################
221 ####################
225 @property
222 @property
226 def input(self):
223 def input(self):
227 return self.worker.w
224 return self.worker.w
228
225
229 @property
226 @property
230 def data_added_event(self):
227 def data_added_event(self):
231 return self.worker.data_added
228 return self.worker.data_added
232
229
233 @property
230 @property
234 def data_added(self):
231 def data_added(self):
235 return self.worker.data_added.is_set()
232 return self.worker.data_added.is_set()
236
233
237 @property
234 @property
238 def reading_paused(self):
235 def reading_paused(self):
239 return not self.worker.keep_reading.is_set()
236 return not self.worker.keep_reading.is_set()
240
237
241 @property
238 @property
242 def done_reading_event(self):
239 def done_reading_event(self):
243 """
240 """
244 Done_reding does not mean that the iterator's buffer is empty.
241 Done_reding does not mean that the iterator's buffer is empty.
245 Iterator might have done reading from underlying source, but the read
242 Iterator might have done reading from underlying source, but the read
246 chunks might still be available for serving through .next() method.
243 chunks might still be available for serving through .next() method.
247
244
248 :returns: An Event class instance.
245 :returns: An Event class instance.
249 """
246 """
250 return self.worker.EOF
247 return self.worker.EOF
251
248
252 @property
249 @property
253 def done_reading(self):
250 def done_reading(self):
254 """
251 """
255 Done_reding does not mean that the iterator's buffer is empty.
252 Done_reding does not mean that the iterator's buffer is empty.
256 Iterator might have done reading from underlying source, but the read
253 Iterator might have done reading from underlying source, but the read
257 chunks might still be available for serving through .next() method.
254 chunks might still be available for serving through .next() method.
258
255
259 :returns: An Bool value.
256 :returns: An Bool value.
260 """
257 """
261 return self.worker.EOF.is_set()
258 return self.worker.EOF.is_set()
262
259
263 @property
260 @property
264 def length(self):
261 def length(self):
265 """
262 """
266 returns int.
263 returns int.
267
264
268 This is the lenght of the que of chunks, not the length of
265 This is the lenght of the que of chunks, not the length of
269 the combined contents in those chunks.
266 the combined contents in those chunks.
270
267
271 __len__() cannot be meaningfully implemented because this
268 __len__() cannot be meaningfully implemented because this
272 reader is just flying throuh a bottomless pit content and
269 reader is just flying throuh a bottomless pit content and
273 can only know the lenght of what it already saw.
270 can only know the lenght of what it already saw.
274
271
275 If __len__() on WSGI server per PEP 3333 returns a value,
272 If __len__() on WSGI server per PEP 3333 returns a value,
276 the responce's length will be set to that. In order not to
273 the responce's length will be set to that. In order not to
277 confuse WSGI PEP3333 servers, we will not implement __len__
274 confuse WSGI PEP3333 servers, we will not implement __len__
278 at all.
275 at all.
279 """
276 """
280 return len(self.data)
277 return len(self.data)
281
278
282 def prepend(self, x):
279 def prepend(self, x):
283 self.data.appendleft(x)
280 self.data.appendleft(x)
284
281
285 def append(self, x):
282 def append(self, x):
286 self.data.append(x)
283 self.data.append(x)
287
284
288 def extend(self, o):
285 def extend(self, o):
289 self.data.extend(o)
286 self.data.extend(o)
290
287
291 def __getitem__(self, i):
288 def __getitem__(self, i):
292 return self.data[i]
289 return self.data[i]
293
290
294
291
295 class SubprocessIOChunker(object):
292 class SubprocessIOChunker(object):
296 """
293 """
297 Processor class wrapping handling of subprocess IO.
294 Processor class wrapping handling of subprocess IO.
298
295
299 .. important::
296 .. important::
300
297
301 Watch out for the method `__del__` on this class. If this object
298 Watch out for the method `__del__` on this class. If this object
302 is deleted, it will kill the subprocess, so avoid to
299 is deleted, it will kill the subprocess, so avoid to
303 return the `output` attribute or usage of it like in the following
300 return the `output` attribute or usage of it like in the following
304 example::
301 example::
305
302
306 # `args` expected to run a program that produces a lot of output
303 # `args` expected to run a program that produces a lot of output
307 output = ''.join(SubprocessIOChunker(
304 output = ''.join(SubprocessIOChunker(
308 args, shell=False, inputstream=inputstream, env=environ).output)
305 args, shell=False, inputstream=inputstream, env=environ).output)
309
306
310 # `output` will not contain all the data, because the __del__ method
307 # `output` will not contain all the data, because the __del__ method
311 # has already killed the subprocess in this case before all output
308 # has already killed the subprocess in this case before all output
312 # has been consumed.
309 # has been consumed.
313
310
314
311
315
312
316 In a way, this is a "communicate()" replacement with a twist.
313 In a way, this is a "communicate()" replacement with a twist.
317
314
318 - We are multithreaded. Writing in and reading out, err are all sep threads.
315 - We are multithreaded. Writing in and reading out, err are all sep threads.
319 - We support concurrent (in and out) stream processing.
316 - We support concurrent (in and out) stream processing.
320 - The output is not a stream. It's a queue of read string (bytes, not unicode)
317 - The output is not a stream. It's a queue of read string (bytes, not unicode)
321 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
318 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
322 - We are non-blocking in more respects than communicate()
319 - We are non-blocking in more respects than communicate()
323 (reading from subprocess out pauses when internal buffer is full, but
320 (reading from subprocess out pauses when internal buffer is full, but
324 does not block the parent calling code. On the flip side, reading from
321 does not block the parent calling code. On the flip side, reading from
325 slow-yielding subprocess may block the iteration until data shows up. This
322 slow-yielding subprocess may block the iteration until data shows up. This
326 does not block the parallel inpipe reading occurring parallel thread.)
323 does not block the parallel inpipe reading occurring parallel thread.)
327
324
328 The purpose of the object is to allow us to wrap subprocess interactions into
325 The purpose of the object is to allow us to wrap subprocess interactions into
329 and interable that can be passed to a WSGI server as the application's return
326 and interable that can be passed to a WSGI server as the application's return
330 value. Because of stream-processing-ability, WSGI does not have to read ALL
327 value. Because of stream-processing-ability, WSGI does not have to read ALL
331 of the subprocess's output and buffer it, before handing it to WSGI server for
328 of the subprocess's output and buffer it, before handing it to WSGI server for
332 HTTP response. Instead, the class initializer reads just a bit of the stream
329 HTTP response. Instead, the class initializer reads just a bit of the stream
333 to figure out if error ocurred or likely to occur and if not, just hands the
330 to figure out if error ocurred or likely to occur and if not, just hands the
334 further iteration over subprocess output to the server for completion of HTTP
331 further iteration over subprocess output to the server for completion of HTTP
335 response.
332 response.
336
333
337 The real or perceived subprocess error is trapped and raised as one of
334 The real or perceived subprocess error is trapped and raised as one of
338 EnvironmentError family of exceptions
335 EnvironmentError family of exceptions
339
336
340 Example usage:
337 Example usage:
341 # try:
338 # try:
342 # answer = SubprocessIOChunker(
339 # answer = SubprocessIOChunker(
343 # cmd,
340 # cmd,
344 # input,
341 # input,
345 # buffer_size = 65536,
342 # buffer_size = 65536,
346 # chunk_size = 4096
343 # chunk_size = 4096
347 # )
344 # )
348 # except (EnvironmentError) as e:
345 # except (EnvironmentError) as e:
349 # print str(e)
346 # print str(e)
350 # raise e
347 # raise e
351 #
348 #
352 # return answer
349 # return answer
353
350
354
351
355 """
352 """
356
353
357 # TODO: johbo: This is used to make sure that the open end of the PIPE
354 # TODO: johbo: This is used to make sure that the open end of the PIPE
358 # is closed in the end. It would be way better to wrap this into an
355 # is closed in the end. It would be way better to wrap this into an
359 # object, so that it is closed automatically once it is consumed or
356 # object, so that it is closed automatically once it is consumed or
360 # something similar.
357 # something similar.
361 _close_input_fd = None
358 _close_input_fd = None
362
359
363 _closed = False
360 _closed = False
364
361
365 def __init__(self, cmd, inputstream=None, buffer_size=65536,
362 def __init__(self, cmd, inputstream=None, buffer_size=65536,
366 chunk_size=4096, starting_values=None, fail_on_stderr=True,
363 chunk_size=4096, starting_values=None, fail_on_stderr=True,
367 fail_on_return_code=True, **kwargs):
364 fail_on_return_code=True, **kwargs):
368 """
365 """
369 Initializes SubprocessIOChunker
366 Initializes SubprocessIOChunker
370
367
371 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
368 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
372 :param inputstream: (Default: None) A file-like, string, or file pointer.
369 :param inputstream: (Default: None) A file-like, string, or file pointer.
373 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
370 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
374 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
371 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
375 :param starting_values: (Default: []) An array of strings to put in front of output que.
372 :param starting_values: (Default: []) An array of strings to put in front of output que.
376 :param fail_on_stderr: (Default: True) Whether to raise an exception in
373 :param fail_on_stderr: (Default: True) Whether to raise an exception in
377 case something is written to stderr.
374 case something is written to stderr.
378 :param fail_on_return_code: (Default: True) Whether to raise an
375 :param fail_on_return_code: (Default: True) Whether to raise an
379 exception if the return code is not 0.
376 exception if the return code is not 0.
380 """
377 """
381
378
382 starting_values = starting_values or []
379 starting_values = starting_values or []
383 if inputstream:
380 if inputstream:
384 input_streamer = StreamFeeder(inputstream)
381 input_streamer = StreamFeeder(inputstream)
385 input_streamer.start()
382 input_streamer.start()
386 inputstream = input_streamer.output
383 inputstream = input_streamer.output
387 self._close_input_fd = inputstream
384 self._close_input_fd = inputstream
388
385
389 self._fail_on_stderr = fail_on_stderr
386 self._fail_on_stderr = fail_on_stderr
390 self._fail_on_return_code = fail_on_return_code
387 self._fail_on_return_code = fail_on_return_code
391
388
392 _shell = kwargs.get('shell', True)
389 _shell = kwargs.get('shell', True)
393 kwargs['shell'] = _shell
390 kwargs['shell'] = _shell
394
391
395 _p = subprocess.Popen(cmd, bufsize=-1,
392 _p = subprocess.Popen(cmd, bufsize=-1,
396 stdin=inputstream,
393 stdin=inputstream,
397 stdout=subprocess.PIPE,
394 stdout=subprocess.PIPE,
398 stderr=subprocess.PIPE,
395 stderr=subprocess.PIPE,
399 **kwargs)
396 **kwargs)
400
397
401 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
398 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
402 starting_values)
399 starting_values)
403 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
400 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
404
401
405 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
402 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
406 # doing this until we reach either end of file, or end of buffer.
403 # doing this until we reach either end of file, or end of buffer.
407 bg_out.data_added_event.wait(1)
404 bg_out.data_added_event.wait(1)
408 bg_out.data_added_event.clear()
405 bg_out.data_added_event.clear()
409
406
410 # at this point it's still ambiguous if we are done reading or just full buffer.
407 # at this point it's still ambiguous if we are done reading or just full buffer.
411 # Either way, if error (returned by ended process, or implied based on
408 # Either way, if error (returned by ended process, or implied based on
412 # presence of stuff in stderr output) we error out.
409 # presence of stuff in stderr output) we error out.
413 # Else, we are happy.
410 # Else, we are happy.
414 _returncode = _p.poll()
411 _returncode = _p.poll()
415
412
416 if ((_returncode and fail_on_return_code) or
413 if ((_returncode and fail_on_return_code) or
417 (fail_on_stderr and _returncode is None and bg_err.length)):
414 (fail_on_stderr and _returncode is None and bg_err.length)):
418 try:
415 try:
419 _p.terminate()
416 _p.terminate()
420 except Exception:
417 except Exception:
421 pass
418 pass
422 bg_out.stop()
419 bg_out.stop()
423 bg_err.stop()
420 bg_err.stop()
424 if fail_on_stderr:
421 if fail_on_stderr:
425 err = ''.join(bg_err)
422 err = ''.join(bg_err)
426 raise EnvironmentError(
423 raise EnvironmentError(
427 "Subprocess exited due to an error:\n" + err)
424 "Subprocess exited due to an error:\n" + err)
428 if _returncode and fail_on_return_code:
425 if _returncode and fail_on_return_code:
429 err = ''.join(bg_err)
426 err = ''.join(bg_err)
430 if not err:
427 if not err:
431 # maybe get empty stderr, try stdout instead
428 # maybe get empty stderr, try stdout instead
432 # in many cases git reports the errors on stdout too
429 # in many cases git reports the errors on stdout too
433 err = ''.join(bg_out)
430 err = ''.join(bg_out)
434 raise EnvironmentError(
431 raise EnvironmentError(
435 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
432 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
436 _returncode, err))
433 _returncode, err))
437
434
438 self.process = _p
435 self.process = _p
439 self.output = bg_out
436 self.output = bg_out
440 self.error = bg_err
437 self.error = bg_err
441 self.inputstream = inputstream
438 self.inputstream = inputstream
442
439
443 def __iter__(self):
440 def __iter__(self):
444 return self
441 return self
445
442
446 def next(self):
443 def next(self):
447 # Note: mikhail: We need to be sure that we are checking the return
444 # Note: mikhail: We need to be sure that we are checking the return
448 # code after the stdout stream is closed. Some processes, e.g. git
445 # code after the stdout stream is closed. Some processes, e.g. git
449 # are doing some magic in between closing stdout and terminating the
446 # are doing some magic in between closing stdout and terminating the
450 # process and, as a result, we are not getting return code on "slow"
447 # process and, as a result, we are not getting return code on "slow"
451 # systems.
448 # systems.
452 result = None
449 result = None
453 stop_iteration = None
450 stop_iteration = None
454 try:
451 try:
455 result = self.output.next()
452 result = self.output.next()
456 except StopIteration as e:
453 except StopIteration as e:
457 stop_iteration = e
454 stop_iteration = e
458
455
459 if self.process.poll() and self._fail_on_return_code:
456 if self.process.poll() and self._fail_on_return_code:
460 err = '%s' % ''.join(self.error)
457 err = '%s' % ''.join(self.error)
461 raise EnvironmentError(
458 raise EnvironmentError(
462 "Subprocess exited due to an error:\n" + err)
459 "Subprocess exited due to an error:\n" + err)
463
460
464 if stop_iteration:
461 if stop_iteration:
465 raise stop_iteration
462 raise stop_iteration
466 return result
463 return result
467
464
468 def throw(self, type, value=None, traceback=None):
465 def throw(self, type, value=None, traceback=None):
469 if self.output.length or not self.output.done_reading:
466 if self.output.length or not self.output.done_reading:
470 raise type(value)
467 raise type(value)
471
468
472 def close(self):
469 def close(self):
473 if self._closed:
470 if self._closed:
474 return
471 return
475 self._closed = True
472 self._closed = True
476 try:
473 try:
477 self.process.terminate()
474 self.process.terminate()
478 except:
475 except Exception:
479 pass
476 pass
480 if self._close_input_fd:
477 if self._close_input_fd:
481 os.close(self._close_input_fd)
478 os.close(self._close_input_fd)
482 try:
479 try:
483 self.output.close()
480 self.output.close()
484 except:
481 except Exception:
485 pass
482 pass
486 try:
483 try:
487 self.error.close()
484 self.error.close()
488 except:
485 except Exception:
489 pass
486 pass
490 try:
487 try:
491 os.close(self.inputstream)
488 os.close(self.inputstream)
492 except:
489 except Exception:
493 pass
490 pass
494
491
495 def __del__(self):
496 self.close()
497
498
492
499 def run_command(arguments, env=None):
493 def run_command(arguments, env=None):
500 """
494 """
501 Run the specified command and return the stdout.
495 Run the specified command and return the stdout.
502
496
503 :param arguments: sequence of program arguments (including the program name)
497 :param arguments: sequence of program arguments (including the program name)
504 :type arguments: list[str]
498 :type arguments: list[str]
505 """
499 """
506
500
507 cmd = arguments
501 cmd = arguments
508 log.debug('Running subprocessio command %s', cmd)
502 log.debug('Running subprocessio command %s', cmd)
503 proc = None
509 try:
504 try:
510 _opts = {'shell': False, 'fail_on_stderr': False}
505 _opts = {'shell': False, 'fail_on_stderr': False}
511 if env:
506 if env:
512 _opts.update({'env': env})
507 _opts.update({'env': env})
513 p = SubprocessIOChunker(cmd, **_opts)
508 proc = SubprocessIOChunker(cmd, **_opts)
514 stdout = ''.join(p)
509 return ''.join(proc), ''.join(proc.error)
515 stderr = ''.join(''.join(p.error))
516 except (EnvironmentError, OSError) as err:
510 except (EnvironmentError, OSError) as err:
517 cmd = ' '.join(cmd) # human friendly CMD
511 cmd = ' '.join(cmd) # human friendly CMD
518 tb_err = ("Couldn't run subprocessio command (%s).\n"
512 tb_err = ("Couldn't run subprocessio command (%s).\n"
519 "Original error was:%s\n" % (cmd, err))
513 "Original error was:%s\n" % (cmd, err))
520 log.exception(tb_err)
514 log.exception(tb_err)
521 raise Exception(tb_err)
515 raise Exception(tb_err)
516 finally:
517 if proc:
518 proc.close()
522
519
523 return stdout, stderr
@@ -1,775 +1,799 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.vcs_base import RemoteBase
39
40
40 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
41
42
42
43
43 # Set of svn compatible version flags.
44 # Set of svn compatible version flags.
44 # Compare with subversion/svnadmin/svnadmin.c
45 # Compare with subversion/svnadmin/svnadmin.c
45 svn_compatible_versions = {
46 svn_compatible_versions = {
46 'pre-1.4-compatible',
47 'pre-1.4-compatible',
47 'pre-1.5-compatible',
48 'pre-1.5-compatible',
48 'pre-1.6-compatible',
49 'pre-1.6-compatible',
49 'pre-1.8-compatible',
50 'pre-1.8-compatible',
50 'pre-1.9-compatible'
51 'pre-1.9-compatible'
51 }
52 }
52
53
53 svn_compatible_versions_map = {
54 svn_compatible_versions_map = {
54 'pre-1.4-compatible': '1.3',
55 'pre-1.4-compatible': '1.3',
55 'pre-1.5-compatible': '1.4',
56 'pre-1.5-compatible': '1.4',
56 'pre-1.6-compatible': '1.5',
57 'pre-1.6-compatible': '1.5',
57 'pre-1.8-compatible': '1.7',
58 'pre-1.8-compatible': '1.7',
58 'pre-1.9-compatible': '1.8',
59 'pre-1.9-compatible': '1.8',
59 }
60 }
60
61
61
62
62 def reraise_safe_exceptions(func):
63 def reraise_safe_exceptions(func):
63 """Decorator for converting svn exceptions to something neutral."""
64 """Decorator for converting svn exceptions to something neutral."""
64 def wrapper(*args, **kwargs):
65 def wrapper(*args, **kwargs):
65 try:
66 try:
66 return func(*args, **kwargs)
67 return func(*args, **kwargs)
67 except Exception as e:
68 except Exception as e:
68 if not hasattr(e, '_vcs_kind'):
69 if not hasattr(e, '_vcs_kind'):
69 log.exception("Unhandled exception in svn remote call")
70 log.exception("Unhandled exception in svn remote call")
70 raise_from_original(exceptions.UnhandledException(e))
71 raise_from_original(exceptions.UnhandledException(e))
71 raise
72 raise
72 return wrapper
73 return wrapper
73
74
74
75
75 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
76 repo_type = 'svn'
77 repo_type = 'svn'
77
78
78 def _create_repo(self, wire, create, compatible_version):
79 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
81 if create:
81 fs_config = {'compatible-version': '1.9'}
82 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
83 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
84 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
85 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
86 .format(compatible_version))
86 fs_config['compatible-version'] = \
87 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
88 svn_compatible_versions_map[compatible_version]
88
89
89 log.debug('Create SVN repo with config "%s"', fs_config)
90 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
91 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
92 else:
92 repo = svn.repos.open(path)
93 repo = svn.repos.open(path)
93
94
94 log.debug('Got SVN object: %s', repo)
95 log.debug('Got SVN object: %s', repo)
95 return repo
96 return repo
96
97
97 def repo(self, wire, create=False, compatible_version=None):
98 def repo(self, wire, create=False, compatible_version=None):
98 """
99 """
99 Get a repository instance for the given path.
100 Get a repository instance for the given path.
100
101 Uses internally the low level beaker API since the decorators introduce
102 significant overhead.
103 """
101 """
104 region = self._cache_region
102 return self._create_repo(wire, create, compatible_version)
105 context = wire.get('context', None)
106 repo_path = wire.get('path', '')
107 context_uid = '{}'.format(context)
108 cache = wire.get('cache', True)
109 cache_on = context and cache
110
111 @region.conditional_cache_on_arguments(condition=cache_on)
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 return self._create_repo(wire, create, compatible_version)
114
115 return create_new_repo(self.repo_type, repo_path, context_uid,
116 compatible_version)
117
103
118
104
119 NODE_TYPE_MAPPING = {
105 NODE_TYPE_MAPPING = {
120 svn.core.svn_node_file: 'file',
106 svn.core.svn_node_file: 'file',
121 svn.core.svn_node_dir: 'dir',
107 svn.core.svn_node_dir: 'dir',
122 }
108 }
123
109
124
110
125 class SvnRemote(object):
111 class SvnRemote(RemoteBase):
126
112
127 def __init__(self, factory, hg_factory=None):
113 def __init__(self, factory, hg_factory=None):
128 self._factory = factory
114 self._factory = factory
129 # TODO: Remove once we do not use internal Mercurial objects anymore
115 # TODO: Remove once we do not use internal Mercurial objects anymore
130 # for subversion
116 # for subversion
131 self._hg_factory = hg_factory
117 self._hg_factory = hg_factory
132
118
133 @reraise_safe_exceptions
119 @reraise_safe_exceptions
134 def discover_svn_version(self):
120 def discover_svn_version(self):
135 try:
121 try:
136 import svn.core
122 import svn.core
137 svn_ver = svn.core.SVN_VERSION
123 svn_ver = svn.core.SVN_VERSION
138 except ImportError:
124 except ImportError:
139 svn_ver = None
125 svn_ver = None
140 return svn_ver
126 return svn_ver
141
127
142 @reraise_safe_exceptions
128 @reraise_safe_exceptions
143 def is_empty(self, wire):
129 def is_empty(self, wire):
144 repo = self._factory.repo(wire)
145
130
146 try:
131 try:
147 return self.lookup(wire, -1) == 0
132 return self.lookup(wire, -1) == 0
148 except Exception:
133 except Exception:
149 log.exception("failed to read object_store")
134 log.exception("failed to read object_store")
150 return False
135 return False
151
136
152 def check_url(self, url, config_items):
137 def check_url(self, url, config_items):
153 # this can throw exception if not installed, but we detect this
138 # this can throw exception if not installed, but we detect this
154 from hgsubversion import svnrepo
139 from hgsubversion import svnrepo
155
140
156 baseui = self._hg_factory._create_config(config_items)
141 baseui = self._hg_factory._create_config(config_items)
157 # uuid function get's only valid UUID from proper repo, else
142 # uuid function get's only valid UUID from proper repo, else
158 # throws exception
143 # throws exception
159 try:
144 try:
160 svnrepo.svnremoterepo(baseui, url).svn.uuid
145 svnrepo.svnremoterepo(baseui, url).svn.uuid
161 except Exception:
146 except Exception:
162 tb = traceback.format_exc()
147 tb = traceback.format_exc()
163 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
148 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
164 raise URLError(
149 raise URLError(
165 '"%s" is not a valid Subversion source url.' % (url, ))
150 '"%s" is not a valid Subversion source url.' % (url, ))
166 return True
151 return True
167
152
168 def is_path_valid_repository(self, wire, path):
153 def is_path_valid_repository(self, wire, path):
169
154
170 # NOTE(marcink): short circuit the check for SVN repo
155 # NOTE(marcink): short circuit the check for SVN repo
171 # the repos.open might be expensive to check, but we have one cheap
156 # the repos.open might be expensive to check, but we have one cheap
172 # pre condition that we can use, to check for 'format' file
157 # pre condition that we can use, to check for 'format' file
173
158
174 if not os.path.isfile(os.path.join(path, 'format')):
159 if not os.path.isfile(os.path.join(path, 'format')):
175 return False
160 return False
176
161
177 try:
162 try:
178 svn.repos.open(path)
163 svn.repos.open(path)
179 except svn.core.SubversionException:
164 except svn.core.SubversionException:
180 tb = traceback.format_exc()
165 tb = traceback.format_exc()
181 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
166 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
182 return False
167 return False
183 return True
168 return True
184
169
185 @reraise_safe_exceptions
170 @reraise_safe_exceptions
186 def verify(self, wire,):
171 def verify(self, wire,):
187 repo_path = wire['path']
172 repo_path = wire['path']
188 if not self.is_path_valid_repository(wire, repo_path):
173 if not self.is_path_valid_repository(wire, repo_path):
189 raise Exception(
174 raise Exception(
190 "Path %s is not a valid Subversion repository." % repo_path)
175 "Path %s is not a valid Subversion repository." % repo_path)
191
176
192 cmd = ['svnadmin', 'info', repo_path]
177 cmd = ['svnadmin', 'info', repo_path]
193 stdout, stderr = subprocessio.run_command(cmd)
178 stdout, stderr = subprocessio.run_command(cmd)
194 return stdout
179 return stdout
195
180
196 def lookup(self, wire, revision):
181 def lookup(self, wire, revision):
197 if revision not in [-1, None, 'HEAD']:
182 if revision not in [-1, None, 'HEAD']:
198 raise NotImplementedError
183 raise NotImplementedError
199 repo = self._factory.repo(wire)
184 repo = self._factory.repo(wire)
200 fs_ptr = svn.repos.fs(repo)
185 fs_ptr = svn.repos.fs(repo)
201 head = svn.fs.youngest_rev(fs_ptr)
186 head = svn.fs.youngest_rev(fs_ptr)
202 return head
187 return head
203
188
204 def lookup_interval(self, wire, start_ts, end_ts):
189 def lookup_interval(self, wire, start_ts, end_ts):
205 repo = self._factory.repo(wire)
190 repo = self._factory.repo(wire)
206 fsobj = svn.repos.fs(repo)
191 fsobj = svn.repos.fs(repo)
207 start_rev = None
192 start_rev = None
208 end_rev = None
193 end_rev = None
209 if start_ts:
194 if start_ts:
210 start_ts_svn = apr_time_t(start_ts)
195 start_ts_svn = apr_time_t(start_ts)
211 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
196 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
212 else:
197 else:
213 start_rev = 1
198 start_rev = 1
214 if end_ts:
199 if end_ts:
215 end_ts_svn = apr_time_t(end_ts)
200 end_ts_svn = apr_time_t(end_ts)
216 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
201 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
217 else:
202 else:
218 end_rev = svn.fs.youngest_rev(fsobj)
203 end_rev = svn.fs.youngest_rev(fsobj)
219 return start_rev, end_rev
204 return start_rev, end_rev
220
205
221 def revision_properties(self, wire, revision):
206 def revision_properties(self, wire, revision):
222 repo = self._factory.repo(wire)
207
223 fs_ptr = svn.repos.fs(repo)
208 cache_on, context_uid, repo_id = self._cache_on(wire)
224 return svn.fs.revision_proplist(fs_ptr, revision)
209 @self.region.conditional_cache_on_arguments(condition=cache_on)
210 def _revision_properties(_repo_id, _revision):
211 repo = self._factory.repo(wire)
212 fs_ptr = svn.repos.fs(repo)
213 return svn.fs.revision_proplist(fs_ptr, revision)
214 return _revision_properties(repo_id, revision)
225
215
226 def revision_changes(self, wire, revision):
216 def revision_changes(self, wire, revision):
227
217
228 repo = self._factory.repo(wire)
218 repo = self._factory.repo(wire)
229 fsobj = svn.repos.fs(repo)
219 fsobj = svn.repos.fs(repo)
230 rev_root = svn.fs.revision_root(fsobj, revision)
220 rev_root = svn.fs.revision_root(fsobj, revision)
231
221
232 editor = svn.repos.ChangeCollector(fsobj, rev_root)
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
233 editor_ptr, editor_baton = svn.delta.make_editor(editor)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
234 base_dir = ""
224 base_dir = ""
235 send_deltas = False
225 send_deltas = False
236 svn.repos.replay2(
226 svn.repos.replay2(
237 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
238 editor_ptr, editor_baton, None)
228 editor_ptr, editor_baton, None)
239
229
240 added = []
230 added = []
241 changed = []
231 changed = []
242 removed = []
232 removed = []
243
233
244 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
245 for path, change in editor.changes.iteritems():
235 for path, change in editor.changes.iteritems():
246 # TODO: Decide what to do with directory nodes. Subversion can add
236 # TODO: Decide what to do with directory nodes. Subversion can add
247 # empty directories.
237 # empty directories.
248
238
249 if change.item_kind == svn.core.svn_node_dir:
239 if change.item_kind == svn.core.svn_node_dir:
250 continue
240 continue
251 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
252 added.append(path)
242 added.append(path)
253 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
254 svn.repos.CHANGE_ACTION_REPLACE]:
244 svn.repos.CHANGE_ACTION_REPLACE]:
255 changed.append(path)
245 changed.append(path)
256 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
257 removed.append(path)
247 removed.append(path)
258 else:
248 else:
259 raise NotImplementedError(
249 raise NotImplementedError(
260 "Action %s not supported on path %s" % (
250 "Action %s not supported on path %s" % (
261 change.action, path))
251 change.action, path))
262
252
263 changes = {
253 changes = {
264 'added': added,
254 'added': added,
265 'changed': changed,
255 'changed': changed,
266 'removed': removed,
256 'removed': removed,
267 }
257 }
268 return changes
258 return changes
269
259
260 @reraise_safe_exceptions
270 def node_history(self, wire, path, revision, limit):
261 def node_history(self, wire, path, revision, limit):
271 cross_copies = False
262 cache_on, context_uid, repo_id = self._cache_on(wire)
272 repo = self._factory.repo(wire)
263 @self.region.conditional_cache_on_arguments(condition=cache_on)
273 fsobj = svn.repos.fs(repo)
264 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
274 rev_root = svn.fs.revision_root(fsobj, revision)
265 cross_copies = False
266 repo = self._factory.repo(wire)
267 fsobj = svn.repos.fs(repo)
268 rev_root = svn.fs.revision_root(fsobj, revision)
275
269
276 history_revisions = []
270 history_revisions = []
277 history = svn.fs.node_history(rev_root, path)
271 history = svn.fs.node_history(rev_root, path)
278 history = svn.fs.history_prev(history, cross_copies)
279 while history:
280 __, node_revision = svn.fs.history_location(history)
281 history_revisions.append(node_revision)
282 if limit and len(history_revisions) >= limit:
283 break
284 history = svn.fs.history_prev(history, cross_copies)
272 history = svn.fs.history_prev(history, cross_copies)
285 return history_revisions
273 while history:
274 __, node_revision = svn.fs.history_location(history)
275 history_revisions.append(node_revision)
276 if limit and len(history_revisions) >= limit:
277 break
278 history = svn.fs.history_prev(history, cross_copies)
279 return history_revisions
280 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
286
281
287 def node_properties(self, wire, path, revision):
282 def node_properties(self, wire, path, revision):
288 repo = self._factory.repo(wire)
283 cache_on, context_uid, repo_id = self._cache_on(wire)
289 fsobj = svn.repos.fs(repo)
284 @self.region.conditional_cache_on_arguments(condition=cache_on)
290 rev_root = svn.fs.revision_root(fsobj, revision)
285 def _node_properties(_repo_id, _path, _revision):
291 return svn.fs.node_proplist(rev_root, path)
286 repo = self._factory.repo(wire)
287 fsobj = svn.repos.fs(repo)
288 rev_root = svn.fs.revision_root(fsobj, revision)
289 return svn.fs.node_proplist(rev_root, path)
290 return _node_properties(repo_id, path, revision)
292
291
293 def file_annotate(self, wire, path, revision):
292 def file_annotate(self, wire, path, revision):
294 abs_path = 'file://' + urllib.pathname2url(
293 abs_path = 'file://' + urllib.pathname2url(
295 vcspath.join(wire['path'], path))
294 vcspath.join(wire['path'], path))
296 file_uri = svn.core.svn_path_canonicalize(abs_path)
295 file_uri = svn.core.svn_path_canonicalize(abs_path)
297
296
298 start_rev = svn_opt_revision_value_t(0)
297 start_rev = svn_opt_revision_value_t(0)
299 peg_rev = svn_opt_revision_value_t(revision)
298 peg_rev = svn_opt_revision_value_t(revision)
300 end_rev = peg_rev
299 end_rev = peg_rev
301
300
302 annotations = []
301 annotations = []
303
302
304 def receiver(line_no, revision, author, date, line, pool):
303 def receiver(line_no, revision, author, date, line, pool):
305 annotations.append((line_no, revision, line))
304 annotations.append((line_no, revision, line))
306
305
307 # TODO: Cannot use blame5, missing typemap function in the swig code
306 # TODO: Cannot use blame5, missing typemap function in the swig code
308 try:
307 try:
309 svn.client.blame2(
308 svn.client.blame2(
310 file_uri, peg_rev, start_rev, end_rev,
309 file_uri, peg_rev, start_rev, end_rev,
311 receiver, svn.client.create_context())
310 receiver, svn.client.create_context())
312 except svn.core.SubversionException as exc:
311 except svn.core.SubversionException as exc:
313 log.exception("Error during blame operation.")
312 log.exception("Error during blame operation.")
314 raise Exception(
313 raise Exception(
315 "Blame not supported or file does not exist at path %s. "
314 "Blame not supported or file does not exist at path %s. "
316 "Error %s." % (path, exc))
315 "Error %s." % (path, exc))
317
316
318 return annotations
317 return annotations
319
318
320 def get_node_type(self, wire, path, rev=None):
319 def get_node_type(self, wire, path, revision=None):
321 repo = self._factory.repo(wire)
320
322 fs_ptr = svn.repos.fs(repo)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
323 if rev is None:
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
324 rev = svn.fs.youngest_rev(fs_ptr)
323 def _get_node_type(_repo_id, _path, _revision):
325 root = svn.fs.revision_root(fs_ptr, rev)
324 repo = self._factory.repo(wire)
326 node = svn.fs.check_path(root, path)
325 fs_ptr = svn.repos.fs(repo)
327 return NODE_TYPE_MAPPING.get(node, None)
326 if _revision is None:
327 _revision = svn.fs.youngest_rev(fs_ptr)
328 root = svn.fs.revision_root(fs_ptr, _revision)
329 node = svn.fs.check_path(root, path)
330 return NODE_TYPE_MAPPING.get(node, None)
331 return _get_node_type(repo_id, path, revision)
328
332
329 def get_nodes(self, wire, path, revision=None):
333 def get_nodes(self, wire, path, revision=None):
330 repo = self._factory.repo(wire)
334
331 fsobj = svn.repos.fs(repo)
335 cache_on, context_uid, repo_id = self._cache_on(wire)
332 if revision is None:
336 @self.region.conditional_cache_on_arguments(condition=cache_on)
333 revision = svn.fs.youngest_rev(fsobj)
337 def _get_nodes(_repo_id, _path, _revision):
334 root = svn.fs.revision_root(fsobj, revision)
338 repo = self._factory.repo(wire)
335 entries = svn.fs.dir_entries(root, path)
339 fsobj = svn.repos.fs(repo)
336 result = []
340 if _revision is None:
337 for entry_path, entry_info in entries.iteritems():
341 _revision = svn.fs.youngest_rev(fsobj)
338 result.append(
342 root = svn.fs.revision_root(fsobj, _revision)
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 entries = svn.fs.dir_entries(root, path)
340 return result
344 result = []
345 for entry_path, entry_info in entries.iteritems():
346 result.append(
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 return result
349 return _get_nodes(repo_id, path, revision)
341
350
342 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
343 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
344 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
345 if rev is None:
354 if rev is None:
346 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
347 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
348 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
349 return content.read()
358 return content.read()
350
359
351 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
352 repo = self._factory.repo(wire)
361
353 fsobj = svn.repos.fs(repo)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
354 if revision is None:
363 @self.region.conditional_cache_on_arguments(condition=cache_on)
355 revision = svn.fs.youngest_revision(fsobj)
364 def _get_file_size(_repo_id, _path, _revision):
356 root = svn.fs.revision_root(fsobj, revision)
365 repo = self._factory.repo(wire)
357 size = svn.fs.file_length(root, path)
366 fsobj = svn.repos.fs(repo)
358 return size
367 if _revision is None:
368 _revision = svn.fs.youngest_revision(fsobj)
369 root = svn.fs.revision_root(fsobj, _revision)
370 size = svn.fs.file_length(root, path)
371 return size
372 return _get_file_size(repo_id, path, revision)
359
373
360 def create_repository(self, wire, compatible_version=None):
374 def create_repository(self, wire, compatible_version=None):
361 log.info('Creating Subversion repository in path "%s"', wire['path'])
375 log.info('Creating Subversion repository in path "%s"', wire['path'])
362 self._factory.repo(wire, create=True,
376 self._factory.repo(wire, create=True,
363 compatible_version=compatible_version)
377 compatible_version=compatible_version)
364
378
365 def get_url_and_credentials(self, src_url):
379 def get_url_and_credentials(self, src_url):
366 obj = urlparse.urlparse(src_url)
380 obj = urlparse.urlparse(src_url)
367 username = obj.username or None
381 username = obj.username or None
368 password = obj.password or None
382 password = obj.password or None
369 return username, password, src_url
383 return username, password, src_url
370
384
371 def import_remote_repository(self, wire, src_url):
385 def import_remote_repository(self, wire, src_url):
372 repo_path = wire['path']
386 repo_path = wire['path']
373 if not self.is_path_valid_repository(wire, repo_path):
387 if not self.is_path_valid_repository(wire, repo_path):
374 raise Exception(
388 raise Exception(
375 "Path %s is not a valid Subversion repository." % repo_path)
389 "Path %s is not a valid Subversion repository." % repo_path)
376
390
377 username, password, src_url = self.get_url_and_credentials(src_url)
391 username, password, src_url = self.get_url_and_credentials(src_url)
378 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
392 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
379 '--trust-server-cert-failures=unknown-ca']
393 '--trust-server-cert-failures=unknown-ca']
380 if username and password:
394 if username and password:
381 rdump_cmd += ['--username', username, '--password', password]
395 rdump_cmd += ['--username', username, '--password', password]
382 rdump_cmd += [src_url]
396 rdump_cmd += [src_url]
383
397
384 rdump = subprocess.Popen(
398 rdump = subprocess.Popen(
385 rdump_cmd,
399 rdump_cmd,
386 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
400 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
387 load = subprocess.Popen(
401 load = subprocess.Popen(
388 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
402 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
389
403
390 # TODO: johbo: This can be a very long operation, might be better
404 # TODO: johbo: This can be a very long operation, might be better
391 # to track some kind of status and provide an api to check if the
405 # to track some kind of status and provide an api to check if the
392 # import is done.
406 # import is done.
393 rdump.wait()
407 rdump.wait()
394 load.wait()
408 load.wait()
395
409
396 log.debug('Return process ended with code: %s', rdump.returncode)
410 log.debug('Return process ended with code: %s', rdump.returncode)
397 if rdump.returncode != 0:
411 if rdump.returncode != 0:
398 errors = rdump.stderr.read()
412 errors = rdump.stderr.read()
399 log.error('svnrdump dump failed: statuscode %s: message: %s',
413 log.error('svnrdump dump failed: statuscode %s: message: %s',
400 rdump.returncode, errors)
414 rdump.returncode, errors)
401 reason = 'UNKNOWN'
415 reason = 'UNKNOWN'
402 if 'svnrdump: E230001:' in errors:
416 if 'svnrdump: E230001:' in errors:
403 reason = 'INVALID_CERTIFICATE'
417 reason = 'INVALID_CERTIFICATE'
404
418
405 if reason == 'UNKNOWN':
419 if reason == 'UNKNOWN':
406 reason = 'UNKNOWN:{}'.format(errors)
420 reason = 'UNKNOWN:{}'.format(errors)
407 raise Exception(
421 raise Exception(
408 'Failed to dump the remote repository from %s. Reason:%s' % (
422 'Failed to dump the remote repository from %s. Reason:%s' % (
409 src_url, reason))
423 src_url, reason))
410 if load.returncode != 0:
424 if load.returncode != 0:
411 raise Exception(
425 raise Exception(
412 'Failed to load the dump of remote repository from %s.' %
426 'Failed to load the dump of remote repository from %s.' %
413 (src_url, ))
427 (src_url, ))
414
428
415 def commit(self, wire, message, author, timestamp, updated, removed):
429 def commit(self, wire, message, author, timestamp, updated, removed):
416 assert isinstance(message, str)
430 assert isinstance(message, str)
417 assert isinstance(author, str)
431 assert isinstance(author, str)
418
432
419 repo = self._factory.repo(wire)
433 repo = self._factory.repo(wire)
420 fsobj = svn.repos.fs(repo)
434 fsobj = svn.repos.fs(repo)
421
435
422 rev = svn.fs.youngest_rev(fsobj)
436 rev = svn.fs.youngest_rev(fsobj)
423 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
437 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
424 txn_root = svn.fs.txn_root(txn)
438 txn_root = svn.fs.txn_root(txn)
425
439
426 for node in updated:
440 for node in updated:
427 TxnNodeProcessor(node, txn_root).update()
441 TxnNodeProcessor(node, txn_root).update()
428 for node in removed:
442 for node in removed:
429 TxnNodeProcessor(node, txn_root).remove()
443 TxnNodeProcessor(node, txn_root).remove()
430
444
431 commit_id = svn.repos.fs_commit_txn(repo, txn)
445 commit_id = svn.repos.fs_commit_txn(repo, txn)
432
446
433 if timestamp:
447 if timestamp:
434 apr_time = apr_time_t(timestamp)
448 apr_time = apr_time_t(timestamp)
435 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
449 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
436 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
450 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
437
451
438 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
452 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
439 return commit_id
453 return commit_id
440
454
441 def diff(self, wire, rev1, rev2, path1=None, path2=None,
455 def diff(self, wire, rev1, rev2, path1=None, path2=None,
442 ignore_whitespace=False, context=3):
456 ignore_whitespace=False, context=3):
443
457
444 wire.update(cache=False)
458 wire.update(cache=False)
445 repo = self._factory.repo(wire)
459 repo = self._factory.repo(wire)
446 diff_creator = SvnDiffer(
460 diff_creator = SvnDiffer(
447 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
461 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
448 try:
462 try:
449 return diff_creator.generate_diff()
463 return diff_creator.generate_diff()
450 except svn.core.SubversionException as e:
464 except svn.core.SubversionException as e:
451 log.exception(
465 log.exception(
452 "Error during diff operation operation. "
466 "Error during diff operation operation. "
453 "Path might not exist %s, %s" % (path1, path2))
467 "Path might not exist %s, %s" % (path1, path2))
454 return ""
468 return ""
455
469
456 @reraise_safe_exceptions
470 @reraise_safe_exceptions
457 def is_large_file(self, wire, path):
471 def is_large_file(self, wire, path):
458 return False
472 return False
459
473
460 @reraise_safe_exceptions
474 @reraise_safe_exceptions
475 def is_binary(self, wire, rev, path):
476 cache_on, context_uid, repo_id = self._cache_on(wire)
477
478 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 def _is_binary(_repo_id, _rev, _path):
480 raw_bytes = self.get_file_content(wire, path, rev)
481 return raw_bytes and '\0' in raw_bytes
482
483 return _is_binary(repo_id, rev, path)
484
485 @reraise_safe_exceptions
461 def run_svn_command(self, wire, cmd, **opts):
486 def run_svn_command(self, wire, cmd, **opts):
462 path = wire.get('path', None)
487 path = wire.get('path', None)
463
488
464 if path and os.path.isdir(path):
489 if path and os.path.isdir(path):
465 opts['cwd'] = path
490 opts['cwd'] = path
466
491
467 safe_call = False
492 safe_call = False
468 if '_safe' in opts:
493 if '_safe' in opts:
469 safe_call = True
494 safe_call = True
470
495
471 svnenv = os.environ.copy()
496 svnenv = os.environ.copy()
472 svnenv.update(opts.pop('extra_env', {}))
497 svnenv.update(opts.pop('extra_env', {}))
473
498
474 _opts = {'env': svnenv, 'shell': False}
499 _opts = {'env': svnenv, 'shell': False}
475
500
476 try:
501 try:
477 _opts.update(opts)
502 _opts.update(opts)
478 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
479
504
480 return ''.join(p), ''.join(p.error)
505 return ''.join(p), ''.join(p.error)
481 except (EnvironmentError, OSError) as err:
506 except (EnvironmentError, OSError) as err:
482 cmd = ' '.join(cmd) # human friendly CMD
507 cmd = ' '.join(cmd) # human friendly CMD
483 tb_err = ("Couldn't run svn command (%s).\n"
508 tb_err = ("Couldn't run svn command (%s).\n"
484 "Original error was:%s\n"
509 "Original error was:%s\n"
485 "Call options:%s\n"
510 "Call options:%s\n"
486 % (cmd, err, _opts))
511 % (cmd, err, _opts))
487 log.exception(tb_err)
512 log.exception(tb_err)
488 if safe_call:
513 if safe_call:
489 return '', err
514 return '', err
490 else:
515 else:
491 raise exceptions.VcsException()(tb_err)
516 raise exceptions.VcsException()(tb_err)
492
517
493 @reraise_safe_exceptions
518 @reraise_safe_exceptions
494 def install_hooks(self, wire, force=False):
519 def install_hooks(self, wire, force=False):
495 from vcsserver.hook_utils import install_svn_hooks
520 from vcsserver.hook_utils import install_svn_hooks
496 repo_path = wire['path']
521 repo_path = wire['path']
497 binary_dir = settings.BINARY_DIR
522 binary_dir = settings.BINARY_DIR
498 executable = None
523 executable = None
499 if binary_dir:
524 if binary_dir:
500 executable = os.path.join(binary_dir, 'python')
525 executable = os.path.join(binary_dir, 'python')
501 return install_svn_hooks(
526 return install_svn_hooks(
502 repo_path, executable=executable, force_create=force)
527 repo_path, executable=executable, force_create=force)
503
528
504 @reraise_safe_exceptions
529 @reraise_safe_exceptions
505 def get_hooks_info(self, wire):
530 def get_hooks_info(self, wire):
506 from vcsserver.hook_utils import (
531 from vcsserver.hook_utils import (
507 get_svn_pre_hook_version, get_svn_post_hook_version)
532 get_svn_pre_hook_version, get_svn_post_hook_version)
508 repo_path = wire['path']
533 repo_path = wire['path']
509 return {
534 return {
510 'pre_version': get_svn_pre_hook_version(repo_path),
535 'pre_version': get_svn_pre_hook_version(repo_path),
511 'post_version': get_svn_post_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
512 }
537 }
513
538
514
539
515 class SvnDiffer(object):
540 class SvnDiffer(object):
516 """
541 """
517 Utility to create diffs based on difflib and the Subversion api
542 Utility to create diffs based on difflib and the Subversion api
518 """
543 """
519
544
520 binary_content = False
545 binary_content = False
521
546
522 def __init__(
547 def __init__(
523 self, repo, src_rev, src_path, tgt_rev, tgt_path,
548 self, repo, src_rev, src_path, tgt_rev, tgt_path,
524 ignore_whitespace, context):
549 ignore_whitespace, context):
525 self.repo = repo
550 self.repo = repo
526 self.ignore_whitespace = ignore_whitespace
551 self.ignore_whitespace = ignore_whitespace
527 self.context = context
552 self.context = context
528
553
529 fsobj = svn.repos.fs(repo)
554 fsobj = svn.repos.fs(repo)
530
555
531 self.tgt_rev = tgt_rev
556 self.tgt_rev = tgt_rev
532 self.tgt_path = tgt_path or ''
557 self.tgt_path = tgt_path or ''
533 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
558 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
534 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
559 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
535
560
536 self.src_rev = src_rev
561 self.src_rev = src_rev
537 self.src_path = src_path or self.tgt_path
562 self.src_path = src_path or self.tgt_path
538 self.src_root = svn.fs.revision_root(fsobj, src_rev)
563 self.src_root = svn.fs.revision_root(fsobj, src_rev)
539 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
564 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
540
565
541 self._validate()
566 self._validate()
542
567
543 def _validate(self):
568 def _validate(self):
544 if (self.tgt_kind != svn.core.svn_node_none and
569 if (self.tgt_kind != svn.core.svn_node_none and
545 self.src_kind != svn.core.svn_node_none and
570 self.src_kind != svn.core.svn_node_none and
546 self.src_kind != self.tgt_kind):
571 self.src_kind != self.tgt_kind):
547 # TODO: johbo: proper error handling
572 # TODO: johbo: proper error handling
548 raise Exception(
573 raise Exception(
549 "Source and target are not compatible for diff generation. "
574 "Source and target are not compatible for diff generation. "
550 "Source type: %s, target type: %s" %
575 "Source type: %s, target type: %s" %
551 (self.src_kind, self.tgt_kind))
576 (self.src_kind, self.tgt_kind))
552
577
553 def generate_diff(self):
578 def generate_diff(self):
554 buf = StringIO.StringIO()
579 buf = StringIO.StringIO()
555 if self.tgt_kind == svn.core.svn_node_dir:
580 if self.tgt_kind == svn.core.svn_node_dir:
556 self._generate_dir_diff(buf)
581 self._generate_dir_diff(buf)
557 else:
582 else:
558 self._generate_file_diff(buf)
583 self._generate_file_diff(buf)
559 return buf.getvalue()
584 return buf.getvalue()
560
585
561 def _generate_dir_diff(self, buf):
586 def _generate_dir_diff(self, buf):
562 editor = DiffChangeEditor()
587 editor = DiffChangeEditor()
563 editor_ptr, editor_baton = svn.delta.make_editor(editor)
588 editor_ptr, editor_baton = svn.delta.make_editor(editor)
564 svn.repos.dir_delta2(
589 svn.repos.dir_delta2(
565 self.src_root,
590 self.src_root,
566 self.src_path,
591 self.src_path,
567 '', # src_entry
592 '', # src_entry
568 self.tgt_root,
593 self.tgt_root,
569 self.tgt_path,
594 self.tgt_path,
570 editor_ptr, editor_baton,
595 editor_ptr, editor_baton,
571 authorization_callback_allow_all,
596 authorization_callback_allow_all,
572 False, # text_deltas
597 False, # text_deltas
573 svn.core.svn_depth_infinity, # depth
598 svn.core.svn_depth_infinity, # depth
574 False, # entry_props
599 False, # entry_props
575 False, # ignore_ancestry
600 False, # ignore_ancestry
576 )
601 )
577
602
578 for path, __, change in sorted(editor.changes):
603 for path, __, change in sorted(editor.changes):
579 self._generate_node_diff(
604 self._generate_node_diff(
580 buf, change, path, self.tgt_path, path, self.src_path)
605 buf, change, path, self.tgt_path, path, self.src_path)
581
606
582 def _generate_file_diff(self, buf):
607 def _generate_file_diff(self, buf):
583 change = None
608 change = None
584 if self.src_kind == svn.core.svn_node_none:
609 if self.src_kind == svn.core.svn_node_none:
585 change = "add"
610 change = "add"
586 elif self.tgt_kind == svn.core.svn_node_none:
611 elif self.tgt_kind == svn.core.svn_node_none:
587 change = "delete"
612 change = "delete"
588 tgt_base, tgt_path = vcspath.split(self.tgt_path)
613 tgt_base, tgt_path = vcspath.split(self.tgt_path)
589 src_base, src_path = vcspath.split(self.src_path)
614 src_base, src_path = vcspath.split(self.src_path)
590 self._generate_node_diff(
615 self._generate_node_diff(
591 buf, change, tgt_path, tgt_base, src_path, src_base)
616 buf, change, tgt_path, tgt_base, src_path, src_base)
592
617
593 def _generate_node_diff(
618 def _generate_node_diff(
594 self, buf, change, tgt_path, tgt_base, src_path, src_base):
619 self, buf, change, tgt_path, tgt_base, src_path, src_base):
595
620
596 if self.src_rev == self.tgt_rev and tgt_base == src_base:
621 if self.src_rev == self.tgt_rev and tgt_base == src_base:
597 # makes consistent behaviour with git/hg to return empty diff if
622 # makes consistent behaviour with git/hg to return empty diff if
598 # we compare same revisions
623 # we compare same revisions
599 return
624 return
600
625
601 tgt_full_path = vcspath.join(tgt_base, tgt_path)
626 tgt_full_path = vcspath.join(tgt_base, tgt_path)
602 src_full_path = vcspath.join(src_base, src_path)
627 src_full_path = vcspath.join(src_base, src_path)
603
628
604 self.binary_content = False
629 self.binary_content = False
605 mime_type = self._get_mime_type(tgt_full_path)
630 mime_type = self._get_mime_type(tgt_full_path)
606
631
607 if mime_type and not mime_type.startswith('text'):
632 if mime_type and not mime_type.startswith('text'):
608 self.binary_content = True
633 self.binary_content = True
609 buf.write("=" * 67 + '\n')
634 buf.write("=" * 67 + '\n')
610 buf.write("Cannot display: file marked as a binary type.\n")
635 buf.write("Cannot display: file marked as a binary type.\n")
611 buf.write("svn:mime-type = %s\n" % mime_type)
636 buf.write("svn:mime-type = %s\n" % mime_type)
612 buf.write("Index: %s\n" % (tgt_path, ))
637 buf.write("Index: %s\n" % (tgt_path, ))
613 buf.write("=" * 67 + '\n')
638 buf.write("=" * 67 + '\n')
614 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
639 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
615 'tgt_path': tgt_path})
640 'tgt_path': tgt_path})
616
641
617 if change == 'add':
642 if change == 'add':
618 # TODO: johbo: SVN is missing a zero here compared to git
643 # TODO: johbo: SVN is missing a zero here compared to git
619 buf.write("new file mode 10644\n")
644 buf.write("new file mode 10644\n")
620
645
621 #TODO(marcink): intro to binary detection of svn patches
646 #TODO(marcink): intro to binary detection of svn patches
622 # if self.binary_content:
647 # if self.binary_content:
623 # buf.write('GIT binary patch\n')
648 # buf.write('GIT binary patch\n')
624
649
625 buf.write("--- /dev/null\t(revision 0)\n")
650 buf.write("--- /dev/null\t(revision 0)\n")
626 src_lines = []
651 src_lines = []
627 else:
652 else:
628 if change == 'delete':
653 if change == 'delete':
629 buf.write("deleted file mode 10644\n")
654 buf.write("deleted file mode 10644\n")
630
655
631 #TODO(marcink): intro to binary detection of svn patches
656 #TODO(marcink): intro to binary detection of svn patches
632 # if self.binary_content:
657 # if self.binary_content:
633 # buf.write('GIT binary patch\n')
658 # buf.write('GIT binary patch\n')
634
659
635 buf.write("--- a/%s\t(revision %s)\n" % (
660 buf.write("--- a/%s\t(revision %s)\n" % (
636 src_path, self.src_rev))
661 src_path, self.src_rev))
637 src_lines = self._svn_readlines(self.src_root, src_full_path)
662 src_lines = self._svn_readlines(self.src_root, src_full_path)
638
663
639 if change == 'delete':
664 if change == 'delete':
640 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
665 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
641 tgt_lines = []
666 tgt_lines = []
642 else:
667 else:
643 buf.write("+++ b/%s\t(revision %s)\n" % (
668 buf.write("+++ b/%s\t(revision %s)\n" % (
644 tgt_path, self.tgt_rev))
669 tgt_path, self.tgt_rev))
645 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
670 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
646
671
647 if not self.binary_content:
672 if not self.binary_content:
648 udiff = svn_diff.unified_diff(
673 udiff = svn_diff.unified_diff(
649 src_lines, tgt_lines, context=self.context,
674 src_lines, tgt_lines, context=self.context,
650 ignore_blank_lines=self.ignore_whitespace,
675 ignore_blank_lines=self.ignore_whitespace,
651 ignore_case=False,
676 ignore_case=False,
652 ignore_space_changes=self.ignore_whitespace)
677 ignore_space_changes=self.ignore_whitespace)
653 buf.writelines(udiff)
678 buf.writelines(udiff)
654
679
655 def _get_mime_type(self, path):
680 def _get_mime_type(self, path):
656 try:
681 try:
657 mime_type = svn.fs.node_prop(
682 mime_type = svn.fs.node_prop(
658 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
683 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
659 except svn.core.SubversionException:
684 except svn.core.SubversionException:
660 mime_type = svn.fs.node_prop(
685 mime_type = svn.fs.node_prop(
661 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
686 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
662 return mime_type
687 return mime_type
663
688
664 def _svn_readlines(self, fs_root, node_path):
689 def _svn_readlines(self, fs_root, node_path):
665 if self.binary_content:
690 if self.binary_content:
666 return []
691 return []
667 node_kind = svn.fs.check_path(fs_root, node_path)
692 node_kind = svn.fs.check_path(fs_root, node_path)
668 if node_kind not in (
693 if node_kind not in (
669 svn.core.svn_node_file, svn.core.svn_node_symlink):
694 svn.core.svn_node_file, svn.core.svn_node_symlink):
670 return []
695 return []
671 content = svn.core.Stream(
696 content = svn.core.Stream(
672 svn.fs.file_contents(fs_root, node_path)).read()
697 svn.fs.file_contents(fs_root, node_path)).read()
673 return content.splitlines(True)
698 return content.splitlines(True)
674
699
675
700
676
677 class DiffChangeEditor(svn.delta.Editor):
701 class DiffChangeEditor(svn.delta.Editor):
678 """
702 """
679 Records changes between two given revisions
703 Records changes between two given revisions
680 """
704 """
681
705
682 def __init__(self):
706 def __init__(self):
683 self.changes = []
707 self.changes = []
684
708
685 def delete_entry(self, path, revision, parent_baton, pool=None):
709 def delete_entry(self, path, revision, parent_baton, pool=None):
686 self.changes.append((path, None, 'delete'))
710 self.changes.append((path, None, 'delete'))
687
711
688 def add_file(
712 def add_file(
689 self, path, parent_baton, copyfrom_path, copyfrom_revision,
713 self, path, parent_baton, copyfrom_path, copyfrom_revision,
690 file_pool=None):
714 file_pool=None):
691 self.changes.append((path, 'file', 'add'))
715 self.changes.append((path, 'file', 'add'))
692
716
693 def open_file(self, path, parent_baton, base_revision, file_pool=None):
717 def open_file(self, path, parent_baton, base_revision, file_pool=None):
694 self.changes.append((path, 'file', 'change'))
718 self.changes.append((path, 'file', 'change'))
695
719
696
720
697 def authorization_callback_allow_all(root, path, pool):
721 def authorization_callback_allow_all(root, path, pool):
698 return True
722 return True
699
723
700
724
701 class TxnNodeProcessor(object):
725 class TxnNodeProcessor(object):
702 """
726 """
703 Utility to process the change of one node within a transaction root.
727 Utility to process the change of one node within a transaction root.
704
728
705 It encapsulates the knowledge of how to add, update or remove
729 It encapsulates the knowledge of how to add, update or remove
706 a node for a given transaction root. The purpose is to support the method
730 a node for a given transaction root. The purpose is to support the method
707 `SvnRemote.commit`.
731 `SvnRemote.commit`.
708 """
732 """
709
733
710 def __init__(self, node, txn_root):
734 def __init__(self, node, txn_root):
711 assert isinstance(node['path'], str)
735 assert isinstance(node['path'], str)
712
736
713 self.node = node
737 self.node = node
714 self.txn_root = txn_root
738 self.txn_root = txn_root
715
739
716 def update(self):
740 def update(self):
717 self._ensure_parent_dirs()
741 self._ensure_parent_dirs()
718 self._add_file_if_node_does_not_exist()
742 self._add_file_if_node_does_not_exist()
719 self._update_file_content()
743 self._update_file_content()
720 self._update_file_properties()
744 self._update_file_properties()
721
745
722 def remove(self):
746 def remove(self):
723 svn.fs.delete(self.txn_root, self.node['path'])
747 svn.fs.delete(self.txn_root, self.node['path'])
724 # TODO: Clean up directory if empty
748 # TODO: Clean up directory if empty
725
749
726 def _ensure_parent_dirs(self):
750 def _ensure_parent_dirs(self):
727 curdir = vcspath.dirname(self.node['path'])
751 curdir = vcspath.dirname(self.node['path'])
728 dirs_to_create = []
752 dirs_to_create = []
729 while not self._svn_path_exists(curdir):
753 while not self._svn_path_exists(curdir):
730 dirs_to_create.append(curdir)
754 dirs_to_create.append(curdir)
731 curdir = vcspath.dirname(curdir)
755 curdir = vcspath.dirname(curdir)
732
756
733 for curdir in reversed(dirs_to_create):
757 for curdir in reversed(dirs_to_create):
734 log.debug('Creating missing directory "%s"', curdir)
758 log.debug('Creating missing directory "%s"', curdir)
735 svn.fs.make_dir(self.txn_root, curdir)
759 svn.fs.make_dir(self.txn_root, curdir)
736
760
737 def _svn_path_exists(self, path):
761 def _svn_path_exists(self, path):
738 path_status = svn.fs.check_path(self.txn_root, path)
762 path_status = svn.fs.check_path(self.txn_root, path)
739 return path_status != svn.core.svn_node_none
763 return path_status != svn.core.svn_node_none
740
764
741 def _add_file_if_node_does_not_exist(self):
765 def _add_file_if_node_does_not_exist(self):
742 kind = svn.fs.check_path(self.txn_root, self.node['path'])
766 kind = svn.fs.check_path(self.txn_root, self.node['path'])
743 if kind == svn.core.svn_node_none:
767 if kind == svn.core.svn_node_none:
744 svn.fs.make_file(self.txn_root, self.node['path'])
768 svn.fs.make_file(self.txn_root, self.node['path'])
745
769
746 def _update_file_content(self):
770 def _update_file_content(self):
747 assert isinstance(self.node['content'], str)
771 assert isinstance(self.node['content'], str)
748 handler, baton = svn.fs.apply_textdelta(
772 handler, baton = svn.fs.apply_textdelta(
749 self.txn_root, self.node['path'], None, None)
773 self.txn_root, self.node['path'], None, None)
750 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
774 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
751
775
752 def _update_file_properties(self):
776 def _update_file_properties(self):
753 properties = self.node.get('properties', {})
777 properties = self.node.get('properties', {})
754 for key, value in properties.iteritems():
778 for key, value in properties.iteritems():
755 svn.fs.change_node_prop(
779 svn.fs.change_node_prop(
756 self.txn_root, self.node['path'], key, value)
780 self.txn_root, self.node['path'], key, value)
757
781
758
782
759 def apr_time_t(timestamp):
783 def apr_time_t(timestamp):
760 """
784 """
761 Convert a Python timestamp into APR timestamp type apr_time_t
785 Convert a Python timestamp into APR timestamp type apr_time_t
762 """
786 """
763 return timestamp * 1E6
787 return timestamp * 1E6
764
788
765
789
766 def svn_opt_revision_value_t(num):
790 def svn_opt_revision_value_t(num):
767 """
791 """
768 Put `num` into a `svn_opt_revision_value_t` structure.
792 Put `num` into a `svn_opt_revision_value_t` structure.
769 """
793 """
770 value = svn.core.svn_opt_revision_value_t()
794 value = svn.core.svn_opt_revision_value_t()
771 value.number = num
795 value.number = num
772 revision = svn.core.svn_opt_revision_t()
796 revision = svn.core.svn_opt_revision_t()
773 revision.kind = svn.core.svn_opt_revision_number
797 revision.kind = svn.core.svn_opt_revision_number
774 revision.value = value
798 revision.value = value
775 return revision
799 return revision
@@ -1,165 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.pull(
81 self.remote_git.pull(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
107
108
102
109 class TestReraiseSafeExceptions(object):
103 class TestReraiseSafeExceptions(object):
104
110 def test_method_decorated_with_reraise_safe_exceptions(self):
105 def test_method_decorated_with_reraise_safe_exceptions(self):
111 factory = Mock()
106 factory = Mock()
112 git_remote = git.GitRemote(factory)
107 git_remote = git.GitRemote(factory)
113
108
114 def fake_function():
109 def fake_function():
115 return None
110 return None
116
111
117 decorator = git.reraise_safe_exceptions(fake_function)
112 decorator = git.reraise_safe_exceptions(fake_function)
118
113
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 for method_name, method in methods:
115 for method_name, method in methods:
121 if not method_name.startswith('_'):
116 if not method_name.startswith('_'):
122 assert method.im_func.__code__ == decorator.__code__
117 assert method.im_func.__code__ == decorator.__code__
123
118
124 @pytest.mark.parametrize('side_effect, expected_type', [
119 @pytest.mark.parametrize('side_effect, expected_type', [
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.HangupException(), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 ])
126 ])
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 @git.reraise_safe_exceptions
128 @git.reraise_safe_exceptions
134 def fake_method():
129 def fake_method():
135 raise side_effect
130 raise side_effect
136
131
137 with pytest.raises(Exception) as exc_info:
132 with pytest.raises(Exception) as exc_info:
138 fake_method()
133 fake_method()
139 assert type(exc_info.value) == Exception
134 assert type(exc_info.value) == Exception
140 assert exc_info.value._vcs_kind == expected_type
135 assert exc_info.value._vcs_kind == expected_type
141
136
142
137
143 class TestDulwichRepoWrapper(object):
138 class TestDulwichRepoWrapper(object):
144 def test_calls_close_on_delete(self):
139 def test_calls_close_on_delete(self):
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 with isdir_patcher:
141 with isdir_patcher:
147 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 del repo
144 del repo
150 close_mock.assert_called_once_with()
145 close_mock.assert_called_once_with()
151
146
152
147
153 class TestGitFactory(object):
148 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
149 def test_create_repo_returns_dulwich_wrapper(self):
155
150
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 mock.side_effect = {'repo_objects': ''}
152 mock.side_effect = {'repo_objects': ''}
158 factory = git.GitFactory()
153 factory = git.GitFactory()
159 wire = {
154 wire = {
160 'path': '/tmp/abcde'
155 'path': '/tmp/abcde'
161 }
156 }
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 with isdir_patcher:
158 with isdir_patcher:
164 result = factory._create_repo(wire, True)
159 result = factory._create_repo(wire, True)
165 assert isinstance(result, git.Repo)
160 assert isinstance(result, git.Repo)
@@ -1,127 +1,108 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestHGLookup(object):
30 def setup(self):
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
37
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
42
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
45
46
47 class TestDiff(object):
29 class TestDiff(object):
48 def test_raising_safe_exception_when_lookup_failed(self):
30 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
31
50 factory = Mock()
32 factory = Mock()
51 factory.repo = Mock(return_value=repo)
52 hg_remote = hg.HgRemote(factory)
33 hg_remote = hg.HgRemote(factory)
53 with patch('mercurial.patch.diff') as diff_mock:
34 with patch('mercurial.patch.diff') as diff_mock:
54 diff_mock.side_effect = LookupError(
35 diff_mock.side_effect = LookupError(
55 'deadbeef', 'index', 'message')
36 'deadbeef', 'index', 'message')
56 with pytest.raises(Exception) as exc_info:
37 with pytest.raises(Exception) as exc_info:
57 hg_remote.diff(
38 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
59 file_filter=None, opt_git=True, opt_ignorews=True,
40 file_filter=None, opt_git=True, opt_ignorews=True,
60 context=3)
41 context=3)
61 assert type(exc_info.value) == Exception
42 assert type(exc_info.value) == Exception
62 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
63
44
64
45
65 class TestReraiseSafeExceptions(object):
46 class TestReraiseSafeExceptions(object):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
67 factory = Mock()
48 factory = Mock()
68 hg_remote = hg.HgRemote(factory)
49 hg_remote = hg.HgRemote(factory)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 decorator = hg.reraise_safe_exceptions(None)
51 decorator = hg.reraise_safe_exceptions(None)
71 for method_name, method in methods:
52 for method_name, method in methods:
72 if not method_name.startswith('_'):
53 if not method_name.startswith('_'):
73 assert method.im_func.__code__ == decorator.__code__
54 assert method.im_func.__code__ == decorator.__code__
74
55
75 @pytest.mark.parametrize('side_effect, expected_type', [
56 @pytest.mark.parametrize('side_effect, expected_type', [
76 (hgcompat.Abort(), 'abort'),
57 (hgcompat.Abort(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
78 (hgcompat.RepoLookupError(), 'lookup'),
59 (hgcompat.RepoLookupError(), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 (hgcompat.RepoError(), 'error'),
61 (hgcompat.RepoError(), 'error'),
81 (hgcompat.RequirementError(), 'requirement'),
62 (hgcompat.RequirementError(), 'requirement'),
82 ])
63 ])
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 @hg.reraise_safe_exceptions
65 @hg.reraise_safe_exceptions
85 def fake_method():
66 def fake_method():
86 raise side_effect
67 raise side_effect
87
68
88 with pytest.raises(Exception) as exc_info:
69 with pytest.raises(Exception) as exc_info:
89 fake_method()
70 fake_method()
90 assert type(exc_info.value) == Exception
71 assert type(exc_info.value) == Exception
91 assert exc_info.value._vcs_kind == expected_type
72 assert exc_info.value._vcs_kind == expected_type
92
73
93 def test_keeps_original_traceback(self):
74 def test_keeps_original_traceback(self):
94 @hg.reraise_safe_exceptions
75 @hg.reraise_safe_exceptions
95 def fake_method():
76 def fake_method():
96 try:
77 try:
97 raise hgcompat.Abort()
78 raise hgcompat.Abort()
98 except:
79 except:
99 self.original_traceback = traceback.format_tb(
80 self.original_traceback = traceback.format_tb(
100 sys.exc_info()[2])
81 sys.exc_info()[2])
101 raise
82 raise
102
83
103 try:
84 try:
104 fake_method()
85 fake_method()
105 except Exception:
86 except Exception:
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
107
88
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 assert new_traceback_tail == self.original_traceback
90 assert new_traceback_tail == self.original_traceback
110
91
111 def test_maps_unknow_exceptions_to_unhandled(self):
92 def test_maps_unknow_exceptions_to_unhandled(self):
112 @hg.reraise_safe_exceptions
93 @hg.reraise_safe_exceptions
113 def stub_method():
94 def stub_method():
114 raise ValueError('stub')
95 raise ValueError('stub')
115
96
116 with pytest.raises(Exception) as exc_info:
97 with pytest.raises(Exception) as exc_info:
117 stub_method()
98 stub_method()
118 assert exc_info.value._vcs_kind == 'unhandled'
99 assert exc_info.value._vcs_kind == 'unhandled'
119
100
120 def test_does_not_map_known_exceptions(self):
101 def test_does_not_map_known_exceptions(self):
121 @hg.reraise_safe_exceptions
102 @hg.reraise_safe_exceptions
122 def stub_method():
103 def stub_method():
123 raise exceptions.LookupException()('stub')
104 raise exceptions.LookupException()('stub')
124
105
125 with pytest.raises(Exception) as exc_info:
106 with pytest.raises(Exception) as exc_info:
126 stub_method()
107 stub_method()
127 assert exc_info.value._vcs_kind == 'lookup'
108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,82 +1,87 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import mock
19 import mock
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23
23
24 class MockPopen(object):
24 class MockPopen(object):
25 def __init__(self, stderr):
25 def __init__(self, stderr):
26 self.stdout = io.BytesIO('')
26 self.stdout = io.BytesIO('')
27 self.stderr = io.BytesIO(stderr)
27 self.stderr = io.BytesIO(stderr)
28 self.returncode = 1
28 self.returncode = 1
29
29
30 def wait(self):
30 def wait(self):
31 pass
31 pass
32
32
33
33
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 ])
37 ])
38
38
39
39
40 @pytest.mark.parametrize('stderr,expected_reason', [
40 @pytest.mark.parametrize('stderr,expected_reason', [
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 @pytest.mark.xfail(sys.platform == "cygwin",
44 @pytest.mark.xfail(sys.platform == "cygwin",
45 reason="SVN not packaged for Cygwin")
45 reason="SVN not packaged for Cygwin")
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 from vcsserver import svn
47 from vcsserver import svn
48 factory = mock.Mock()
49 factory.repo = mock.Mock(return_value=mock.Mock())
48
50
49 remote = svn.SvnRemote(None)
51 remote = svn.SvnRemote(factory)
50 remote.is_path_valid_repository = lambda wire, path: True
52 remote.is_path_valid_repository = lambda wire, path: True
51
53
52 with mock.patch('subprocess.Popen',
54 with mock.patch('subprocess.Popen',
53 return_value=MockPopen(stderr)):
55 return_value=MockPopen(stderr)):
54 with pytest.raises(Exception) as excinfo:
56 with pytest.raises(Exception) as excinfo:
55 remote.import_remote_repository({'path': 'path'}, 'url')
57 remote.import_remote_repository({'path': 'path'}, 'url')
56
58
57 expected_error_args = (
59 expected_error_args = (
58 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
59
61
60 assert excinfo.value.args == expected_error_args
62 assert excinfo.value.args == expected_error_args
61
63
62
64
63 def test_svn_libraries_can_be_imported():
65 def test_svn_libraries_can_be_imported():
64 import svn
66 import svn
65 import svn.client
67 import svn.client
66 assert svn.client is not None
68 assert svn.client is not None
67
69
68
70
69 @pytest.mark.parametrize('example_url, parts', [
71 @pytest.mark.parametrize('example_url, parts', [
70 ('http://server.com', (None, None, 'http://server.com')),
72 ('http://server.com', (None, None, 'http://server.com')),
71 ('http://user@server.com', ('user', None, 'http://user@server.com')),
73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
72 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
73 ('<script>', (None, None, '<script>')),
75 ('<script>', (None, None, '<script>')),
74 ('http://', (None, None, 'http://')),
76 ('http://', (None, None, 'http://')),
75 ])
77 ])
76 def test_username_password_extraction_from_url(example_url, parts):
78 def test_username_password_extraction_from_url(example_url, parts):
77 from vcsserver import svn
79 from vcsserver import svn
78
80
79 remote = svn.SvnRemote(None)
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
84 remote = svn.SvnRemote(factory)
80 remote.is_path_valid_repository = lambda wire, path: True
85 remote.is_path_valid_repository = lambda wire, path: True
81
86
82 assert remote.get_url_and_credentials(example_url) == parts
87 assert remote.get_url_and_credentials(example_url) == parts
@@ -1,58 +1,64 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
19
20 import time
18 import time
21 import logging
19 import logging
22
20
23
21 import vcsserver
24 from vcsserver.utils import safe_str
22 from vcsserver.utils import safe_str
25
23
26
24
27 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
28
26
29
27
30 def get_access_path(request):
28 def get_access_path(request):
31 environ = request.environ
29 environ = request.environ
32 return environ.get('PATH_INFO')
30 return environ.get('PATH_INFO')
33
31
34
32
33 def get_user_agent(environ):
34 return environ.get('HTTP_USER_AGENT')
35
36
35 class RequestWrapperTween(object):
37 class RequestWrapperTween(object):
36 def __init__(self, handler, registry):
38 def __init__(self, handler, registry):
37 self.handler = handler
39 self.handler = handler
38 self.registry = registry
40 self.registry = registry
39
41
40 # one-time configuration code goes here
42 # one-time configuration code goes here
41
43
42 def __call__(self, request):
44 def __call__(self, request):
43 start = time.time()
45 start = time.time()
44 try:
46 try:
45 response = self.handler(request)
47 response = self.handler(request)
46 finally:
48 finally:
47 end = time.time()
49 end = time.time()
48
50 total = end - start
49 log.info('IP: %s Request to path: `%s` time: %.3fs',
51 count = request.request_count()
50 '127.0.0.1', safe_str(get_access_path(request)), end - start)
52 _ver_ = vcsserver.__version__
53 log.info(
54 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
55 count, '127.0.0.1', request.environ.get('REQUEST_METHOD'),
56 safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_)
51
57
52 return response
58 return response
53
59
54
60
55 def includeme(config):
61 def includeme(config):
56 config.add_tween(
62 config.add_tween(
57 'vcsserver.tweens.RequestWrapperTween',
63 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
58 )
64 )
General Comments 0
You need to be logged in to leave comments. Login now