##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r810:8ec05ec7 merge stable
parent child Browse files
Show More
@@ -0,0 +1,8 b''
1 ## special libraries we could extend the requirements.txt file with to add some
2 ## custom libraries useful for debug and memory tracing
3
4 ## uncomment inclusion of this file in requirements.txt run make generate-pkgs and nix-shell
5
6 objgraph
7 memory-profiler
8 pympler
@@ -0,0 +1,27 b''
1 # -*- coding: utf-8 -*-
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20
21 counter = 0
22
23
24 def get_request_counter(request):
25 global counter
26 counter += 1
27 return counter
@@ -0,0 +1,19 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,32 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
21
22 @property
23 def region(self):
24 return self._factory._cache_region
25
26 def _cache_on(self, wire):
27 context = wire.get('context', '')
28 context_uid = '{}'.format(context)
29 repo_id = wire.get('repo_id', '')
30 cache = wire.get('cache', True)
31 cache_on = context and cache
32 return cache_on, context_uid, repo_id
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.17.4
2 current_version = 4.18.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.17.4
12 state = in_progress
13 version = 4.18.0
16 14
@@ -1,87 +1,237 b''
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 ################################################################################
1 ## -*- coding: utf-8 -*-
4 2
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5 6
6 7 [server:main]
7 ## COMMON ##
8 ; COMMON HOST/IP CONFIG
8 9 host = 0.0.0.0
9 10 port = 9900
10 11
12 ; ##################################################
13 ; WAITRESS WSGI SERVER - Recommended for Development
14 ; ##################################################
15
16 ; use server type
11 17 use = egg:waitress#main
12 18
19 ; number of worker threads
20 threads = 5
21
22 ; MAX BODY SIZE 100GB
23 max_request_body_size = 107374182400
24
25 ; Use poll instead of select, fixes file descriptors limits problems.
26 ; May not work on old windows systems.
27 asyncore_use_poll = true
28
29
30 ; ###########################
31 ; GUNICORN APPLICATION SERVER
32 ; ###########################
33
34 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
35
36 ; Module to use, this setting shouldn't be changed
37 #use = egg:gunicorn#main
38
39 ; Sets the number of process workers. More workers means more concurrent connections
40 ; RhodeCode can handle at the same time. Each additional worker also it increases
41 ; memory usage as each has it's own set of caches.
42 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
43 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
44 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
45 ; when using more than 1 worker.
46 #workers = 2
47
48 ; Gunicorn access log level
49 #loglevel = info
50
51 ; Process name visible in process list
52 #proc_name = rhodecode_vcsserver
53
54 ; Type of worker class, one of `sync`, `gevent`
55 ; currently `sync` is the only option allowed.
56 #worker_class = sync
57
58 ; The maximum number of simultaneous clients. Valid only for gevent
59 #worker_connections = 10
60
61 ; Max number of requests that worker will handle before being gracefully restarted.
62 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
63 #max_requests = 1000
64 #max_requests_jitter = 30
65
66 ; Amount of time a worker can spend with handling a request before it
67 ; gets killed and restarted. By default set to 21600 (6hrs)
68 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
69 #timeout = 21600
70
71 ; The maximum size of HTTP request line in bytes.
72 ; 0 for unlimited
73 #limit_request_line = 0
74
75 ; Limit the number of HTTP headers fields in a request.
76 ; By default this value is 100 and can't be larger than 32768.
77 #limit_request_fields = 32768
78
79 ; Limit the allowed size of an HTTP request header field.
80 ; Value is a positive number or 0.
81 ; Setting it to 0 will allow unlimited header field sizes.
82 #limit_request_field_size = 0
83
84 ; Timeout for graceful workers restart.
85 ; After receiving a restart signal, workers have this much time to finish
86 ; serving requests. Workers still alive after the timeout (starting from the
87 ; receipt of the restart signal) are force killed.
88 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
89 #graceful_timeout = 3600
90
91 # The number of seconds to wait for requests on a Keep-Alive connection.
92 # Generally set in the 1-5 seconds range.
93 #keepalive = 2
94
95 ; Maximum memory usage that each worker can use before it will receive a
96 ; graceful restart signal 0 = memory monitoring is disabled
97 ; Examples: 268435456 (256MB), 536870912 (512MB)
98 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
99 #memory_max_usage = 0
100
101 ; How often in seconds to check for memory usage for each gunicorn worker
102 #memory_usage_check_interval = 60
103
104 ; Threshold value for which we don't recycle worker if GarbageCollection
105 ; frees up enough resources. Before each restart we try to run GC on worker
106 ; in case we get enough free memory after that, restart will not happen.
107 #memory_usage_recovery_threshold = 0.8
108
13 109
14 110 [app:main]
111 ; The %(here)s variable will be replaced with the absolute path of parent directory
112 ; of this file
15 113 use = egg:rhodecode-vcsserver
16 114
17 pyramid.default_locale_name = en
115
116 ; #############
117 ; DEBUG OPTIONS
118 ; #############
119
120 # During development the we want to have the debug toolbar enabled
18 121 pyramid.includes =
122 pyramid_debugtoolbar
19 123
20 ## default locale used by VCS systems
124 debugtoolbar.hosts = 0.0.0.0/0
125 debugtoolbar.exclude_prefixes =
126 /css
127 /fonts
128 /images
129 /js
130
131 ; #################
132 ; END DEBUG OPTIONS
133 ; #################
134
135 ; Pyramid default locales, we need this to be set
136 pyramid.default_locale_name = en
137
138 ; default locale used by VCS systems
21 139 locale = en_US.UTF-8
22 140
23
24 ## path to binaries for vcsserver, it should be set by the installer
25 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
141 ; path to binaries for vcsserver, it should be set by the installer
142 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
143 ; it can also be a path to nix-build output in case of development
26 144 core.binary_dir = ""
27 145
28 ## Custom exception store path, defaults to TMPDIR
29 ## This is used to store exception from RhodeCode in shared directory
146 ; Custom exception store path, defaults to TMPDIR
147 ; This is used to store exception from RhodeCode in shared directory
30 148 #exception_tracker.store_path =
31 149
32 ## Default cache dir for caches. Putting this into a ramdisk
33 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
34 ## large amount of space
35 cache_dir = %(here)s/rcdev/data
150 ; #############
151 ; DOGPILE CACHE
152 ; #############
153
154 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
155 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
156 cache_dir = %(here)s/data
157
158 ; ***************************************
159 ; `repo_object` cache, default file based
160 ; ***************************************
161
162 ; `repo_object` cache settings for vcs methods for repositories
163 rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
164
165 ; cache auto-expires after N seconds
166 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
167 rc_cache.repo_object.expiration_time = 2592000
168
169 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
170 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
36 171
37 ## cache region for storing repo_objects cache
38 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
39 ## cache auto-expires after N seconds
40 rc_cache.repo_object.expiration_time = 300
41 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
42 rc_cache.repo_object.max_size = 100
172 ; ***********************************************************
173 ; `repo_object` cache with redis backend
174 ; recommended for larger instance, and for better performance
175 ; ***********************************************************
176
177 ; `repo_object` cache settings for vcs methods for repositories
178 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
179
180 ; cache auto-expires after N seconds
181 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
182 #rc_cache.repo_object.expiration_time = 2592000
183
184 ; redis_expiration_time needs to be greater then expiration_time
185 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
186
187 #rc_cache.repo_object.arguments.host = localhost
188 #rc_cache.repo_object.arguments.port = 6379
189 #rc_cache.repo_object.arguments.db = 5
190 #rc_cache.repo_object.arguments.socket_timeout = 30
191 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
192 #rc_cache.repo_object.arguments.distributed_lock = true
43 193
44 194
45 ################################
46 ### LOGGING CONFIGURATION ####
47 ################################
195 ; #####################
196 ; LOGGING CONFIGURATION
197 ; #####################
48 198 [loggers]
49 199 keys = root, vcsserver
50 200
51 201 [handlers]
52 202 keys = console
53 203
54 204 [formatters]
55 205 keys = generic
56 206
57 #############
58 ## LOGGERS ##
59 #############
207 ; #######
208 ; LOGGERS
209 ; #######
60 210 [logger_root]
61 211 level = NOTSET
62 212 handlers = console
63 213
64 214 [logger_vcsserver]
65 215 level = DEBUG
66 216 handlers =
67 217 qualname = vcsserver
68 218 propagate = 1
69 219
70 220
71 ##############
72 ## HANDLERS ##
73 ##############
221 ; ########
222 ; HANDLERS
223 ; ########
74 224
75 225 [handler_console]
76 226 class = StreamHandler
77 args = (sys.stderr,)
227 args = (sys.stderr, )
78 228 level = DEBUG
79 229 formatter = generic
80 230
81 ################
82 ## FORMATTERS ##
83 ################
231 ; ##########
232 ; FORMATTERS
233 ; ##########
84 234
85 235 [formatter_generic]
86 236 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
87 237 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,154 +1,265 b''
1 1 """
2 gunicorn config extension and hooks. Sets additional configuration that is
3 available post the .ini config.
4
5 - workers = ${cpu_number}
6 - threads = 1
7 - proc_name = ${gunicorn_proc_name}
8 - worker_class = sync
9 - worker_connections = 10
10 - max_requests = 1000
11 - max_requests_jitter = 30
12 - timeout = 21600
13
2 Gunicorn config extension and hooks. This config file adds some extra settings and memory management.
3 Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer
14 4 """
15 5
16 import multiprocessing
6 import gc
7 import os
17 8 import sys
9 import math
18 10 import time
19 import datetime
20 11 import threading
21 12 import traceback
13 import random
22 14 from gunicorn.glogging import Logger
23 15
24 16
17 def get_workers():
18 import multiprocessing
19 return multiprocessing.cpu_count() * 2 + 1
20
25 21 # GLOBAL
26 22 errorlog = '-'
27 23 accesslog = '-'
28 loglevel = 'debug'
29
30 # SECURITY
31
32 # The maximum size of HTTP request line in bytes.
33 # 0 for unlimited
34 limit_request_line = 0
35
36 # Limit the number of HTTP headers fields in a request.
37 # By default this value is 100 and can't be larger than 32768.
38 limit_request_fields = 10240
39
40 # Limit the allowed size of an HTTP request header field.
41 # Value is a positive number or 0.
42 # Setting it to 0 will allow unlimited header field sizes.
43 limit_request_field_size = 0
44
45
46 # Timeout for graceful workers restart.
47 # After receiving a restart signal, workers have this much time to finish
48 # serving requests. Workers still alive after the timeout (starting from the
49 # receipt of the restart signal) are force killed.
50 graceful_timeout = 30
51
52
53 # The number of seconds to wait for requests on a Keep-Alive connection.
54 # Generally set in the 1-5 seconds range.
55 keepalive = 2
56 24
57 25
58 26 # SERVER MECHANICS
59 27 # None == system temp dir
60 28 # worker_tmp_dir is recommended to be set to some tmpfs
61 29 worker_tmp_dir = None
62 30 tmp_upload_dir = None
63 31
64 32 # Custom log format
65 33 access_log_format = (
66 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
34 '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
67 35
68 36 # self adjust workers based on CPU count
69 # workers = multiprocessing.cpu_count() * 2 + 1
37 # workers = get_workers()
38
39
40 def _get_process_rss(pid=None):
41 try:
42 import psutil
43 if pid:
44 proc = psutil.Process(pid)
45 else:
46 proc = psutil.Process()
47 return proc.memory_info().rss
48 except Exception:
49 return None
70 50
71 51
72 def post_fork(server, worker):
73 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
52 def _get_config(ini_path):
53
54 try:
55 import configparser
56 except ImportError:
57 import ConfigParser as configparser
58 try:
59 config = configparser.RawConfigParser()
60 config.read(ini_path)
61 return config
62 except Exception:
63 return None
64
65
66 def _time_with_offset(memory_usage_check_interval):
67 return time.time() - random.randint(0, memory_usage_check_interval/2.0)
74 68
75 69
76 70 def pre_fork(server, worker):
77 71 pass
78 72
79 73
74 def post_fork(server, worker):
75
76 # memory spec defaults
77 _memory_max_usage = 0
78 _memory_usage_check_interval = 60
79 _memory_usage_recovery_threshold = 0.8
80
81 ini_path = os.path.abspath(server.cfg.paste)
82 conf = _get_config(ini_path)
83
84 section = 'server:main'
85 if conf and conf.has_section(section):
86
87 if conf.has_option(section, 'memory_max_usage'):
88 _memory_max_usage = conf.getint(section, 'memory_max_usage')
89
90 if conf.has_option(section, 'memory_usage_check_interval'):
91 _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval')
92
93 if conf.has_option(section, 'memory_usage_recovery_threshold'):
94 _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold')
95
96 worker._memory_max_usage = _memory_max_usage
97 worker._memory_usage_check_interval = _memory_usage_check_interval
98 worker._memory_usage_recovery_threshold = _memory_usage_recovery_threshold
99
100 # register memory last check time, with some random offset so we don't recycle all
101 # at once
102 worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval)
103
104 if _memory_max_usage:
105 server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid,
106 _format_data_size(_memory_max_usage))
107 else:
108 server.log.info("[%-10s] WORKER spawned", worker.pid)
109
110
80 111 def pre_exec(server):
81 112 server.log.info("Forked child, re-executing.")
82 113
83 114
84 115 def on_starting(server):
85 server.log.info("Server is starting.")
116 server_lbl = '{} {}'.format(server.proc_name, server.address)
117 server.log.info("Server %s is starting.", server_lbl)
86 118
87 119
88 120 def when_ready(server):
89 server.log.info("Server is ready. Spawning workers")
121 server.log.info("Server %s is ready. Spawning workers", server)
90 122
91 123
92 124 def on_reload(server):
93 125 pass
94 126
95 127
128 def _format_data_size(size, unit="B", precision=1, binary=True):
129 """Format a number using SI units (kilo, mega, etc.).
130
131 ``size``: The number as a float or int.
132
133 ``unit``: The unit name in plural form. Examples: "bytes", "B".
134
135 ``precision``: How many digits to the right of the decimal point. Default
136 is 1. 0 suppresses the decimal point.
137
138 ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000).
139 If true, use base-2 binary prefixes (kibi = Ki = 1024).
140
141 ``full_name``: If false (default), use the prefix abbreviation ("k" or
142 "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false,
143 use abbreviation ("k" or "Ki").
144
145 """
146
147 if not binary:
148 base = 1000
149 multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
150 else:
151 base = 1024
152 multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi')
153
154 sign = ""
155 if size > 0:
156 m = int(math.log(size, base))
157 elif size < 0:
158 sign = "-"
159 size = -size
160 m = int(math.log(size, base))
161 else:
162 m = 0
163 if m > 8:
164 m = 8
165
166 if m == 0:
167 precision = '%.0f'
168 else:
169 precision = '%%.%df' % precision
170
171 size = precision % (size / math.pow(base, m))
172
173 return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit)
174
175
176 def _check_memory_usage(worker):
177 memory_max_usage = worker._memory_max_usage
178 if not memory_max_usage:
179 return
180
181 memory_usage_check_interval = worker._memory_usage_check_interval
182 memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold
183
184 elapsed = time.time() - worker._last_memory_check_time
185 if elapsed > memory_usage_check_interval:
186 mem_usage = _get_process_rss()
187 if mem_usage and mem_usage > memory_max_usage:
188 worker.log.info(
189 "memory usage %s > %s, forcing gc",
190 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
191 # Try to clean it up by forcing a full collection.
192 gc.collect()
193 mem_usage = _get_process_rss()
194 if mem_usage > memory_usage_recovery_threshold:
195 # Didn't clean up enough, we'll have to terminate.
196 worker.log.warning(
197 "memory usage %s > %s after gc, quitting",
198 _format_data_size(mem_usage), _format_data_size(memory_max_usage))
199 # This will cause worker to auto-restart itself
200 worker.alive = False
201 worker._last_memory_check_time = time.time()
202
203
96 204 def worker_int(worker):
97 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
205 worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid)
98 206
99 207 # get traceback info, on worker crash
100 208 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
101 209 code = []
102 210 for thread_id, stack in sys._current_frames().items():
103 211 code.append(
104 212 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
105 213 for fname, lineno, name, line in traceback.extract_stack(stack):
106 214 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
107 215 if line:
108 216 code.append(" %s" % (line.strip()))
109 217 worker.log.debug("\n".join(code))
110 218
111 219
112 220 def worker_abort(worker):
113 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
221 worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid)
114 222
115 223
116 224 def worker_exit(server, worker):
117 worker.log.info("[<%-10s>] worker exit", worker.pid)
225 worker.log.info("[%-10s] worker exit", worker.pid)
118 226
119 227
120 228 def child_exit(server, worker):
121 worker.log.info("[<%-10s>] worker child exit", worker.pid)
229 worker.log.info("[%-10s] worker child exit", worker.pid)
122 230
123 231
124 232 def pre_request(worker, req):
125 233 worker.start_time = time.time()
126 234 worker.log.debug(
127 235 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
128 236
129 237
130 238 def post_request(worker, req, environ, resp):
131 239 total_time = time.time() - worker.start_time
240 # Gunicorn sometimes has problems with reading the status_code
241 status_code = getattr(resp, 'status_code', '')
132 242 worker.log.debug(
133 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
134 worker.nr, req.method, req.path, resp.status_code, total_time)
243 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs",
244 worker.nr, req.method, req.path, status_code, total_time)
245 _check_memory_usage(worker)
135 246
136 247
137 248 class RhodeCodeLogger(Logger):
138 249 """
139 250 Custom Logger that allows some customization that gunicorn doesn't allow
140 251 """
141 252
142 253 datefmt = r"%Y-%m-%d %H:%M:%S"
143 254
144 255 def __init__(self, cfg):
145 256 Logger.__init__(self, cfg)
146 257
147 258 def now(self):
148 259 """ return date in RhodeCode Log format """
149 260 now = time.time()
150 261 msecs = int((now - long(now)) * 1000)
151 262 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
152 263
153 264
154 265 logger_class = RhodeCodeLogger
@@ -1,108 +1,200 b''
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 ################################################################################
1 ## -*- coding: utf-8 -*-
4 2
3 ; #################################
4 ; RHODECODE VCSSERVER CONFIGURATION
5 ; #################################
5 6
6 7 [server:main]
7 ## COMMON ##
8 ; COMMON HOST/IP CONFIG
8 9 host = 127.0.0.1
9 10 port = 9900
10 11
11 12
12 ##########################
13 ## GUNICORN WSGI SERVER ##
14 ##########################
15 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
13 ; ###########################
14 ; GUNICORN APPLICATION SERVER
15 ; ###########################
16
17 ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
18
19 ; Module to use, this setting shouldn't be changed
16 20 use = egg:gunicorn#main
17 ## Sets the number of process workers. Recommended
18 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
21
22 ; Sets the number of process workers. More workers means more concurrent connections
23 ; RhodeCode can handle at the same time. Each additional worker also it increases
24 ; memory usage as each has it's own set of caches.
25 ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more
26 ; than 8-10 unless for really big deployments .e.g 700-1000 users.
27 ; `instance_id = *` must be set in the [app:main] section below (which is the default)
28 ; when using more than 1 worker.
19 29 workers = 2
20 ## process name
30
31 ; Gunicorn access log level
32 loglevel = info
33
34 ; Process name visible in process list
21 35 proc_name = rhodecode_vcsserver
22 ## type of worker class, currently `sync` is the only option allowed.
36
37 ; Type of worker class, one of `sync`, `gevent`
38 ; currently `sync` is the only option allowed.
23 39 worker_class = sync
24 ## The maximum number of simultaneous clients. Valid only for Gevent
25 #worker_connections = 10
26 ## max number of requests that worker will handle before being gracefully
27 ## restarted, could prevent memory leaks
40
41 ; The maximum number of simultaneous clients. Valid only for gevent
42 worker_connections = 10
43
44 ; Max number of requests that worker will handle before being gracefully restarted.
45 ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once.
28 46 max_requests = 1000
29 47 max_requests_jitter = 30
30 ## amount of time a worker can spend with handling a request before it
31 ## gets killed and restarted. Set to 6hrs
48
49 ; Amount of time a worker can spend with handling a request before it
50 ; gets killed and restarted. By default set to 21600 (6hrs)
51 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
32 52 timeout = 21600
33 53
54 ; The maximum size of HTTP request line in bytes.
55 ; 0 for unlimited
56 limit_request_line = 0
57
58 ; Limit the number of HTTP headers fields in a request.
59 ; By default this value is 100 and can't be larger than 32768.
60 limit_request_fields = 32768
61
62 ; Limit the allowed size of an HTTP request header field.
63 ; Value is a positive number or 0.
64 ; Setting it to 0 will allow unlimited header field sizes.
65 limit_request_field_size = 0
66
67 ; Timeout for graceful workers restart.
68 ; After receiving a restart signal, workers have this much time to finish
69 ; serving requests. Workers still alive after the timeout (starting from the
70 ; receipt of the restart signal) are force killed.
71 ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h)
72 graceful_timeout = 3600
73
74 # The number of seconds to wait for requests on a Keep-Alive connection.
75 # Generally set in the 1-5 seconds range.
76 keepalive = 2
77
78 ; Maximum memory usage that each worker can use before it will receive a
79 ; graceful restart signal 0 = memory monitoring is disabled
80 ; Examples: 268435456 (256MB), 536870912 (512MB)
81 ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB)
82 memory_max_usage = 0
83
84 ; How often in seconds to check for memory usage for each gunicorn worker
85 memory_usage_check_interval = 60
86
87 ; Threshold value for which we don't recycle worker if GarbageCollection
88 ; frees up enough resources. Before each restart we try to run GC on worker
89 ; in case we get enough free memory after that, restart will not happen.
90 memory_usage_recovery_threshold = 0.8
91
34 92
35 93 [app:main]
94 ; The %(here)s variable will be replaced with the absolute path of parent directory
95 ; of this file
36 96 use = egg:rhodecode-vcsserver
37 97
98 ; Pyramid default locales, we need this to be set
38 99 pyramid.default_locale_name = en
39 pyramid.includes =
40 100
41 ## default locale used by VCS systems
101 ; default locale used by VCS systems
42 102 locale = en_US.UTF-8
43 103
44
45 ## path to binaries for vcsserver, it should be set by the installer
46 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
104 ; path to binaries for vcsserver, it should be set by the installer
105 ; at installation time, e.g /home/user/vcsserver-1/profile/bin
106 ; it can also be a path to nix-build output in case of development
47 107 core.binary_dir = ""
48 108
49 ## Custom exception store path, defaults to TMPDIR
50 ## This is used to store exception from RhodeCode in shared directory
109 ; Custom exception store path, defaults to TMPDIR
110 ; This is used to store exception from RhodeCode in shared directory
51 111 #exception_tracker.store_path =
52 112
53 ## Default cache dir for caches. Putting this into a ramdisk
54 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
55 ## large amount of space
56 cache_dir = %(here)s/rcdev/data
113 ; #############
114 ; DOGPILE CACHE
115 ; #############
116
117 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
118 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
119 cache_dir = %(here)s/data
120
121 ; ***************************************
122 ; `repo_object` cache, default file based
123 ; ***************************************
124
125 ; `repo_object` cache settings for vcs methods for repositories
126 rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace
127
128 ; cache auto-expires after N seconds
129 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
130 rc_cache.repo_object.expiration_time = 2592000
131
132 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
133 #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db
57 134
58 ## cache region for storing repo_objects cache
59 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
60 ## cache auto-expires after N seconds
61 rc_cache.repo_object.expiration_time = 300
62 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
63 rc_cache.repo_object.max_size = 100
135 ; ***********************************************************
136 ; `repo_object` cache with redis backend
137 ; recommended for larger instance, and for better performance
138 ; ***********************************************************
139
140 ; `repo_object` cache settings for vcs methods for repositories
141 #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack
142
143 ; cache auto-expires after N seconds
144 ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days)
145 #rc_cache.repo_object.expiration_time = 2592000
146
147 ; redis_expiration_time needs to be greater then expiration_time
148 #rc_cache.repo_object.arguments.redis_expiration_time = 3592000
149
150 #rc_cache.repo_object.arguments.host = localhost
151 #rc_cache.repo_object.arguments.port = 6379
152 #rc_cache.repo_object.arguments.db = 5
153 #rc_cache.repo_object.arguments.socket_timeout = 30
154 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
155 #rc_cache.repo_object.arguments.distributed_lock = true
64 156
65 157
66 ################################
67 ### LOGGING CONFIGURATION ####
68 ################################
158 ; #####################
159 ; LOGGING CONFIGURATION
160 ; #####################
69 161 [loggers]
70 162 keys = root, vcsserver
71 163
72 164 [handlers]
73 165 keys = console
74 166
75 167 [formatters]
76 168 keys = generic
77 169
78 #############
79 ## LOGGERS ##
80 #############
170 ; #######
171 ; LOGGERS
172 ; #######
81 173 [logger_root]
82 174 level = NOTSET
83 175 handlers = console
84 176
85 177 [logger_vcsserver]
86 178 level = DEBUG
87 179 handlers =
88 180 qualname = vcsserver
89 181 propagate = 1
90 182
91 183
92 ##############
93 ## HANDLERS ##
94 ##############
184 ; ########
185 ; HANDLERS
186 ; ########
95 187
96 188 [handler_console]
97 189 class = StreamHandler
98 args = (sys.stderr,)
99 level = DEBUG
190 args = (sys.stderr, )
191 level = INFO
100 192 formatter = generic
101 193
102 ################
103 ## FORMATTERS ##
104 ################
194 ; ##########
195 ; FORMATTERS
196 ; ##########
105 197
106 198 [formatter_generic]
107 199 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
108 200 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,47 +1,71 b''
1 1 self: super: {
2
2 3 # bump GIT version
3 4 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 name = "git-2.19.2";
5 name = "git-2.24.1";
5 6 src = self.fetchurl {
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.19.2.tar.xz";
7 sha256 = "1scbggzghkzzfqg4ky3qh7h9w87c3zya4ls5disz7dbx56is7sgw";
7 url = "https://www.kernel.org/pub/software/scm/git/git-2.24.1.tar.xz";
8 sha256 = "0ql5z31vgl7b785gwrf00m129mg7zi9pa65n12ij3mpxx3f28gvj";
8 9 };
9 10
10 11 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
11 12 patches = [
12 13 ./patches/git/docbook2texi.patch
13 14 ./patches/git/git-sh-i18n.patch
14 15 ./patches/git/ssh-path.patch
15 16 ./patches/git/git-send-email-honor-PATH.patch
16 17 ./patches/git/installCheck-path.patch
17 18 ];
18 19
19 20 });
20 21
22 libgit2rc = super.lib.overrideDerivation super.libgit2 (oldAttrs: {
23 name = "libgit2-0.28.2";
24 version = "0.28.2";
25
26 src = self.fetchFromGitHub {
27 owner = "libgit2";
28 repo = "libgit2";
29 rev = "v0.28.2";
30 sha256 = "0cm8fvs05rj0baigs2133q5a0sm3pa234y8h6hmwhl2bz9xq3k4b";
31 };
32
33 cmakeFlags = [ "-DTHREADSAFE=ON" "-DUSE_HTTPS=no"];
34
35 buildInputs = [
36 super.zlib
37 super.libssh2
38 super.openssl
39 super.curl
40 ];
41
42
43 });
44
21 45 # Override subversion derivation to
22 46 # - activate python bindings
23 47 subversion =
24 48 let
25 49 subversionWithPython = super.subversion.override {
26 50 httpSupport = true;
27 51 pythonBindings = true;
28 52 python = self.python27Packages.python;
29 53 };
30 54 in
31 55 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
32 name = "subversion-1.10.2";
56 name = "subversion-1.12.2";
33 57 src = self.fetchurl {
34 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
35 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
58 url = "https://archive.apache.org/dist/subversion/subversion-1.12.2.tar.gz";
59 sha256 = "1wr1pklnq67xdzmf237zj6l1hg43yshfkbxvpvd5sv6r0dk7v4pl";
36 60 };
37 61
38 62 ## use internal lz4/utf8proc because it is stable and shipped with SVN
39 63 configureFlags = oldAttrs.configureFlags ++ [
40 64 " --with-lz4=internal"
41 65 " --with-utf8proc=internal"
42 66 ];
43 67
44
45 68 });
46 69
70
47 71 }
@@ -1,37 +1,38 b''
1 1 This patch does two things: (1) use the right name for `docbook2texi',
2 2 and (2) make sure `gitman.info' isn't produced since it's broken (duplicate
3 3 node names).
4 4
5 5 diff --git a/Documentation/Makefile b/Documentation/Makefile
6 index 26a2342bea..ceccd67ebb 100644
6 7 --- a/Documentation/Makefile
7 8 +++ b/Documentation/Makefile
8 @@ -122,7 +122,7 @@
9 @@ -132,7 +132,7 @@ HTML_REPO = ../../git-htmldocs
9 10
10 11 MAKEINFO = makeinfo
11 12 INSTALL_INFO = install-info
12 13 -DOCBOOK2X_TEXI = docbook2x-texi
13 14 +DOCBOOK2X_TEXI = docbook2texi
14 15 DBLATEX = dblatex
15 16 ASCIIDOC_DBLATEX_DIR = /etc/asciidoc/dblatex
16 17 DBLATEX_COMMON = -p $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.xsl -s $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.sty
17 @@ -240,7 +240,7 @@
18 @@ -250,7 +250,7 @@ man1: $(DOC_MAN1)
18 19 man5: $(DOC_MAN5)
19 20 man7: $(DOC_MAN7)
20 21
21 22 -info: git.info gitman.info
22 23 +info: git.info
23 24
24 25 pdf: user-manual.pdf
25 26
26 @@ -256,10 +256,9 @@
27 @@ -266,10 +266,9 @@ install-man: man
27 28
28 29 install-info: info
29 30 $(INSTALL) -d -m 755 $(DESTDIR)$(infodir)
30 31 - $(INSTALL) -m 644 git.info gitman.info $(DESTDIR)$(infodir)
31 32 + $(INSTALL) -m 644 git.info $(DESTDIR)$(infodir)
32 33 if test -r $(DESTDIR)$(infodir)/dir; then \
33 34 $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) git.info ;\
34 35 - $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) gitman.info ;\
35 36 else \
36 37 echo "No directory found in $(DESTDIR)$(infodir)" >&2 ; \
37 38 fi
@@ -1,26 +1,28 b''
1 1 diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt
2 index 1afe9fc858..05dd7c3a90 100644
2 3 --- a/Documentation/git-send-email.txt
3 4 +++ b/Documentation/git-send-email.txt
4 @@ -208,8 +208,7 @@ a password is obtained using 'git-credential'.
5 @@ -215,8 +215,7 @@ a password is obtained using 'git-credential'.
5 6 specify a full pathname of a sendmail-like program instead;
6 7 the program must support the `-i` option. Default value can
7 8 be specified by the `sendemail.smtpServer` configuration
8 9 - option; the built-in default is to search for `sendmail` in
9 10 - `/usr/sbin`, `/usr/lib` and $PATH if such program is
10 11 + option; the built-in default is to search in $PATH if such program is
11 12 available, falling back to `localhost` otherwise.
12 13
13 14 --smtp-server-port=<port>::
14 15 diff --git a/git-send-email.perl b/git-send-email.perl
16 index 8eb63b5a2f..74a61d8213 100755
15 17 --- a/git-send-email.perl
16 18 +++ b/git-send-email.perl
17 @@ -944,8 +944,7 @@ if (defined $reply_to) {
19 @@ -956,8 +956,7 @@ sub expand_one_alias {
18 20 }
19 21
20 22 if (!defined $smtp_server) {
21 23 - my @sendmail_paths = qw( /usr/sbin/sendmail /usr/lib/sendmail );
22 24 - push @sendmail_paths, map {"$_/sendmail"} split /:/, $ENV{PATH};
23 25 + my @sendmail_paths = map {"$_/sendmail"} split /:/, $ENV{PATH};
24 26 foreach (@sendmail_paths) {
25 27 if (-x $_) {
26 28 $smtp_server = $_;
@@ -1,94 +1,23 b''
1 diff --git a/git-sh-i18n.sh b/git-sh-i18n.sh
2 index e1d917fd27..e90f8e1414 100644
1 3 --- a/git-sh-i18n.sh
2 4 +++ b/git-sh-i18n.sh
3 @@ -15,87 +15,11 @@
4 fi
5 export TEXTDOMAINDIR
6
7 -# First decide what scheme to use...
8 -GIT_INTERNAL_GETTEXT_SH_SCHEME=fallthrough
9 -if test -n "$GIT_GETTEXT_POISON"
10 -then
11 - GIT_INTERNAL_GETTEXT_SH_SCHEME=poison
12 -elif test -n "@@USE_GETTEXT_SCHEME@@"
13 -then
14 - GIT_INTERNAL_GETTEXT_SH_SCHEME="@@USE_GETTEXT_SCHEME@@"
15 -elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS"
16 -then
17 - : no probing necessary
5 @@ -26,7 +26,7 @@ then
6 elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS"
7 then
8 : no probing necessary
18 9 -elif type gettext.sh >/dev/null 2>&1
19 -then
20 - # GNU libintl's gettext.sh
21 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
22 -elif test "$(gettext -h 2>&1)" = "-h"
23 -then
24 - # gettext binary exists but no gettext.sh. likely to be a gettext
25 - # binary on a Solaris or something that is not GNU libintl and
26 - # lack eval_gettext.
27 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gettext_without_eval_gettext
28 -fi
29 -export GIT_INTERNAL_GETTEXT_SH_SCHEME
30 -
31 -# ... and then follow that decision.
32 -case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in
33 -gnu)
34 - # Use libintl's gettext.sh, or fall back to English if we can't.
10 +elif type @gettext@/bin/gettext.sh >/dev/null 2>&1
11 then
12 # GNU libintl's gettext.sh
13 GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
14 @@ -43,7 +43,8 @@ export GIT_INTERNAL_GETTEXT_SH_SCHEME
15 case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in
16 gnu)
17 # Use libintl's gettext.sh, or fall back to English if we can't.
35 18 - . gettext.sh
36 - ;;
37 -gettext_without_eval_gettext)
38 - # Solaris has a gettext(1) but no eval_gettext(1)
39 - eval_gettext () {
40 - gettext "$1" | (
41 - export PATH $(git sh-i18n--envsubst --variables "$1");
42 - git sh-i18n--envsubst "$1"
43 - )
44 - }
45 -
46 - eval_ngettext () {
47 - ngettext "$1" "$2" "$3" | (
48 - export PATH $(git sh-i18n--envsubst --variables "$2");
49 - git sh-i18n--envsubst "$2"
50 - )
51 - }
52 - ;;
53 -poison)
54 - # Emit garbage so that tests that incorrectly rely on translatable
55 - # strings will fail.
56 - gettext () {
57 - printf "%s" "# GETTEXT POISON #"
58 - }
59 -
60 - eval_gettext () {
61 - printf "%s" "# GETTEXT POISON #"
62 - }
63 -
64 - eval_ngettext () {
65 - printf "%s" "# GETTEXT POISON #"
66 - }
67 - ;;
68 -*)
69 - gettext () {
70 - printf "%s" "$1"
71 - }
72 -
73 - eval_gettext () {
74 - printf "%s" "$1" | (
75 - export PATH $(git sh-i18n--envsubst --variables "$1");
76 - git sh-i18n--envsubst "$1"
77 - )
78 - }
79 +# GNU gettext
80 +export GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
81 +export PATH=@gettext@/bin:$PATH
82
83 - eval_ngettext () {
84 - (test "$3" = 1 && printf "%s" "$1" || printf "%s" "$2") | (
85 - export PATH $(git sh-i18n--envsubst --variables "$2");
86 - git sh-i18n--envsubst "$2"
87 - )
88 - }
89 - ;;
90 -esac
91 +. @gettext@/bin/gettext.sh
92
93 # Git-specific wrapper functions
94 gettextln () {
19 + . @gettext@/bin/gettext.sh
20 + export PATH=@gettext@/bin:$PATH
21 ;;
22 gettext_without_eval_gettext)
23 # Solaris has a gettext(1) but no eval_gettext(1)
@@ -1,12 +1,13 b''
1 1 diff --git a/t/test-lib.sh b/t/test-lib.sh
2 index 8665b0a9b6..8bb892b1af 100644
2 3 --- a/t/test-lib.sh
3 4 +++ b/t/test-lib.sh
4 @@ -923,7 +923,7 @@
5 @@ -1227,7 +1227,7 @@ elif test -n "$GIT_TEST_INSTALLED"
5 6 then
6 7 GIT_EXEC_PATH=$($GIT_TEST_INSTALLED/git --exec-path) ||
7 8 error "Cannot run git from $GIT_TEST_INSTALLED."
8 - PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR:$PATH
9 - PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$PATH
9 10 + PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$GIT_BUILD_DIR:$PATH
10 11 GIT_EXEC_PATH=${GIT_TEST_EXEC_PATH:-$GIT_EXEC_PATH}
11 12 else # normal case, use ../bin-wrappers only unless $with_dashes:
12 git_bin_dir="$GIT_BUILD_DIR/bin-wrappers"
13 if test -n "$no_bin_wrappers"
@@ -1,26 +1,26 b''
1 1 diff --git a/connect.c b/connect.c
2 index c3a014c5b..fbca3262b 100644
2 index 4813f005ab..b3f12f3268 100644
3 3 --- a/connect.c
4 4 +++ b/connect.c
5 @@ -1010,7 +1010,7 @@ static void fill_ssh_args(struct child_process *conn, const char *ssh_host,
5 @@ -1183,7 +1183,7 @@ static void fill_ssh_args(struct child_process *conn, const char *ssh_host,
6 6
7 7 ssh = getenv("GIT_SSH");
8 8 if (!ssh)
9 9 - ssh = "ssh";
10 10 + ssh = "@ssh@";
11 11 variant = determine_ssh_variant(ssh, 0);
12 12 }
13 13
14 14 diff --git a/git-gui/lib/remote_add.tcl b/git-gui/lib/remote_add.tcl
15 index 480a6b30d..781720424 100644
15 index 480a6b30d0..7817204241 100644
16 16 --- a/git-gui/lib/remote_add.tcl
17 17 +++ b/git-gui/lib/remote_add.tcl
18 18 @@ -139,7 +139,7 @@ method _add {} {
19 19 # Parse the location
20 20 if { [regexp {(?:git\+)?ssh://([^/]+)(/.+)} $location xx host path]
21 21 || [regexp {([^:][^:]+):(.+)} $location xx host path]} {
22 22 - set ssh ssh
23 23 + set ssh @ssh@
24 24 if {[info exists env(GIT_SSH)]} {
25 25 set ssh $env(GIT_SSH)
26 26 }
@@ -1,60 +1,72 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 7 { pkgs
8 8 , basePythonPackages
9 9 }:
10 10
11 11 let
12 12 sed = "sed -i";
13 13
14 14 in
15 15
16 16 self: super: {
17 17
18 "cffi" = super."cffi".override (attrs: {
19 buildInputs = [
20 pkgs.libffi
21 ];
22 });
23
18 24 "gevent" = super."gevent".override (attrs: {
19 25 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
20 26 # NOTE: (marcink) odd requirements from gevent aren't set properly,
21 27 # thus we need to inject psutil manually
22 28 self."psutil"
23 29 ];
24 30 });
25 31
26 32 "hgsubversion" = super."hgsubversion".override (attrs: {
27 33 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 34 pkgs.sqlite
29 35 #basePythonPackages.sqlite3
30 36 self.mercurial
31 37 ];
32 38 });
33 39
34 40 "subvertpy" = super."subvertpy".override (attrs: {
35 41 SVN_PREFIX = "${pkgs.subversion.dev}";
36 42 propagatedBuildInputs = [
37 43 pkgs.apr.dev
38 44 pkgs.aprutil
39 45 pkgs.subversion
40 46 ];
41 47 });
42 48
43 49 "mercurial" = super."mercurial".override (attrs: {
44 50 propagatedBuildInputs = [
45 51 # self.python.modules.curses
46 52 ];
47 53 });
48 54
49 55 "dulwich" = super."dulwich".override (attrs: {
50 56 patches = [
51 57 ./patches/dulwich/handle-dir-refs.patch
52 58 ];
53 59 });
54 60
61 "pygit2" = super."pygit2".override (attrs: {
62 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
63 pkgs.libffi
64 pkgs.libgit2rc
65 ];
66 });
55 67
56 68 # Avoid that base packages screw up the build process
57 69 inherit (basePythonPackages)
58 70 setuptools;
59 71
60 72 }
@@ -1,948 +1,1090 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.2.1";
8 name = "atomicwrites-1.3.0";
9 9 doCheck = false;
10 10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.2.0";
19 name = "attrs-19.3.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 "cffi" = super.buildPythonPackage {
52 name = "cffi-1.12.3";
53 doCheck = false;
54 propagatedBuildInputs = [
55 self."pycparser"
56 ];
57 src = fetchurl {
58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 };
61 meta = {
62 license = [ pkgs.lib.licenses.mit ];
63 };
64 };
51 65 "configobj" = super.buildPythonPackage {
52 66 name = "configobj-5.0.6";
53 67 doCheck = false;
54 68 propagatedBuildInputs = [
55 69 self."six"
56 70 ];
57 71 src = fetchurl {
58 72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
59 73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
60 74 };
61 75 meta = {
62 76 license = [ pkgs.lib.licenses.bsdOriginal ];
63 77 };
64 78 };
79 "configparser" = super.buildPythonPackage {
80 name = "configparser-4.0.2";
81 doCheck = false;
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 };
86 meta = {
87 license = [ pkgs.lib.licenses.mit ];
88 };
89 };
90 "contextlib2" = super.buildPythonPackage {
91 name = "contextlib2-0.6.0.post1";
92 doCheck = false;
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 };
97 meta = {
98 license = [ pkgs.lib.licenses.psfl ];
99 };
100 };
65 101 "cov-core" = super.buildPythonPackage {
66 102 name = "cov-core-1.15.0";
67 103 doCheck = false;
68 104 propagatedBuildInputs = [
69 105 self."coverage"
70 106 ];
71 107 src = fetchurl {
72 108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
74 110 };
75 111 meta = {
76 112 license = [ pkgs.lib.licenses.mit ];
77 113 };
78 114 };
79 115 "coverage" = super.buildPythonPackage {
80 name = "coverage-4.5.3";
116 name = "coverage-4.5.4";
81 117 doCheck = false;
82 118 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz";
84 sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx";
119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
85 121 };
86 122 meta = {
87 123 license = [ pkgs.lib.licenses.asl20 ];
88 124 };
89 125 };
90 126 "decorator" = super.buildPythonPackage {
91 127 name = "decorator-4.1.2";
92 128 doCheck = false;
93 129 src = fetchurl {
94 130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
96 132 };
97 133 meta = {
98 134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
99 135 };
100 136 };
101 137 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.7.1";
138 name = "dogpile.cache-0.9.0";
103 139 doCheck = false;
104 140 propagatedBuildInputs = [
105 141 self."decorator"
106 142 ];
107 143 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
109 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
110 146 };
111 147 meta = {
112 148 license = [ pkgs.lib.licenses.bsdOriginal ];
113 149 };
114 150 };
115 151 "dogpile.core" = super.buildPythonPackage {
116 152 name = "dogpile.core-0.4.1";
117 153 doCheck = false;
118 154 src = fetchurl {
119 155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
120 156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
121 157 };
122 158 meta = {
123 159 license = [ pkgs.lib.licenses.bsdOriginal ];
124 160 };
125 161 };
126 162 "dulwich" = super.buildPythonPackage {
127 163 name = "dulwich-0.13.0";
128 164 doCheck = false;
129 165 src = fetchurl {
130 166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
131 167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
132 168 };
133 169 meta = {
134 170 license = [ pkgs.lib.licenses.gpl2Plus ];
135 171 };
136 172 };
137 173 "enum34" = super.buildPythonPackage {
138 174 name = "enum34-1.1.6";
139 175 doCheck = false;
140 176 src = fetchurl {
141 177 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
142 178 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
143 179 };
144 180 meta = {
145 181 license = [ pkgs.lib.licenses.bsdOriginal ];
146 182 };
147 183 };
148 184 "funcsigs" = super.buildPythonPackage {
149 185 name = "funcsigs-1.0.2";
150 186 doCheck = false;
151 187 src = fetchurl {
152 188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
153 189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
154 190 };
155 191 meta = {
156 192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
157 193 };
158 194 };
159 195 "gevent" = super.buildPythonPackage {
160 196 name = "gevent-1.4.0";
161 197 doCheck = false;
162 198 propagatedBuildInputs = [
163 199 self."greenlet"
164 200 ];
165 201 src = fetchurl {
166 202 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
167 203 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
168 204 };
169 205 meta = {
170 206 license = [ pkgs.lib.licenses.mit ];
171 207 };
172 208 };
173 209 "gprof2dot" = super.buildPythonPackage {
174 210 name = "gprof2dot-2017.9.19";
175 211 doCheck = false;
176 212 src = fetchurl {
177 213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
178 214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
179 215 };
180 216 meta = {
181 217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
182 218 };
183 219 };
184 220 "greenlet" = super.buildPythonPackage {
185 221 name = "greenlet-0.4.15";
186 222 doCheck = false;
187 223 src = fetchurl {
188 224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
189 225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
190 226 };
191 227 meta = {
192 228 license = [ pkgs.lib.licenses.mit ];
193 229 };
194 230 };
195 231 "gunicorn" = super.buildPythonPackage {
196 232 name = "gunicorn-19.9.0";
197 233 doCheck = false;
198 234 src = fetchurl {
199 235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
200 236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
201 237 };
202 238 meta = {
203 239 license = [ pkgs.lib.licenses.mit ];
204 240 };
205 241 };
206 242 "hg-evolve" = super.buildPythonPackage {
207 name = "hg-evolve-8.5.1";
243 name = "hg-evolve-9.1.0";
208 244 doCheck = false;
209 245 src = fetchurl {
210 url = "https://files.pythonhosted.org/packages/e3/ce/6594aa403e3464831d4daf20e45fd2e3ef553d968ac13d2c7fa791d4eedd/hg-evolve-8.5.1.tar.gz";
211 sha256 = "09avqn7c1biz97vb1zw91q6nfzydpcqv43mgpfrj7ywp0fscfgf3";
246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
212 248 };
213 249 meta = {
214 250 license = [ { fullName = "GPLv2+"; } ];
215 251 };
216 252 };
217 253 "hgsubversion" = super.buildPythonPackage {
218 254 name = "hgsubversion-1.9.3";
219 255 doCheck = false;
220 256 propagatedBuildInputs = [
221 257 self."mercurial"
222 258 self."subvertpy"
223 259 ];
224 260 src = fetchurl {
225 261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
226 262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
227 263 };
228 264 meta = {
229 265 license = [ pkgs.lib.licenses.gpl1 ];
230 266 };
231 267 };
232 268 "hupper" = super.buildPythonPackage {
233 name = "hupper-1.6.1";
269 name = "hupper-1.9.1";
234 270 doCheck = false;
235 271 src = fetchurl {
236 url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz";
237 sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy";
272 url = "https://files.pythonhosted.org/packages/09/3a/4f215659f31eeffe364a984dba486bfa3907bfcc54b7013bdfe825cebb5f/hupper-1.9.1.tar.gz";
273 sha256 = "0pyg879fv9mbwlnbzw2a3234qqycqs9l97h5mpkmk0bvxhi2471v";
238 274 };
239 275 meta = {
240 276 license = [ pkgs.lib.licenses.mit ];
241 277 };
242 278 };
279 "importlib-metadata" = super.buildPythonPackage {
280 name = "importlib-metadata-0.23";
281 doCheck = false;
282 propagatedBuildInputs = [
283 self."zipp"
284 self."contextlib2"
285 self."configparser"
286 self."pathlib2"
287 ];
288 src = fetchurl {
289 url = "https://files.pythonhosted.org/packages/5d/44/636bcd15697791943e2dedda0dbe098d8530a38d113b202817133e0b06c0/importlib_metadata-0.23.tar.gz";
290 sha256 = "09mdqdfv5rdrwz80jh9m379gxmvk2vhjfz0fg53hid00icvxf65a";
291 };
292 meta = {
293 license = [ pkgs.lib.licenses.asl20 ];
294 };
295 };
243 296 "ipdb" = super.buildPythonPackage {
244 297 name = "ipdb-0.12";
245 298 doCheck = false;
246 299 propagatedBuildInputs = [
247 300 self."setuptools"
248 301 self."ipython"
249 302 ];
250 303 src = fetchurl {
251 304 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
252 305 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
253 306 };
254 307 meta = {
255 308 license = [ pkgs.lib.licenses.bsdOriginal ];
256 309 };
257 310 };
258 311 "ipython" = super.buildPythonPackage {
259 312 name = "ipython-5.1.0";
260 313 doCheck = false;
261 314 propagatedBuildInputs = [
262 315 self."setuptools"
263 316 self."decorator"
264 317 self."pickleshare"
265 318 self."simplegeneric"
266 319 self."traitlets"
267 320 self."prompt-toolkit"
268 321 self."pygments"
269 322 self."pexpect"
270 323 self."backports.shutil-get-terminal-size"
271 324 self."pathlib2"
272 325 self."pexpect"
273 326 ];
274 327 src = fetchurl {
275 328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
276 329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
277 330 };
278 331 meta = {
279 332 license = [ pkgs.lib.licenses.bsdOriginal ];
280 333 };
281 334 };
282 335 "ipython-genutils" = super.buildPythonPackage {
283 336 name = "ipython-genutils-0.2.0";
284 337 doCheck = false;
285 338 src = fetchurl {
286 339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
287 340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
288 341 };
289 342 meta = {
290 343 license = [ pkgs.lib.licenses.bsdOriginal ];
291 344 };
292 345 };
293 346 "mako" = super.buildPythonPackage {
294 name = "mako-1.0.7";
347 name = "mako-1.1.0";
295 348 doCheck = false;
296 349 propagatedBuildInputs = [
297 350 self."markupsafe"
298 351 ];
299 352 src = fetchurl {
300 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
301 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
302 355 };
303 356 meta = {
304 357 license = [ pkgs.lib.licenses.mit ];
305 358 };
306 359 };
307 360 "markupsafe" = super.buildPythonPackage {
308 name = "markupsafe-1.1.0";
361 name = "markupsafe-1.1.1";
309 362 doCheck = false;
310 363 src = fetchurl {
311 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
312 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
313 366 };
314 367 meta = {
315 license = [ pkgs.lib.licenses.bsdOriginal ];
368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
316 369 };
317 370 };
318 371 "mercurial" = super.buildPythonPackage {
319 name = "mercurial-4.9.1";
372 name = "mercurial-5.1.1";
320 373 doCheck = false;
321 374 src = fetchurl {
322 url = "https://files.pythonhosted.org/packages/60/58/a1c52d5f5c0b755e231faf7c4f507dc51fe26d979d36346bc9d28f4f8a75/mercurial-4.9.1.tar.gz";
323 sha256 = "0iybbkd9add066729zg01kwz5hhc1s6lhp9rrnsmzq6ihyxj3p8v";
375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
324 377 };
325 378 meta = {
326 379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
327 380 };
328 381 };
329 382 "mock" = super.buildPythonPackage {
330 name = "mock-1.0.1";
383 name = "mock-3.0.5";
331 384 doCheck = false;
385 propagatedBuildInputs = [
386 self."six"
387 self."funcsigs"
388 ];
332 389 src = fetchurl {
333 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
334 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
335 392 };
336 393 meta = {
337 license = [ pkgs.lib.licenses.bsdOriginal ];
394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
338 395 };
339 396 };
340 397 "more-itertools" = super.buildPythonPackage {
341 398 name = "more-itertools-5.0.0";
342 399 doCheck = false;
343 400 propagatedBuildInputs = [
344 401 self."six"
345 402 ];
346 403 src = fetchurl {
347 404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
348 405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
349 406 };
350 407 meta = {
351 408 license = [ pkgs.lib.licenses.mit ];
352 409 };
353 410 };
354 411 "msgpack-python" = super.buildPythonPackage {
355 412 name = "msgpack-python-0.5.6";
356 413 doCheck = false;
357 414 src = fetchurl {
358 415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
359 416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
360 417 };
361 418 meta = {
362 419 license = [ pkgs.lib.licenses.asl20 ];
363 420 };
364 421 };
422 "packaging" = super.buildPythonPackage {
423 name = "packaging-19.2";
424 doCheck = false;
425 propagatedBuildInputs = [
426 self."pyparsing"
427 self."six"
428 ];
429 src = fetchurl {
430 url = "https://files.pythonhosted.org/packages/5a/2f/449ded84226d0e2fda8da9252e5ee7731bdf14cd338f622dfcd9934e0377/packaging-19.2.tar.gz";
431 sha256 = "0izwlz9h0bw171a1chr311g2y7n657zjaf4mq4rgm8pp9lbj9f98";
432 };
433 meta = {
434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 };
436 };
365 437 "pastedeploy" = super.buildPythonPackage {
366 438 name = "pastedeploy-2.0.1";
367 439 doCheck = false;
368 440 src = fetchurl {
369 441 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
370 442 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
371 443 };
372 444 meta = {
373 445 license = [ pkgs.lib.licenses.mit ];
374 446 };
375 447 };
376 448 "pathlib2" = super.buildPythonPackage {
377 name = "pathlib2-2.3.4";
449 name = "pathlib2-2.3.5";
378 450 doCheck = false;
379 451 propagatedBuildInputs = [
380 452 self."six"
381 453 self."scandir"
382 454 ];
383 455 src = fetchurl {
384 url = "https://files.pythonhosted.org/packages/b5/f4/9c7cc726ece2498b6c8b62d3262aa43f59039b953fe23c9964ac5e18d40b/pathlib2-2.3.4.tar.gz";
385 sha256 = "1y0f9rkm1924zrc5dn4bwxlhgdkbml82lkcc28l5rgmr7d918q24";
456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
386 458 };
387 459 meta = {
388 460 license = [ pkgs.lib.licenses.mit ];
389 461 };
390 462 };
391 463 "pexpect" = super.buildPythonPackage {
392 464 name = "pexpect-4.7.0";
393 465 doCheck = false;
394 466 propagatedBuildInputs = [
395 467 self."ptyprocess"
396 468 ];
397 469 src = fetchurl {
398 470 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
399 471 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
400 472 };
401 473 meta = {
402 474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
403 475 };
404 476 };
405 477 "pickleshare" = super.buildPythonPackage {
406 478 name = "pickleshare-0.7.5";
407 479 doCheck = false;
408 480 propagatedBuildInputs = [
409 481 self."pathlib2"
410 482 ];
411 483 src = fetchurl {
412 484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
413 485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
414 486 };
415 487 meta = {
416 488 license = [ pkgs.lib.licenses.mit ];
417 489 };
418 490 };
419 491 "plaster" = super.buildPythonPackage {
420 492 name = "plaster-1.0";
421 493 doCheck = false;
422 494 propagatedBuildInputs = [
423 495 self."setuptools"
424 496 ];
425 497 src = fetchurl {
426 498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
427 499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
428 500 };
429 501 meta = {
430 502 license = [ pkgs.lib.licenses.mit ];
431 503 };
432 504 };
433 505 "plaster-pastedeploy" = super.buildPythonPackage {
434 506 name = "plaster-pastedeploy-0.7";
435 507 doCheck = false;
436 508 propagatedBuildInputs = [
437 509 self."pastedeploy"
438 510 self."plaster"
439 511 ];
440 512 src = fetchurl {
441 513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
442 514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
443 515 };
444 516 meta = {
445 517 license = [ pkgs.lib.licenses.mit ];
446 518 };
447 519 };
448 520 "pluggy" = super.buildPythonPackage {
449 name = "pluggy-0.11.0";
521 name = "pluggy-0.13.1";
450 522 doCheck = false;
523 propagatedBuildInputs = [
524 self."importlib-metadata"
525 ];
451 526 src = fetchurl {
452 url = "https://files.pythonhosted.org/packages/0d/a1/862ab336e8128fde20981d2c1aa8506693412daf5083b1911d539412676b/pluggy-0.11.0.tar.gz";
453 sha256 = "10511a54dvafw1jrk75mrhml53c7b7w4yaw7241696lc2hfvr895";
527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
454 529 };
455 530 meta = {
456 531 license = [ pkgs.lib.licenses.mit ];
457 532 };
458 533 };
459 534 "prompt-toolkit" = super.buildPythonPackage {
460 name = "prompt-toolkit-1.0.16";
535 name = "prompt-toolkit-1.0.18";
461 536 doCheck = false;
462 537 propagatedBuildInputs = [
463 538 self."six"
464 539 self."wcwidth"
465 540 ];
466 541 src = fetchurl {
467 url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz";
468 sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1";
542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
469 544 };
470 545 meta = {
471 546 license = [ pkgs.lib.licenses.bsdOriginal ];
472 547 };
473 548 };
474 549 "psutil" = super.buildPythonPackage {
475 name = "psutil-5.5.1";
550 name = "psutil-5.6.5";
476 551 doCheck = false;
477 552 src = fetchurl {
478 url = "https://files.pythonhosted.org/packages/c7/01/7c30b247cdc5ba29623faa5c8cf1f1bbf7e041783c340414b0ed7e067c64/psutil-5.5.1.tar.gz";
479 sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj";
553 url = "https://files.pythonhosted.org/packages/03/9a/95c4b3d0424426e5fd94b5302ff74cea44d5d4f53466e1228ac8e73e14b4/psutil-5.6.5.tar.gz";
554 sha256 = "0isil5jxwwd8awz54qk28rpgjg43i5l6yl70g40vxwa4r4m56lfh";
480 555 };
481 556 meta = {
482 557 license = [ pkgs.lib.licenses.bsdOriginal ];
483 558 };
484 559 };
485 560 "ptyprocess" = super.buildPythonPackage {
486 561 name = "ptyprocess-0.6.0";
487 562 doCheck = false;
488 563 src = fetchurl {
489 564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
490 565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
491 566 };
492 567 meta = {
493 568 license = [ ];
494 569 };
495 570 };
496 571 "py" = super.buildPythonPackage {
497 name = "py-1.6.0";
572 name = "py-1.8.0";
498 573 doCheck = false;
499 574 src = fetchurl {
500 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
501 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
502 577 };
503 578 meta = {
504 579 license = [ pkgs.lib.licenses.mit ];
505 580 };
506 581 };
582 "pycparser" = super.buildPythonPackage {
583 name = "pycparser-2.19";
584 doCheck = false;
585 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz";
587 sha256 = "1cr5dcj9628lkz1qlwq3fv97c25363qppkmcayqvd05dpy573259";
588 };
589 meta = {
590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 };
592 };
593 "pygit2" = super.buildPythonPackage {
594 name = "pygit2-0.28.2";
595 doCheck = false;
596 propagatedBuildInputs = [
597 self."cffi"
598 self."six"
599 ];
600 src = fetchurl {
601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 };
604 meta = {
605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 };
607 };
507 608 "pygments" = super.buildPythonPackage {
508 609 name = "pygments-2.4.2";
509 610 doCheck = false;
510 611 src = fetchurl {
511 612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
512 613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
513 614 };
514 615 meta = {
515 616 license = [ pkgs.lib.licenses.bsdOriginal ];
516 617 };
517 618 };
619 "pyparsing" = super.buildPythonPackage {
620 name = "pyparsing-2.4.5";
621 doCheck = false;
622 src = fetchurl {
623 url = "https://files.pythonhosted.org/packages/00/32/8076fa13e832bb4dcff379f18f228e5a53412be0631808b9ca2610c0f566/pyparsing-2.4.5.tar.gz";
624 sha256 = "0fk8gsybiw1gm146mkjdjvaajwh20xwvpv4j7syh2zrnpq0j19jc";
625 };
626 meta = {
627 license = [ pkgs.lib.licenses.mit ];
628 };
629 };
518 630 "pyramid" = super.buildPythonPackage {
519 631 name = "pyramid-1.10.4";
520 632 doCheck = false;
521 633 propagatedBuildInputs = [
522 634 self."hupper"
523 635 self."plaster"
524 636 self."plaster-pastedeploy"
525 637 self."setuptools"
526 638 self."translationstring"
527 639 self."venusian"
528 640 self."webob"
529 641 self."zope.deprecation"
530 642 self."zope.interface"
531 643 self."repoze.lru"
532 644 ];
533 645 src = fetchurl {
534 646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
535 647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
536 648 };
537 649 meta = {
538 650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
539 651 };
540 652 };
541 653 "pyramid-mako" = super.buildPythonPackage {
542 name = "pyramid-mako-1.0.2";
654 name = "pyramid-mako-1.1.0";
543 655 doCheck = false;
544 656 propagatedBuildInputs = [
545 657 self."pyramid"
546 658 self."mako"
547 659 ];
548 660 src = fetchurl {
549 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
550 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
551 663 };
552 664 meta = {
553 665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
554 666 };
555 667 };
556 668 "pytest" = super.buildPythonPackage {
557 name = "pytest-3.8.2";
669 name = "pytest-4.6.5";
558 670 doCheck = false;
559 671 propagatedBuildInputs = [
560 672 self."py"
561 673 self."six"
562 self."setuptools"
674 self."packaging"
563 675 self."attrs"
564 self."more-itertools"
565 676 self."atomicwrites"
566 677 self."pluggy"
678 self."importlib-metadata"
679 self."wcwidth"
567 680 self."funcsigs"
568 681 self."pathlib2"
682 self."more-itertools"
569 683 ];
570 684 src = fetchurl {
571 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
572 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
573 687 };
574 688 meta = {
575 689 license = [ pkgs.lib.licenses.mit ];
576 690 };
577 691 };
578 692 "pytest-cov" = super.buildPythonPackage {
579 name = "pytest-cov-2.6.0";
693 name = "pytest-cov-2.7.1";
580 694 doCheck = false;
581 695 propagatedBuildInputs = [
582 696 self."pytest"
583 697 self."coverage"
584 698 ];
585 699 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
587 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
588 702 };
589 703 meta = {
590 704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
591 705 };
592 706 };
593 707 "pytest-profiling" = super.buildPythonPackage {
594 name = "pytest-profiling-1.3.0";
708 name = "pytest-profiling-1.7.0";
595 709 doCheck = false;
596 710 propagatedBuildInputs = [
597 711 self."six"
598 712 self."pytest"
599 713 self."gprof2dot"
600 714 ];
601 715 src = fetchurl {
602 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
603 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
604 718 };
605 719 meta = {
606 720 license = [ pkgs.lib.licenses.mit ];
607 721 };
608 722 };
609 723 "pytest-runner" = super.buildPythonPackage {
610 name = "pytest-runner-4.2";
724 name = "pytest-runner-5.1";
611 725 doCheck = false;
612 726 src = fetchurl {
613 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
614 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
615 729 };
616 730 meta = {
617 731 license = [ pkgs.lib.licenses.mit ];
618 732 };
619 733 };
620 734 "pytest-sugar" = super.buildPythonPackage {
621 name = "pytest-sugar-0.9.1";
735 name = "pytest-sugar-0.9.2";
622 736 doCheck = false;
623 737 propagatedBuildInputs = [
624 738 self."pytest"
625 739 self."termcolor"
740 self."packaging"
626 741 ];
627 742 src = fetchurl {
628 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
629 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
630 745 };
631 746 meta = {
632 747 license = [ pkgs.lib.licenses.bsdOriginal ];
633 748 };
634 749 };
635 750 "pytest-timeout" = super.buildPythonPackage {
636 name = "pytest-timeout-1.3.2";
751 name = "pytest-timeout-1.3.3";
637 752 doCheck = false;
638 753 propagatedBuildInputs = [
639 754 self."pytest"
640 755 ];
641 756 src = fetchurl {
642 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
643 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
644 759 };
645 760 meta = {
646 761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
647 762 };
648 763 };
764 "redis" = super.buildPythonPackage {
765 name = "redis-3.3.11";
766 doCheck = false;
767 src = fetchurl {
768 url = "https://files.pythonhosted.org/packages/06/ca/00557c74279d2f256d3c42cabf237631355f3a132e4c74c2000e6647ad98/redis-3.3.11.tar.gz";
769 sha256 = "1hicqbi5xl92hhml82awrr2rxl9jar5fp8nbcycj9qgmsdwc43wd";
770 };
771 meta = {
772 license = [ pkgs.lib.licenses.mit ];
773 };
774 };
649 775 "repoze.lru" = super.buildPythonPackage {
650 776 name = "repoze.lru-0.7";
651 777 doCheck = false;
652 778 src = fetchurl {
653 779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
654 780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
655 781 };
656 782 meta = {
657 783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
658 784 };
659 785 };
660 786 "rhodecode-vcsserver" = super.buildPythonPackage {
661 name = "rhodecode-vcsserver-4.17.4";
787 name = "rhodecode-vcsserver-4.18.0";
662 788 buildInputs = [
663 789 self."pytest"
664 790 self."py"
665 791 self."pytest-cov"
666 792 self."pytest-sugar"
667 793 self."pytest-runner"
668 794 self."pytest-profiling"
669 795 self."pytest-timeout"
670 796 self."gprof2dot"
671 797 self."mock"
672 798 self."cov-core"
673 799 self."coverage"
674 800 self."webtest"
675 801 self."beautifulsoup4"
676 802 self."configobj"
677 803 ];
678 804 doCheck = true;
679 805 propagatedBuildInputs = [
680 806 self."configobj"
681 807 self."dogpile.cache"
682 808 self."dogpile.core"
683 809 self."decorator"
684 810 self."dulwich"
685 811 self."hgsubversion"
686 812 self."hg-evolve"
687 813 self."mako"
688 814 self."markupsafe"
689 815 self."mercurial"
690 816 self."msgpack-python"
691 817 self."pastedeploy"
692 818 self."pyramid"
693 819 self."pyramid-mako"
820 self."pygit2"
694 821 self."repoze.lru"
822 self."redis"
695 823 self."simplejson"
696 824 self."subprocess32"
697 825 self."subvertpy"
698 826 self."six"
699 827 self."translationstring"
700 828 self."webob"
701 829 self."zope.deprecation"
702 830 self."zope.interface"
703 831 self."gevent"
704 832 self."greenlet"
705 833 self."gunicorn"
706 834 self."waitress"
707 835 self."ipdb"
708 836 self."ipython"
709 837 self."pytest"
710 838 self."py"
711 839 self."pytest-cov"
712 840 self."pytest-sugar"
713 841 self."pytest-runner"
714 842 self."pytest-profiling"
715 843 self."pytest-timeout"
716 844 self."gprof2dot"
717 845 self."mock"
718 846 self."cov-core"
719 847 self."coverage"
720 848 self."webtest"
721 849 self."beautifulsoup4"
722 850 ];
723 851 src = ./.;
724 852 meta = {
725 853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
726 854 };
727 855 };
728 856 "scandir" = super.buildPythonPackage {
729 857 name = "scandir-1.10.0";
730 858 doCheck = false;
731 859 src = fetchurl {
732 860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
733 861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
734 862 };
735 863 meta = {
736 864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
737 865 };
738 866 };
739 867 "setproctitle" = super.buildPythonPackage {
740 868 name = "setproctitle-1.1.10";
741 869 doCheck = false;
742 870 src = fetchurl {
743 871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
744 872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
745 873 };
746 874 meta = {
747 875 license = [ pkgs.lib.licenses.bsdOriginal ];
748 876 };
749 877 };
750 878 "setuptools" = super.buildPythonPackage {
751 name = "setuptools-41.0.1";
879 name = "setuptools-44.0.0";
752 880 doCheck = false;
753 881 src = fetchurl {
754 url = "https://files.pythonhosted.org/packages/1d/64/a18a487b4391a05b9c7f938b94a16d80305bf0369c6b0b9509e86165e1d3/setuptools-41.0.1.zip";
755 sha256 = "04sns22y2hhsrwfy1mha2lgslvpjsjsz8xws7h2rh5a7ylkd28m2";
882 url = "https://files.pythonhosted.org/packages/b0/f3/44da7482ac6da3f36f68e253cb04de37365b3dba9036a3c70773b778b485/setuptools-44.0.0.zip";
883 sha256 = "025h5cnxcmda1893l6i12hrwdvs1n8r31qs6q4pkif2v7rrggfp5";
756 884 };
757 885 meta = {
758 886 license = [ pkgs.lib.licenses.mit ];
759 887 };
760 888 };
761 889 "simplegeneric" = super.buildPythonPackage {
762 890 name = "simplegeneric-0.8.1";
763 891 doCheck = false;
764 892 src = fetchurl {
765 893 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
766 894 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
767 895 };
768 896 meta = {
769 897 license = [ pkgs.lib.licenses.zpl21 ];
770 898 };
771 899 };
772 900 "simplejson" = super.buildPythonPackage {
773 901 name = "simplejson-3.16.0";
774 902 doCheck = false;
775 903 src = fetchurl {
776 904 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
777 905 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
778 906 };
779 907 meta = {
780 908 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
781 909 };
782 910 };
783 911 "six" = super.buildPythonPackage {
784 912 name = "six-1.11.0";
785 913 doCheck = false;
786 914 src = fetchurl {
787 915 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
788 916 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
789 917 };
790 918 meta = {
791 919 license = [ pkgs.lib.licenses.mit ];
792 920 };
793 921 };
794 922 "subprocess32" = super.buildPythonPackage {
795 923 name = "subprocess32-3.5.4";
796 924 doCheck = false;
797 925 src = fetchurl {
798 926 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
799 927 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
800 928 };
801 929 meta = {
802 930 license = [ pkgs.lib.licenses.psfl ];
803 931 };
804 932 };
805 933 "subvertpy" = super.buildPythonPackage {
806 934 name = "subvertpy-0.10.1";
807 935 doCheck = false;
808 936 src = fetchurl {
809 937 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
810 938 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
811 939 };
812 940 meta = {
813 941 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
814 942 };
815 943 };
816 944 "termcolor" = super.buildPythonPackage {
817 945 name = "termcolor-1.1.0";
818 946 doCheck = false;
819 947 src = fetchurl {
820 948 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
821 949 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
822 950 };
823 951 meta = {
824 952 license = [ pkgs.lib.licenses.mit ];
825 953 };
826 954 };
827 955 "traitlets" = super.buildPythonPackage {
828 name = "traitlets-4.3.2";
956 name = "traitlets-4.3.3";
829 957 doCheck = false;
830 958 propagatedBuildInputs = [
831 959 self."ipython-genutils"
832 960 self."six"
833 961 self."decorator"
834 962 self."enum34"
835 963 ];
836 964 src = fetchurl {
837 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
838 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
965 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
966 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
839 967 };
840 968 meta = {
841 969 license = [ pkgs.lib.licenses.bsdOriginal ];
842 970 };
843 971 };
844 972 "translationstring" = super.buildPythonPackage {
845 973 name = "translationstring-1.3";
846 974 doCheck = false;
847 975 src = fetchurl {
848 976 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
849 977 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
850 978 };
851 979 meta = {
852 980 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
853 981 };
854 982 };
855 983 "venusian" = super.buildPythonPackage {
856 984 name = "venusian-1.2.0";
857 985 doCheck = false;
858 986 src = fetchurl {
859 987 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
860 988 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
861 989 };
862 990 meta = {
863 991 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
864 992 };
865 993 };
866 994 "waitress" = super.buildPythonPackage {
867 name = "waitress-1.3.0";
995 name = "waitress-1.3.1";
868 996 doCheck = false;
869 997 src = fetchurl {
870 url = "https://files.pythonhosted.org/packages/43/50/9890471320d5ad22761ae46661cf745f487b1c8c4ec49352b99e1078b970/waitress-1.3.0.tar.gz";
871 sha256 = "09j5dzbbcxib7vdskhx39s1qsydlr4n2p2png71d7mjnr9pnwajf";
998 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
999 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
872 1000 };
873 1001 meta = {
874 1002 license = [ pkgs.lib.licenses.zpl21 ];
875 1003 };
876 1004 };
877 1005 "wcwidth" = super.buildPythonPackage {
878 1006 name = "wcwidth-0.1.7";
879 1007 doCheck = false;
880 1008 src = fetchurl {
881 1009 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
882 1010 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
883 1011 };
884 1012 meta = {
885 1013 license = [ pkgs.lib.licenses.mit ];
886 1014 };
887 1015 };
888 1016 "webob" = super.buildPythonPackage {
889 1017 name = "webob-1.8.5";
890 1018 doCheck = false;
891 1019 src = fetchurl {
892 1020 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
893 1021 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
894 1022 };
895 1023 meta = {
896 1024 license = [ pkgs.lib.licenses.mit ];
897 1025 };
898 1026 };
899 1027 "webtest" = super.buildPythonPackage {
900 1028 name = "webtest-2.0.33";
901 1029 doCheck = false;
902 1030 propagatedBuildInputs = [
903 1031 self."six"
904 1032 self."webob"
905 1033 self."waitress"
906 1034 self."beautifulsoup4"
907 1035 ];
908 1036 src = fetchurl {
909 1037 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
910 1038 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
911 1039 };
912 1040 meta = {
913 1041 license = [ pkgs.lib.licenses.mit ];
914 1042 };
915 1043 };
1044 "zipp" = super.buildPythonPackage {
1045 name = "zipp-0.6.0";
1046 doCheck = false;
1047 propagatedBuildInputs = [
1048 self."more-itertools"
1049 ];
1050 src = fetchurl {
1051 url = "https://files.pythonhosted.org/packages/57/dd/585d728479d97d25aeeb9aa470d36a4ad8d0ba5610f84e14770128ce6ff7/zipp-0.6.0.tar.gz";
1052 sha256 = "13ndkf7vklw978a4gdl1yfvn8hch28429a0iam67sg4nrp5v261p";
1053 };
1054 meta = {
1055 license = [ pkgs.lib.licenses.mit ];
1056 };
1057 };
916 1058 "zope.deprecation" = super.buildPythonPackage {
917 1059 name = "zope.deprecation-4.4.0";
918 1060 doCheck = false;
919 1061 propagatedBuildInputs = [
920 1062 self."setuptools"
921 1063 ];
922 1064 src = fetchurl {
923 1065 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
924 1066 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
925 1067 };
926 1068 meta = {
927 1069 license = [ pkgs.lib.licenses.zpl21 ];
928 1070 };
929 1071 };
930 1072 "zope.interface" = super.buildPythonPackage {
931 1073 name = "zope.interface-4.6.0";
932 1074 doCheck = false;
933 1075 propagatedBuildInputs = [
934 1076 self."setuptools"
935 1077 ];
936 1078 src = fetchurl {
937 1079 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
938 1080 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
939 1081 };
940 1082 meta = {
941 1083 license = [ pkgs.lib.licenses.zpl21 ];
942 1084 };
943 1085 };
944 1086
945 1087 ### Test requirements
946 1088
947 1089
948 1090 }
@@ -1,41 +1,42 b''
1 1 { pkgs ? (import <nixpkgs> {})
2 2 , pythonPackages ? "python27Packages"
3 3 }:
4 4
5 5 with pkgs.lib;
6 6
7 7 let _pythonPackages = pythonPackages; in
8 8 let
9 9 pythonPackages = getAttr _pythonPackages pkgs;
10 10
11 11 pip2nix = import ./nix-common/pip2nix.nix {
12 12 inherit
13 13 pkgs
14 14 pythonPackages;
15 15 };
16 16
17 17 in
18 18
19 19 pkgs.stdenv.mkDerivation {
20 20 name = "pip2nix-generated";
21 21 buildInputs = [
22 22 pip2nix.pip2nix
23 23 pythonPackages.pip-tools
24 24 pkgs.apr
25 25 pkgs.aprutil
26 pkgs.libffi
26 27 ];
27 28
28 29 shellHook = ''
29 30 runHook preShellHook
30 31 echo "Setting SVN_* variables"
31 32 export SVN_LIBRARY_PATH=${pkgs.subversion}/lib
32 33 export SVN_HEADER_PATH=${pkgs.subversion.dev}/include
33 34 runHook postShellHook
34 35 '';
35 36
36 37 preShellHook = ''
37 38 echo "Starting Generate Shell"
38 39 # Custom prompt to distinguish from other dev envs.
39 40 export PS1="\n\[\033[1;32m\][Generate-shell:\w]$\[\033[0m\] "
40 41 '';
41 42 }
@@ -1,43 +1,48 b''
1 1 ## dependencies
2 2
3 3 # our custom configobj
4 4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
5 5
6 dogpile.cache==0.7.1
6 dogpile.cache==0.9.0
7 7 dogpile.core==0.4.1
8 8 decorator==4.1.2
9 9 dulwich==0.13.0
10 10 hgsubversion==1.9.3
11 hg-evolve==8.5.1
12 mako==1.0.7
13 markupsafe==1.1.0
14 mercurial==4.9.1
11 hg-evolve==9.1.0
12 mako==1.1.0
13 markupsafe==1.1.1
14 mercurial==5.1.1
15 15 msgpack-python==0.5.6
16 16
17 17 pastedeploy==2.0.1
18 18 pyramid==1.10.4
19 pyramid-mako==1.0.2
19 pyramid-mako==1.1.0
20 pygit2==0.28.2
20 21
21 22 repoze.lru==0.7
23 redis==3.3.11
22 24 simplejson==3.16.0
23 25 subprocess32==3.5.4
24 26 subvertpy==0.10.1
25 27
26 28 six==1.11.0
27 29 translationstring==1.3
28 30 webob==1.8.5
29 31 zope.deprecation==4.4.0
30 32 zope.interface==4.6.0
31 33
32 34 ## http servers
33 35 gevent==1.4.0
34 36 greenlet==0.4.15
35 37 gunicorn==19.9.0
36 waitress==1.3.0
38 waitress==1.3.1
37 39
38 40 ## debug
39 41 ipdb==0.12.0
40 42 ipython==5.1.0
41 43
42 44 ## test related requirements
43 45 -r requirements_test.txt
46
47 ## uncomment to add the debug libraries
48 #-r requirements_debug.txt
@@ -1,12 +1,18 b''
1 1 # contains not directly required libraries we want to pin the version.
2 2
3 atomicwrites==1.2.1
4 attrs==18.2.0
5 hupper==1.6.1
6 pathlib2==2.3.4
3 atomicwrites==1.3.0
4 attrs==19.3.0
5 contextlib2==0.6.0.post1
6 cffi==1.12.3
7 hupper==1.9.1
8 importlib-metadata==0.23
9 packaging==19.2.0
10 pathlib2==2.3.5
7 11 pygments==2.4.2
8 psutil==5.5.1
9 pluggy==0.11.0
12 pyparsing==2.4.5
13 psutil==5.6.5
14 pluggy==0.13.1
10 15 scandir==1.10.0
11 16 setproctitle==1.1.10
12 17 venusian==1.2.0
18 wcwidth==0.1.7
@@ -1,16 +1,16 b''
1 1 # test related requirements
2 pytest==3.8.2
3 py==1.6.0
4 pytest-cov==2.6.0
5 pytest-sugar==0.9.1
6 pytest-runner==4.2.0
7 pytest-profiling==1.3.0
8 pytest-timeout==1.3.2
2 pytest==4.6.5
3 py==1.8.0
4 pytest-cov==2.7.1
5 pytest-sugar==0.9.2
6 pytest-runner==5.1.0
7 pytest-profiling==1.7.0
8 pytest-timeout==1.3.3
9 9 gprof2dot==2017.9.19
10 10
11 mock==1.0.1
11 mock==3.0.5
12 12 cov-core==1.15.0
13 coverage==4.5.3
13 coverage==4.5.4
14 14
15 15 webtest==2.0.33
16 16 beautifulsoup4==4.6.3
@@ -1,1 +1,1 b''
1 4.17.4 No newline at end of file
1 4.18.0 No newline at end of file
@@ -1,94 +1,76 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 23 from vcsserver.lib.rc_cache import region_meta
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class RepoFactory(object):
28 28 """
29 29 Utility to create instances of repository
30 30
31 31 It provides internal caching of the `repo` object based on
32 32 the :term:`call context`.
33 33 """
34 34 repo_type = None
35 35
36 36 def __init__(self):
37 37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38 38
39 39 def _create_config(self, path, config):
40 40 config = {}
41 41 return config
42 42
43 43 def _create_repo(self, wire, create):
44 44 raise NotImplementedError()
45 45
46 46 def repo(self, wire, create=False):
47 """
48 Get a repository instance for the given path.
49
50 Uses internally the low level beaker API since the decorators introduce
51 significant overhead.
52 """
53 region = self._cache_region
54 context = wire.get('context', None)
55 repo_path = wire.get('path', '')
56 context_uid = '{}'.format(context)
57 cache = wire.get('cache', True)
58 cache_on = context and cache
59
60 @region.conditional_cache_on_arguments(condition=cache_on)
61 def create_new_repo(_repo_type, _repo_path, _context_uid):
62 return self._create_repo(wire, create)
63
64 repo = create_new_repo(self.repo_type, repo_path, context_uid)
65 return repo
47 raise NotImplementedError()
66 48
67 49
68 50 def obfuscate_qs(query_string):
69 51 if query_string is None:
70 52 return None
71 53
72 54 parsed = []
73 55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
74 56 if k in ['auth_token', 'api_key']:
75 57 v = "*****"
76 58 parsed.append((k, v))
77 59
78 60 return '&'.join('{}{}'.format(
79 61 k, '={}'.format(v) if v else '') for k, v in parsed)
80 62
81 63
82 64 def raise_from_original(new_type):
83 65 """
84 66 Raise a new exception type with original args and traceback.
85 67 """
86 68 exc_type, exc_value, exc_traceback = sys.exc_info()
87 69 new_exc = new_type(*exc_value.args)
88 70 # store the original traceback into the new exc
89 71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
90 72
91 73 try:
92 74 raise new_exc, None, exc_traceback
93 75 finally:
94 76 del exc_traceback
This diff has been collapsed as it changes many lines, (753 lines changed) Show them Hide them
@@ -1,752 +1,1177 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 18 import collections
18 19 import logging
19 20 import os
20 21 import posixpath as vcspath
21 22 import re
22 23 import stat
23 24 import traceback
24 25 import urllib
25 26 import urllib2
26 27 from functools import wraps
27 28
28 29 import more_itertools
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
29 32 from dulwich import index, objects
30 33 from dulwich.client import HttpGitClient, LocalGitClient
31 34 from dulwich.errors import (
32 35 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 36 MissingCommitError, ObjectMissing, HangupException,
34 37 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
38 from dulwich.repo import Repo as DulwichRepo
36 39 from dulwich.server import update_server_info
37 40
38 41 from vcsserver import exceptions, settings, subprocessio
39 from vcsserver.utils import safe_str
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
42 from vcsserver.utils import safe_str, safe_int
43 from vcsserver.base import RepoFactory, obfuscate_qs
41 44 from vcsserver.hgcompat import (
42 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
44 48
45 49 DIR_STAT = stat.S_IFDIR
46 50 FILE_MODE = stat.S_IFMT
47 51 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
53
48 54
49 55 log = logging.getLogger(__name__)
50 56
51 57
58 def str_to_dulwich(value):
59 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
62 return value.decode(settings.WIRE_ENCODING)
63
64
52 65 def reraise_safe_exceptions(func):
53 66 """Converts Dulwich exceptions to something neutral."""
67
54 68 @wraps(func)
55 69 def wrapper(*args, **kwargs):
56 70 try:
57 71 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 ObjectMissing) as e:
60 exc = exceptions.LookupException(e)
61 raise exc(e)
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 exc = exceptions.LookupException(org_exc=e)
74 raise exc(safe_str(e))
62 75 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
64 raise exc(e)
76 exc = exceptions.VcsException(org_exc=e)
77 raise exc(safe_str(e))
65 78 except Exception as e:
66 79 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 80 # (KeyError on empty repos), we cannot track this and catch all
68 81 # exceptions, it's an exceptions from other handlers
69 82 #if not hasattr(e, '_vcs_kind'):
70 83 #log.exception("Unhandled exception in git remote call")
71 84 #raise_from_original(exceptions.UnhandledException)
72 85 raise
73 86 return wrapper
74 87
75 88
76 89 class Repo(DulwichRepo):
77 90 """
78 91 A wrapper for dulwich Repo class.
79 92
80 93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 94 "Too many open files" error. We need to close all opened file descriptors
82 95 once the repo object is destroyed.
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
86 96 """
87 97 def __del__(self):
88 98 if hasattr(self, 'object_store'):
89 99 self.close()
90 100
91 101
102 class Repository(LibGit2Repo):
103
104 def __enter__(self):
105 return self
106
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
109
110
92 111 class GitFactory(RepoFactory):
93 112 repo_type = 'git'
94 113
95 def _create_repo(self, wire, create):
96 repo_path = str_to_dulwich(wire['path'])
97 return Repo(repo_path)
114 def _create_repo(self, wire, create, use_libgit2=False):
115 if use_libgit2:
116 return Repository(wire['path'])
117 else:
118 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
120
121 def repo(self, wire, create=False, use_libgit2=False):
122 """
123 Get a repository instance for the given path.
124 """
125 return self._create_repo(wire, create, use_libgit2)
126
127 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
98 129
99 130
100 class GitRemote(object):
131 class GitRemote(RemoteBase):
101 132
102 133 def __init__(self, factory):
103 134 self._factory = factory
104 self.peeled_ref_marker = '^{}'
105 135 self._bulk_methods = {
106 "author": self.commit_attribute,
107 "date": self.get_object_attrs,
108 "message": self.commit_attribute,
109 "parents": self.commit_attribute,
136 "date": self.date,
137 "author": self.author,
138 "branch": self.branch,
139 "message": self.message,
140 "parents": self.parents,
110 141 "_commit": self.revision,
111 142 }
112 143
113 144 def _wire_to_config(self, wire):
114 145 if 'config' in wire:
115 146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 147 return {}
117 148
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
121
122 149 def _remote_conf(self, config):
123 150 params = [
124 151 '-c', 'core.askpass=""',
125 152 ]
126 153 ssl_cert_dir = config.get('vcs_ssl_dir')
127 154 if ssl_cert_dir:
128 155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 156 return params
130 157
131 158 @reraise_safe_exceptions
132 def is_empty(self, wire):
133 repo = self._factory.repo(wire)
134 try:
135 return not repo.head()
136 except Exception:
137 log.exception("failed to read object_store")
138 return True
159 def discover_git_version(self):
160 stdout, _ = self.run_git_command(
161 {}, ['--version'], _bare=True, _safe=True)
162 prefix = 'git version'
163 if stdout.startswith(prefix):
164 stdout = stdout[len(prefix):]
165 return stdout.strip()
139 166
140 167 @reraise_safe_exceptions
141 def add_object(self, wire, content):
142 repo = self._factory.repo(wire)
143 blob = objects.Blob()
144 blob.set_raw_string(content)
145 repo.object_store.add_object(blob)
146 return blob.id
168 def is_empty(self, wire):
169 repo_init = self._factory.repo_libgit2(wire)
170 with repo_init as repo:
171
172 try:
173 has_head = repo.head.name
174 if has_head:
175 return False
176
177 # NOTE(marcink): check again using more expensive method
178 return repo.is_empty
179 except Exception:
180 pass
181
182 return True
147 183
148 184 @reraise_safe_exceptions
149 185 def assert_correct_path(self, wire):
150 path = wire.get('path')
151 try:
152 self._factory.repo(wire)
153 except NotGitRepository as e:
154 tb = traceback.format_exc()
155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
156 return False
186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 def _assert_correct_path(_context_uid, _repo_id):
189 try:
190 repo_init = self._factory.repo_libgit2(wire)
191 with repo_init as repo:
192 pass
193 except pygit2.GitError:
194 path = wire.get('path')
195 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
157 198
158 return True
199 return True
200 return _assert_correct_path(context_uid, repo_id)
159 201
160 202 @reraise_safe_exceptions
161 203 def bare(self, wire):
162 repo = self._factory.repo(wire)
163 return repo.bare
204 repo_init = self._factory.repo_libgit2(wire)
205 with repo_init as repo:
206 return repo.is_bare
164 207
165 208 @reraise_safe_exceptions
166 209 def blob_as_pretty_string(self, wire, sha):
167 repo = self._factory.repo(wire)
168 return repo[sha].as_pretty_string()
210 repo_init = self._factory.repo_libgit2(wire)
211 with repo_init as repo:
212 blob_obj = repo[sha]
213 blob = blob_obj.data
214 return blob
169 215
170 216 @reraise_safe_exceptions
171 217 def blob_raw_length(self, wire, sha):
172 repo = self._factory.repo(wire)
173 blob = repo[sha]
174 return blob.raw_length()
218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 def _blob_raw_length(_repo_id, _sha):
221
222 repo_init = self._factory.repo_libgit2(wire)
223 with repo_init as repo:
224 blob = repo[sha]
225 return blob.size
226
227 return _blob_raw_length(repo_id, sha)
175 228
176 229 def _parse_lfs_pointer(self, raw_content):
177 230
178 231 spec_string = 'version https://git-lfs.github.com/spec'
179 232 if raw_content and raw_content.startswith(spec_string):
180 233 pattern = re.compile(r"""
181 234 (?:\n)?
182 235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
183 236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
184 237 ^size[ ](?P<oid_size>[0-9]+)\n
185 238 (?:\n)?
186 239 """, re.VERBOSE | re.MULTILINE)
187 240 match = pattern.match(raw_content)
188 241 if match:
189 242 return match.groupdict()
190 243
191 244 return {}
192 245
193 246 @reraise_safe_exceptions
194 def is_large_file(self, wire, sha):
195 repo = self._factory.repo(wire)
196 blob = repo[sha]
197 return self._parse_lfs_pointer(blob.as_raw_string())
247 def is_large_file(self, wire, commit_id):
248 cache_on, context_uid, repo_id = self._cache_on(wire)
249
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 def _is_large_file(_repo_id, _sha):
252 repo_init = self._factory.repo_libgit2(wire)
253 with repo_init as repo:
254 blob = repo[commit_id]
255 if blob.is_binary:
256 return {}
257
258 return self._parse_lfs_pointer(blob.data)
259
260 return _is_large_file(repo_id, commit_id)
261
262 @reraise_safe_exceptions
263 def is_binary(self, wire, tree_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
272
273 return _is_binary(repo_id, tree_id)
198 274
199 275 @reraise_safe_exceptions
200 276 def in_largefiles_store(self, wire, oid):
201 repo = self._factory.repo(wire)
202 277 conf = self._wire_to_config(wire)
278 repo_init = self._factory.repo_libgit2(wire)
279 with repo_init as repo:
280 repo_name = repo.path
203 281
204 282 store_location = conf.get('vcs_git_lfs_store_location')
205 283 if store_location:
206 repo_name = repo.path
284
207 285 store = LFSOidStore(
208 286 oid=oid, repo=repo_name, store_location=store_location)
209 287 return store.has_oid()
210 288
211 289 return False
212 290
213 291 @reraise_safe_exceptions
214 292 def store_path(self, wire, oid):
215 repo = self._factory.repo(wire)
216 293 conf = self._wire_to_config(wire)
294 repo_init = self._factory.repo_libgit2(wire)
295 with repo_init as repo:
296 repo_name = repo.path
217 297
218 298 store_location = conf.get('vcs_git_lfs_store_location')
219 299 if store_location:
220 repo_name = repo.path
221 300 store = LFSOidStore(
222 301 oid=oid, repo=repo_name, store_location=store_location)
223 302 return store.oid_path
224 303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
225 304
226 305 @reraise_safe_exceptions
227 306 def bulk_request(self, wire, rev, pre_load):
228 result = {}
229 for attr in pre_load:
230 try:
231 method = self._bulk_methods[attr]
232 args = [wire, rev]
233 if attr == "date":
234 args.extend(["commit_time", "commit_timezone"])
235 elif attr in ["author", "message", "parents"]:
236 args.append(attr)
237 result[attr] = method(*args)
238 except KeyError as e:
239 raise exceptions.VcsException(e)(
240 "Unknown bulk attribute: %s" % attr)
241 return result
307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 def _bulk_request(_repo_id, _rev, _pre_load):
310 result = {}
311 for attr in pre_load:
312 try:
313 method = self._bulk_methods[attr]
314 args = [wire, rev]
315 result[attr] = method(*args)
316 except KeyError as e:
317 raise exceptions.VcsException(e)(
318 "Unknown bulk attribute: %s" % attr)
319 return result
320
321 return _bulk_request(repo_id, rev, sorted(pre_load))
242 322
243 323 def _build_opener(self, url):
244 324 handlers = []
245 325 url_obj = url_parser(url)
246 326 _, authinfo = url_obj.authinfo()
247 327
248 328 if authinfo:
249 329 # create a password manager
250 330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
251 331 passmgr.add_password(*authinfo)
252 332
253 333 handlers.extend((httpbasicauthhandler(passmgr),
254 334 httpdigestauthhandler(passmgr)))
255 335
256 336 return urllib2.build_opener(*handlers)
257 337
338 def _type_id_to_name(self, type_id):
339 return {
340 1: b'commit',
341 2: b'tree',
342 3: b'blob',
343 4: b'tag'
344 }[type_id]
345
258 346 @reraise_safe_exceptions
259 347 def check_url(self, url, config):
260 348 url_obj = url_parser(url)
261 349 test_uri, _ = url_obj.authinfo()
262 350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
263 351 url_obj.query = obfuscate_qs(url_obj.query)
264 352 cleaned_uri = str(url_obj)
265 353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
266 354
267 355 if not test_uri.endswith('info/refs'):
268 356 test_uri = test_uri.rstrip('/') + '/info/refs'
269 357
270 358 o = self._build_opener(url)
271 359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
272 360
273 361 q = {"service": 'git-upload-pack'}
274 362 qs = '?%s' % urllib.urlencode(q)
275 363 cu = "%s%s" % (test_uri, qs)
276 364 req = urllib2.Request(cu, None, {})
277 365
278 366 try:
279 367 log.debug("Trying to open URL %s", cleaned_uri)
280 368 resp = o.open(req)
281 369 if resp.code != 200:
282 370 raise exceptions.URLError()('Return Code is not 200')
283 371 except Exception as e:
284 372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
285 373 # means it cannot be cloned
286 374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
287 375
288 376 # now detect if it's proper git repo
289 377 gitdata = resp.read()
290 378 if 'service=git-upload-pack' in gitdata:
291 379 pass
292 380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
293 381 # old style git can return some other format !
294 382 pass
295 383 else:
296 384 raise exceptions.URLError()(
297 385 "url [%s] does not look like an git" % (cleaned_uri,))
298 386
299 387 return True
300 388
301 389 @reraise_safe_exceptions
302 390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
303 391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
304 392 remote_refs = self.pull(wire, url, apply_refs=False)
305 393 repo = self._factory.repo(wire)
306 394 if isinstance(valid_refs, list):
307 395 valid_refs = tuple(valid_refs)
308 396
309 397 for k in remote_refs:
310 398 # only parse heads/tags and skip so called deferred tags
311 399 if k.startswith(valid_refs) and not k.endswith(deferred):
312 400 repo[k] = remote_refs[k]
313 401
314 402 if update_after_clone:
315 403 # we want to checkout HEAD
316 404 repo["HEAD"] = remote_refs["HEAD"]
317 405 index.build_index_from_tree(repo.path, repo.index_path(),
318 406 repo.object_store, repo["HEAD"].tree)
319 407
408 @reraise_safe_exceptions
409 def branch(self, wire, commit_id):
410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 def _branch(_context_uid, _repo_id, _commit_id):
413 regex = re.compile('^refs/heads')
414
415 def filter_with(ref):
416 return regex.match(ref[0]) and ref[1] == _commit_id
417
418 branches = filter(filter_with, self.get_refs(wire).items())
419 return [x[0].split('refs/heads/')[-1] for x in branches]
420
421 return _branch(context_uid, repo_id, commit_id)
422
423 @reraise_safe_exceptions
424 def commit_branches(self, wire, commit_id):
425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 repo_init = self._factory.repo_libgit2(wire)
429 with repo_init as repo:
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 return branches
432
433 return _commit_branches(context_uid, repo_id, commit_id)
434
435 @reraise_safe_exceptions
436 def add_object(self, wire, content):
437 repo_init = self._factory.repo_libgit2(wire)
438 with repo_init as repo:
439 blob = objects.Blob()
440 blob.set_raw_string(content)
441 repo.object_store.add_object(blob)
442 return blob.id
443
320 444 # TODO: this is quite complex, check if that can be simplified
321 445 @reraise_safe_exceptions
322 446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
323 447 repo = self._factory.repo(wire)
324 448 object_store = repo.object_store
325 449
326 450 # Create tree and populates it with blobs
327 451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
328 452
329 453 for node in updated:
330 454 # Compute subdirs if needed
331 455 dirpath, nodename = vcspath.split(node['path'])
332 456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
333 457 parent = commit_tree
334 458 ancestors = [('', parent)]
335 459
336 460 # Tries to dig for the deepest existing tree
337 461 while dirnames:
338 462 curdir = dirnames.pop(0)
339 463 try:
340 464 dir_id = parent[curdir][1]
341 465 except KeyError:
342 466 # put curdir back into dirnames and stops
343 467 dirnames.insert(0, curdir)
344 468 break
345 469 else:
346 470 # If found, updates parent
347 471 parent = repo[dir_id]
348 472 ancestors.append((curdir, parent))
349 473 # Now parent is deepest existing tree and we need to create
350 474 # subtrees for dirnames (in reverse order)
351 475 # [this only applies for nodes from added]
352 476 new_trees = []
353 477
354 478 blob = objects.Blob.from_string(node['content'])
355 479
356 480 if dirnames:
357 481 # If there are trees which should be created we need to build
358 482 # them now (in reverse order)
359 483 reversed_dirnames = list(reversed(dirnames))
360 484 curtree = objects.Tree()
361 485 curtree[node['node_path']] = node['mode'], blob.id
362 486 new_trees.append(curtree)
363 487 for dirname in reversed_dirnames[:-1]:
364 488 newtree = objects.Tree()
365 489 newtree[dirname] = (DIR_STAT, curtree.id)
366 490 new_trees.append(newtree)
367 491 curtree = newtree
368 492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
369 493 else:
370 parent.add(
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
372 495
373 496 new_trees.append(parent)
374 497 # Update ancestors
375 498 reversed_ancestors = reversed(
376 499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
377 500 for parent, tree, path in reversed_ancestors:
378 501 parent[path] = (DIR_STAT, tree.id)
379 502 object_store.add_object(tree)
380 503
381 504 object_store.add_object(blob)
382 505 for tree in new_trees:
383 506 object_store.add_object(tree)
384 507
385 508 for node_path in removed:
386 509 paths = node_path.split('/')
387 510 tree = commit_tree
388 511 trees = [tree]
389 512 # Traverse deep into the forest...
390 513 for path in paths:
391 514 try:
392 515 obj = repo[tree[path][1]]
393 516 if isinstance(obj, objects.Tree):
394 517 trees.append(obj)
395 518 tree = obj
396 519 except KeyError:
397 520 break
398 521 # Cut down the blob and all rotten trees on the way back...
399 522 for path, tree in reversed(zip(paths, trees)):
400 523 del tree[path]
401 524 if tree:
402 525 # This tree still has elements - don't remove it or any
403 526 # of it's parents
404 527 break
405 528
406 529 object_store.add_object(commit_tree)
407 530
408 531 # Create commit
409 532 commit = objects.Commit()
410 533 commit.tree = commit_tree.id
411 534 for k, v in commit_data.iteritems():
412 535 setattr(commit, k, v)
413 536 object_store.add_object(commit)
414 537
538 self.create_branch(wire, branch, commit.id)
539
540 # dulwich set-ref
415 541 ref = 'refs/heads/%s' % branch
416 542 repo.refs[ref] = commit.id
417 543
418 544 return commit.id
419 545
420 546 @reraise_safe_exceptions
421 547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
422 548 if url != 'default' and '://' not in url:
423 549 client = LocalGitClient(url)
424 550 else:
425 551 url_obj = url_parser(url)
426 552 o = self._build_opener(url)
427 553 url, _ = url_obj.authinfo()
428 554 client = HttpGitClient(base_url=url, opener=o)
429 555 repo = self._factory.repo(wire)
430 556
431 557 determine_wants = repo.object_store.determine_wants_all
432 558 if refs:
433 559 def determine_wants_requested(references):
434 560 return [references[r] for r in references if r in refs]
435 561 determine_wants = determine_wants_requested
436 562
437 563 try:
438 564 remote_refs = client.fetch(
439 565 path=url, target=repo, determine_wants=determine_wants)
440 566 except NotGitRepository as e:
441 567 log.warning(
442 568 'Trying to fetch from "%s" failed, not a Git repository.', url)
443 569 # Exception can contain unicode which we convert
444 570 raise exceptions.AbortException(e)(repr(e))
445 571
446 572 # mikhail: client.fetch() returns all the remote refs, but fetches only
447 573 # refs filtered by `determine_wants` function. We need to filter result
448 574 # as well
449 575 if refs:
450 576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
451 577
452 578 if apply_refs:
453 579 # TODO: johbo: Needs proper test coverage with a git repository
454 580 # that contains a tag object, so that we would end up with
455 581 # a peeled ref at this point.
456 582 for k in remote_refs:
457 if k.endswith(self.peeled_ref_marker):
583 if k.endswith(PEELED_REF_MARKER):
458 584 log.debug("Skipping peeled reference %s", k)
459 585 continue
460 586 repo[k] = remote_refs[k]
461 587
462 588 if refs and not update_after:
463 589 # mikhail: explicitly set the head to the last ref.
464 590 repo['HEAD'] = remote_refs[refs[-1]]
465 591
466 592 if update_after:
467 593 # we want to checkout HEAD
468 594 repo["HEAD"] = remote_refs["HEAD"]
469 595 index.build_index_from_tree(repo.path, repo.index_path(),
470 596 repo.object_store, repo["HEAD"].tree)
471 597 return remote_refs
472 598
473 599 @reraise_safe_exceptions
474 def sync_fetch(self, wire, url, refs=None):
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
475 601 repo = self._factory.repo(wire)
476 602 if refs and not isinstance(refs, (list, tuple)):
477 603 refs = [refs]
604
478 605 config = self._wire_to_config(wire)
479 606 # get all remote refs we'll use to fetch later
607 cmd = ['ls-remote']
608 if not all_refs:
609 cmd += ['--heads', '--tags']
610 cmd += [url]
480 611 output, __ = self.run_git_command(
481 wire, ['ls-remote', url], fail_on_stderr=False,
612 wire, cmd, fail_on_stderr=False,
482 613 _copts=self._remote_conf(config),
483 614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
484 615
485 616 remote_refs = collections.OrderedDict()
486 617 fetch_refs = []
487 618
488 619 for ref_line in output.splitlines():
489 620 sha, ref = ref_line.split('\t')
490 621 sha = sha.strip()
491 622 if ref in remote_refs:
492 623 # duplicate, skip
493 624 continue
494 if ref.endswith(self.peeled_ref_marker):
625 if ref.endswith(PEELED_REF_MARKER):
495 626 log.debug("Skipping peeled reference %s", ref)
496 627 continue
497 628 # don't sync HEAD
498 629 if ref in ['HEAD']:
499 630 continue
500 631
501 632 remote_refs[ref] = sha
502 633
503 634 if refs and sha in refs:
504 635 # we filter fetch using our specified refs
505 636 fetch_refs.append('{}:{}'.format(ref, ref))
506 637 elif not refs:
507 638 fetch_refs.append('{}:{}'.format(ref, ref))
508 639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640
509 641 if fetch_refs:
510 642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
511 643 fetch_refs_chunks = list(chunk)
512 644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
513 645 _out, _err = self.run_git_command(
514 646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
515 647 fail_on_stderr=False,
516 648 _copts=self._remote_conf(config),
517 649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
518 650
519 651 return remote_refs
520 652
521 653 @reraise_safe_exceptions
522 654 def sync_push(self, wire, url, refs=None):
523 655 if not self.check_url(url, wire):
524 656 return
525 657 config = self._wire_to_config(wire)
526 repo = self._factory.repo(wire)
658 self._factory.repo(wire)
527 659 self.run_git_command(
528 660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
529 661 _copts=self._remote_conf(config),
530 662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
531 663
532 664 @reraise_safe_exceptions
533 665 def get_remote_refs(self, wire, url):
534 666 repo = Repo(url)
535 667 return repo.get_refs()
536 668
537 669 @reraise_safe_exceptions
538 670 def get_description(self, wire):
539 671 repo = self._factory.repo(wire)
540 672 return repo.get_description()
541 673
542 674 @reraise_safe_exceptions
543 675 def get_missing_revs(self, wire, rev1, rev2, path2):
544 676 repo = self._factory.repo(wire)
545 677 LocalGitClient(thin_packs=False).fetch(path2, repo)
546 678
547 679 wire_remote = wire.copy()
548 680 wire_remote['path'] = path2
549 681 repo_remote = self._factory.repo(wire_remote)
550 682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
551 683
552 684 revs = [
553 685 x.commit.id
554 686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
555 687 return revs
556 688
557 689 @reraise_safe_exceptions
558 690 def get_object(self, wire, sha):
559 repo = self._factory.repo(wire)
560 obj = repo.get_object(sha)
561 commit_id = obj.id
691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 def _get_object(_context_uid, _repo_id, _sha):
694 repo_init = self._factory.repo_libgit2(wire)
695 with repo_init as repo:
562 696
563 if isinstance(obj, Tag):
564 commit_id = obj.object[1]
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 try:
699 commit = repo.revparse_single(sha)
700 except (KeyError, ValueError) as e:
701 raise exceptions.LookupException(e)(missing_commit_err)
702
703 is_tag = False
704 if isinstance(commit, pygit2.Tag):
705 commit = repo.get(commit.target)
706 is_tag = True
707
708 check_dangling = True
709 if is_tag:
710 check_dangling = False
565 711
566 return {
567 'id': obj.id,
568 'type': obj.type_name,
569 'commit_id': commit_id,
570 'idx': 0
571 }
712 # we used a reference and it parsed means we're not having a dangling commit
713 if sha != commit.hex:
714 check_dangling = False
715
716 if check_dangling:
717 # check for dangling commit
718 for branch in repo.branches.with_commit(commit.hex):
719 if branch:
720 break
721 else:
722 raise exceptions.LookupException(None)(missing_commit_err)
572 723
573 @reraise_safe_exceptions
574 def get_object_attrs(self, wire, sha, *attrs):
575 repo = self._factory.repo(wire)
576 obj = repo.get_object(sha)
577 return list(getattr(obj, a) for a in attrs)
724 commit_id = commit.hex
725 type_id = commit.type
726
727 return {
728 'id': commit_id,
729 'type': self._type_id_to_name(type_id),
730 'commit_id': commit_id,
731 'idx': 0
732 }
733
734 return _get_object(context_uid, repo_id, sha)
578 735
579 736 @reraise_safe_exceptions
580 737 def get_refs(self, wire):
581 repo = self._factory.repo(wire)
582 result = {}
583 for ref, sha in repo.refs.as_dict().items():
584 peeled_sha = repo.get_peeled(ref)
585 result[ref] = peeled_sha
586 return result
738 cache_on, context_uid, repo_id = self._cache_on(wire)
739 @self.region.conditional_cache_on_arguments(condition=cache_on)
740 def _get_refs(_context_uid, _repo_id):
741
742 repo_init = self._factory.repo_libgit2(wire)
743 with repo_init as repo:
744 regex = re.compile('^refs/(heads|tags)/')
745 return {x.name: x.target.hex for x in
746 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
747
748 return _get_refs(context_uid, repo_id)
587 749
588 750 @reraise_safe_exceptions
589 def get_refs_path(self, wire):
590 repo = self._factory.repo(wire)
591 return repo.refs.path
751 def get_branch_pointers(self, wire):
752 cache_on, context_uid, repo_id = self._cache_on(wire)
753 @self.region.conditional_cache_on_arguments(condition=cache_on)
754 def _get_branch_pointers(_context_uid, _repo_id):
755
756 repo_init = self._factory.repo_libgit2(wire)
757 regex = re.compile('^refs/heads')
758 with repo_init as repo:
759 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
760 return {x.target.hex: x.shorthand for x in branches}
761
762 return _get_branch_pointers(context_uid, repo_id)
592 763
593 764 @reraise_safe_exceptions
594 765 def head(self, wire, show_exc=True):
595 repo = self._factory.repo(wire)
596 try:
597 return repo.head()
598 except Exception:
599 if show_exc:
600 raise
766 cache_on, context_uid, repo_id = self._cache_on(wire)
767 @self.region.conditional_cache_on_arguments(condition=cache_on)
768 def _head(_context_uid, _repo_id, _show_exc):
769 repo_init = self._factory.repo_libgit2(wire)
770 with repo_init as repo:
771 try:
772 return repo.head.peel().hex
773 except Exception:
774 if show_exc:
775 raise
776 return _head(context_uid, repo_id, show_exc)
601 777
602 778 @reraise_safe_exceptions
603 779 def init(self, wire):
604 780 repo_path = str_to_dulwich(wire['path'])
605 781 self.repo = Repo.init(repo_path)
606 782
607 783 @reraise_safe_exceptions
608 784 def init_bare(self, wire):
609 785 repo_path = str_to_dulwich(wire['path'])
610 786 self.repo = Repo.init_bare(repo_path)
611 787
612 788 @reraise_safe_exceptions
613 789 def revision(self, wire, rev):
614 repo = self._factory.repo(wire)
615 obj = repo[rev]
616 obj_data = {
617 'id': obj.id,
618 }
619 try:
620 obj_data['tree'] = obj.tree
621 except AttributeError:
622 pass
623 return obj_data
790
791 cache_on, context_uid, repo_id = self._cache_on(wire)
792 @self.region.conditional_cache_on_arguments(condition=cache_on)
793 def _revision(_context_uid, _repo_id, _rev):
794 repo_init = self._factory.repo_libgit2(wire)
795 with repo_init as repo:
796 commit = repo[rev]
797 obj_data = {
798 'id': commit.id.hex,
799 }
800 # tree objects itself don't have tree_id attribute
801 if hasattr(commit, 'tree_id'):
802 obj_data['tree'] = commit.tree_id.hex
803
804 return obj_data
805 return _revision(context_uid, repo_id, rev)
806
807 @reraise_safe_exceptions
808 def date(self, wire, commit_id):
809 cache_on, context_uid, repo_id = self._cache_on(wire)
810 @self.region.conditional_cache_on_arguments(condition=cache_on)
811 def _date(_repo_id, _commit_id):
812 repo_init = self._factory.repo_libgit2(wire)
813 with repo_init as repo:
814 commit = repo[commit_id]
815
816 if hasattr(commit, 'commit_time'):
817 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
818 else:
819 commit = commit.get_object()
820 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
821
822 # TODO(marcink): check dulwich difference of offset vs timezone
823 return [commit_time, commit_time_offset]
824 return _date(repo_id, commit_id)
624 825
625 826 @reraise_safe_exceptions
626 def commit_attribute(self, wire, rev, attr):
627 repo = self._factory.repo(wire)
628 obj = repo[rev]
629 return getattr(obj, attr)
827 def author(self, wire, commit_id):
828 cache_on, context_uid, repo_id = self._cache_on(wire)
829 @self.region.conditional_cache_on_arguments(condition=cache_on)
830 def _author(_repo_id, _commit_id):
831 repo_init = self._factory.repo_libgit2(wire)
832 with repo_init as repo:
833 commit = repo[commit_id]
834
835 if hasattr(commit, 'author'):
836 author = commit.author
837 else:
838 author = commit.get_object().author
839
840 if author.email:
841 return u"{} <{}>".format(author.name, author.email)
842
843 return u"{}".format(author.raw_name)
844 return _author(repo_id, commit_id)
845
846 @reraise_safe_exceptions
847 def message(self, wire, commit_id):
848 cache_on, context_uid, repo_id = self._cache_on(wire)
849 @self.region.conditional_cache_on_arguments(condition=cache_on)
850 def _message(_repo_id, _commit_id):
851 repo_init = self._factory.repo_libgit2(wire)
852 with repo_init as repo:
853 commit = repo[commit_id]
854 return commit.message
855 return _message(repo_id, commit_id)
856
857 @reraise_safe_exceptions
858 def parents(self, wire, commit_id):
859 cache_on, context_uid, repo_id = self._cache_on(wire)
860 @self.region.conditional_cache_on_arguments(condition=cache_on)
861 def _parents(_repo_id, _commit_id):
862 repo_init = self._factory.repo_libgit2(wire)
863 with repo_init as repo:
864 commit = repo[commit_id]
865 if hasattr(commit, 'parent_ids'):
866 parent_ids = commit.parent_ids
867 else:
868 parent_ids = commit.get_object().parent_ids
869
870 return [x.hex for x in parent_ids]
871 return _parents(repo_id, commit_id)
872
873 @reraise_safe_exceptions
874 def children(self, wire, commit_id):
875 cache_on, context_uid, repo_id = self._cache_on(wire)
876 @self.region.conditional_cache_on_arguments(condition=cache_on)
877 def _children(_repo_id, _commit_id):
878 output, __ = self.run_git_command(
879 wire, ['rev-list', '--all', '--children'])
880
881 child_ids = []
882 pat = re.compile(r'^%s' % commit_id)
883 for l in output.splitlines():
884 if pat.match(l):
885 found_ids = l.split(' ')[1:]
886 child_ids.extend(found_ids)
887
888 return child_ids
889 return _children(repo_id, commit_id)
630 890
631 891 @reraise_safe_exceptions
632 892 def set_refs(self, wire, key, value):
633 repo = self._factory.repo(wire)
634 repo.refs[key] = value
893 repo_init = self._factory.repo_libgit2(wire)
894 with repo_init as repo:
895 repo.references.create(key, value, force=True)
896
897 @reraise_safe_exceptions
898 def create_branch(self, wire, branch_name, commit_id, force=False):
899 repo_init = self._factory.repo_libgit2(wire)
900 with repo_init as repo:
901 commit = repo[commit_id]
902
903 if force:
904 repo.branches.local.create(branch_name, commit, force=force)
905 elif not repo.branches.get(branch_name):
906 # create only if that branch isn't existing
907 repo.branches.local.create(branch_name, commit, force=force)
635 908
636 909 @reraise_safe_exceptions
637 910 def remove_ref(self, wire, key):
638 repo = self._factory.repo(wire)
639 del repo.refs[key]
911 repo_init = self._factory.repo_libgit2(wire)
912 with repo_init as repo:
913 repo.references.delete(key)
914
915 @reraise_safe_exceptions
916 def tag_remove(self, wire, tag_name):
917 repo_init = self._factory.repo_libgit2(wire)
918 with repo_init as repo:
919 key = 'refs/tags/{}'.format(tag_name)
920 repo.references.delete(key)
640 921
641 922 @reraise_safe_exceptions
642 923 def tree_changes(self, wire, source_id, target_id):
924 # TODO(marcink): remove this seems it's only used by tests
643 925 repo = self._factory.repo(wire)
644 926 source = repo[source_id].tree if source_id else None
645 927 target = repo[target_id].tree
646 928 result = repo.object_store.tree_changes(source, target)
647 929 return list(result)
648 930
649 931 @reraise_safe_exceptions
932 def tree_and_type_for_path(self, wire, commit_id, path):
933
934 cache_on, context_uid, repo_id = self._cache_on(wire)
935 @self.region.conditional_cache_on_arguments(condition=cache_on)
936 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
937 repo_init = self._factory.repo_libgit2(wire)
938
939 with repo_init as repo:
940 commit = repo[commit_id]
941 try:
942 tree = commit.tree[path]
943 except KeyError:
944 return None, None, None
945
946 return tree.id.hex, tree.type, tree.filemode
947 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
948
949 @reraise_safe_exceptions
650 950 def tree_items(self, wire, tree_id):
651 repo = self._factory.repo(wire)
652 tree = repo[tree_id]
951 cache_on, context_uid, repo_id = self._cache_on(wire)
952 @self.region.conditional_cache_on_arguments(condition=cache_on)
953 def _tree_items(_repo_id, _tree_id):
954
955 repo_init = self._factory.repo_libgit2(wire)
956 with repo_init as repo:
957 try:
958 tree = repo[tree_id]
959 except KeyError:
960 raise ObjectMissing('No tree with id: {}'.format(tree_id))
961
962 result = []
963 for item in tree:
964 item_sha = item.hex
965 item_mode = item.filemode
966 item_type = item.type
967
968 if item_type == 'commit':
969 # NOTE(marcink): submodules we translate to 'link' for backward compat
970 item_type = 'link'
971
972 result.append((item.name, item_mode, item_sha, item_type))
973 return result
974 return _tree_items(repo_id, tree_id)
975
976 @reraise_safe_exceptions
977 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
978 """
979 Old version that uses subprocess to call diff
980 """
981
982 flags = [
983 '-U%s' % context, '--patch',
984 '--binary',
985 '--find-renames',
986 '--no-indent-heuristic',
987 # '--indent-heuristic',
988 #'--full-index',
989 #'--abbrev=40'
990 ]
991
992 if opt_ignorews:
993 flags.append('--ignore-all-space')
994
995 if commit_id_1 == self.EMPTY_COMMIT:
996 cmd = ['show'] + flags + [commit_id_2]
997 else:
998 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
999
1000 if file_filter:
1001 cmd.extend(['--', file_filter])
1002
1003 diff, __ = self.run_git_command(wire, cmd)
1004 # If we used 'show' command, strip first few lines (until actual diff
1005 # starts)
1006 if commit_id_1 == self.EMPTY_COMMIT:
1007 lines = diff.splitlines()
1008 x = 0
1009 for line in lines:
1010 if line.startswith('diff'):
1011 break
1012 x += 1
1013 # Append new line just like 'diff' command do
1014 diff = '\n'.join(lines[x:]) + '\n'
1015 return diff
1016
1017 @reraise_safe_exceptions
1018 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1019 repo_init = self._factory.repo_libgit2(wire)
1020 with repo_init as repo:
1021 swap = True
1022 flags = 0
1023 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1024
1025 if opt_ignorews:
1026 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1027
1028 if commit_id_1 == self.EMPTY_COMMIT:
1029 comm1 = repo[commit_id_2]
1030 diff_obj = comm1.tree.diff_to_tree(
1031 flags=flags, context_lines=context, swap=swap)
1032
1033 else:
1034 comm1 = repo[commit_id_2]
1035 comm2 = repo[commit_id_1]
1036 diff_obj = comm1.tree.diff_to_tree(
1037 comm2.tree, flags=flags, context_lines=context, swap=swap)
1038 similar_flags = 0
1039 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1040 diff_obj.find_similar(flags=similar_flags)
1041
1042 if file_filter:
1043 for p in diff_obj:
1044 if p.delta.old_file.path == file_filter:
1045 return p.patch or ''
1046 # fo matching path == no diff
1047 return ''
1048 return diff_obj.patch or ''
1049
1050 @reraise_safe_exceptions
1051 def node_history(self, wire, commit_id, path, limit):
1052 cache_on, context_uid, repo_id = self._cache_on(wire)
1053 @self.region.conditional_cache_on_arguments(condition=cache_on)
1054 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1055 # optimize for n==1, rev-list is much faster for that use-case
1056 if limit == 1:
1057 cmd = ['rev-list', '-1', commit_id, '--', path]
1058 else:
1059 cmd = ['log']
1060 if limit:
1061 cmd.extend(['-n', str(safe_int(limit, 0))])
1062 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1063
1064 output, __ = self.run_git_command(wire, cmd)
1065 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1066
1067 return [x for x in commit_ids]
1068 return _node_history(context_uid, repo_id, commit_id, path, limit)
1069
1070 @reraise_safe_exceptions
1071 def node_annotate(self, wire, commit_id, path):
1072
1073 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1074 # -l ==> outputs long shas (and we need all 40 characters)
1075 # --root ==> doesn't put '^' character for boundaries
1076 # -r commit_id ==> blames for the given commit
1077 output, __ = self.run_git_command(wire, cmd)
653 1078
654 1079 result = []
655 for item in tree.iteritems():
656 item_sha = item.sha
657 item_mode = item.mode
658
659 if FILE_MODE(item_mode) == GIT_LINK:
660 item_type = "link"
661 else:
662 item_type = repo[item_sha].type_name
663
664 result.append((item.path, item_mode, item_sha, item_type))
1080 for i, blame_line in enumerate(output.split('\n')[:-1]):
1081 line_no = i + 1
1082 commit_id, line = re.split(r' ', blame_line, 1)
1083 result.append((line_no, commit_id, line))
665 1084 return result
666 1085
667 1086 @reraise_safe_exceptions
668 1087 def update_server_info(self, wire):
669 1088 repo = self._factory.repo(wire)
670 1089 update_server_info(repo)
671 1090
672 1091 @reraise_safe_exceptions
673 def discover_git_version(self):
674 stdout, _ = self.run_git_command(
675 {}, ['--version'], _bare=True, _safe=True)
676 prefix = 'git version'
677 if stdout.startswith(prefix):
678 stdout = stdout[len(prefix):]
679 return stdout.strip()
1092 def get_all_commit_ids(self, wire):
1093
1094 cache_on, context_uid, repo_id = self._cache_on(wire)
1095 @self.region.conditional_cache_on_arguments(condition=cache_on)
1096 def _get_all_commit_ids(_context_uid, _repo_id):
1097
1098 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1099 try:
1100 output, __ = self.run_git_command(wire, cmd)
1101 return output.splitlines()
1102 except Exception:
1103 # Can be raised for empty repositories
1104 return []
1105 return _get_all_commit_ids(context_uid, repo_id)
680 1106
681 1107 @reraise_safe_exceptions
682 1108 def run_git_command(self, wire, cmd, **opts):
683 1109 path = wire.get('path', None)
684 1110
685 1111 if path and os.path.isdir(path):
686 1112 opts['cwd'] = path
687 1113
688 1114 if '_bare' in opts:
689 1115 _copts = []
690 1116 del opts['_bare']
691 1117 else:
692 1118 _copts = ['-c', 'core.quotepath=false', ]
693 1119 safe_call = False
694 1120 if '_safe' in opts:
695 1121 # no exc on failure
696 1122 del opts['_safe']
697 1123 safe_call = True
698 1124
699 1125 if '_copts' in opts:
700 1126 _copts.extend(opts['_copts'] or [])
701 1127 del opts['_copts']
702 1128
703 1129 gitenv = os.environ.copy()
704 1130 gitenv.update(opts.pop('extra_env', {}))
705 1131 # need to clean fix GIT_DIR !
706 1132 if 'GIT_DIR' in gitenv:
707 1133 del gitenv['GIT_DIR']
708 1134 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
709 1135 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
710 1136
711 1137 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
712 1138 _opts = {'env': gitenv, 'shell': False}
713 1139
1140 proc = None
714 1141 try:
715 1142 _opts.update(opts)
716 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1143 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
717 1144
718 return ''.join(p), ''.join(p.error)
1145 return ''.join(proc), ''.join(proc.error)
719 1146 except (EnvironmentError, OSError) as err:
720 1147 cmd = ' '.join(cmd) # human friendly CMD
721 1148 tb_err = ("Couldn't run git command (%s).\n"
722 1149 "Original error was:%s\n"
723 1150 "Call options:%s\n"
724 1151 % (cmd, err, _opts))
725 1152 log.exception(tb_err)
726 1153 if safe_call:
727 1154 return '', err
728 1155 else:
729 1156 raise exceptions.VcsException()(tb_err)
1157 finally:
1158 if proc:
1159 proc.close()
730 1160
731 1161 @reraise_safe_exceptions
732 1162 def install_hooks(self, wire, force=False):
733 1163 from vcsserver.hook_utils import install_git_hooks
734 repo = self._factory.repo(wire)
735 return install_git_hooks(repo.path, repo.bare, force_create=force)
1164 bare = self.bare(wire)
1165 path = wire['path']
1166 return install_git_hooks(path, bare, force_create=force)
736 1167
737 1168 @reraise_safe_exceptions
738 1169 def get_hooks_info(self, wire):
739 1170 from vcsserver.hook_utils import (
740 1171 get_git_pre_hook_version, get_git_post_hook_version)
741 repo = self._factory.repo(wire)
1172 bare = self.bare(wire)
1173 path = wire['path']
742 1174 return {
743 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
744 'post_version': get_git_post_hook_version(repo.path, repo.bare),
1175 'pre_version': get_git_pre_hook_version(path, bare),
1176 'post_version': get_git_post_hook_version(path, bare),
745 1177 }
746
747
748 def str_to_dulwich(value):
749 """
750 Dulwich 0.10.1a requires `unicode` objects to be passed in.
751 """
752 return value.decode(settings.WIRE_ENCODING)
This diff has been collapsed as it changes many lines, (648 lines changed) Show them Hide them
@@ -1,856 +1,990 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23 import traceback
24 24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase, purge
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30
31 31 import vcsserver
32 32 from vcsserver import exceptions
33 33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 34 from vcsserver.hgcompat import (
35 35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 39 RepoLookupError, InterventionRequired, RequirementError)
40 from vcsserver.vcs_base import RemoteBase
40 41
41 42 log = logging.getLogger(__name__)
42 43
43 44
44 45 def make_ui_from_config(repo_config):
45 46
46 47 class LoggingUI(ui.ui):
47 48 def status(self, *msg, **opts):
48 49 log.info(' '.join(msg).rstrip('\n'))
49 50 super(LoggingUI, self).status(*msg, **opts)
50 51
51 52 def warn(self, *msg, **opts):
52 53 log.warn(' '.join(msg).rstrip('\n'))
53 54 super(LoggingUI, self).warn(*msg, **opts)
54 55
55 56 def error(self, *msg, **opts):
56 57 log.error(' '.join(msg).rstrip('\n'))
57 58 super(LoggingUI, self).error(*msg, **opts)
58 59
59 60 def note(self, *msg, **opts):
60 61 log.info(' '.join(msg).rstrip('\n'))
61 62 super(LoggingUI, self).note(*msg, **opts)
62 63
63 64 def debug(self, *msg, **opts):
64 65 log.debug(' '.join(msg).rstrip('\n'))
65 66 super(LoggingUI, self).debug(*msg, **opts)
66 67
67 68 baseui = LoggingUI()
68 69
69 70 # clean the baseui object
70 71 baseui._ocfg = hgconfig.config()
71 72 baseui._ucfg = hgconfig.config()
72 73 baseui._tcfg = hgconfig.config()
73 74
74 75 for section, option, value in repo_config:
75 76 baseui.setconfig(section, option, value)
76 77
77 78 # make our hgweb quiet so it doesn't print output
78 79 baseui.setconfig('ui', 'quiet', 'true')
79 80
80 81 baseui.setconfig('ui', 'paginate', 'never')
81 82 # for better Error reporting of Mercurial
82 83 baseui.setconfig('ui', 'message-output', 'stderr')
83 84
84 85 # force mercurial to only use 1 thread, otherwise it may try to set a
85 86 # signal in a non-main thread, thus generating a ValueError.
86 87 baseui.setconfig('worker', 'numcpus', 1)
87 88
88 89 # If there is no config for the largefiles extension, we explicitly disable
89 90 # it here. This overrides settings from repositories hgrc file. Recent
90 91 # mercurial versions enable largefiles in hgrc on clone from largefile
91 92 # repo.
92 93 if not baseui.hasconfig('extensions', 'largefiles'):
93 94 log.debug('Explicitly disable largefiles extension for repo.')
94 95 baseui.setconfig('extensions', 'largefiles', '!')
95 96
96 97 return baseui
97 98
98 99
99 100 def reraise_safe_exceptions(func):
100 101 """Decorator for converting mercurial exceptions to something neutral."""
102
101 103 def wrapper(*args, **kwargs):
102 104 try:
103 105 return func(*args, **kwargs)
104 106 except (Abort, InterventionRequired) as e:
105 107 raise_from_original(exceptions.AbortException(e))
106 108 except RepoLookupError as e:
107 109 raise_from_original(exceptions.LookupException(e))
108 110 except RequirementError as e:
109 111 raise_from_original(exceptions.RequirementException(e))
110 112 except RepoError as e:
111 113 raise_from_original(exceptions.VcsException(e))
112 114 except LookupError as e:
113 115 raise_from_original(exceptions.LookupException(e))
114 116 except Exception as e:
115 117 if not hasattr(e, '_vcs_kind'):
116 118 log.exception("Unhandled exception in hg remote call")
117 119 raise_from_original(exceptions.UnhandledException(e))
118 120
119 121 raise
120 122 return wrapper
121 123
122 124
123 125 class MercurialFactory(RepoFactory):
124 126 repo_type = 'hg'
125 127
126 128 def _create_config(self, config, hooks=True):
127 129 if not hooks:
128 130 hooks_to_clean = frozenset((
129 131 'changegroup.repo_size', 'preoutgoing.pre_pull',
130 132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
131 133 new_config = []
132 134 for section, option, value in config:
133 135 if section == 'hooks' and option in hooks_to_clean:
134 136 continue
135 137 new_config.append((section, option, value))
136 138 config = new_config
137 139
138 140 baseui = make_ui_from_config(config)
139 141 return baseui
140 142
141 143 def _create_repo(self, wire, create):
142 144 baseui = self._create_config(wire["config"])
143 145 return instance(baseui, wire["path"], create)
144 146
147 def repo(self, wire, create=False):
148 """
149 Get a repository instance for the given path.
150 """
151 return self._create_repo(wire, create)
145 152
146 class HgRemote(object):
153
154 class HgRemote(RemoteBase):
147 155
148 156 def __init__(self, factory):
149 157 self._factory = factory
150
151 158 self._bulk_methods = {
152 159 "affected_files": self.ctx_files,
153 160 "author": self.ctx_user,
154 161 "branch": self.ctx_branch,
155 162 "children": self.ctx_children,
156 163 "date": self.ctx_date,
157 164 "message": self.ctx_description,
158 165 "parents": self.ctx_parents,
159 166 "status": self.ctx_status,
160 167 "obsolete": self.ctx_obsolete,
161 168 "phase": self.ctx_phase,
162 169 "hidden": self.ctx_hidden,
163 170 "_file_paths": self.ctx_list,
164 171 }
165 172
166 173 def _get_ctx(self, repo, ref):
167 174 return get_ctx(repo, ref)
168 175
169 176 @reraise_safe_exceptions
170 177 def discover_hg_version(self):
171 178 from mercurial import util
172 179 return util.version()
173 180
174 181 @reraise_safe_exceptions
175 182 def is_empty(self, wire):
176 183 repo = self._factory.repo(wire)
177 184
178 185 try:
179 186 return len(repo) == 0
180 187 except Exception:
181 188 log.exception("failed to read object_store")
182 189 return False
183 190
184 191 @reraise_safe_exceptions
185 192 def archive_repo(self, archive_path, mtime, file_info, kind):
186 193 if kind == "tgz":
187 194 archiver = archival.tarit(archive_path, mtime, "gz")
188 195 elif kind == "tbz2":
189 196 archiver = archival.tarit(archive_path, mtime, "bz2")
190 197 elif kind == 'zip':
191 198 archiver = archival.zipit(archive_path, mtime)
192 199 else:
193 200 raise exceptions.ArchiveException()(
194 201 'Remote does not support: "%s".' % kind)
195 202
196 203 for f_path, f_mode, f_is_link, f_content in file_info:
197 204 archiver.addfile(f_path, f_mode, f_is_link, f_content)
198 205 archiver.done()
199 206
200 207 @reraise_safe_exceptions
201 208 def bookmarks(self, wire):
202 repo = self._factory.repo(wire)
203 return dict(repo._bookmarks)
209 cache_on, context_uid, repo_id = self._cache_on(wire)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
211 def _bookmarks(_context_uid, _repo_id):
212 repo = self._factory.repo(wire)
213 return dict(repo._bookmarks)
214
215 return _bookmarks(context_uid, repo_id)
204 216
205 217 @reraise_safe_exceptions
206 218 def branches(self, wire, normal, closed):
207 repo = self._factory.repo(wire)
208 iter_branches = repo.branchmap().iterbranches()
209 bt = {}
210 for branch_name, _heads, tip, is_closed in iter_branches:
211 if normal and not is_closed:
212 bt[branch_name] = tip
213 if closed and is_closed:
214 bt[branch_name] = tip
215
216 return bt
219 cache_on, context_uid, repo_id = self._cache_on(wire)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 def _branches(_context_uid, _repo_id, _normal, _closed):
222 repo = self._factory.repo(wire)
223 iter_branches = repo.branchmap().iterbranches()
224 bt = {}
225 for branch_name, _heads, tip, is_closed in iter_branches:
226 if normal and not is_closed:
227 bt[branch_name] = tip
228 if closed and is_closed:
229 bt[branch_name] = tip
217 230
218 @reraise_safe_exceptions
219 def bulk_request(self, wire, rev, pre_load):
220 result = {}
221 for attr in pre_load:
222 try:
223 method = self._bulk_methods[attr]
224 result[attr] = method(wire, rev)
225 except KeyError as e:
226 raise exceptions.VcsException(e)(
227 'Unknown bulk attribute: "%s"' % attr)
228 return result
231 return bt
229 232
230 @reraise_safe_exceptions
231 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
232 baseui = self._factory._create_config(wire["config"], hooks=hooks)
233 clone(baseui, source, dest, noupdate=not update_after_clone)
233 return _branches(context_uid, repo_id, normal, closed)
234 234
235 235 @reraise_safe_exceptions
236 def commitctx(
237 self, wire, message, parents, commit_time, commit_timezone,
238 user, files, extra, removed, updated):
239
240 repo = self._factory.repo(wire)
241 baseui = self._factory._create_config(wire['config'])
242 publishing = baseui.configbool('phases', 'publish')
243 if publishing:
244 new_commit = 'public'
245 else:
246 new_commit = 'draft'
247
248 def _filectxfn(_repo, ctx, path):
249 """
250 Marks given path as added/changed/removed in a given _repo. This is
251 for internal mercurial commit function.
252 """
253
254 # check if this path is removed
255 if path in removed:
256 # returning None is a way to mark node for removal
257 return None
236 def bulk_request(self, wire, commit_id, pre_load):
237 cache_on, context_uid, repo_id = self._cache_on(wire)
238 @self.region.conditional_cache_on_arguments(condition=cache_on)
239 def _bulk_request(_repo_id, _commit_id, _pre_load):
240 result = {}
241 for attr in pre_load:
242 try:
243 method = self._bulk_methods[attr]
244 result[attr] = method(wire, commit_id)
245 except KeyError as e:
246 raise exceptions.VcsException(e)(
247 'Unknown bulk attribute: "%s"' % attr)
248 return result
258 249
259 # check if this path is added
260 for node in updated:
261 if node['path'] == path:
262 return memfilectx(
263 _repo,
264 changectx=ctx,
265 path=node['path'],
266 data=node['content'],
267 islink=False,
268 isexec=bool(node['mode'] & stat.S_IXUSR),
269 copied=False)
270
271 raise exceptions.AbortException()(
272 "Given path haven't been marked as added, "
273 "changed or removed (%s)" % path)
274
275 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
276
277 commit_ctx = memctx(
278 repo=repo,
279 parents=parents,
280 text=message,
281 files=files,
282 filectxfn=_filectxfn,
283 user=user,
284 date=(commit_time, commit_timezone),
285 extra=extra)
286
287 n = repo.commitctx(commit_ctx)
288 new_id = hex(n)
289
290 return new_id
250 return _bulk_request(repo_id, commit_id, sorted(pre_load))
291 251
292 252 @reraise_safe_exceptions
293 def ctx_branch(self, wire, revision):
294 repo = self._factory.repo(wire)
295 ctx = self._get_ctx(repo, revision)
296 return ctx.branch()
253 def ctx_branch(self, wire, commit_id):
254 cache_on, context_uid, repo_id = self._cache_on(wire)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 def _ctx_branch(_repo_id, _commit_id):
257 repo = self._factory.repo(wire)
258 ctx = self._get_ctx(repo, commit_id)
259 return ctx.branch()
260 return _ctx_branch(repo_id, commit_id)
297 261
298 262 @reraise_safe_exceptions
299 def ctx_children(self, wire, revision):
300 repo = self._factory.repo(wire)
301 ctx = self._get_ctx(repo, revision)
302 return [child.rev() for child in ctx.children()]
303
304 @reraise_safe_exceptions
305 def ctx_date(self, wire, revision):
306 repo = self._factory.repo(wire)
307 ctx = self._get_ctx(repo, revision)
308 return ctx.date()
263 def ctx_date(self, wire, commit_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 def _ctx_date(_repo_id, _commit_id):
267 repo = self._factory.repo(wire)
268 ctx = self._get_ctx(repo, commit_id)
269 return ctx.date()
270 return _ctx_date(repo_id, commit_id)
309 271
310 272 @reraise_safe_exceptions
311 273 def ctx_description(self, wire, revision):
312 274 repo = self._factory.repo(wire)
313 275 ctx = self._get_ctx(repo, revision)
314 276 return ctx.description()
315 277
316 278 @reraise_safe_exceptions
317 def ctx_files(self, wire, revision):
318 repo = self._factory.repo(wire)
319 ctx = self._get_ctx(repo, revision)
320 return ctx.files()
279 def ctx_files(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 def _ctx_files(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
285 return ctx.files()
286
287 return _ctx_files(repo_id, commit_id)
321 288
322 289 @reraise_safe_exceptions
323 290 def ctx_list(self, path, revision):
324 291 repo = self._factory.repo(path)
325 292 ctx = self._get_ctx(repo, revision)
326 293 return list(ctx)
327 294
328 295 @reraise_safe_exceptions
329 def ctx_parents(self, wire, revision):
330 repo = self._factory.repo(wire)
331 ctx = self._get_ctx(repo, revision)
332 return [parent.rev() for parent in ctx.parents()]
296 def ctx_parents(self, wire, commit_id):
297 cache_on, context_uid, repo_id = self._cache_on(wire)
298 @self.region.conditional_cache_on_arguments(condition=cache_on)
299 def _ctx_parents(_repo_id, _commit_id):
300 repo = self._factory.repo(wire)
301 ctx = self._get_ctx(repo, commit_id)
302 return [parent.hex() for parent in ctx.parents()
303 if not (parent.hidden() or parent.obsolete())]
304
305 return _ctx_parents(repo_id, commit_id)
306
307 @reraise_safe_exceptions
308 def ctx_children(self, wire, commit_id):
309 cache_on, context_uid, repo_id = self._cache_on(wire)
310 @self.region.conditional_cache_on_arguments(condition=cache_on)
311 def _ctx_children(_repo_id, _commit_id):
312 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, commit_id)
314 return [child.hex() for child in ctx.children()
315 if not (child.hidden() or child.obsolete())]
316
317 return _ctx_children(repo_id, commit_id)
333 318
334 319 @reraise_safe_exceptions
335 def ctx_phase(self, wire, revision):
336 repo = self._factory.repo(wire)
337 ctx = self._get_ctx(repo, revision)
338 # public=0, draft=1, secret=3
339 return ctx.phase()
320 def ctx_phase(self, wire, commit_id):
321 cache_on, context_uid, repo_id = self._cache_on(wire)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 def _ctx_phase(_context_uid, _repo_id, _commit_id):
324 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
326 # public=0, draft=1, secret=3
327 return ctx.phase()
328 return _ctx_phase(context_uid, repo_id, commit_id)
340 329
341 330 @reraise_safe_exceptions
342 def ctx_obsolete(self, wire, revision):
343 repo = self._factory.repo(wire)
344 ctx = self._get_ctx(repo, revision)
345 return ctx.obsolete()
331 def ctx_obsolete(self, wire, commit_id):
332 cache_on, context_uid, repo_id = self._cache_on(wire)
333 @self.region.conditional_cache_on_arguments(condition=cache_on)
334 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
335 repo = self._factory.repo(wire)
336 ctx = self._get_ctx(repo, commit_id)
337 return ctx.obsolete()
338 return _ctx_obsolete(context_uid, repo_id, commit_id)
346 339
347 340 @reraise_safe_exceptions
348 def ctx_hidden(self, wire, revision):
349 repo = self._factory.repo(wire)
350 ctx = self._get_ctx(repo, revision)
351 return ctx.hidden()
341 def ctx_hidden(self, wire, commit_id):
342 cache_on, context_uid, repo_id = self._cache_on(wire)
343 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
345 repo = self._factory.repo(wire)
346 ctx = self._get_ctx(repo, commit_id)
347 return ctx.hidden()
348 return _ctx_hidden(context_uid, repo_id, commit_id)
352 349
353 350 @reraise_safe_exceptions
354 351 def ctx_substate(self, wire, revision):
355 352 repo = self._factory.repo(wire)
356 353 ctx = self._get_ctx(repo, revision)
357 354 return ctx.substate
358 355
359 356 @reraise_safe_exceptions
360 357 def ctx_status(self, wire, revision):
361 358 repo = self._factory.repo(wire)
362 359 ctx = self._get_ctx(repo, revision)
363 360 status = repo[ctx.p1().node()].status(other=ctx.node())
364 361 # object of status (odd, custom named tuple in mercurial) is not
365 362 # correctly serializable, we make it a list, as the underling
366 363 # API expects this to be a list
367 364 return list(status)
368 365
369 366 @reraise_safe_exceptions
370 367 def ctx_user(self, wire, revision):
371 368 repo = self._factory.repo(wire)
372 369 ctx = self._get_ctx(repo, revision)
373 370 return ctx.user()
374 371
375 372 @reraise_safe_exceptions
376 373 def check_url(self, url, config):
377 374 _proto = None
378 375 if '+' in url[:url.find('://')]:
379 376 _proto = url[0:url.find('+')]
380 377 url = url[url.find('+') + 1:]
381 378 handlers = []
382 379 url_obj = url_parser(url)
383 380 test_uri, authinfo = url_obj.authinfo()
384 381 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
385 382 url_obj.query = obfuscate_qs(url_obj.query)
386 383
387 384 cleaned_uri = str(url_obj)
388 385 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
389 386
390 387 if authinfo:
391 388 # create a password manager
392 389 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
393 390 passmgr.add_password(*authinfo)
394 391
395 392 handlers.extend((httpbasicauthhandler(passmgr),
396 393 httpdigestauthhandler(passmgr)))
397 394
398 395 o = urllib2.build_opener(*handlers)
399 396 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
400 397 ('Accept', 'application/mercurial-0.1')]
401 398
402 399 q = {"cmd": 'between'}
403 400 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
404 401 qs = '?%s' % urllib.urlencode(q)
405 402 cu = "%s%s" % (test_uri, qs)
406 403 req = urllib2.Request(cu, None, {})
407 404
408 405 try:
409 406 log.debug("Trying to open URL %s", cleaned_uri)
410 407 resp = o.open(req)
411 408 if resp.code != 200:
412 409 raise exceptions.URLError()('Return Code is not 200')
413 410 except Exception as e:
414 411 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
415 412 # means it cannot be cloned
416 413 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
417 414
418 415 # now check if it's a proper hg repo, but don't do it for svn
419 416 try:
420 417 if _proto == 'svn':
421 418 pass
422 419 else:
423 420 # check for pure hg repos
424 421 log.debug(
425 422 "Verifying if URL is a Mercurial repository: %s",
426 423 cleaned_uri)
427 424 ui = make_ui_from_config(config)
428 425 peer_checker = makepeer(ui, url)
429 426 peer_checker.lookup('tip')
430 427 except Exception as e:
431 428 log.warning("URL is not a valid Mercurial repository: %s",
432 429 cleaned_uri)
433 430 raise exceptions.URLError(e)(
434 431 "url [%s] does not look like an hg repo org_exc: %s"
435 432 % (cleaned_uri, e))
436 433
437 434 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
438 435 return True
439 436
440 437 @reraise_safe_exceptions
441 def diff(
442 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
443 context):
438 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
444 439 repo = self._factory.repo(wire)
445 440
446 441 if file_filter:
447 442 match_filter = match(file_filter[0], '', [file_filter[1]])
448 443 else:
449 444 match_filter = file_filter
450 445 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
451 446
452 447 try:
453 448 return "".join(patch.diff(
454 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
449 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
455 450 except RepoLookupError as e:
456 451 raise exceptions.LookupException(e)()
457 452
458 453 @reraise_safe_exceptions
459 454 def node_history(self, wire, revision, path, limit):
460 repo = self._factory.repo(wire)
455 cache_on, context_uid, repo_id = self._cache_on(wire)
456 @self.region.conditional_cache_on_arguments(condition=cache_on)
457 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
458 repo = self._factory.repo(wire)
461 459
462 ctx = self._get_ctx(repo, revision)
463 fctx = ctx.filectx(path)
460 ctx = self._get_ctx(repo, revision)
461 fctx = ctx.filectx(path)
464 462
465 def history_iter():
466 limit_rev = fctx.rev()
467 for obj in reversed(list(fctx.filelog())):
468 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
471 continue
463 def history_iter():
464 limit_rev = fctx.rev()
465 for obj in reversed(list(fctx.filelog())):
466 obj = fctx.filectx(obj)
467 ctx = obj.changectx()
468 if ctx.hidden() or ctx.obsolete():
469 continue
472 470
473 if limit_rev >= obj.rev():
474 yield obj
471 if limit_rev >= obj.rev():
472 yield obj
475 473
476 history = []
477 for cnt, obj in enumerate(history_iter()):
478 if limit and cnt >= limit:
479 break
480 history.append(hex(obj.node()))
474 history = []
475 for cnt, obj in enumerate(history_iter()):
476 if limit and cnt >= limit:
477 break
478 history.append(hex(obj.node()))
481 479
482 return [x for x in history]
480 return [x for x in history]
481 return _node_history(context_uid, repo_id, revision, path, limit)
483 482
484 483 @reraise_safe_exceptions
485 484 def node_history_untill(self, wire, revision, path, limit):
486 repo = self._factory.repo(wire)
487 ctx = self._get_ctx(repo, revision)
488 fctx = ctx.filectx(path)
485 cache_on, context_uid, repo_id = self._cache_on(wire)
486 @self.region.conditional_cache_on_arguments(condition=cache_on)
487 def _node_history_until(_context_uid, _repo_id):
488 repo = self._factory.repo(wire)
489 ctx = self._get_ctx(repo, revision)
490 fctx = ctx.filectx(path)
489 491
490 file_log = list(fctx.filelog())
491 if limit:
492 # Limit to the last n items
493 file_log = file_log[-limit:]
492 file_log = list(fctx.filelog())
493 if limit:
494 # Limit to the last n items
495 file_log = file_log[-limit:]
494 496
495 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
497 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
498 return _node_history_until(context_uid, repo_id, revision, path, limit)
496 499
497 500 @reraise_safe_exceptions
498 501 def fctx_annotate(self, wire, revision, path):
499 502 repo = self._factory.repo(wire)
500 503 ctx = self._get_ctx(repo, revision)
501 504 fctx = ctx.filectx(path)
502 505
503 506 result = []
504 507 for i, annotate_obj in enumerate(fctx.annotate(), 1):
505 508 ln_no = i
506 509 sha = hex(annotate_obj.fctx.node())
507 510 content = annotate_obj.text
508 511 result.append((ln_no, sha, content))
509 512 return result
510 513
511 514 @reraise_safe_exceptions
512 def fctx_data(self, wire, revision, path):
515 def fctx_node_data(self, wire, revision, path):
513 516 repo = self._factory.repo(wire)
514 517 ctx = self._get_ctx(repo, revision)
515 518 fctx = ctx.filectx(path)
516 519 return fctx.data()
517 520
518 521 @reraise_safe_exceptions
519 def fctx_flags(self, wire, revision, path):
520 repo = self._factory.repo(wire)
521 ctx = self._get_ctx(repo, revision)
522 fctx = ctx.filectx(path)
523 return fctx.flags()
522 def fctx_flags(self, wire, commit_id, path):
523 cache_on, context_uid, repo_id = self._cache_on(wire)
524 @self.region.conditional_cache_on_arguments(condition=cache_on)
525 def _fctx_flags(_repo_id, _commit_id, _path):
526 repo = self._factory.repo(wire)
527 ctx = self._get_ctx(repo, commit_id)
528 fctx = ctx.filectx(path)
529 return fctx.flags()
530
531 return _fctx_flags(repo_id, commit_id, path)
524 532
525 533 @reraise_safe_exceptions
526 def fctx_size(self, wire, revision, path):
527 repo = self._factory.repo(wire)
528 ctx = self._get_ctx(repo, revision)
529 fctx = ctx.filectx(path)
530 return fctx.size()
534 def fctx_size(self, wire, commit_id, path):
535 cache_on, context_uid, repo_id = self._cache_on(wire)
536 @self.region.conditional_cache_on_arguments(condition=cache_on)
537 def _fctx_size(_repo_id, _revision, _path):
538 repo = self._factory.repo(wire)
539 ctx = self._get_ctx(repo, commit_id)
540 fctx = ctx.filectx(path)
541 return fctx.size()
542 return _fctx_size(repo_id, commit_id, path)
531 543
532 544 @reraise_safe_exceptions
533 545 def get_all_commit_ids(self, wire, name):
534 repo = self._factory.repo(wire)
535 repo = repo.filtered(name)
536 revs = map(lambda x: hex(x[7]), repo.changelog.index)
537 return revs
546 cache_on, context_uid, repo_id = self._cache_on(wire)
547 @self.region.conditional_cache_on_arguments(condition=cache_on)
548 def _get_all_commit_ids(_context_uid, _repo_id, _name):
549 repo = self._factory.repo(wire)
550 repo = repo.filtered(name)
551 revs = map(lambda x: hex(x[7]), repo.changelog.index)
552 return revs
553 return _get_all_commit_ids(context_uid, repo_id, name)
538 554
539 555 @reraise_safe_exceptions
540 556 def get_config_value(self, wire, section, name, untrusted=False):
541 557 repo = self._factory.repo(wire)
542 558 return repo.ui.config(section, name, untrusted=untrusted)
543 559
544 560 @reraise_safe_exceptions
545 def get_config_bool(self, wire, section, name, untrusted=False):
546 repo = self._factory.repo(wire)
547 return repo.ui.configbool(section, name, untrusted=untrusted)
561 def is_large_file(self, wire, commit_id, path):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
565 return largefiles.lfutil.isstandin(path)
566
567 return _is_large_file(context_uid, repo_id, commit_id, path)
548 568
549 569 @reraise_safe_exceptions
550 def get_config_list(self, wire, section, name, untrusted=False):
551 repo = self._factory.repo(wire)
552 return repo.ui.configlist(section, name, untrusted=untrusted)
570 def is_binary(self, wire, revision, path):
571 cache_on, context_uid, repo_id = self._cache_on(wire)
553 572
554 @reraise_safe_exceptions
555 def is_large_file(self, wire, path):
556 return largefiles.lfutil.isstandin(path)
573 @self.region.conditional_cache_on_arguments(condition=cache_on)
574 def _is_binary(_repo_id, _sha, _path):
575 repo = self._factory.repo(wire)
576 ctx = self._get_ctx(repo, revision)
577 fctx = ctx.filectx(path)
578 return fctx.isbinary()
579
580 return _is_binary(repo_id, revision, path)
557 581
558 582 @reraise_safe_exceptions
559 583 def in_largefiles_store(self, wire, sha):
560 584 repo = self._factory.repo(wire)
561 585 return largefiles.lfutil.instore(repo, sha)
562 586
563 587 @reraise_safe_exceptions
564 588 def in_user_cache(self, wire, sha):
565 589 repo = self._factory.repo(wire)
566 590 return largefiles.lfutil.inusercache(repo.ui, sha)
567 591
568 592 @reraise_safe_exceptions
569 593 def store_path(self, wire, sha):
570 594 repo = self._factory.repo(wire)
571 595 return largefiles.lfutil.storepath(repo, sha)
572 596
573 597 @reraise_safe_exceptions
574 598 def link(self, wire, sha, path):
575 599 repo = self._factory.repo(wire)
576 600 largefiles.lfutil.link(
577 601 largefiles.lfutil.usercachepath(repo.ui, sha), path)
578 602
579 603 @reraise_safe_exceptions
580 604 def localrepository(self, wire, create=False):
581 605 self._factory.repo(wire, create=create)
582 606
583 607 @reraise_safe_exceptions
584 608 def lookup(self, wire, revision, both):
585
586 repo = self._factory.repo(wire)
587
588 if isinstance(revision, int):
589 # NOTE(marcink):
590 # since Mercurial doesn't support negative indexes properly
591 # we need to shift accordingly by one to get proper index, e.g
592 # repo[-1] => repo[-2]
593 # repo[0] => repo[-1]
594 if revision <= 0:
595 revision = revision + -1
596 try:
597 ctx = self._get_ctx(repo, revision)
598 except (TypeError, RepoLookupError) as e:
599 e._org_exc_tb = traceback.format_exc()
600 raise exceptions.LookupException(e)(revision)
601 except LookupError as e:
602 e._org_exc_tb = traceback.format_exc()
603 raise exceptions.LookupException(e)(e.name)
609 cache_on, context_uid, repo_id = self._cache_on(wire)
610 @self.region.conditional_cache_on_arguments(condition=cache_on)
611 def _lookup(_context_uid, _repo_id, _revision, _both):
604 612
605 if not both:
606 return ctx.hex()
607
608 ctx = repo[ctx.hex()]
609 return ctx.hex(), ctx.rev()
613 repo = self._factory.repo(wire)
614 rev = _revision
615 if isinstance(rev, int):
616 # NOTE(marcink):
617 # since Mercurial doesn't support negative indexes properly
618 # we need to shift accordingly by one to get proper index, e.g
619 # repo[-1] => repo[-2]
620 # repo[0] => repo[-1]
621 if rev <= 0:
622 rev = rev + -1
623 try:
624 ctx = self._get_ctx(repo, rev)
625 except (TypeError, RepoLookupError) as e:
626 e._org_exc_tb = traceback.format_exc()
627 raise exceptions.LookupException(e)(rev)
628 except LookupError as e:
629 e._org_exc_tb = traceback.format_exc()
630 raise exceptions.LookupException(e)(e.name)
610 631
611 @reraise_safe_exceptions
612 def pull(self, wire, url, commit_ids=None):
613 repo = self._factory.repo(wire)
614 # Disable any prompts for this repo
615 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
632 if not both:
633 return ctx.hex()
616 634
617 remote = peer(repo, {}, url)
618 # Disable any prompts for this remote
619 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
635 ctx = repo[ctx.hex()]
636 return ctx.hex(), ctx.rev()
620 637
621 if commit_ids:
622 commit_ids = [bin(commit_id) for commit_id in commit_ids]
623
624 return exchange.pull(
625 repo, remote, heads=commit_ids, force=None).cgresult
638 return _lookup(context_uid, repo_id, revision, both)
626 639
627 640 @reraise_safe_exceptions
628 641 def sync_push(self, wire, url):
629 642 if not self.check_url(url, wire['config']):
630 643 return
631 644
632 645 repo = self._factory.repo(wire)
633 646
634 647 # Disable any prompts for this repo
635 648 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
636 649
637 650 bookmarks = dict(repo._bookmarks).keys()
638 651 remote = peer(repo, {}, url)
639 652 # Disable any prompts for this remote
640 653 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
641 654
642 655 return exchange.push(
643 656 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
644 657
645 658 @reraise_safe_exceptions
646 659 def revision(self, wire, rev):
647 660 repo = self._factory.repo(wire)
648 661 ctx = self._get_ctx(repo, rev)
649 662 return ctx.rev()
650 663
651 664 @reraise_safe_exceptions
652 def rev_range(self, wire, filter):
653 repo = self._factory.repo(wire)
654 revisions = [rev for rev in revrange(repo, filter)]
655 return revisions
665 def rev_range(self, wire, commit_filter):
666 cache_on, context_uid, repo_id = self._cache_on(wire)
667
668 @self.region.conditional_cache_on_arguments(condition=cache_on)
669 def _rev_range(_context_uid, _repo_id, _filter):
670 repo = self._factory.repo(wire)
671 revisions = [rev for rev in revrange(repo, commit_filter)]
672 return revisions
673
674 return _rev_range(context_uid, repo_id, sorted(commit_filter))
656 675
657 676 @reraise_safe_exceptions
658 677 def rev_range_hash(self, wire, node):
659 678 repo = self._factory.repo(wire)
660 679
661 680 def get_revs(repo, rev_opt):
662 681 if rev_opt:
663 682 revs = revrange(repo, rev_opt)
664 683 if len(revs) == 0:
665 684 return (nullrev, nullrev)
666 685 return max(revs), min(revs)
667 686 else:
668 687 return len(repo) - 1, 0
669 688
670 689 stop, start = get_revs(repo, [node + ':'])
671 690 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
672 691 return revs
673 692
674 693 @reraise_safe_exceptions
675 694 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
676 695 other_path = kwargs.pop('other_path', None)
677 696
678 697 # case when we want to compare two independent repositories
679 698 if other_path and other_path != wire["path"]:
680 699 baseui = self._factory._create_config(wire["config"])
681 700 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
682 701 else:
683 702 repo = self._factory.repo(wire)
684 703 return list(repo.revs(rev_spec, *args))
685 704
686 705 @reraise_safe_exceptions
687 def strip(self, wire, revision, update, backup):
688 repo = self._factory.repo(wire)
689 ctx = self._get_ctx(repo, revision)
690 hgext_strip(
691 repo.baseui, repo, ctx.node(), update=update, backup=backup)
692
693 @reraise_safe_exceptions
694 706 def verify(self, wire,):
695 707 repo = self._factory.repo(wire)
696 708 baseui = self._factory._create_config(wire['config'])
697 709 baseui.setconfig('ui', 'quiet', 'false')
698 710 output = io.BytesIO()
699 711
700 712 def write(data, **unused_kwargs):
701 713 output.write(data)
702 714 baseui.write = write
703 715
704 716 repo.ui = baseui
705 717 verify.verify(repo)
706 718 return output.getvalue()
707 719
708 720 @reraise_safe_exceptions
709 def tag(self, wire, name, revision, message, local, user,
710 tag_time, tag_timezone):
721 def hg_update_cache(self, wire,):
711 722 repo = self._factory.repo(wire)
712 ctx = self._get_ctx(repo, revision)
713 node = ctx.node()
723 baseui = self._factory._create_config(wire['config'])
724 baseui.setconfig('ui', 'quiet', 'false')
725 output = io.BytesIO()
714 726
715 date = (tag_time, tag_timezone)
716 try:
717 hg_tag.tag(repo, name, node, message, local, user, date)
718 except Abort as e:
719 log.exception("Tag operation aborted")
720 # Exception can contain unicode which we convert
721 raise exceptions.AbortException(e)(repr(e))
727 def write(data, **unused_kwargs):
728 output.write(data)
729 baseui.write = write
730
731 repo.ui = baseui
732 with repo.wlock(), repo.lock():
733 repo.updatecaches(full=True)
734
735 return output.getvalue()
722 736
723 737 @reraise_safe_exceptions
724 738 def tags(self, wire):
725 repo = self._factory.repo(wire)
726 return repo.tags()
739 cache_on, context_uid, repo_id = self._cache_on(wire)
740 @self.region.conditional_cache_on_arguments(condition=cache_on)
741 def _tags(_context_uid, _repo_id):
742 repo = self._factory.repo(wire)
743 return repo.tags()
744
745 return _tags(context_uid, repo_id)
727 746
728 747 @reraise_safe_exceptions
729 748 def update(self, wire, node=None, clean=False):
730 749 repo = self._factory.repo(wire)
731 750 baseui = self._factory._create_config(wire['config'])
732 751 commands.update(baseui, repo, node=node, clean=clean)
733 752
734 753 @reraise_safe_exceptions
735 754 def identify(self, wire):
736 755 repo = self._factory.repo(wire)
737 756 baseui = self._factory._create_config(wire['config'])
738 757 output = io.BytesIO()
739 758 baseui.write = output.write
740 759 # This is required to get a full node id
741 760 baseui.debugflag = True
742 761 commands.identify(baseui, repo, id=True)
743 762
744 763 return output.getvalue()
745 764
746 765 @reraise_safe_exceptions
747 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
748 hooks=True):
749 repo = self._factory.repo(wire)
750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
751
752 # Mercurial internally has a lot of logic that checks ONLY if
753 # option is defined, we just pass those if they are defined then
754 opts = {}
755 if bookmark:
756 opts['bookmark'] = bookmark
757 if branch:
758 opts['branch'] = branch
759 if revision:
760 opts['rev'] = revision
761
762 commands.pull(baseui, repo, source, **opts)
763
764 @reraise_safe_exceptions
765 766 def heads(self, wire, branch=None):
766 767 repo = self._factory.repo(wire)
767 768 baseui = self._factory._create_config(wire['config'])
768 769 output = io.BytesIO()
769 770
770 771 def write(data, **unused_kwargs):
771 772 output.write(data)
772 773
773 774 baseui.write = write
774 775 if branch:
775 776 args = [branch]
776 777 else:
777 778 args = []
778 779 commands.heads(baseui, repo, template='{node} ', *args)
779 780
780 781 return output.getvalue()
781 782
782 783 @reraise_safe_exceptions
783 784 def ancestor(self, wire, revision1, revision2):
784 785 repo = self._factory.repo(wire)
785 786 changelog = repo.changelog
786 787 lookup = repo.lookup
787 788 a = changelog.ancestor(lookup(revision1), lookup(revision2))
788 789 return hex(a)
789 790
790 791 @reraise_safe_exceptions
791 def push(self, wire, revisions, dest_path, hooks=True,
792 push_branches=False):
792 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
793 baseui = self._factory._create_config(wire["config"], hooks=hooks)
794 clone(baseui, source, dest, noupdate=not update_after_clone)
795
796 @reraise_safe_exceptions
797 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
798
799 repo = self._factory.repo(wire)
800 baseui = self._factory._create_config(wire['config'])
801 publishing = baseui.configbool('phases', 'publish')
802 if publishing:
803 new_commit = 'public'
804 else:
805 new_commit = 'draft'
806
807 def _filectxfn(_repo, ctx, path):
808 """
809 Marks given path as added/changed/removed in a given _repo. This is
810 for internal mercurial commit function.
811 """
812
813 # check if this path is removed
814 if path in removed:
815 # returning None is a way to mark node for removal
816 return None
817
818 # check if this path is added
819 for node in updated:
820 if node['path'] == path:
821 return memfilectx(
822 _repo,
823 changectx=ctx,
824 path=node['path'],
825 data=node['content'],
826 islink=False,
827 isexec=bool(node['mode'] & stat.S_IXUSR),
828 copysource=False)
829
830 raise exceptions.AbortException()(
831 "Given path haven't been marked as added, "
832 "changed or removed (%s)" % path)
833
834 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
835
836 commit_ctx = memctx(
837 repo=repo,
838 parents=parents,
839 text=message,
840 files=files,
841 filectxfn=_filectxfn,
842 user=user,
843 date=(commit_time, commit_timezone),
844 extra=extra)
845
846 n = repo.commitctx(commit_ctx)
847 new_id = hex(n)
848
849 return new_id
850
851 @reraise_safe_exceptions
852 def pull(self, wire, url, commit_ids=None):
853 repo = self._factory.repo(wire)
854 # Disable any prompts for this repo
855 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
856
857 remote = peer(repo, {}, url)
858 # Disable any prompts for this remote
859 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
860
861 if commit_ids:
862 commit_ids = [bin(commit_id) for commit_id in commit_ids]
863
864 return exchange.pull(
865 repo, remote, heads=commit_ids, force=None).cgresult
866
867 @reraise_safe_exceptions
868 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
869 repo = self._factory.repo(wire)
870 baseui = self._factory._create_config(wire['config'], hooks=hooks)
871
872 # Mercurial internally has a lot of logic that checks ONLY if
873 # option is defined, we just pass those if they are defined then
874 opts = {}
875 if bookmark:
876 opts['bookmark'] = bookmark
877 if branch:
878 opts['branch'] = branch
879 if revision:
880 opts['rev'] = revision
881
882 commands.pull(baseui, repo, source, **opts)
883
884 @reraise_safe_exceptions
885 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
793 886 repo = self._factory.repo(wire)
794 887 baseui = self._factory._create_config(wire['config'], hooks=hooks)
795 888 commands.push(baseui, repo, dest=dest_path, rev=revisions,
796 889 new_branch=push_branches)
797 890
798 891 @reraise_safe_exceptions
892 def strip(self, wire, revision, update, backup):
893 repo = self._factory.repo(wire)
894 ctx = self._get_ctx(repo, revision)
895 hgext_strip(
896 repo.baseui, repo, ctx.node(), update=update, backup=backup)
897
898 @reraise_safe_exceptions
899 def get_unresolved_files(self, wire):
900 repo = self._factory.repo(wire)
901
902 log.debug('Calculating unresolved files for repo: %s', repo)
903 output = io.BytesIO()
904
905 def write(data, **unused_kwargs):
906 output.write(data)
907
908 baseui = self._factory._create_config(wire['config'])
909 baseui.write = write
910
911 commands.resolve(baseui, repo, list=True)
912 unresolved = output.getvalue().splitlines(0)
913 return unresolved
914
915 @reraise_safe_exceptions
799 916 def merge(self, wire, revision):
800 917 repo = self._factory.repo(wire)
801 918 baseui = self._factory._create_config(wire['config'])
802 919 repo.ui.setconfig('ui', 'merge', 'internal:dump')
803 920
804 921 # In case of sub repositories are used mercurial prompts the user in
805 922 # case of merge conflicts or different sub repository sources. By
806 923 # setting the interactive flag to `False` mercurial doesn't prompt the
807 924 # used but instead uses a default value.
808 925 repo.ui.setconfig('ui', 'interactive', False)
809 926 commands.merge(baseui, repo, rev=revision)
810 927
811 928 @reraise_safe_exceptions
812 929 def merge_state(self, wire):
813 930 repo = self._factory.repo(wire)
814 931 repo.ui.setconfig('ui', 'merge', 'internal:dump')
815 932
816 933 # In case of sub repositories are used mercurial prompts the user in
817 934 # case of merge conflicts or different sub repository sources. By
818 935 # setting the interactive flag to `False` mercurial doesn't prompt the
819 936 # used but instead uses a default value.
820 937 repo.ui.setconfig('ui', 'interactive', False)
821 938 ms = hg_merge.mergestate(repo)
822 939 return [x for x in ms.unresolved()]
823 940
824 941 @reraise_safe_exceptions
825 942 def commit(self, wire, message, username, close_branch=False):
826 943 repo = self._factory.repo(wire)
827 944 baseui = self._factory._create_config(wire['config'])
828 945 repo.ui.setconfig('ui', 'username', username)
829 946 commands.commit(baseui, repo, message=message, close_branch=close_branch)
830 947
831
832 948 @reraise_safe_exceptions
833 949 def rebase(self, wire, source=None, dest=None, abort=False):
834 950 repo = self._factory.repo(wire)
835 951 baseui = self._factory._create_config(wire['config'])
836 952 repo.ui.setconfig('ui', 'merge', 'internal:dump')
837 rebase.rebase(
838 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
953 # In case of sub repositories are used mercurial prompts the user in
954 # case of merge conflicts or different sub repository sources. By
955 # setting the interactive flag to `False` mercurial doesn't prompt the
956 # used but instead uses a default value.
957 repo.ui.setconfig('ui', 'interactive', False)
958 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
959
960 @reraise_safe_exceptions
961 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
962 repo = self._factory.repo(wire)
963 ctx = self._get_ctx(repo, revision)
964 node = ctx.node()
965
966 date = (tag_time, tag_timezone)
967 try:
968 hg_tag.tag(repo, name, node, message, local, user, date)
969 except Abort as e:
970 log.exception("Tag operation aborted")
971 # Exception can contain unicode which we convert
972 raise exceptions.AbortException(e)(repr(e))
839 973
840 974 @reraise_safe_exceptions
841 975 def bookmark(self, wire, bookmark, revision=None):
842 976 repo = self._factory.repo(wire)
843 977 baseui = self._factory._create_config(wire['config'])
844 978 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
845 979
846 980 @reraise_safe_exceptions
847 981 def install_hooks(self, wire, force=False):
848 982 # we don't need any special hooks for Mercurial
849 983 pass
850 984
851 985 @reraise_safe_exceptions
852 986 def get_hooks_info(self, wire):
853 987 return {
854 988 'pre_version': vcsserver.__version__,
855 989 'post_version': vcsserver.__version__,
856 990 }
@@ -1,711 +1,722 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver.hgcompat import get_ctx
37 36
38 37 log = logging.getLogger(__name__)
39 38
40 39
41 40 class HooksHttpClient(object):
42 41 connection = None
43 42
44 43 def __init__(self, hooks_uri):
45 44 self.hooks_uri = hooks_uri
46 45
47 46 def __call__(self, method, extras):
48 47 connection = HTTPConnection(self.hooks_uri)
49 48 body = self._serialize(method, extras)
50 49 try:
51 50 connection.request('POST', '/', body)
52 51 except Exception:
53 52 log.error('Connection failed on %s', connection)
54 53 raise
55 54 response = connection.getresponse()
56 55
57 56 response_data = response.read()
58 57
59 58 try:
60 59 return json.loads(response_data)
61 60 except Exception:
62 61 log.exception('Failed to decode hook response json data. '
63 62 'response_code:%s, raw_data:%s',
64 63 response.status, response_data)
65 64 raise
66 65
67 66 def _serialize(self, hook_name, extras):
68 67 data = {
69 68 'method': hook_name,
70 69 'extras': extras
71 70 }
72 71 return json.dumps(data)
73 72
74 73
75 74 class HooksDummyClient(object):
76 75 def __init__(self, hooks_module):
77 76 self._hooks_module = importlib.import_module(hooks_module)
78 77
79 78 def __call__(self, hook_name, extras):
80 79 with self._hooks_module.Hooks() as hooks:
81 80 return getattr(hooks, hook_name)(extras)
82 81
83 82
83 class HooksShadowRepoClient(object):
84
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
87
88
84 89 class RemoteMessageWriter(object):
85 90 """Writer base class."""
86 91 def write(self, message):
87 92 raise NotImplementedError()
88 93
89 94
90 95 class HgMessageWriter(RemoteMessageWriter):
91 96 """Writer that knows how to send messages to mercurial clients."""
92 97
93 98 def __init__(self, ui):
94 99 self.ui = ui
95 100
96 101 def write(self, message):
97 102 # TODO: Check why the quiet flag is set by default.
98 103 old = self.ui.quiet
99 104 self.ui.quiet = False
100 105 self.ui.status(message.encode('utf-8'))
101 106 self.ui.quiet = old
102 107
103 108
104 109 class GitMessageWriter(RemoteMessageWriter):
105 110 """Writer that knows how to send messages to git clients."""
106 111
107 112 def __init__(self, stdout=None):
108 113 self.stdout = stdout or sys.stdout
109 114
110 115 def write(self, message):
111 116 self.stdout.write(message.encode('utf-8'))
112 117
113 118
114 119 class SvnMessageWriter(RemoteMessageWriter):
115 120 """Writer that knows how to send messages to svn clients."""
116 121
117 122 def __init__(self, stderr=None):
118 123 # SVN needs data sent to stderr for back-to-client messaging
119 124 self.stderr = stderr or sys.stderr
120 125
121 126 def write(self, message):
122 127 self.stderr.write(message.encode('utf-8'))
123 128
124 129
125 130 def _handle_exception(result):
126 131 exception_class = result.get('exception')
127 132 exception_traceback = result.get('exception_traceback')
128 133
129 134 if exception_traceback:
130 135 log.error('Got traceback from remote call:%s', exception_traceback)
131 136
132 137 if exception_class == 'HTTPLockedRC':
133 138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
134 139 elif exception_class == 'HTTPBranchProtected':
135 140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
136 141 elif exception_class == 'RepositoryError':
137 142 raise exceptions.VcsException()(*result['exception_args'])
138 143 elif exception_class:
139 144 raise Exception('Got remote exception "%s" with args "%s"' %
140 145 (exception_class, result['exception_args']))
141 146
142 147
143 148 def _get_hooks_client(extras):
144 if 'hooks_uri' in extras:
145 protocol = extras.get('hooks_protocol')
149 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
146 152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
147 155 else:
148 156 return HooksDummyClient(extras['hooks_module'])
149 157
150 158
151 159 def _call_hook(hook_name, extras, writer):
152 160 hooks_client = _get_hooks_client(extras)
153 161 log.debug('Hooks, using client:%s', hooks_client)
154 162 result = hooks_client(hook_name, extras)
155 163 log.debug('Hooks got result: %s', result)
156 164
157 165 _handle_exception(result)
158 166 writer.write(result['output'])
159 167
160 168 return result['status']
161 169
162 170
163 171 def _extras_from_ui(ui):
164 172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
165 173 if not hook_data:
166 174 # maybe it's inside environ ?
167 175 env_hook_data = os.environ.get('RC_SCM_DATA')
168 176 if env_hook_data:
169 177 hook_data = env_hook_data
170 178
171 179 extras = {}
172 180 if hook_data:
173 181 extras = json.loads(hook_data)
174 182 return extras
175 183
176 184
177 185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
178 187
179 188 commits = []
180 189 revs = []
181 190 start = get_ctx(repo, node).rev()
182 191 end = len(repo)
183 192 for rev in range(start, end):
184 193 revs.append(rev)
185 194 ctx = get_ctx(repo, rev)
186 195 commit_id = mercurial.node.hex(ctx.node())
187 196 branch = ctx.branch()
188 197 commits.append((commit_id, branch))
189 198
190 199 parent_heads = []
191 200 if check_heads:
192 201 parent_heads = _check_heads(repo, start, end, revs)
193 202 return commits, parent_heads
194 203
195 204
196 205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
197 207 changelog = repo.changelog
198 208 parents = set()
199 209
200 210 for new_rev in commits:
201 211 for p in changelog.parentrevs(new_rev):
202 212 if p == mercurial.node.nullrev:
203 213 continue
204 214 if p < start:
205 215 parents.add(p)
206 216
207 217 for p in parents:
208 218 branch = get_ctx(repo, p).branch()
209 219 # The heads descending from that parent, on the same branch
210 220 parent_heads = set([p])
211 221 reachable = set([p])
212 222 for x in xrange(p + 1, end):
213 223 if get_ctx(repo, x).branch() != branch:
214 224 continue
215 225 for pp in changelog.parentrevs(x):
216 226 if pp in reachable:
217 227 reachable.add(x)
218 228 parent_heads.discard(pp)
219 229 parent_heads.add(x)
220 230 # More than one head? Suggest merging
221 231 if len(parent_heads) > 1:
222 232 return list(parent_heads)
223 233
224 234 return []
225 235
226 236
227 237 def _get_git_env():
228 238 env = {}
229 239 for k, v in os.environ.items():
230 240 if k.startswith('GIT'):
231 241 env[k] = v
232 242
233 243 # serialized version
234 244 return [(k, v) for k, v in env.items()]
235 245
236 246
237 247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
238 248 env = {}
239 249 for k, v in os.environ.items():
240 250 if k.startswith('HG'):
241 251 env[k] = v
242 252
243 253 env['HG_NODE'] = old_rev
244 254 env['HG_NODE_LAST'] = new_rev
245 255 env['HG_TXNID'] = txnid
246 256 env['HG_PENDING'] = repo_path
247 257
248 258 return [(k, v) for k, v in env.items()]
249 259
250 260
251 261 def repo_size(ui, repo, **kwargs):
252 262 extras = _extras_from_ui(ui)
253 263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
254 264
255 265
256 266 def pre_pull(ui, repo, **kwargs):
257 267 extras = _extras_from_ui(ui)
258 268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
259 269
260 270
261 271 def pre_pull_ssh(ui, repo, **kwargs):
262 272 extras = _extras_from_ui(ui)
263 273 if extras and extras.get('SSH'):
264 274 return pre_pull(ui, repo, **kwargs)
265 275 return 0
266 276
267 277
268 278 def post_pull(ui, repo, **kwargs):
269 279 extras = _extras_from_ui(ui)
270 280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
271 281
272 282
273 283 def post_pull_ssh(ui, repo, **kwargs):
274 284 extras = _extras_from_ui(ui)
275 285 if extras and extras.get('SSH'):
276 286 return post_pull(ui, repo, **kwargs)
277 287 return 0
278 288
279 289
280 290 def pre_push(ui, repo, node=None, **kwargs):
281 291 """
282 292 Mercurial pre_push hook
283 293 """
284 294 extras = _extras_from_ui(ui)
285 295 detect_force_push = extras.get('detect_force_push')
286 296
287 297 rev_data = []
288 298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
289 299 branches = collections.defaultdict(list)
290 300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
291 301 for commit_id, branch in commits:
292 302 branches[branch].append(commit_id)
293 303
294 304 for branch, commits in branches.items():
295 305 old_rev = kwargs.get('node_last') or commits[0]
296 306 rev_data.append({
297 307 'total_commits': len(commits),
298 308 'old_rev': old_rev,
299 309 'new_rev': commits[-1],
300 310 'ref': '',
301 311 'type': 'branch',
302 312 'name': branch,
303 313 })
304 314
305 315 for push_ref in rev_data:
306 316 push_ref['multiple_heads'] = _heads
307 317
308 318 repo_path = os.path.join(
309 319 extras.get('repo_store', ''), extras.get('repository', ''))
310 320 push_ref['hg_env'] = _get_hg_env(
311 321 old_rev=push_ref['old_rev'],
312 322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
313 323 repo_path=repo_path)
314 324
315 325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
316 326 extras['commit_ids'] = rev_data
317 327
318 328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
319 329
320 330
321 331 def pre_push_ssh(ui, repo, node=None, **kwargs):
322 332 extras = _extras_from_ui(ui)
323 333 if extras.get('SSH'):
324 334 return pre_push(ui, repo, node, **kwargs)
325 335
326 336 return 0
327 337
328 338
329 339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
330 340 """
331 341 Mercurial pre_push hook for SSH
332 342 """
333 343 extras = _extras_from_ui(ui)
334 344 if extras.get('SSH'):
335 345 permission = extras['SSH_PERMISSIONS']
336 346
337 347 if 'repository.write' == permission or 'repository.admin' == permission:
338 348 return 0
339 349
340 350 # non-zero ret code
341 351 return 1
342 352
343 353 return 0
344 354
345 355
346 356 def post_push(ui, repo, node, **kwargs):
347 357 """
348 358 Mercurial post_push hook
349 359 """
350 360 extras = _extras_from_ui(ui)
351 361
352 362 commit_ids = []
353 363 branches = []
354 364 bookmarks = []
355 365 tags = []
356 366
357 367 commits, _heads = _rev_range_hash(repo, node)
358 368 for commit_id, branch in commits:
359 369 commit_ids.append(commit_id)
360 370 if branch not in branches:
361 371 branches.append(branch)
362 372
363 373 if hasattr(ui, '_rc_pushkey_branches'):
364 374 bookmarks = ui._rc_pushkey_branches
365 375
366 376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
367 377 extras['commit_ids'] = commit_ids
368 378 extras['new_refs'] = {
369 379 'branches': branches,
370 380 'bookmarks': bookmarks,
371 381 'tags': tags
372 382 }
373 383
374 384 return _call_hook('post_push', extras, HgMessageWriter(ui))
375 385
376 386
377 387 def post_push_ssh(ui, repo, node, **kwargs):
378 388 """
379 389 Mercurial post_push hook for SSH
380 390 """
381 391 if _extras_from_ui(ui).get('SSH'):
382 392 return post_push(ui, repo, node, **kwargs)
383 393 return 0
384 394
385 395
386 396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
387 398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
388 399 # store new bookmarks in our UI object propagated later to post_push
389 400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
390 401 return
391 402
392 403
393 404 # backward compat
394 405 log_pull_action = post_pull
395 406
396 407 # backward compat
397 408 log_push_action = post_push
398 409
399 410
400 411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
401 412 """
402 413 Old hook name: keep here for backward compatibility.
403 414
404 415 This is only required when the installed git hooks are not upgraded.
405 416 """
406 417 pass
407 418
408 419
409 420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
410 421 """
411 422 Old hook name: keep here for backward compatibility.
412 423
413 424 This is only required when the installed git hooks are not upgraded.
414 425 """
415 426 pass
416 427
417 428
418 429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
419 430
420 431
421 432 def git_pre_pull(extras):
422 433 """
423 434 Pre pull hook.
424 435
425 436 :param extras: dictionary containing the keys defined in simplevcs
426 437 :type extras: dict
427 438
428 439 :return: status code of the hook. 0 for success.
429 440 :rtype: int
430 441 """
431 442 if 'pull' not in extras['hooks']:
432 443 return HookResponse(0, '')
433 444
434 445 stdout = io.BytesIO()
435 446 try:
436 447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
437 448 except Exception as error:
438 449 status = 128
439 450 stdout.write('ERROR: %s\n' % str(error))
440 451
441 452 return HookResponse(status, stdout.getvalue())
442 453
443 454
444 455 def git_post_pull(extras):
445 456 """
446 457 Post pull hook.
447 458
448 459 :param extras: dictionary containing the keys defined in simplevcs
449 460 :type extras: dict
450 461
451 462 :return: status code of the hook. 0 for success.
452 463 :rtype: int
453 464 """
454 465 if 'pull' not in extras['hooks']:
455 466 return HookResponse(0, '')
456 467
457 468 stdout = io.BytesIO()
458 469 try:
459 470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
460 471 except Exception as error:
461 472 status = 128
462 473 stdout.write('ERROR: %s\n' % error)
463 474
464 475 return HookResponse(status, stdout.getvalue())
465 476
466 477
467 478 def _parse_git_ref_lines(revision_lines):
468 479 rev_data = []
469 480 for revision_line in revision_lines or []:
470 481 old_rev, new_rev, ref = revision_line.strip().split(' ')
471 482 ref_data = ref.split('/', 2)
472 483 if ref_data[1] in ('tags', 'heads'):
473 484 rev_data.append({
474 485 # NOTE(marcink):
475 486 # we're unable to tell total_commits for git at this point
476 487 # but we set the variable for consistency with GIT
477 488 'total_commits': -1,
478 489 'old_rev': old_rev,
479 490 'new_rev': new_rev,
480 491 'ref': ref,
481 492 'type': ref_data[1],
482 493 'name': ref_data[2],
483 494 })
484 495 return rev_data
485 496
486 497
487 498 def git_pre_receive(unused_repo_path, revision_lines, env):
488 499 """
489 500 Pre push hook.
490 501
491 502 :param extras: dictionary containing the keys defined in simplevcs
492 503 :type extras: dict
493 504
494 505 :return: status code of the hook. 0 for success.
495 506 :rtype: int
496 507 """
497 508 extras = json.loads(env['RC_SCM_DATA'])
498 509 rev_data = _parse_git_ref_lines(revision_lines)
499 510 if 'push' not in extras['hooks']:
500 511 return 0
501 512 empty_commit_id = '0' * 40
502 513
503 514 detect_force_push = extras.get('detect_force_push')
504 515
505 516 for push_ref in rev_data:
506 517 # store our git-env which holds the temp store
507 518 push_ref['git_env'] = _get_git_env()
508 519 push_ref['pruned_sha'] = ''
509 520 if not detect_force_push:
510 521 # don't check for forced-push when we don't need to
511 522 continue
512 523
513 524 type_ = push_ref['type']
514 525 new_branch = push_ref['old_rev'] == empty_commit_id
515 526 delete_branch = push_ref['new_rev'] == empty_commit_id
516 527 if type_ == 'heads' and not (new_branch or delete_branch):
517 528 old_rev = push_ref['old_rev']
518 529 new_rev = push_ref['new_rev']
519 530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
520 531 stdout, stderr = subprocessio.run_command(
521 532 cmd, env=os.environ.copy())
522 533 # means we're having some non-reachable objects, this forced push was used
523 534 if stdout:
524 535 push_ref['pruned_sha'] = stdout.splitlines()
525 536
526 537 extras['hook_type'] = 'pre_receive'
527 538 extras['commit_ids'] = rev_data
528 539 return _call_hook('pre_push', extras, GitMessageWriter())
529 540
530 541
531 542 def git_post_receive(unused_repo_path, revision_lines, env):
532 543 """
533 544 Post push hook.
534 545
535 546 :param extras: dictionary containing the keys defined in simplevcs
536 547 :type extras: dict
537 548
538 549 :return: status code of the hook. 0 for success.
539 550 :rtype: int
540 551 """
541 552 extras = json.loads(env['RC_SCM_DATA'])
542 553 if 'push' not in extras['hooks']:
543 554 return 0
544 555
545 556 rev_data = _parse_git_ref_lines(revision_lines)
546 557
547 558 git_revs = []
548 559
549 560 # N.B.(skreft): it is ok to just call git, as git before calling a
550 561 # subcommand sets the PATH environment variable so that it point to the
551 562 # correct version of the git executable.
552 563 empty_commit_id = '0' * 40
553 564 branches = []
554 565 tags = []
555 566 for push_ref in rev_data:
556 567 type_ = push_ref['type']
557 568
558 569 if type_ == 'heads':
559 570 if push_ref['old_rev'] == empty_commit_id:
560 571 # starting new branch case
561 572 if push_ref['name'] not in branches:
562 573 branches.append(push_ref['name'])
563 574
564 575 # Fix up head revision if needed
565 576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
566 577 try:
567 578 subprocessio.run_command(cmd, env=os.environ.copy())
568 579 except Exception:
569 580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
570 581 'refs/heads/%s' % push_ref['name']]
571 582 print("Setting default branch to %s" % push_ref['name'])
572 583 subprocessio.run_command(cmd, env=os.environ.copy())
573 584
574 585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
575 586 '--format=%(refname)', 'refs/heads/*']
576 587 stdout, stderr = subprocessio.run_command(
577 588 cmd, env=os.environ.copy())
578 589 heads = stdout
579 590 heads = heads.replace(push_ref['ref'], '')
580 591 heads = ' '.join(head for head
581 592 in heads.splitlines() if head) or '.'
582 593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
583 594 '--pretty=format:%H', '--', push_ref['new_rev'],
584 595 '--not', heads]
585 596 stdout, stderr = subprocessio.run_command(
586 597 cmd, env=os.environ.copy())
587 598 git_revs.extend(stdout.splitlines())
588 599 elif push_ref['new_rev'] == empty_commit_id:
589 600 # delete branch case
590 601 git_revs.append('delete_branch=>%s' % push_ref['name'])
591 602 else:
592 603 if push_ref['name'] not in branches:
593 604 branches.append(push_ref['name'])
594 605
595 606 cmd = [settings.GIT_EXECUTABLE, 'log',
596 607 '{old_rev}..{new_rev}'.format(**push_ref),
597 608 '--reverse', '--pretty=format:%H']
598 609 stdout, stderr = subprocessio.run_command(
599 610 cmd, env=os.environ.copy())
600 611 git_revs.extend(stdout.splitlines())
601 612 elif type_ == 'tags':
602 613 if push_ref['name'] not in tags:
603 614 tags.append(push_ref['name'])
604 615 git_revs.append('tag=>%s' % push_ref['name'])
605 616
606 617 extras['hook_type'] = 'post_receive'
607 618 extras['commit_ids'] = git_revs
608 619 extras['new_refs'] = {
609 620 'branches': branches,
610 621 'bookmarks': [],
611 622 'tags': tags,
612 623 }
613 624
614 625 if 'repo_size' in extras['hooks']:
615 626 try:
616 627 _call_hook('repo_size', extras, GitMessageWriter())
617 628 except:
618 629 pass
619 630
620 631 return _call_hook('post_push', extras, GitMessageWriter())
621 632
622 633
623 634 def _get_extras_from_txn_id(path, txn_id):
624 635 extras = {}
625 636 try:
626 637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
627 638 '-t', txn_id,
628 639 '--revprop', path, 'rc-scm-extras']
629 640 stdout, stderr = subprocessio.run_command(
630 641 cmd, env=os.environ.copy())
631 642 extras = json.loads(base64.urlsafe_b64decode(stdout))
632 643 except Exception:
633 644 log.exception('Failed to extract extras info from txn_id')
634 645
635 646 return extras
636 647
637 648
638 649 def _get_extras_from_commit_id(commit_id, path):
639 650 extras = {}
640 651 try:
641 652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
642 653 '-r', commit_id,
643 654 '--revprop', path, 'rc-scm-extras']
644 655 stdout, stderr = subprocessio.run_command(
645 656 cmd, env=os.environ.copy())
646 657 extras = json.loads(base64.urlsafe_b64decode(stdout))
647 658 except Exception:
648 659 log.exception('Failed to extract extras info from commit_id')
649 660
650 661 return extras
651 662
652 663
653 664 def svn_pre_commit(repo_path, commit_data, env):
654 665 path, txn_id = commit_data
655 666 branches = []
656 667 tags = []
657 668
658 669 if env.get('RC_SCM_DATA'):
659 670 extras = json.loads(env['RC_SCM_DATA'])
660 671 else:
661 672 # fallback method to read from TXN-ID stored data
662 673 extras = _get_extras_from_txn_id(path, txn_id)
663 674 if not extras:
664 675 return 0
665 676
666 677 extras['hook_type'] = 'pre_commit'
667 678 extras['commit_ids'] = [txn_id]
668 679 extras['txn_id'] = txn_id
669 680 extras['new_refs'] = {
670 681 'total_commits': 1,
671 682 'branches': branches,
672 683 'bookmarks': [],
673 684 'tags': tags,
674 685 }
675 686
676 687 return _call_hook('pre_push', extras, SvnMessageWriter())
677 688
678 689
679 690 def svn_post_commit(repo_path, commit_data, env):
680 691 """
681 692 commit_data is path, rev, txn_id
682 693 """
683 694 path, commit_id, txn_id = commit_data
684 695 branches = []
685 696 tags = []
686 697
687 698 if env.get('RC_SCM_DATA'):
688 699 extras = json.loads(env['RC_SCM_DATA'])
689 700 else:
690 701 # fallback method to read from TXN-ID stored data
691 702 extras = _get_extras_from_commit_id(commit_id, path)
692 703 if not extras:
693 704 return 0
694 705
695 706 extras['hook_type'] = 'post_commit'
696 707 extras['commit_ids'] = [commit_id]
697 708 extras['txn_id'] = txn_id
698 709 extras['new_refs'] = {
699 710 'branches': branches,
700 711 'bookmarks': [],
701 712 'tags': tags,
702 713 'total_commits': 1,
703 714 }
704 715
705 716 if 'repo_size' in extras['hooks']:
706 717 try:
707 718 _call_hook('repo_size', extras, SvnMessageWriter())
708 719 except Exception:
709 720 pass
710 721
711 722 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,610 +1,675 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 from itertools import chain
28 from cStringIO import StringIO
28 29
29 30 import simplejson as json
30 31 import msgpack
31 32 from pyramid.config import Configurator
32 33 from pyramid.settings import asbool, aslist
33 34 from pyramid.wsgi import wsgiapp
34 35 from pyramid.compat import configparser
36 from pyramid.response import Response
35 37
38 from vcsserver.utils import safe_int
36 39
37 40 log = logging.getLogger(__name__)
38 41
39 42 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
40 43 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
41 44
42 45 try:
43 46 locale.setlocale(locale.LC_ALL, '')
44 47 except locale.Error as e:
45 48 log.error(
46 49 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
47 50 os.environ['LC_ALL'] = 'C'
48 51
49 52 import vcsserver
50 53 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
51 54 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
52 55 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
53 56 from vcsserver.echo_stub.echo_app import EchoApp
54 57 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
55 58 from vcsserver.lib.exc_tracking import store_exception
56 59 from vcsserver.server import VcsServer
57 60
58 61 try:
59 62 from vcsserver.git import GitFactory, GitRemote
60 63 except ImportError:
61 64 GitFactory = None
62 65 GitRemote = None
63 66
64 67 try:
65 68 from vcsserver.hg import MercurialFactory, HgRemote
66 69 except ImportError:
67 70 MercurialFactory = None
68 71 HgRemote = None
69 72
70 73 try:
71 74 from vcsserver.svn import SubversionFactory, SvnRemote
72 75 except ImportError:
73 76 SubversionFactory = None
74 77 SvnRemote = None
75 78
76 79
77 80 def _is_request_chunked(environ):
78 81 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 82 return stream
80 83
81 84
82 85 def _int_setting(settings, name, default):
83 86 settings[name] = int(settings.get(name, default))
84 87 return settings[name]
85 88
86 89
87 90 def _bool_setting(settings, name, default):
88 91 input_val = settings.get(name, default)
89 92 if isinstance(input_val, unicode):
90 93 input_val = input_val.encode('utf8')
91 94 settings[name] = asbool(input_val)
92 95 return settings[name]
93 96
94 97
95 98 def _list_setting(settings, name, default):
96 99 raw_value = settings.get(name, default)
97 100
98 101 # Otherwise we assume it uses pyramids space/newline separation.
99 102 settings[name] = aslist(raw_value)
100 103 return settings[name]
101 104
102 105
103 106 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
104 107 value = settings.get(name, default)
105 108
106 109 if default_when_empty and not value:
107 110 # use default value when value is empty
108 111 value = default
109 112
110 113 if lower:
111 114 value = value.lower()
112 115 settings[name] = value
113 116 return settings[name]
114 117
115 118
116 119 class VCS(object):
117 def __init__(self, locale=None, cache_config=None):
118 self.locale = locale
120 def __init__(self, locale_conf=None, cache_config=None):
121 self.locale = locale_conf
119 122 self.cache_config = cache_config
120 123 self._configure_locale()
121 124
122 125 if GitFactory and GitRemote:
123 126 git_factory = GitFactory()
124 127 self._git_remote = GitRemote(git_factory)
125 128 else:
126 129 log.info("Git client import failed")
127 130
128 131 if MercurialFactory and HgRemote:
129 132 hg_factory = MercurialFactory()
130 133 self._hg_remote = HgRemote(hg_factory)
131 134 else:
132 135 log.info("Mercurial client import failed")
133 136
134 137 if SubversionFactory and SvnRemote:
135 138 svn_factory = SubversionFactory()
136 139
137 140 # hg factory is used for svn url validation
138 141 hg_factory = MercurialFactory()
139 142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 143 else:
141 144 log.info("Subversion client import failed")
142 145
143 146 self._vcsserver = VcsServer()
144 147
145 148 def _configure_locale(self):
146 149 if self.locale:
147 150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 151 else:
149 152 log.info(
150 153 'Configuring locale subsystem based on environment variables')
151 154 try:
152 155 # If self.locale is the empty string, then the locale
153 156 # module will use the environment variables. See the
154 157 # documentation of the package `locale`.
155 158 locale.setlocale(locale.LC_ALL, self.locale)
156 159
157 160 language_code, encoding = locale.getlocale()
158 161 log.info(
159 162 'Locale set to language code "%s" with encoding "%s".',
160 163 language_code, encoding)
161 164 except locale.Error:
162 165 log.exception(
163 166 'Cannot set locale, not configuring the locale system')
164 167
165 168
166 169 class WsgiProxy(object):
167 170 def __init__(self, wsgi):
168 171 self.wsgi = wsgi
169 172
170 173 def __call__(self, environ, start_response):
171 174 input_data = environ['wsgi.input'].read()
172 175 input_data = msgpack.unpackb(input_data)
173 176
174 177 error = None
175 178 try:
176 179 data, status, headers = self.wsgi.handle(
177 180 input_data['environment'], input_data['input_data'],
178 181 *input_data['args'], **input_data['kwargs'])
179 182 except Exception as e:
180 183 data, status, headers = [], None, None
181 184 error = {
182 185 'message': str(e),
183 186 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 187 }
185 188
186 189 start_response(200, {})
187 190 return self._iterator(error, status, headers, data)
188 191
189 192 def _iterator(self, error, status, headers, data):
190 193 initial_data = [
191 194 error,
192 195 status,
193 196 headers,
194 197 ]
195 198
196 199 for d in chain(initial_data, data):
197 200 yield msgpack.packb(d)
198 201
199 202
200 203 def not_found(request):
201 204 return {'status': '404 NOT FOUND'}
202 205
203 206
204 207 class VCSViewPredicate(object):
205 208 def __init__(self, val, config):
206 209 self.remotes = val
207 210
208 211 def text(self):
209 212 return 'vcs view method = %s' % (self.remotes.keys(),)
210 213
211 214 phash = text
212 215
213 216 def __call__(self, context, request):
214 217 """
215 218 View predicate that returns true if given backend is supported by
216 219 defined remotes.
217 220 """
218 221 backend = request.matchdict.get('backend')
219 222 return backend in self.remotes
220 223
221 224
222 225 class HTTPApplication(object):
223 226 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
224 227
225 228 remote_wsgi = remote_wsgi
226 229 _use_echo_app = False
227 230
228 231 def __init__(self, settings=None, global_config=None):
229 232 self._sanitize_settings_and_apply_defaults(settings)
230 233
231 234 self.config = Configurator(settings=settings)
232 235 self.global_config = global_config
233 236 self.config.include('vcsserver.lib.rc_cache')
234 237
235 locale = settings.get('locale', '') or 'en_US.UTF-8'
236 vcs = VCS(locale=locale, cache_config=settings)
238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
237 240 self._remotes = {
238 241 'hg': vcs._hg_remote,
239 242 'git': vcs._git_remote,
240 243 'svn': vcs._svn_remote,
241 244 'server': vcs._vcsserver,
242 245 }
243 246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
244 247 self._use_echo_app = True
245 248 log.warning("Using EchoApp for VCS operations.")
246 249 self.remote_wsgi = remote_wsgi_stub
247 250
248 251 self._configure_settings(global_config, settings)
249 252 self._configure()
250 253
251 254 def _configure_settings(self, global_config, app_settings):
252 255 """
253 256 Configure the settings module.
254 257 """
255 258 settings_merged = global_config.copy()
256 259 settings_merged.update(app_settings)
257 260
258 261 git_path = app_settings.get('git_path', None)
259 262 if git_path:
260 263 settings.GIT_EXECUTABLE = git_path
261 264 binary_dir = app_settings.get('core.binary_dir', None)
262 265 if binary_dir:
263 266 settings.BINARY_DIR = binary_dir
264 267
265 268 # Store the settings to make them available to other modules.
266 269 vcsserver.PYRAMID_SETTINGS = settings_merged
267 270 vcsserver.CONFIG = settings_merged
268 271
269 272 def _sanitize_settings_and_apply_defaults(self, settings):
270 273 temp_store = tempfile.gettempdir()
271 274 default_cache_dir = os.path.join(temp_store, 'rc_cache')
272 275
273 276 # save default, cache dir, and use it for all backends later.
274 277 default_cache_dir = _string_setting(
275 278 settings,
276 279 'cache_dir',
277 280 default_cache_dir, lower=False, default_when_empty=True)
278 281
279 282 # ensure we have our dir created
280 283 if not os.path.isdir(default_cache_dir):
281 284 os.makedirs(default_cache_dir, mode=0o755)
282 285
283 286 # exception store cache
284 287 _string_setting(
285 288 settings,
286 289 'exception_tracker.store_path',
287 290 temp_store, lower=False, default_when_empty=True)
288 291
289 292 # repo_object cache
290 293 _string_setting(
291 294 settings,
292 295 'rc_cache.repo_object.backend',
293 'dogpile.cache.rc.memory_lru')
296 'dogpile.cache.rc.file_namespace', lower=False)
294 297 _int_setting(
295 298 settings,
296 299 'rc_cache.repo_object.expiration_time',
297 300)
298 _int_setting(
300 30 * 24 * 60 * 60)
301 _string_setting(
299 302 settings,
300 'rc_cache.repo_object.max_size',
301 1024)
303 'rc_cache.repo_object.arguments.filename',
304 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
302 305
303 306 def _configure(self):
304 307 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
305 308
306 309 self.config.add_route('service', '/_service')
307 310 self.config.add_route('status', '/status')
308 311 self.config.add_route('hg_proxy', '/proxy/hg')
309 312 self.config.add_route('git_proxy', '/proxy/git')
313
314 # rpc methods
310 315 self.config.add_route('vcs', '/{backend}')
316
317 # streaming rpc remote methods
318 self.config.add_route('vcs_stream', '/{backend}/stream')
319
320 # vcs operations clone/push as streaming
311 321 self.config.add_route('stream_git', '/stream/git/*repo_name')
312 322 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
313 323
314 324 self.config.add_view(self.status_view, route_name='status', renderer='json')
315 325 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
316 326
317 327 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
318 328 self.config.add_view(self.git_proxy(), route_name='git_proxy')
319 329 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
320 330 vcs_view=self._remotes)
331 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
332 vcs_view=self._remotes)
321 333
322 334 self.config.add_view(self.hg_stream(), route_name='stream_hg')
323 335 self.config.add_view(self.git_stream(), route_name='stream_git')
324 336
325 337 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
326 338
327 339 self.config.add_notfound_view(not_found, renderer='json')
328 340
329 341 self.config.add_view(self.handle_vcs_exception, context=Exception)
330 342
331 343 self.config.add_tween(
332 'vcsserver.tweens.RequestWrapperTween',
344 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
333 345 )
346 self.config.add_request_method(
347 'vcsserver.lib.request_counter.get_request_counter',
348 'request_count')
334 349
335 350 def wsgi_app(self):
336 351 return self.config.make_wsgi_app()
337 352
338 def vcs_view(self, request):
353 def _vcs_view_params(self, request):
339 354 remote = self._remotes[request.matchdict['backend']]
340 355 payload = msgpack.unpackb(request.body, use_list=True)
341 356 method = payload.get('method')
342 params = payload.get('params')
357 params = payload['params']
343 358 wire = params.get('wire')
344 359 args = params.get('args')
345 360 kwargs = params.get('kwargs')
346 361 context_uid = None
347 362
348 363 if wire:
349 364 try:
350 365 wire['context'] = context_uid = uuid.UUID(wire['context'])
351 366 except KeyError:
352 367 pass
353 368 args.insert(0, wire)
369 repo_state_uid = wire.get('repo_state_uid') if wire else None
354 370
355 log.debug('method called:%s with kwargs:%s context_uid: %s',
356 method, kwargs, context_uid)
371 # NOTE(marcink): trading complexity for slight performance
372 if log.isEnabledFor(logging.DEBUG):
373 no_args_methods = [
374 'archive_repo'
375 ]
376 if method in no_args_methods:
377 call_args = ''
378 else:
379 call_args = args[1:]
380
381 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
382 method, call_args, kwargs, context_uid, repo_state_uid)
383
384 return payload, remote, method, args, kwargs
385
386 def vcs_view(self, request):
387
388 payload, remote, method, args, kwargs = self._vcs_view_params(request)
389 payload_id = payload.get('id')
390
357 391 try:
358 392 resp = getattr(remote, method)(*args, **kwargs)
359 393 except Exception as e:
360 394 exc_info = list(sys.exc_info())
361 395 exc_type, exc_value, exc_traceback = exc_info
362 396
363 397 org_exc = getattr(e, '_org_exc', None)
364 398 org_exc_name = None
365 399 org_exc_tb = ''
366 400 if org_exc:
367 401 org_exc_name = org_exc.__class__.__name__
368 402 org_exc_tb = getattr(e, '_org_exc_tb', '')
369 403 # replace our "faked" exception with our org
370 404 exc_info[0] = org_exc.__class__
371 405 exc_info[1] = org_exc
372 406
373 407 store_exception(id(exc_info), exc_info)
374 408
375 409 tb_info = ''.join(
376 410 traceback.format_exception(exc_type, exc_value, exc_traceback))
377 411
378 412 type_ = e.__class__.__name__
379 413 if type_ not in self.ALLOWED_EXCEPTIONS:
380 414 type_ = None
381 415
382 416 resp = {
383 'id': payload.get('id'),
417 'id': payload_id,
384 418 'error': {
385 419 'message': e.message,
386 420 'traceback': tb_info,
387 421 'org_exc': org_exc_name,
388 422 'org_exc_tb': org_exc_tb,
389 423 'type': type_
390 424 }
391 425 }
392 426 try:
393 427 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
394 428 except AttributeError:
395 429 pass
396 430 else:
397 431 resp = {
398 'id': payload.get('id'),
432 'id': payload_id,
399 433 'result': resp
400 434 }
401 435
402 436 return resp
403 437
438 def vcs_stream_view(self, request):
439 payload, remote, method, args, kwargs = self._vcs_view_params(request)
440 # this method has a stream: marker we remove it here
441 method = method.split('stream:')[-1]
442 chunk_size = safe_int(payload.get('chunk_size')) or 4096
443
444 try:
445 resp = getattr(remote, method)(*args, **kwargs)
446 except Exception as e:
447 raise
448
449 def get_chunked_data(method_resp):
450 stream = StringIO(method_resp)
451 while 1:
452 chunk = stream.read(chunk_size)
453 if not chunk:
454 break
455 yield chunk
456
457 response = Response(app_iter=get_chunked_data(resp))
458 response.content_type = 'application/octet-stream'
459
460 return response
461
404 462 def status_view(self, request):
405 463 import vcsserver
406 464 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
407 465 'pid': os.getpid()}
408 466
409 467 def service_view(self, request):
410 468 import vcsserver
411 469
412 470 payload = msgpack.unpackb(request.body, use_list=True)
471 server_config, app_config = {}, {}
413 472
414 473 try:
415 474 path = self.global_config['__file__']
416 config = configparser.ConfigParser()
475 config = configparser.RawConfigParser()
476
417 477 config.read(path)
418 parsed_ini = config
419 if parsed_ini.has_section('server:main'):
420 parsed_ini = dict(parsed_ini.items('server:main'))
478
479 if config.has_section('server:main'):
480 server_config = dict(config.items('server:main'))
481 if config.has_section('app:main'):
482 app_config = dict(config.items('app:main'))
483
421 484 except Exception:
422 485 log.exception('Failed to read .ini file for display')
423 parsed_ini = {}
486
487 environ = os.environ.items()
424 488
425 489 resp = {
426 490 'id': payload.get('id'),
427 491 'result': dict(
428 492 version=vcsserver.__version__,
429 config=parsed_ini,
493 config=server_config,
494 app_config=app_config,
495 environ=environ,
430 496 payload=payload,
431 497 )
432 498 }
433 499 return resp
434 500
435 501 def _msgpack_renderer_factory(self, info):
436 502 def _render(value, system):
437 value = msgpack.packb(value)
438 503 request = system.get('request')
439 504 if request is not None:
440 505 response = request.response
441 506 ct = response.content_type
442 507 if ct == response.default_content_type:
443 508 response.content_type = 'application/x-msgpack'
444 return value
509 return msgpack.packb(value)
445 510 return _render
446 511
447 512 def set_env_from_config(self, environ, config):
448 513 dict_conf = {}
449 514 try:
450 515 for elem in config:
451 516 if elem[0] == 'rhodecode':
452 517 dict_conf = json.loads(elem[2])
453 518 break
454 519 except Exception:
455 520 log.exception('Failed to fetch SCM CONFIG')
456 521 return
457 522
458 523 username = dict_conf.get('username')
459 524 if username:
460 525 environ['REMOTE_USER'] = username
461 526 # mercurial specific, some extension api rely on this
462 527 environ['HGUSER'] = username
463 528
464 529 ip = dict_conf.get('ip')
465 530 if ip:
466 531 environ['REMOTE_HOST'] = ip
467 532
468 533 if _is_request_chunked(environ):
469 534 # set the compatibility flag for webob
470 535 environ['wsgi.input_terminated'] = True
471 536
472 537 def hg_proxy(self):
473 538 @wsgiapp
474 539 def _hg_proxy(environ, start_response):
475 540 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
476 541 return app(environ, start_response)
477 542 return _hg_proxy
478 543
479 544 def git_proxy(self):
480 545 @wsgiapp
481 546 def _git_proxy(environ, start_response):
482 547 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
483 548 return app(environ, start_response)
484 549 return _git_proxy
485 550
486 551 def hg_stream(self):
487 552 if self._use_echo_app:
488 553 @wsgiapp
489 554 def _hg_stream(environ, start_response):
490 555 app = EchoApp('fake_path', 'fake_name', None)
491 556 return app(environ, start_response)
492 557 return _hg_stream
493 558 else:
494 559 @wsgiapp
495 560 def _hg_stream(environ, start_response):
496 561 log.debug('http-app: handling hg stream')
497 562 repo_path = environ['HTTP_X_RC_REPO_PATH']
498 563 repo_name = environ['HTTP_X_RC_REPO_NAME']
499 564 packed_config = base64.b64decode(
500 565 environ['HTTP_X_RC_REPO_CONFIG'])
501 566 config = msgpack.unpackb(packed_config)
502 567 app = scm_app.create_hg_wsgi_app(
503 568 repo_path, repo_name, config)
504 569
505 570 # Consistent path information for hgweb
506 571 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
507 572 environ['REPO_NAME'] = repo_name
508 573 self.set_env_from_config(environ, config)
509 574
510 575 log.debug('http-app: starting app handler '
511 576 'with %s and process request', app)
512 577 return app(environ, ResponseFilter(start_response))
513 578 return _hg_stream
514 579
515 580 def git_stream(self):
516 581 if self._use_echo_app:
517 582 @wsgiapp
518 583 def _git_stream(environ, start_response):
519 584 app = EchoApp('fake_path', 'fake_name', None)
520 585 return app(environ, start_response)
521 586 return _git_stream
522 587 else:
523 588 @wsgiapp
524 589 def _git_stream(environ, start_response):
525 590 log.debug('http-app: handling git stream')
526 591 repo_path = environ['HTTP_X_RC_REPO_PATH']
527 592 repo_name = environ['HTTP_X_RC_REPO_NAME']
528 593 packed_config = base64.b64decode(
529 594 environ['HTTP_X_RC_REPO_CONFIG'])
530 595 config = msgpack.unpackb(packed_config)
531 596
532 597 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
533 598 self.set_env_from_config(environ, config)
534 599
535 600 content_type = environ.get('CONTENT_TYPE', '')
536 601
537 602 path = environ['PATH_INFO']
538 603 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
539 604 log.debug(
540 605 'LFS: Detecting if request `%s` is LFS server path based '
541 606 'on content type:`%s`, is_lfs:%s',
542 607 path, content_type, is_lfs_request)
543 608
544 609 if not is_lfs_request:
545 610 # fallback detection by path
546 611 if GIT_LFS_PROTO_PAT.match(path):
547 612 is_lfs_request = True
548 613 log.debug(
549 614 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
550 615 path, is_lfs_request)
551 616
552 617 if is_lfs_request:
553 618 app = scm_app.create_git_lfs_wsgi_app(
554 619 repo_path, repo_name, config)
555 620 else:
556 621 app = scm_app.create_git_wsgi_app(
557 622 repo_path, repo_name, config)
558 623
559 624 log.debug('http-app: starting app handler '
560 625 'with %s and process request', app)
561 626
562 627 return app(environ, start_response)
563 628
564 629 return _git_stream
565 630
566 631 def handle_vcs_exception(self, exception, request):
567 632 _vcs_kind = getattr(exception, '_vcs_kind', '')
568 633 if _vcs_kind == 'repo_locked':
569 634 # Get custom repo-locked status code if present.
570 635 status_code = request.headers.get('X-RC-Locked-Status-Code')
571 636 return HTTPRepoLocked(
572 637 title=exception.message, status_code=status_code)
573 638
574 639 elif _vcs_kind == 'repo_branch_protected':
575 640 # Get custom repo-branch-protected status code if present.
576 641 return HTTPRepoBranchProtected(title=exception.message)
577 642
578 643 exc_info = request.exc_info
579 644 store_exception(id(exc_info), exc_info)
580 645
581 646 traceback_info = 'unavailable'
582 647 if request.exc_info:
583 648 exc_type, exc_value, exc_tb = request.exc_info
584 649 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
585 650
586 651 log.error(
587 652 'error occurred handling this request for path: %s, \n tb: %s',
588 653 request.path, traceback_info)
589 654 raise exception
590 655
591 656
592 657 class ResponseFilter(object):
593 658
594 659 def __init__(self, start_response):
595 660 self._start_response = start_response
596 661
597 662 def __call__(self, status, response_headers, exc_info=None):
598 663 headers = tuple(
599 664 (h, v) for h, v in response_headers
600 665 if not wsgiref.util.is_hop_by_hop(h))
601 666 return self._start_response(status, headers, exc_info)
602 667
603 668
604 669 def main(global_config, **settings):
605 670 if MercurialFactory:
606 671 hgpatches.patch_largefiles_capabilities()
607 672 hgpatches.patch_subrepo_type_mapping()
608 673
609 674 app = HTTPApplication(settings=settings, global_config=global_config)
610 675 return app.wsgi_app()
@@ -1,60 +1,72 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 from dogpile.cache import register_backend
20 20
21 21 register_backend(
22 22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 23 "LRUMemoryBackend")
24 24
25 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "FileNamespaceBackend")
28
29 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "RedisPickleBackend")
32
33 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
36
37
25 38 log = logging.getLogger(__name__)
26 39
27 40 from . import region_meta
28 from .util import key_generator, get_default_cache_settings, make_region
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
29 42
30 43
31 44 def configure_dogpile_cache(settings):
32 45 cache_dir = settings.get('cache_dir')
33 46 if cache_dir:
34 47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
35 48
36 49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
37 50
38 51 # inspect available namespaces
39 52 avail_regions = set()
40 53 for key in rc_cache_data.keys():
41 54 namespace_name = key.split('.', 1)[0]
42 55 avail_regions.add(namespace_name)
43 56 log.debug('dogpile: found following cache regions: %s', avail_regions)
44 57
45 58 # register them into namespace
46 59 for region_name in avail_regions:
47 60 new_region = make_region(
48 61 name=region_name,
49 function_key_generator=key_generator
62 function_key_generator=None
50 63 )
51 64
52 65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
53
54 log.debug('dogpile: registering a new region %s[%s]',
55 region_name, new_region.__dict__)
66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
56 68 region_meta.dogpile_cache_regions[region_name] = new_region
57 69
58 70
59 71 def includeme(config):
60 72 configure_dogpile_cache(config.registry.settings)
@@ -1,51 +1,253 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 import time
19 import errno
18 20 import logging
19 21
22 import msgpack
23 import redis
24
25 from dogpile.cache.api import CachedValue
20 26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
31
21 32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
22 33
23 34
24 35 _default_max_size = 1024
25 36
26 37 log = logging.getLogger(__name__)
27 38
28 39
29 40 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 key_prefix = 'lru_mem_backend'
30 42 pickle_values = False
31 43
32 44 def __init__(self, arguments):
33 45 max_size = arguments.pop('max_size', _default_max_size)
34 46
35 47 LRUDictClass = LRUDict
36 48 if arguments.pop('log_key_count', None):
37 49 LRUDictClass = LRUDictDebug
38 50
39 51 arguments['cache_dict'] = LRUDictClass(max_size)
40 52 super(LRUMemoryBackend, self).__init__(arguments)
41 53
42 54 def delete(self, key):
43 55 try:
44 56 del self._cache[key]
45 57 except KeyError:
46 58 # we don't care if key isn't there at deletion
47 59 pass
48 60
49 61 def delete_multi(self, keys):
50 62 for key in keys:
51 63 self.delete(key)
64
65
66 class PickleSerializer(object):
67
68 def _dumps(self, value, safe=False):
69 try:
70 return compat.pickle.dumps(value)
71 except Exception:
72 if safe:
73 return NO_VALUE
74 else:
75 raise
76
77 def _loads(self, value, safe=True):
78 try:
79 return compat.pickle.loads(value)
80 except Exception:
81 if safe:
82 return NO_VALUE
83 else:
84 raise
85
86
87 class MsgPackSerializer(object):
88
89 def _dumps(self, value, safe=False):
90 try:
91 return msgpack.packb(value)
92 except Exception:
93 if safe:
94 return NO_VALUE
95 else:
96 raise
97
98 def _loads(self, value, safe=True):
99 """
100 pickle maintained the `CachedValue` wrapper of the tuple
101 msgpack does not, so it must be added back in.
102 """
103 try:
104 value = msgpack.unpackb(value, use_list=False)
105 return CachedValue(*value)
106 except Exception:
107 if safe:
108 return NO_VALUE
109 else:
110 raise
111
112
113 import fcntl
114 flock_org = fcntl.flock
115
116
117 class CustomLockFactory(FileLock):
118
119 pass
120
121
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 key_prefix = 'file_backend'
124
125 def __init__(self, arguments):
126 arguments['lock_factory'] = CustomLockFactory
127 super(FileNamespaceBackend, self).__init__(arguments)
128
129 def __repr__(self):
130 return '{} `{}`'.format(self.__class__, self.filename)
131
132 def list_keys(self, prefix=''):
133 prefix = '{}:{}'.format(self.key_prefix, prefix)
134
135 def cond(v):
136 if not prefix:
137 return True
138
139 if v.startswith(prefix):
140 return True
141 return False
142
143 with self._dbm_file(True) as dbm:
144
145 return filter(cond, dbm.keys())
146
147 def get_store(self):
148 return self.filename
149
150 def get(self, key):
151 with self._dbm_file(False) as dbm:
152 if hasattr(dbm, 'get'):
153 value = dbm.get(key, NO_VALUE)
154 else:
155 # gdbm objects lack a .get method
156 try:
157 value = dbm[key]
158 except KeyError:
159 value = NO_VALUE
160 if value is not NO_VALUE:
161 value = self._loads(value)
162 return value
163
164 def set(self, key, value):
165 with self._dbm_file(True) as dbm:
166 dbm[key] = self._dumps(value)
167
168 def set_multi(self, mapping):
169 with self._dbm_file(True) as dbm:
170 for key, value in mapping.items():
171 dbm[key] = self._dumps(value)
172
173
174 class BaseRedisBackend(redis_backend.RedisBackend):
175
176 def _create_client(self):
177 args = {}
178
179 if self.url is not None:
180 args.update(url=self.url)
181
182 else:
183 args.update(
184 host=self.host, password=self.password,
185 port=self.port, db=self.db
186 )
187
188 connection_pool = redis.ConnectionPool(**args)
189
190 return redis.StrictRedis(connection_pool=connection_pool)
191
192 def list_keys(self, prefix=''):
193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 return self.client.keys(prefix)
195
196 def get_store(self):
197 return self.client.connection_pool
198
199 def get(self, key):
200 value = self.client.get(key)
201 if value is None:
202 return NO_VALUE
203 return self._loads(value)
204
205 def get_multi(self, keys):
206 if not keys:
207 return []
208 values = self.client.mget(keys)
209 loads = self._loads
210 return [
211 loads(v) if v is not None else NO_VALUE
212 for v in values]
213
214 def set(self, key, value):
215 if self.redis_expiration_time:
216 self.client.setex(key, self.redis_expiration_time,
217 self._dumps(value))
218 else:
219 self.client.set(key, self._dumps(value))
220
221 def set_multi(self, mapping):
222 dumps = self._dumps
223 mapping = dict(
224 (k, dumps(v))
225 for k, v in mapping.items()
226 )
227
228 if not self.redis_expiration_time:
229 self.client.mset(mapping)
230 else:
231 pipe = self.client.pipeline()
232 for key, value in mapping.items():
233 pipe.setex(key, self.redis_expiration_time, value)
234 pipe.execute()
235
236 def get_mutex(self, key):
237 u = redis_backend.u
238 if self.distributed_lock:
239 lock_key = u('_lock_{0}').format(key)
240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
242 else:
243 return None
244
245
246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 key_prefix = 'redis_pickle_backend'
248 pass
249
250
251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 key_prefix = 'redis_msgpack_backend'
253 pass
@@ -1,136 +1,153 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import functools
21 from decorator import decorate
22
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
21 25
22 26 from vcsserver.utils import safe_str, sha1
23 from dogpile.cache import CacheRegion
24 from dogpile.cache.util import compat
27
25 28
26 29 log = logging.getLogger(__name__)
27 30
28 31
29 32 class RhodeCodeCacheRegion(CacheRegion):
30 33
31 34 def conditional_cache_on_arguments(
32 35 self, namespace=None,
33 36 expiration_time=None,
34 37 should_cache_fn=None,
35 38 to_str=compat.string_type,
36 39 function_key_generator=None,
37 40 condition=True):
38 41 """
39 42 Custom conditional decorator, that will not touch any dogpile internals if
40 43 condition isn't meet. This works a bit different than should_cache_fn
41 44 And it's faster in cases we don't ever want to compute cached values
42 45 """
43 46 expiration_time_is_callable = compat.callable(expiration_time)
44 47
45 48 if function_key_generator is None:
46 49 function_key_generator = self.function_key_generator
47 50
48 def decorator(fn):
51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52
53 if not condition:
54 log.debug('Calling un-cached func:%s', user_func.func_name)
55 return user_func(*arg, **kw)
56
57 key = key_generator(*arg, **kw)
58
59 timeout = expiration_time() if expiration_time_is_callable \
60 else expiration_time
61
62 log.debug('Calling cached fn:%s', user_func.func_name)
63 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
64
65 def cache_decorator(user_func):
49 66 if to_str is compat.string_type:
50 67 # backwards compatible
51 key_generator = function_key_generator(namespace, fn)
68 key_generator = function_key_generator(namespace, user_func)
52 69 else:
53 key_generator = function_key_generator(namespace, fn, to_str=to_str)
54
55 @functools.wraps(fn)
56 def decorate(*arg, **kw):
57 key = key_generator(*arg, **kw)
70 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
58 71
59 @functools.wraps(fn)
60 def creator():
61 return fn(*arg, **kw)
62
63 if not condition:
64 return creator()
65
66 timeout = expiration_time() if expiration_time_is_callable \
67 else expiration_time
68
69 return self.get_or_create(key, creator, timeout, should_cache_fn)
72 def refresh(*arg, **kw):
73 """
74 Like invalidate, but regenerates the value instead
75 """
76 key = key_generator(*arg, **kw)
77 value = user_func(*arg, **kw)
78 self.set(key, value)
79 return value
70 80
71 81 def invalidate(*arg, **kw):
72 82 key = key_generator(*arg, **kw)
73 83 self.delete(key)
74 84
75 85 def set_(value, *arg, **kw):
76 86 key = key_generator(*arg, **kw)
77 87 self.set(key, value)
78 88
79 89 def get(*arg, **kw):
80 90 key = key_generator(*arg, **kw)
81 91 return self.get(key)
82 92
83 def refresh(*arg, **kw):
84 key = key_generator(*arg, **kw)
85 value = fn(*arg, **kw)
86 self.set(key, value)
87 return value
93 user_func.set = set_
94 user_func.invalidate = invalidate
95 user_func.get = get
96 user_func.refresh = refresh
97 user_func.key_generator = key_generator
98 user_func.original = user_func
88 99
89 decorate.set = set_
90 decorate.invalidate = invalidate
91 decorate.refresh = refresh
92 decorate.get = get
93 decorate.original = fn
94 decorate.key_generator = key_generator
100 # Use `decorate` to preserve the signature of :param:`user_func`.
95 101
96 return decorate
102 return decorate(user_func, functools.partial(
103 get_or_create_for_user_func, key_generator))
97 104
98 return decorator
105 return cache_decorator
99 106
100 107
101 108 def make_region(*arg, **kw):
102 109 return RhodeCodeCacheRegion(*arg, **kw)
103 110
104 111
105 112 def get_default_cache_settings(settings, prefixes=None):
106 113 prefixes = prefixes or []
107 114 cache_settings = {}
108 115 for key in settings.keys():
109 116 for prefix in prefixes:
110 117 if key.startswith(prefix):
111 118 name = key.split(prefix)[1].strip()
112 119 val = settings[key]
113 if isinstance(val, basestring):
120 if isinstance(val, compat.string_types):
114 121 val = val.strip()
115 122 cache_settings[name] = val
116 123 return cache_settings
117 124
118 125
119 126 def compute_key_from_params(*args):
120 127 """
121 128 Helper to compute key from given params to be used in cache manager
122 129 """
123 130 return sha1("_".join(map(safe_str, args)))
124 131
125 132
126 def key_generator(namespace, fn):
133 def backend_key_generator(backend):
134 """
135 Special wrapper that also sends over the backend to the key generator
136 """
137 def wrapper(namespace, fn):
138 return key_generator(backend, namespace, fn)
139 return wrapper
140
141
142 def key_generator(backend, namespace, fn):
127 143 fname = fn.__name__
128 144
129 145 def generate_key(*args):
130 namespace_pref = namespace or 'default'
146 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
147 namespace_pref = namespace or 'default_namespace'
131 148 arg_key = compute_key_from_params(*args)
132 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
149 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
133 150
134 151 return final_key
135 152
136 153 return generate_key
@@ -1,523 +1,519 b''
1 1 """
2 2 Module provides a class allowing to wrap communication over subprocess.Popen
3 3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 4 stream processor exposing the output data as an iterator fitting to be a
5 5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6 6
7 7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
8 8
9 9 This file is part of git_http_backend.py Project.
10 10
11 11 git_http_backend.py Project is free software: you can redistribute it and/or
12 12 modify it under the terms of the GNU Lesser General Public License as
13 13 published by the Free Software Foundation, either version 2.1 of the License,
14 14 or (at your option) any later version.
15 15
16 16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 19 GNU Lesser General Public License for more details.
20 20
21 21 You should have received a copy of the GNU Lesser General Public License
22 22 along with git_http_backend.py Project.
23 23 If not, see <http://www.gnu.org/licenses/>.
24 24 """
25 25 import os
26 26 import logging
27 27 import subprocess32 as subprocess
28 28 from collections import deque
29 29 from threading import Event, Thread
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class StreamFeeder(Thread):
35 35 """
36 36 Normal writing into pipe-like is blocking once the buffer is filled.
37 37 This thread allows a thread to seep data from a file-like into a pipe
38 38 without blocking the main thread.
39 39 We close inpipe once the end of the source stream is reached.
40 40 """
41 41
42 42 def __init__(self, source):
43 43 super(StreamFeeder, self).__init__()
44 44 self.daemon = True
45 45 filelike = False
46 46 self.bytes = bytes()
47 47 if type(source) in (type(''), bytes, bytearray): # string-like
48 48 self.bytes = bytes(source)
49 49 else: # can be either file pointer or file-like
50 50 if type(source) in (int, long): # file pointer it is
51 51 # converting file descriptor (int) stdin into file-like
52 52 try:
53 53 source = os.fdopen(source, 'rb', 16384)
54 54 except Exception:
55 55 pass
56 56 # let's see if source is file-like by now
57 57 try:
58 58 filelike = source.read
59 59 except Exception:
60 60 pass
61 61 if not filelike and not self.bytes:
62 62 raise TypeError("StreamFeeder's source object must be a readable "
63 63 "file-like, a file descriptor, or a string-like.")
64 64 self.source = source
65 65 self.readiface, self.writeiface = os.pipe()
66 66
67 67 def run(self):
68 68 t = self.writeiface
69 69 try:
70 70 if self.bytes:
71 71 os.write(t, self.bytes)
72 72 else:
73 73 s = self.source
74 74 b = s.read(4096)
75 75 while b:
76 76 os.write(t, b)
77 77 b = s.read(4096)
78 78 finally:
79 79 os.close(t)
80 80
81 81 @property
82 82 def output(self):
83 83 return self.readiface
84 84
85 85
86 86 class InputStreamChunker(Thread):
87 87 def __init__(self, source, target, buffer_size, chunk_size):
88 88
89 89 super(InputStreamChunker, self).__init__()
90 90
91 91 self.daemon = True # die die die.
92 92
93 93 self.source = source
94 94 self.target = target
95 95 self.chunk_count_max = int(buffer_size / chunk_size) + 1
96 96 self.chunk_size = chunk_size
97 97
98 98 self.data_added = Event()
99 99 self.data_added.clear()
100 100
101 101 self.keep_reading = Event()
102 102 self.keep_reading.set()
103 103
104 104 self.EOF = Event()
105 105 self.EOF.clear()
106 106
107 107 self.go = Event()
108 108 self.go.set()
109 109
110 110 def stop(self):
111 111 self.go.clear()
112 112 self.EOF.set()
113 113 try:
114 114 # this is not proper, but is done to force the reader thread let
115 115 # go of the input because, if successful, .close() will send EOF
116 116 # down the pipe.
117 117 self.source.close()
118 118 except:
119 119 pass
120 120
121 121 def run(self):
122 122 s = self.source
123 123 t = self.target
124 124 cs = self.chunk_size
125 125 chunk_count_max = self.chunk_count_max
126 126 keep_reading = self.keep_reading
127 127 da = self.data_added
128 128 go = self.go
129 129
130 130 try:
131 131 b = s.read(cs)
132 132 except ValueError:
133 133 b = ''
134 134
135 135 timeout_input = 20
136 136 while b and go.is_set():
137 137 if len(t) > chunk_count_max:
138 138 keep_reading.clear()
139 139 keep_reading.wait(timeout_input)
140 140 if len(t) > chunk_count_max + timeout_input:
141 141 log.error("Timed out while waiting for input from subprocess.")
142 142 os._exit(-1) # this will cause the worker to recycle itself
143 143
144 144 t.append(b)
145 145 da.set()
146 146
147 147 try:
148 148 b = s.read(cs)
149 149 except ValueError:
150 150 b = ''
151 151
152 152 self.EOF.set()
153 153 da.set() # for cases when done but there was no input.
154 154
155 155
156 156 class BufferedGenerator(object):
157 157 """
158 158 Class behaves as a non-blocking, buffered pipe reader.
159 159 Reads chunks of data (through a thread)
160 160 from a blocking pipe, and attaches these to an array (Deque) of chunks.
161 161 Reading is halted in the thread when max chunks is internally buffered.
162 162 The .next() may operate in blocking or non-blocking fashion by yielding
163 163 '' if no data is ready
164 164 to be sent or by not returning until there is some data to send
165 165 When we get EOF from underlying source pipe we raise the marker to raise
166 166 StopIteration after the last chunk of data is yielded.
167 167 """
168 168
169 169 def __init__(self, source, buffer_size=65536, chunk_size=4096,
170 170 starting_values=None, bottomless=False):
171 171 starting_values = starting_values or []
172 172
173 173 if bottomless:
174 174 maxlen = int(buffer_size / chunk_size)
175 175 else:
176 176 maxlen = None
177 177
178 178 self.data = deque(starting_values, maxlen)
179 179 self.worker = InputStreamChunker(source, self.data, buffer_size,
180 180 chunk_size)
181 181 if starting_values:
182 182 self.worker.data_added.set()
183 183 self.worker.start()
184 184
185 185 ####################
186 186 # Generator's methods
187 187 ####################
188 188
189 189 def __iter__(self):
190 190 return self
191 191
192 192 def next(self):
193 193 while not len(self.data) and not self.worker.EOF.is_set():
194 194 self.worker.data_added.clear()
195 195 self.worker.data_added.wait(0.2)
196 196 if len(self.data):
197 197 self.worker.keep_reading.set()
198 198 return bytes(self.data.popleft())
199 199 elif self.worker.EOF.is_set():
200 200 raise StopIteration
201 201
202 202 def throw(self, exc_type, value=None, traceback=None):
203 203 if not self.worker.EOF.is_set():
204 204 raise exc_type(value)
205 205
206 206 def start(self):
207 207 self.worker.start()
208 208
209 209 def stop(self):
210 210 self.worker.stop()
211 211
212 212 def close(self):
213 213 try:
214 214 self.worker.stop()
215 215 self.throw(GeneratorExit)
216 216 except (GeneratorExit, StopIteration):
217 217 pass
218 218
219 def __del__(self):
220 self.close()
221
222 219 ####################
223 220 # Threaded reader's infrastructure.
224 221 ####################
225 222 @property
226 223 def input(self):
227 224 return self.worker.w
228 225
229 226 @property
230 227 def data_added_event(self):
231 228 return self.worker.data_added
232 229
233 230 @property
234 231 def data_added(self):
235 232 return self.worker.data_added.is_set()
236 233
237 234 @property
238 235 def reading_paused(self):
239 236 return not self.worker.keep_reading.is_set()
240 237
241 238 @property
242 239 def done_reading_event(self):
243 240 """
244 241 Done_reding does not mean that the iterator's buffer is empty.
245 242 Iterator might have done reading from underlying source, but the read
246 243 chunks might still be available for serving through .next() method.
247 244
248 245 :returns: An Event class instance.
249 246 """
250 247 return self.worker.EOF
251 248
252 249 @property
253 250 def done_reading(self):
254 251 """
255 252 Done_reding does not mean that the iterator's buffer is empty.
256 253 Iterator might have done reading from underlying source, but the read
257 254 chunks might still be available for serving through .next() method.
258 255
259 256 :returns: An Bool value.
260 257 """
261 258 return self.worker.EOF.is_set()
262 259
263 260 @property
264 261 def length(self):
265 262 """
266 263 returns int.
267 264
268 265 This is the lenght of the que of chunks, not the length of
269 266 the combined contents in those chunks.
270 267
271 268 __len__() cannot be meaningfully implemented because this
272 269 reader is just flying throuh a bottomless pit content and
273 270 can only know the lenght of what it already saw.
274 271
275 272 If __len__() on WSGI server per PEP 3333 returns a value,
276 273 the responce's length will be set to that. In order not to
277 274 confuse WSGI PEP3333 servers, we will not implement __len__
278 275 at all.
279 276 """
280 277 return len(self.data)
281 278
282 279 def prepend(self, x):
283 280 self.data.appendleft(x)
284 281
285 282 def append(self, x):
286 283 self.data.append(x)
287 284
288 285 def extend(self, o):
289 286 self.data.extend(o)
290 287
291 288 def __getitem__(self, i):
292 289 return self.data[i]
293 290
294 291
295 292 class SubprocessIOChunker(object):
296 293 """
297 294 Processor class wrapping handling of subprocess IO.
298 295
299 296 .. important::
300 297
301 298 Watch out for the method `__del__` on this class. If this object
302 299 is deleted, it will kill the subprocess, so avoid to
303 300 return the `output` attribute or usage of it like in the following
304 301 example::
305 302
306 303 # `args` expected to run a program that produces a lot of output
307 304 output = ''.join(SubprocessIOChunker(
308 305 args, shell=False, inputstream=inputstream, env=environ).output)
309 306
310 307 # `output` will not contain all the data, because the __del__ method
311 308 # has already killed the subprocess in this case before all output
312 309 # has been consumed.
313 310
314 311
315 312
316 313 In a way, this is a "communicate()" replacement with a twist.
317 314
318 315 - We are multithreaded. Writing in and reading out, err are all sep threads.
319 316 - We support concurrent (in and out) stream processing.
320 317 - The output is not a stream. It's a queue of read string (bytes, not unicode)
321 318 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
322 319 - We are non-blocking in more respects than communicate()
323 320 (reading from subprocess out pauses when internal buffer is full, but
324 321 does not block the parent calling code. On the flip side, reading from
325 322 slow-yielding subprocess may block the iteration until data shows up. This
326 323 does not block the parallel inpipe reading occurring parallel thread.)
327 324
328 325 The purpose of the object is to allow us to wrap subprocess interactions into
329 326 and interable that can be passed to a WSGI server as the application's return
330 327 value. Because of stream-processing-ability, WSGI does not have to read ALL
331 328 of the subprocess's output and buffer it, before handing it to WSGI server for
332 329 HTTP response. Instead, the class initializer reads just a bit of the stream
333 330 to figure out if error ocurred or likely to occur and if not, just hands the
334 331 further iteration over subprocess output to the server for completion of HTTP
335 332 response.
336 333
337 334 The real or perceived subprocess error is trapped and raised as one of
338 335 EnvironmentError family of exceptions
339 336
340 337 Example usage:
341 338 # try:
342 339 # answer = SubprocessIOChunker(
343 340 # cmd,
344 341 # input,
345 342 # buffer_size = 65536,
346 343 # chunk_size = 4096
347 344 # )
348 345 # except (EnvironmentError) as e:
349 346 # print str(e)
350 347 # raise e
351 348 #
352 349 # return answer
353 350
354 351
355 352 """
356 353
357 354 # TODO: johbo: This is used to make sure that the open end of the PIPE
358 355 # is closed in the end. It would be way better to wrap this into an
359 356 # object, so that it is closed automatically once it is consumed or
360 357 # something similar.
361 358 _close_input_fd = None
362 359
363 360 _closed = False
364 361
365 362 def __init__(self, cmd, inputstream=None, buffer_size=65536,
366 363 chunk_size=4096, starting_values=None, fail_on_stderr=True,
367 364 fail_on_return_code=True, **kwargs):
368 365 """
369 366 Initializes SubprocessIOChunker
370 367
371 368 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
372 369 :param inputstream: (Default: None) A file-like, string, or file pointer.
373 370 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
374 371 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
375 372 :param starting_values: (Default: []) An array of strings to put in front of output que.
376 373 :param fail_on_stderr: (Default: True) Whether to raise an exception in
377 374 case something is written to stderr.
378 375 :param fail_on_return_code: (Default: True) Whether to raise an
379 376 exception if the return code is not 0.
380 377 """
381 378
382 379 starting_values = starting_values or []
383 380 if inputstream:
384 381 input_streamer = StreamFeeder(inputstream)
385 382 input_streamer.start()
386 383 inputstream = input_streamer.output
387 384 self._close_input_fd = inputstream
388 385
389 386 self._fail_on_stderr = fail_on_stderr
390 387 self._fail_on_return_code = fail_on_return_code
391 388
392 389 _shell = kwargs.get('shell', True)
393 390 kwargs['shell'] = _shell
394 391
395 392 _p = subprocess.Popen(cmd, bufsize=-1,
396 393 stdin=inputstream,
397 394 stdout=subprocess.PIPE,
398 395 stderr=subprocess.PIPE,
399 396 **kwargs)
400 397
401 398 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
402 399 starting_values)
403 400 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
404 401
405 402 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
406 403 # doing this until we reach either end of file, or end of buffer.
407 404 bg_out.data_added_event.wait(1)
408 405 bg_out.data_added_event.clear()
409 406
410 407 # at this point it's still ambiguous if we are done reading or just full buffer.
411 408 # Either way, if error (returned by ended process, or implied based on
412 409 # presence of stuff in stderr output) we error out.
413 410 # Else, we are happy.
414 411 _returncode = _p.poll()
415 412
416 413 if ((_returncode and fail_on_return_code) or
417 414 (fail_on_stderr and _returncode is None and bg_err.length)):
418 415 try:
419 416 _p.terminate()
420 417 except Exception:
421 418 pass
422 419 bg_out.stop()
423 420 bg_err.stop()
424 421 if fail_on_stderr:
425 422 err = ''.join(bg_err)
426 423 raise EnvironmentError(
427 424 "Subprocess exited due to an error:\n" + err)
428 425 if _returncode and fail_on_return_code:
429 426 err = ''.join(bg_err)
430 427 if not err:
431 428 # maybe get empty stderr, try stdout instead
432 429 # in many cases git reports the errors on stdout too
433 430 err = ''.join(bg_out)
434 431 raise EnvironmentError(
435 432 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
436 433 _returncode, err))
437 434
438 435 self.process = _p
439 436 self.output = bg_out
440 437 self.error = bg_err
441 438 self.inputstream = inputstream
442 439
443 440 def __iter__(self):
444 441 return self
445 442
446 443 def next(self):
447 444 # Note: mikhail: We need to be sure that we are checking the return
448 445 # code after the stdout stream is closed. Some processes, e.g. git
449 446 # are doing some magic in between closing stdout and terminating the
450 447 # process and, as a result, we are not getting return code on "slow"
451 448 # systems.
452 449 result = None
453 450 stop_iteration = None
454 451 try:
455 452 result = self.output.next()
456 453 except StopIteration as e:
457 454 stop_iteration = e
458 455
459 456 if self.process.poll() and self._fail_on_return_code:
460 457 err = '%s' % ''.join(self.error)
461 458 raise EnvironmentError(
462 459 "Subprocess exited due to an error:\n" + err)
463 460
464 461 if stop_iteration:
465 462 raise stop_iteration
466 463 return result
467 464
468 465 def throw(self, type, value=None, traceback=None):
469 466 if self.output.length or not self.output.done_reading:
470 467 raise type(value)
471 468
472 469 def close(self):
473 470 if self._closed:
474 471 return
475 472 self._closed = True
476 473 try:
477 474 self.process.terminate()
478 except:
475 except Exception:
479 476 pass
480 477 if self._close_input_fd:
481 478 os.close(self._close_input_fd)
482 479 try:
483 480 self.output.close()
484 except:
481 except Exception:
485 482 pass
486 483 try:
487 484 self.error.close()
488 except:
485 except Exception:
489 486 pass
490 487 try:
491 488 os.close(self.inputstream)
492 except:
489 except Exception:
493 490 pass
494 491
495 def __del__(self):
496 self.close()
497
498 492
499 493 def run_command(arguments, env=None):
500 494 """
501 495 Run the specified command and return the stdout.
502 496
503 497 :param arguments: sequence of program arguments (including the program name)
504 498 :type arguments: list[str]
505 499 """
506 500
507 501 cmd = arguments
508 502 log.debug('Running subprocessio command %s', cmd)
503 proc = None
509 504 try:
510 505 _opts = {'shell': False, 'fail_on_stderr': False}
511 506 if env:
512 507 _opts.update({'env': env})
513 p = SubprocessIOChunker(cmd, **_opts)
514 stdout = ''.join(p)
515 stderr = ''.join(''.join(p.error))
508 proc = SubprocessIOChunker(cmd, **_opts)
509 return ''.join(proc), ''.join(proc.error)
516 510 except (EnvironmentError, OSError) as err:
517 511 cmd = ' '.join(cmd) # human friendly CMD
518 512 tb_err = ("Couldn't run subprocessio command (%s).\n"
519 513 "Original error was:%s\n" % (cmd, err))
520 514 log.exception(tb_err)
521 515 raise Exception(tb_err)
516 finally:
517 if proc:
518 proc.close()
522 519
523 return stdout, stderr
@@ -1,775 +1,799 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 from urllib2 import URLError
23 23 import urlparse
24 24 import logging
25 25 import posixpath as vcspath
26 26 import StringIO
27 27 import urllib
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.vcs_base import RemoteBase
39 40
40 41 log = logging.getLogger(__name__)
41 42
42 43
43 44 # Set of svn compatible version flags.
44 45 # Compare with subversion/svnadmin/svnadmin.c
45 46 svn_compatible_versions = {
46 47 'pre-1.4-compatible',
47 48 'pre-1.5-compatible',
48 49 'pre-1.6-compatible',
49 50 'pre-1.8-compatible',
50 51 'pre-1.9-compatible'
51 52 }
52 53
53 54 svn_compatible_versions_map = {
54 55 'pre-1.4-compatible': '1.3',
55 56 'pre-1.5-compatible': '1.4',
56 57 'pre-1.6-compatible': '1.5',
57 58 'pre-1.8-compatible': '1.7',
58 59 'pre-1.9-compatible': '1.8',
59 60 }
60 61
61 62
62 63 def reraise_safe_exceptions(func):
63 64 """Decorator for converting svn exceptions to something neutral."""
64 65 def wrapper(*args, **kwargs):
65 66 try:
66 67 return func(*args, **kwargs)
67 68 except Exception as e:
68 69 if not hasattr(e, '_vcs_kind'):
69 70 log.exception("Unhandled exception in svn remote call")
70 71 raise_from_original(exceptions.UnhandledException(e))
71 72 raise
72 73 return wrapper
73 74
74 75
75 76 class SubversionFactory(RepoFactory):
76 77 repo_type = 'svn'
77 78
78 79 def _create_repo(self, wire, create, compatible_version):
79 80 path = svn.core.svn_path_canonicalize(wire['path'])
80 81 if create:
81 82 fs_config = {'compatible-version': '1.9'}
82 83 if compatible_version:
83 84 if compatible_version not in svn_compatible_versions:
84 85 raise Exception('Unknown SVN compatible version "{}"'
85 86 .format(compatible_version))
86 87 fs_config['compatible-version'] = \
87 88 svn_compatible_versions_map[compatible_version]
88 89
89 90 log.debug('Create SVN repo with config "%s"', fs_config)
90 91 repo = svn.repos.create(path, "", "", None, fs_config)
91 92 else:
92 93 repo = svn.repos.open(path)
93 94
94 95 log.debug('Got SVN object: %s', repo)
95 96 return repo
96 97
97 98 def repo(self, wire, create=False, compatible_version=None):
98 99 """
99 100 Get a repository instance for the given path.
100
101 Uses internally the low level beaker API since the decorators introduce
102 significant overhead.
103 101 """
104 region = self._cache_region
105 context = wire.get('context', None)
106 repo_path = wire.get('path', '')
107 context_uid = '{}'.format(context)
108 cache = wire.get('cache', True)
109 cache_on = context and cache
110
111 @region.conditional_cache_on_arguments(condition=cache_on)
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 return self._create_repo(wire, create, compatible_version)
114
115 return create_new_repo(self.repo_type, repo_path, context_uid,
116 compatible_version)
102 return self._create_repo(wire, create, compatible_version)
117 103
118 104
119 105 NODE_TYPE_MAPPING = {
120 106 svn.core.svn_node_file: 'file',
121 107 svn.core.svn_node_dir: 'dir',
122 108 }
123 109
124 110
125 class SvnRemote(object):
111 class SvnRemote(RemoteBase):
126 112
127 113 def __init__(self, factory, hg_factory=None):
128 114 self._factory = factory
129 115 # TODO: Remove once we do not use internal Mercurial objects anymore
130 116 # for subversion
131 117 self._hg_factory = hg_factory
132 118
133 119 @reraise_safe_exceptions
134 120 def discover_svn_version(self):
135 121 try:
136 122 import svn.core
137 123 svn_ver = svn.core.SVN_VERSION
138 124 except ImportError:
139 125 svn_ver = None
140 126 return svn_ver
141 127
142 128 @reraise_safe_exceptions
143 129 def is_empty(self, wire):
144 repo = self._factory.repo(wire)
145 130
146 131 try:
147 132 return self.lookup(wire, -1) == 0
148 133 except Exception:
149 134 log.exception("failed to read object_store")
150 135 return False
151 136
152 137 def check_url(self, url, config_items):
153 138 # this can throw exception if not installed, but we detect this
154 139 from hgsubversion import svnrepo
155 140
156 141 baseui = self._hg_factory._create_config(config_items)
157 142 # uuid function get's only valid UUID from proper repo, else
158 143 # throws exception
159 144 try:
160 145 svnrepo.svnremoterepo(baseui, url).svn.uuid
161 146 except Exception:
162 147 tb = traceback.format_exc()
163 148 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
164 149 raise URLError(
165 150 '"%s" is not a valid Subversion source url.' % (url, ))
166 151 return True
167 152
168 153 def is_path_valid_repository(self, wire, path):
169 154
170 155 # NOTE(marcink): short circuit the check for SVN repo
171 156 # the repos.open might be expensive to check, but we have one cheap
172 157 # pre condition that we can use, to check for 'format' file
173 158
174 159 if not os.path.isfile(os.path.join(path, 'format')):
175 160 return False
176 161
177 162 try:
178 163 svn.repos.open(path)
179 164 except svn.core.SubversionException:
180 165 tb = traceback.format_exc()
181 166 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
182 167 return False
183 168 return True
184 169
185 170 @reraise_safe_exceptions
186 171 def verify(self, wire,):
187 172 repo_path = wire['path']
188 173 if not self.is_path_valid_repository(wire, repo_path):
189 174 raise Exception(
190 175 "Path %s is not a valid Subversion repository." % repo_path)
191 176
192 177 cmd = ['svnadmin', 'info', repo_path]
193 178 stdout, stderr = subprocessio.run_command(cmd)
194 179 return stdout
195 180
196 181 def lookup(self, wire, revision):
197 182 if revision not in [-1, None, 'HEAD']:
198 183 raise NotImplementedError
199 184 repo = self._factory.repo(wire)
200 185 fs_ptr = svn.repos.fs(repo)
201 186 head = svn.fs.youngest_rev(fs_ptr)
202 187 return head
203 188
204 189 def lookup_interval(self, wire, start_ts, end_ts):
205 190 repo = self._factory.repo(wire)
206 191 fsobj = svn.repos.fs(repo)
207 192 start_rev = None
208 193 end_rev = None
209 194 if start_ts:
210 195 start_ts_svn = apr_time_t(start_ts)
211 196 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
212 197 else:
213 198 start_rev = 1
214 199 if end_ts:
215 200 end_ts_svn = apr_time_t(end_ts)
216 201 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
217 202 else:
218 203 end_rev = svn.fs.youngest_rev(fsobj)
219 204 return start_rev, end_rev
220 205
221 206 def revision_properties(self, wire, revision):
222 repo = self._factory.repo(wire)
223 fs_ptr = svn.repos.fs(repo)
224 return svn.fs.revision_proplist(fs_ptr, revision)
207
208 cache_on, context_uid, repo_id = self._cache_on(wire)
209 @self.region.conditional_cache_on_arguments(condition=cache_on)
210 def _revision_properties(_repo_id, _revision):
211 repo = self._factory.repo(wire)
212 fs_ptr = svn.repos.fs(repo)
213 return svn.fs.revision_proplist(fs_ptr, revision)
214 return _revision_properties(repo_id, revision)
225 215
226 216 def revision_changes(self, wire, revision):
227 217
228 218 repo = self._factory.repo(wire)
229 219 fsobj = svn.repos.fs(repo)
230 220 rev_root = svn.fs.revision_root(fsobj, revision)
231 221
232 222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
233 223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
234 224 base_dir = ""
235 225 send_deltas = False
236 226 svn.repos.replay2(
237 227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
238 228 editor_ptr, editor_baton, None)
239 229
240 230 added = []
241 231 changed = []
242 232 removed = []
243 233
244 234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
245 235 for path, change in editor.changes.iteritems():
246 236 # TODO: Decide what to do with directory nodes. Subversion can add
247 237 # empty directories.
248 238
249 239 if change.item_kind == svn.core.svn_node_dir:
250 240 continue
251 241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
252 242 added.append(path)
253 243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
254 244 svn.repos.CHANGE_ACTION_REPLACE]:
255 245 changed.append(path)
256 246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
257 247 removed.append(path)
258 248 else:
259 249 raise NotImplementedError(
260 250 "Action %s not supported on path %s" % (
261 251 change.action, path))
262 252
263 253 changes = {
264 254 'added': added,
265 255 'changed': changed,
266 256 'removed': removed,
267 257 }
268 258 return changes
269 259
260 @reraise_safe_exceptions
270 261 def node_history(self, wire, path, revision, limit):
271 cross_copies = False
272 repo = self._factory.repo(wire)
273 fsobj = svn.repos.fs(repo)
274 rev_root = svn.fs.revision_root(fsobj, revision)
262 cache_on, context_uid, repo_id = self._cache_on(wire)
263 @self.region.conditional_cache_on_arguments(condition=cache_on)
264 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
265 cross_copies = False
266 repo = self._factory.repo(wire)
267 fsobj = svn.repos.fs(repo)
268 rev_root = svn.fs.revision_root(fsobj, revision)
275 269
276 history_revisions = []
277 history = svn.fs.node_history(rev_root, path)
278 history = svn.fs.history_prev(history, cross_copies)
279 while history:
280 __, node_revision = svn.fs.history_location(history)
281 history_revisions.append(node_revision)
282 if limit and len(history_revisions) >= limit:
283 break
270 history_revisions = []
271 history = svn.fs.node_history(rev_root, path)
284 272 history = svn.fs.history_prev(history, cross_copies)
285 return history_revisions
273 while history:
274 __, node_revision = svn.fs.history_location(history)
275 history_revisions.append(node_revision)
276 if limit and len(history_revisions) >= limit:
277 break
278 history = svn.fs.history_prev(history, cross_copies)
279 return history_revisions
280 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
286 281
287 282 def node_properties(self, wire, path, revision):
288 repo = self._factory.repo(wire)
289 fsobj = svn.repos.fs(repo)
290 rev_root = svn.fs.revision_root(fsobj, revision)
291 return svn.fs.node_proplist(rev_root, path)
283 cache_on, context_uid, repo_id = self._cache_on(wire)
284 @self.region.conditional_cache_on_arguments(condition=cache_on)
285 def _node_properties(_repo_id, _path, _revision):
286 repo = self._factory.repo(wire)
287 fsobj = svn.repos.fs(repo)
288 rev_root = svn.fs.revision_root(fsobj, revision)
289 return svn.fs.node_proplist(rev_root, path)
290 return _node_properties(repo_id, path, revision)
292 291
293 292 def file_annotate(self, wire, path, revision):
294 293 abs_path = 'file://' + urllib.pathname2url(
295 294 vcspath.join(wire['path'], path))
296 295 file_uri = svn.core.svn_path_canonicalize(abs_path)
297 296
298 297 start_rev = svn_opt_revision_value_t(0)
299 298 peg_rev = svn_opt_revision_value_t(revision)
300 299 end_rev = peg_rev
301 300
302 301 annotations = []
303 302
304 303 def receiver(line_no, revision, author, date, line, pool):
305 304 annotations.append((line_no, revision, line))
306 305
307 306 # TODO: Cannot use blame5, missing typemap function in the swig code
308 307 try:
309 308 svn.client.blame2(
310 309 file_uri, peg_rev, start_rev, end_rev,
311 310 receiver, svn.client.create_context())
312 311 except svn.core.SubversionException as exc:
313 312 log.exception("Error during blame operation.")
314 313 raise Exception(
315 314 "Blame not supported or file does not exist at path %s. "
316 315 "Error %s." % (path, exc))
317 316
318 317 return annotations
319 318
320 def get_node_type(self, wire, path, rev=None):
321 repo = self._factory.repo(wire)
322 fs_ptr = svn.repos.fs(repo)
323 if rev is None:
324 rev = svn.fs.youngest_rev(fs_ptr)
325 root = svn.fs.revision_root(fs_ptr, rev)
326 node = svn.fs.check_path(root, path)
327 return NODE_TYPE_MAPPING.get(node, None)
319 def get_node_type(self, wire, path, revision=None):
320
321 cache_on, context_uid, repo_id = self._cache_on(wire)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 def _get_node_type(_repo_id, _path, _revision):
324 repo = self._factory.repo(wire)
325 fs_ptr = svn.repos.fs(repo)
326 if _revision is None:
327 _revision = svn.fs.youngest_rev(fs_ptr)
328 root = svn.fs.revision_root(fs_ptr, _revision)
329 node = svn.fs.check_path(root, path)
330 return NODE_TYPE_MAPPING.get(node, None)
331 return _get_node_type(repo_id, path, revision)
328 332
329 333 def get_nodes(self, wire, path, revision=None):
330 repo = self._factory.repo(wire)
331 fsobj = svn.repos.fs(repo)
332 if revision is None:
333 revision = svn.fs.youngest_rev(fsobj)
334 root = svn.fs.revision_root(fsobj, revision)
335 entries = svn.fs.dir_entries(root, path)
336 result = []
337 for entry_path, entry_info in entries.iteritems():
338 result.append(
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 return result
334
335 cache_on, context_uid, repo_id = self._cache_on(wire)
336 @self.region.conditional_cache_on_arguments(condition=cache_on)
337 def _get_nodes(_repo_id, _path, _revision):
338 repo = self._factory.repo(wire)
339 fsobj = svn.repos.fs(repo)
340 if _revision is None:
341 _revision = svn.fs.youngest_rev(fsobj)
342 root = svn.fs.revision_root(fsobj, _revision)
343 entries = svn.fs.dir_entries(root, path)
344 result = []
345 for entry_path, entry_info in entries.iteritems():
346 result.append(
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 return result
349 return _get_nodes(repo_id, path, revision)
341 350
342 351 def get_file_content(self, wire, path, rev=None):
343 352 repo = self._factory.repo(wire)
344 353 fsobj = svn.repos.fs(repo)
345 354 if rev is None:
346 355 rev = svn.fs.youngest_revision(fsobj)
347 356 root = svn.fs.revision_root(fsobj, rev)
348 357 content = svn.core.Stream(svn.fs.file_contents(root, path))
349 358 return content.read()
350 359
351 360 def get_file_size(self, wire, path, revision=None):
352 repo = self._factory.repo(wire)
353 fsobj = svn.repos.fs(repo)
354 if revision is None:
355 revision = svn.fs.youngest_revision(fsobj)
356 root = svn.fs.revision_root(fsobj, revision)
357 size = svn.fs.file_length(root, path)
358 return size
361
362 cache_on, context_uid, repo_id = self._cache_on(wire)
363 @self.region.conditional_cache_on_arguments(condition=cache_on)
364 def _get_file_size(_repo_id, _path, _revision):
365 repo = self._factory.repo(wire)
366 fsobj = svn.repos.fs(repo)
367 if _revision is None:
368 _revision = svn.fs.youngest_revision(fsobj)
369 root = svn.fs.revision_root(fsobj, _revision)
370 size = svn.fs.file_length(root, path)
371 return size
372 return _get_file_size(repo_id, path, revision)
359 373
360 374 def create_repository(self, wire, compatible_version=None):
361 375 log.info('Creating Subversion repository in path "%s"', wire['path'])
362 376 self._factory.repo(wire, create=True,
363 377 compatible_version=compatible_version)
364 378
365 379 def get_url_and_credentials(self, src_url):
366 380 obj = urlparse.urlparse(src_url)
367 381 username = obj.username or None
368 382 password = obj.password or None
369 383 return username, password, src_url
370 384
371 385 def import_remote_repository(self, wire, src_url):
372 386 repo_path = wire['path']
373 387 if not self.is_path_valid_repository(wire, repo_path):
374 388 raise Exception(
375 389 "Path %s is not a valid Subversion repository." % repo_path)
376 390
377 391 username, password, src_url = self.get_url_and_credentials(src_url)
378 392 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
379 393 '--trust-server-cert-failures=unknown-ca']
380 394 if username and password:
381 395 rdump_cmd += ['--username', username, '--password', password]
382 396 rdump_cmd += [src_url]
383 397
384 398 rdump = subprocess.Popen(
385 399 rdump_cmd,
386 400 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
387 401 load = subprocess.Popen(
388 402 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
389 403
390 404 # TODO: johbo: This can be a very long operation, might be better
391 405 # to track some kind of status and provide an api to check if the
392 406 # import is done.
393 407 rdump.wait()
394 408 load.wait()
395 409
396 410 log.debug('Return process ended with code: %s', rdump.returncode)
397 411 if rdump.returncode != 0:
398 412 errors = rdump.stderr.read()
399 413 log.error('svnrdump dump failed: statuscode %s: message: %s',
400 414 rdump.returncode, errors)
401 415 reason = 'UNKNOWN'
402 416 if 'svnrdump: E230001:' in errors:
403 417 reason = 'INVALID_CERTIFICATE'
404 418
405 419 if reason == 'UNKNOWN':
406 420 reason = 'UNKNOWN:{}'.format(errors)
407 421 raise Exception(
408 422 'Failed to dump the remote repository from %s. Reason:%s' % (
409 423 src_url, reason))
410 424 if load.returncode != 0:
411 425 raise Exception(
412 426 'Failed to load the dump of remote repository from %s.' %
413 427 (src_url, ))
414 428
415 429 def commit(self, wire, message, author, timestamp, updated, removed):
416 430 assert isinstance(message, str)
417 431 assert isinstance(author, str)
418 432
419 433 repo = self._factory.repo(wire)
420 434 fsobj = svn.repos.fs(repo)
421 435
422 436 rev = svn.fs.youngest_rev(fsobj)
423 437 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
424 438 txn_root = svn.fs.txn_root(txn)
425 439
426 440 for node in updated:
427 441 TxnNodeProcessor(node, txn_root).update()
428 442 for node in removed:
429 443 TxnNodeProcessor(node, txn_root).remove()
430 444
431 445 commit_id = svn.repos.fs_commit_txn(repo, txn)
432 446
433 447 if timestamp:
434 448 apr_time = apr_time_t(timestamp)
435 449 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
436 450 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
437 451
438 452 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
439 453 return commit_id
440 454
441 455 def diff(self, wire, rev1, rev2, path1=None, path2=None,
442 456 ignore_whitespace=False, context=3):
443 457
444 458 wire.update(cache=False)
445 459 repo = self._factory.repo(wire)
446 460 diff_creator = SvnDiffer(
447 461 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
448 462 try:
449 463 return diff_creator.generate_diff()
450 464 except svn.core.SubversionException as e:
451 465 log.exception(
452 466 "Error during diff operation operation. "
453 467 "Path might not exist %s, %s" % (path1, path2))
454 468 return ""
455 469
456 470 @reraise_safe_exceptions
457 471 def is_large_file(self, wire, path):
458 472 return False
459 473
460 474 @reraise_safe_exceptions
475 def is_binary(self, wire, rev, path):
476 cache_on, context_uid, repo_id = self._cache_on(wire)
477
478 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 def _is_binary(_repo_id, _rev, _path):
480 raw_bytes = self.get_file_content(wire, path, rev)
481 return raw_bytes and '\0' in raw_bytes
482
483 return _is_binary(repo_id, rev, path)
484
485 @reraise_safe_exceptions
461 486 def run_svn_command(self, wire, cmd, **opts):
462 487 path = wire.get('path', None)
463 488
464 489 if path and os.path.isdir(path):
465 490 opts['cwd'] = path
466 491
467 492 safe_call = False
468 493 if '_safe' in opts:
469 494 safe_call = True
470 495
471 496 svnenv = os.environ.copy()
472 497 svnenv.update(opts.pop('extra_env', {}))
473 498
474 499 _opts = {'env': svnenv, 'shell': False}
475 500
476 501 try:
477 502 _opts.update(opts)
478 503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
479 504
480 505 return ''.join(p), ''.join(p.error)
481 506 except (EnvironmentError, OSError) as err:
482 507 cmd = ' '.join(cmd) # human friendly CMD
483 508 tb_err = ("Couldn't run svn command (%s).\n"
484 509 "Original error was:%s\n"
485 510 "Call options:%s\n"
486 511 % (cmd, err, _opts))
487 512 log.exception(tb_err)
488 513 if safe_call:
489 514 return '', err
490 515 else:
491 516 raise exceptions.VcsException()(tb_err)
492 517
493 518 @reraise_safe_exceptions
494 519 def install_hooks(self, wire, force=False):
495 520 from vcsserver.hook_utils import install_svn_hooks
496 521 repo_path = wire['path']
497 522 binary_dir = settings.BINARY_DIR
498 523 executable = None
499 524 if binary_dir:
500 525 executable = os.path.join(binary_dir, 'python')
501 526 return install_svn_hooks(
502 527 repo_path, executable=executable, force_create=force)
503 528
504 529 @reraise_safe_exceptions
505 530 def get_hooks_info(self, wire):
506 531 from vcsserver.hook_utils import (
507 532 get_svn_pre_hook_version, get_svn_post_hook_version)
508 533 repo_path = wire['path']
509 534 return {
510 535 'pre_version': get_svn_pre_hook_version(repo_path),
511 536 'post_version': get_svn_post_hook_version(repo_path),
512 537 }
513 538
514 539
515 540 class SvnDiffer(object):
516 541 """
517 542 Utility to create diffs based on difflib and the Subversion api
518 543 """
519 544
520 545 binary_content = False
521 546
522 547 def __init__(
523 548 self, repo, src_rev, src_path, tgt_rev, tgt_path,
524 549 ignore_whitespace, context):
525 550 self.repo = repo
526 551 self.ignore_whitespace = ignore_whitespace
527 552 self.context = context
528 553
529 554 fsobj = svn.repos.fs(repo)
530 555
531 556 self.tgt_rev = tgt_rev
532 557 self.tgt_path = tgt_path or ''
533 558 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
534 559 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
535 560
536 561 self.src_rev = src_rev
537 562 self.src_path = src_path or self.tgt_path
538 563 self.src_root = svn.fs.revision_root(fsobj, src_rev)
539 564 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
540 565
541 566 self._validate()
542 567
543 568 def _validate(self):
544 569 if (self.tgt_kind != svn.core.svn_node_none and
545 570 self.src_kind != svn.core.svn_node_none and
546 571 self.src_kind != self.tgt_kind):
547 572 # TODO: johbo: proper error handling
548 573 raise Exception(
549 574 "Source and target are not compatible for diff generation. "
550 575 "Source type: %s, target type: %s" %
551 576 (self.src_kind, self.tgt_kind))
552 577
553 578 def generate_diff(self):
554 579 buf = StringIO.StringIO()
555 580 if self.tgt_kind == svn.core.svn_node_dir:
556 581 self._generate_dir_diff(buf)
557 582 else:
558 583 self._generate_file_diff(buf)
559 584 return buf.getvalue()
560 585
561 586 def _generate_dir_diff(self, buf):
562 587 editor = DiffChangeEditor()
563 588 editor_ptr, editor_baton = svn.delta.make_editor(editor)
564 589 svn.repos.dir_delta2(
565 590 self.src_root,
566 591 self.src_path,
567 592 '', # src_entry
568 593 self.tgt_root,
569 594 self.tgt_path,
570 595 editor_ptr, editor_baton,
571 596 authorization_callback_allow_all,
572 597 False, # text_deltas
573 598 svn.core.svn_depth_infinity, # depth
574 599 False, # entry_props
575 600 False, # ignore_ancestry
576 601 )
577 602
578 603 for path, __, change in sorted(editor.changes):
579 604 self._generate_node_diff(
580 605 buf, change, path, self.tgt_path, path, self.src_path)
581 606
582 607 def _generate_file_diff(self, buf):
583 608 change = None
584 609 if self.src_kind == svn.core.svn_node_none:
585 610 change = "add"
586 611 elif self.tgt_kind == svn.core.svn_node_none:
587 612 change = "delete"
588 613 tgt_base, tgt_path = vcspath.split(self.tgt_path)
589 614 src_base, src_path = vcspath.split(self.src_path)
590 615 self._generate_node_diff(
591 616 buf, change, tgt_path, tgt_base, src_path, src_base)
592 617
593 618 def _generate_node_diff(
594 619 self, buf, change, tgt_path, tgt_base, src_path, src_base):
595 620
596 621 if self.src_rev == self.tgt_rev and tgt_base == src_base:
597 622 # makes consistent behaviour with git/hg to return empty diff if
598 623 # we compare same revisions
599 624 return
600 625
601 626 tgt_full_path = vcspath.join(tgt_base, tgt_path)
602 627 src_full_path = vcspath.join(src_base, src_path)
603 628
604 629 self.binary_content = False
605 630 mime_type = self._get_mime_type(tgt_full_path)
606 631
607 632 if mime_type and not mime_type.startswith('text'):
608 633 self.binary_content = True
609 634 buf.write("=" * 67 + '\n')
610 635 buf.write("Cannot display: file marked as a binary type.\n")
611 636 buf.write("svn:mime-type = %s\n" % mime_type)
612 637 buf.write("Index: %s\n" % (tgt_path, ))
613 638 buf.write("=" * 67 + '\n')
614 639 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
615 640 'tgt_path': tgt_path})
616 641
617 642 if change == 'add':
618 643 # TODO: johbo: SVN is missing a zero here compared to git
619 644 buf.write("new file mode 10644\n")
620 645
621 646 #TODO(marcink): intro to binary detection of svn patches
622 647 # if self.binary_content:
623 648 # buf.write('GIT binary patch\n')
624 649
625 650 buf.write("--- /dev/null\t(revision 0)\n")
626 651 src_lines = []
627 652 else:
628 653 if change == 'delete':
629 654 buf.write("deleted file mode 10644\n")
630 655
631 656 #TODO(marcink): intro to binary detection of svn patches
632 657 # if self.binary_content:
633 658 # buf.write('GIT binary patch\n')
634 659
635 660 buf.write("--- a/%s\t(revision %s)\n" % (
636 661 src_path, self.src_rev))
637 662 src_lines = self._svn_readlines(self.src_root, src_full_path)
638 663
639 664 if change == 'delete':
640 665 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
641 666 tgt_lines = []
642 667 else:
643 668 buf.write("+++ b/%s\t(revision %s)\n" % (
644 669 tgt_path, self.tgt_rev))
645 670 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
646 671
647 672 if not self.binary_content:
648 673 udiff = svn_diff.unified_diff(
649 674 src_lines, tgt_lines, context=self.context,
650 675 ignore_blank_lines=self.ignore_whitespace,
651 676 ignore_case=False,
652 677 ignore_space_changes=self.ignore_whitespace)
653 678 buf.writelines(udiff)
654 679
655 680 def _get_mime_type(self, path):
656 681 try:
657 682 mime_type = svn.fs.node_prop(
658 683 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
659 684 except svn.core.SubversionException:
660 685 mime_type = svn.fs.node_prop(
661 686 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
662 687 return mime_type
663 688
664 689 def _svn_readlines(self, fs_root, node_path):
665 690 if self.binary_content:
666 691 return []
667 692 node_kind = svn.fs.check_path(fs_root, node_path)
668 693 if node_kind not in (
669 694 svn.core.svn_node_file, svn.core.svn_node_symlink):
670 695 return []
671 696 content = svn.core.Stream(
672 697 svn.fs.file_contents(fs_root, node_path)).read()
673 698 return content.splitlines(True)
674 699
675 700
676
677 701 class DiffChangeEditor(svn.delta.Editor):
678 702 """
679 703 Records changes between two given revisions
680 704 """
681 705
682 706 def __init__(self):
683 707 self.changes = []
684 708
685 709 def delete_entry(self, path, revision, parent_baton, pool=None):
686 710 self.changes.append((path, None, 'delete'))
687 711
688 712 def add_file(
689 713 self, path, parent_baton, copyfrom_path, copyfrom_revision,
690 714 file_pool=None):
691 715 self.changes.append((path, 'file', 'add'))
692 716
693 717 def open_file(self, path, parent_baton, base_revision, file_pool=None):
694 718 self.changes.append((path, 'file', 'change'))
695 719
696 720
697 721 def authorization_callback_allow_all(root, path, pool):
698 722 return True
699 723
700 724
701 725 class TxnNodeProcessor(object):
702 726 """
703 727 Utility to process the change of one node within a transaction root.
704 728
705 729 It encapsulates the knowledge of how to add, update or remove
706 730 a node for a given transaction root. The purpose is to support the method
707 731 `SvnRemote.commit`.
708 732 """
709 733
710 734 def __init__(self, node, txn_root):
711 735 assert isinstance(node['path'], str)
712 736
713 737 self.node = node
714 738 self.txn_root = txn_root
715 739
716 740 def update(self):
717 741 self._ensure_parent_dirs()
718 742 self._add_file_if_node_does_not_exist()
719 743 self._update_file_content()
720 744 self._update_file_properties()
721 745
722 746 def remove(self):
723 747 svn.fs.delete(self.txn_root, self.node['path'])
724 748 # TODO: Clean up directory if empty
725 749
726 750 def _ensure_parent_dirs(self):
727 751 curdir = vcspath.dirname(self.node['path'])
728 752 dirs_to_create = []
729 753 while not self._svn_path_exists(curdir):
730 754 dirs_to_create.append(curdir)
731 755 curdir = vcspath.dirname(curdir)
732 756
733 757 for curdir in reversed(dirs_to_create):
734 758 log.debug('Creating missing directory "%s"', curdir)
735 759 svn.fs.make_dir(self.txn_root, curdir)
736 760
737 761 def _svn_path_exists(self, path):
738 762 path_status = svn.fs.check_path(self.txn_root, path)
739 763 return path_status != svn.core.svn_node_none
740 764
741 765 def _add_file_if_node_does_not_exist(self):
742 766 kind = svn.fs.check_path(self.txn_root, self.node['path'])
743 767 if kind == svn.core.svn_node_none:
744 768 svn.fs.make_file(self.txn_root, self.node['path'])
745 769
746 770 def _update_file_content(self):
747 771 assert isinstance(self.node['content'], str)
748 772 handler, baton = svn.fs.apply_textdelta(
749 773 self.txn_root, self.node['path'], None, None)
750 774 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
751 775
752 776 def _update_file_properties(self):
753 777 properties = self.node.get('properties', {})
754 778 for key, value in properties.iteritems():
755 779 svn.fs.change_node_prop(
756 780 self.txn_root, self.node['path'], key, value)
757 781
758 782
759 783 def apr_time_t(timestamp):
760 784 """
761 785 Convert a Python timestamp into APR timestamp type apr_time_t
762 786 """
763 787 return timestamp * 1E6
764 788
765 789
766 790 def svn_opt_revision_value_t(num):
767 791 """
768 792 Put `num` into a `svn_opt_revision_value_t` structure.
769 793 """
770 794 value = svn.core.svn_opt_revision_value_t()
771 795 value.number = num
772 796 revision = svn.core.svn_opt_revision_t()
773 797 revision.kind = svn.core.svn_opt_revision_number
774 798 revision.value = value
775 799 return revision
@@ -1,165 +1,160 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.pull(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
107
108 102
109 103 class TestReraiseSafeExceptions(object):
104
110 105 def test_method_decorated_with_reraise_safe_exceptions(self):
111 106 factory = Mock()
112 107 git_remote = git.GitRemote(factory)
113 108
114 109 def fake_function():
115 110 return None
116 111
117 112 decorator = git.reraise_safe_exceptions(fake_function)
118 113
119 114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 115 for method_name, method in methods:
121 116 if not method_name.startswith('_'):
122 117 assert method.im_func.__code__ == decorator.__code__
123 118
124 119 @pytest.mark.parametrize('side_effect, expected_type', [
125 120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 124 (dulwich.errors.HangupException(), 'error'),
130 125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 126 ])
132 127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 128 @git.reraise_safe_exceptions
134 129 def fake_method():
135 130 raise side_effect
136 131
137 132 with pytest.raises(Exception) as exc_info:
138 133 fake_method()
139 134 assert type(exc_info.value) == Exception
140 135 assert exc_info.value._vcs_kind == expected_type
141 136
142 137
143 138 class TestDulwichRepoWrapper(object):
144 139 def test_calls_close_on_delete(self):
145 140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 141 with isdir_patcher:
147 142 repo = git.Repo('/tmp/abcde')
148 143 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 144 del repo
150 145 close_mock.assert_called_once_with()
151 146
152 147
153 148 class TestGitFactory(object):
154 149 def test_create_repo_returns_dulwich_wrapper(self):
155 150
156 151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 152 mock.side_effect = {'repo_objects': ''}
158 153 factory = git.GitFactory()
159 154 wire = {
160 155 'path': '/tmp/abcde'
161 156 }
162 157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 158 with isdir_patcher:
164 159 result = factory._create_repo(wire, True)
165 160 assert isinstance(result, git.Repo)
@@ -1,127 +1,108 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 class TestHGLookup(object):
30 def setup(self):
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
37
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
42
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
45
46
47 29 class TestDiff(object):
48 30 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
31
50 32 factory = Mock()
51 factory.repo = Mock(return_value=repo)
52 33 hg_remote = hg.HgRemote(factory)
53 34 with patch('mercurial.patch.diff') as diff_mock:
54 35 diff_mock.side_effect = LookupError(
55 36 'deadbeef', 'index', 'message')
56 37 with pytest.raises(Exception) as exc_info:
57 38 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
59 40 file_filter=None, opt_git=True, opt_ignorews=True,
60 41 context=3)
61 42 assert type(exc_info.value) == Exception
62 43 assert exc_info.value._vcs_kind == 'lookup'
63 44
64 45
65 46 class TestReraiseSafeExceptions(object):
66 47 def test_method_decorated_with_reraise_safe_exceptions(self):
67 48 factory = Mock()
68 49 hg_remote = hg.HgRemote(factory)
69 50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 51 decorator = hg.reraise_safe_exceptions(None)
71 52 for method_name, method in methods:
72 53 if not method_name.startswith('_'):
73 54 assert method.im_func.__code__ == decorator.__code__
74 55
75 56 @pytest.mark.parametrize('side_effect, expected_type', [
76 57 (hgcompat.Abort(), 'abort'),
77 58 (hgcompat.InterventionRequired(), 'abort'),
78 59 (hgcompat.RepoLookupError(), 'lookup'),
79 60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 61 (hgcompat.RepoError(), 'error'),
81 62 (hgcompat.RequirementError(), 'requirement'),
82 63 ])
83 64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 65 @hg.reraise_safe_exceptions
85 66 def fake_method():
86 67 raise side_effect
87 68
88 69 with pytest.raises(Exception) as exc_info:
89 70 fake_method()
90 71 assert type(exc_info.value) == Exception
91 72 assert exc_info.value._vcs_kind == expected_type
92 73
93 74 def test_keeps_original_traceback(self):
94 75 @hg.reraise_safe_exceptions
95 76 def fake_method():
96 77 try:
97 78 raise hgcompat.Abort()
98 79 except:
99 80 self.original_traceback = traceback.format_tb(
100 81 sys.exc_info()[2])
101 82 raise
102 83
103 84 try:
104 85 fake_method()
105 86 except Exception:
106 87 new_traceback = traceback.format_tb(sys.exc_info()[2])
107 88
108 89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 90 assert new_traceback_tail == self.original_traceback
110 91
111 92 def test_maps_unknow_exceptions_to_unhandled(self):
112 93 @hg.reraise_safe_exceptions
113 94 def stub_method():
114 95 raise ValueError('stub')
115 96
116 97 with pytest.raises(Exception) as exc_info:
117 98 stub_method()
118 99 assert exc_info.value._vcs_kind == 'unhandled'
119 100
120 101 def test_does_not_map_known_exceptions(self):
121 102 @hg.reraise_safe_exceptions
122 103 def stub_method():
123 104 raise exceptions.LookupException()('stub')
124 105
125 106 with pytest.raises(Exception) as exc_info:
126 107 stub_method()
127 108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,82 +1,87 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import mock
20 20 import pytest
21 21 import sys
22 22
23 23
24 24 class MockPopen(object):
25 25 def __init__(self, stderr):
26 26 self.stdout = io.BytesIO('')
27 27 self.stderr = io.BytesIO(stderr)
28 28 self.returncode = 1
29 29
30 30 def wait(self):
31 31 pass
32 32
33 33
34 34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 37 ])
38 38
39 39
40 40 @pytest.mark.parametrize('stderr,expected_reason', [
41 41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
43 43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 44 @pytest.mark.xfail(sys.platform == "cygwin",
45 45 reason="SVN not packaged for Cygwin")
46 46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 47 from vcsserver import svn
48 factory = mock.Mock()
49 factory.repo = mock.Mock(return_value=mock.Mock())
48 50
49 remote = svn.SvnRemote(None)
51 remote = svn.SvnRemote(factory)
50 52 remote.is_path_valid_repository = lambda wire, path: True
51 53
52 54 with mock.patch('subprocess.Popen',
53 55 return_value=MockPopen(stderr)):
54 56 with pytest.raises(Exception) as excinfo:
55 57 remote.import_remote_repository({'path': 'path'}, 'url')
56 58
57 59 expected_error_args = (
58 60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
59 61
60 62 assert excinfo.value.args == expected_error_args
61 63
62 64
63 65 def test_svn_libraries_can_be_imported():
64 66 import svn
65 67 import svn.client
66 68 assert svn.client is not None
67 69
68 70
69 71 @pytest.mark.parametrize('example_url, parts', [
70 72 ('http://server.com', (None, None, 'http://server.com')),
71 73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
72 74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
73 75 ('<script>', (None, None, '<script>')),
74 76 ('http://', (None, None, 'http://')),
75 77 ])
76 78 def test_username_password_extraction_from_url(example_url, parts):
77 79 from vcsserver import svn
78 80
79 remote = svn.SvnRemote(None)
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
84 remote = svn.SvnRemote(factory)
80 85 remote.is_path_valid_repository = lambda wire, path: True
81 86
82 87 assert remote.get_url_and_credentials(example_url) == parts
@@ -1,58 +1,64 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18
19
20 18 import time
21 19 import logging
22 20
23
21 import vcsserver
24 22 from vcsserver.utils import safe_str
25 23
26 24
27 25 log = logging.getLogger(__name__)
28 26
29 27
30 28 def get_access_path(request):
31 29 environ = request.environ
32 30 return environ.get('PATH_INFO')
33 31
34 32
33 def get_user_agent(environ):
34 return environ.get('HTTP_USER_AGENT')
35
36
35 37 class RequestWrapperTween(object):
36 38 def __init__(self, handler, registry):
37 39 self.handler = handler
38 40 self.registry = registry
39 41
40 42 # one-time configuration code goes here
41 43
42 44 def __call__(self, request):
43 45 start = time.time()
44 46 try:
45 47 response = self.handler(request)
46 48 finally:
47 49 end = time.time()
48
49 log.info('IP: %s Request to path: `%s` time: %.3fs',
50 '127.0.0.1', safe_str(get_access_path(request)), end - start)
50 total = end - start
51 count = request.request_count()
52 _ver_ = vcsserver.__version__
53 log.info(
54 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
55 count, '127.0.0.1', request.environ.get('REQUEST_METHOD'),
56 safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_)
51 57
52 58 return response
53 59
54 60
55 61 def includeme(config):
56 62 config.add_tween(
57 'vcsserver.tweens.RequestWrapperTween',
63 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
58 64 )
General Comments 0
You need to be logged in to leave comments. Login now