Show More
@@ -0,0 +1,8 b'' | |||
|
1 | ## special libraries we could extend the requirements.txt file with to add some | |
|
2 | ## custom libraries useful for debug and memory tracing | |
|
3 | ||
|
4 | ## uncomment inclusion of this file in requirements.txt run make generate-pkgs and nix-shell | |
|
5 | ||
|
6 | objgraph | |
|
7 | memory-profiler | |
|
8 | pympler |
@@ -0,0 +1,27 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
|
4 | # Copyright (C) 2014-2019 RhodeCode GmbH | |
|
5 | # | |
|
6 | # This program is free software; you can redistribute it and/or modify | |
|
7 | # it under the terms of the GNU General Public License as published by | |
|
8 | # the Free Software Foundation; either version 3 of the License, or | |
|
9 | # (at your option) any later version. | |
|
10 | # | |
|
11 | # This program is distributed in the hope that it will be useful, | |
|
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
14 | # GNU General Public License for more details. | |
|
15 | # | |
|
16 | # You should have received a copy of the GNU General Public License | |
|
17 | # along with this program; if not, write to the Free Software Foundation, | |
|
18 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
|
19 | ||
|
20 | ||
|
21 | counter = 0 | |
|
22 | ||
|
23 | ||
|
24 | def get_request_counter(request): | |
|
25 | global counter | |
|
26 | counter += 1 | |
|
27 | return counter |
@@ -0,0 +1,19 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
@@ -0,0 +1,32 b'' | |||
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
|
2 | # Copyright (C) 2014-2019 RhodeCode GmbH | |
|
3 | # | |
|
4 | # This program is free software; you can redistribute it and/or modify | |
|
5 | # it under the terms of the GNU General Public License as published by | |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
|
7 | # (at your option) any later version. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU General Public License | |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
|
17 | ||
|
18 | ||
|
19 | class RemoteBase(object): | |
|
20 | EMPTY_COMMIT = '0' * 40 | |
|
21 | ||
|
22 | @property | |
|
23 | def region(self): | |
|
24 | return self._factory._cache_region | |
|
25 | ||
|
26 | def _cache_on(self, wire): | |
|
27 | context = wire.get('context', '') | |
|
28 | context_uid = '{}'.format(context) | |
|
29 | repo_id = wire.get('repo_id', '') | |
|
30 | cache = wire.get('cache', True) | |
|
31 | cache_on = context and cache | |
|
32 | return cache_on, context_uid, repo_id |
@@ -1,5 +1,5 b'' | |||
|
1 | 1 | [bumpversion] |
|
2 |
current_version = 4.1 |
|
|
2 | current_version = 4.18.0 | |
|
3 | 3 | message = release: Bump version {current_version} to {new_version} |
|
4 | 4 | |
|
5 | 5 | [bumpversion:file:vcsserver/VERSION] |
@@ -5,12 +5,10 b' done = false' | |||
|
5 | 5 | done = true |
|
6 | 6 | |
|
7 | 7 | [task:fixes_on_stable] |
|
8 | done = true | |
|
9 | 8 | |
|
10 | 9 | [task:pip2nix_generated] |
|
11 | done = true | |
|
12 | 10 | |
|
13 | 11 | [release] |
|
14 |
state = |
|
|
15 |
version = 4.1 |
|
|
12 | state = in_progress | |
|
13 | version = 4.18.0 | |
|
16 | 14 |
@@ -1,50 +1,200 b'' | |||
|
1 | ################################################################################ | |
|
2 | # RhodeCode VCSServer with HTTP Backend - configuration # | |
|
3 | ################################################################################ | |
|
1 | ## -*- coding: utf-8 -*- | |
|
4 | 2 | |
|
3 | ; ################################# | |
|
4 | ; RHODECODE VCSSERVER CONFIGURATION | |
|
5 | ; ################################# | |
|
5 | 6 | |
|
6 | 7 | [server:main] |
|
7 | ## COMMON ## | |
|
8 | ; COMMON HOST/IP CONFIG | |
|
8 | 9 | host = 0.0.0.0 |
|
9 | 10 | port = 9900 |
|
10 | 11 | |
|
12 | ; ################################################## | |
|
13 | ; WAITRESS WSGI SERVER - Recommended for Development | |
|
14 | ; ################################################## | |
|
15 | ||
|
16 | ; use server type | |
|
11 | 17 | use = egg:waitress#main |
|
12 | 18 | |
|
19 | ; number of worker threads | |
|
20 | threads = 5 | |
|
21 | ||
|
22 | ; MAX BODY SIZE 100GB | |
|
23 | max_request_body_size = 107374182400 | |
|
24 | ||
|
25 | ; Use poll instead of select, fixes file descriptors limits problems. | |
|
26 | ; May not work on old windows systems. | |
|
27 | asyncore_use_poll = true | |
|
28 | ||
|
29 | ||
|
30 | ; ########################### | |
|
31 | ; GUNICORN APPLICATION SERVER | |
|
32 | ; ########################### | |
|
33 | ||
|
34 | ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini | |
|
35 | ||
|
36 | ; Module to use, this setting shouldn't be changed | |
|
37 | #use = egg:gunicorn#main | |
|
38 | ||
|
39 | ; Sets the number of process workers. More workers means more concurrent connections | |
|
40 | ; RhodeCode can handle at the same time. Each additional worker also it increases | |
|
41 | ; memory usage as each has it's own set of caches. | |
|
42 | ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more | |
|
43 | ; than 8-10 unless for really big deployments .e.g 700-1000 users. | |
|
44 | ; `instance_id = *` must be set in the [app:main] section below (which is the default) | |
|
45 | ; when using more than 1 worker. | |
|
46 | #workers = 2 | |
|
47 | ||
|
48 | ; Gunicorn access log level | |
|
49 | #loglevel = info | |
|
50 | ||
|
51 | ; Process name visible in process list | |
|
52 | #proc_name = rhodecode_vcsserver | |
|
53 | ||
|
54 | ; Type of worker class, one of `sync`, `gevent` | |
|
55 | ; currently `sync` is the only option allowed. | |
|
56 | #worker_class = sync | |
|
57 | ||
|
58 | ; The maximum number of simultaneous clients. Valid only for gevent | |
|
59 | #worker_connections = 10 | |
|
60 | ||
|
61 | ; Max number of requests that worker will handle before being gracefully restarted. | |
|
62 | ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. | |
|
63 | #max_requests = 1000 | |
|
64 | #max_requests_jitter = 30 | |
|
65 | ||
|
66 | ; Amount of time a worker can spend with handling a request before it | |
|
67 | ; gets killed and restarted. By default set to 21600 (6hrs) | |
|
68 | ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) | |
|
69 | #timeout = 21600 | |
|
70 | ||
|
71 | ; The maximum size of HTTP request line in bytes. | |
|
72 | ; 0 for unlimited | |
|
73 | #limit_request_line = 0 | |
|
74 | ||
|
75 | ; Limit the number of HTTP headers fields in a request. | |
|
76 | ; By default this value is 100 and can't be larger than 32768. | |
|
77 | #limit_request_fields = 32768 | |
|
78 | ||
|
79 | ; Limit the allowed size of an HTTP request header field. | |
|
80 | ; Value is a positive number or 0. | |
|
81 | ; Setting it to 0 will allow unlimited header field sizes. | |
|
82 | #limit_request_field_size = 0 | |
|
83 | ||
|
84 | ; Timeout for graceful workers restart. | |
|
85 | ; After receiving a restart signal, workers have this much time to finish | |
|
86 | ; serving requests. Workers still alive after the timeout (starting from the | |
|
87 | ; receipt of the restart signal) are force killed. | |
|
88 | ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) | |
|
89 | #graceful_timeout = 3600 | |
|
90 | ||
|
91 | # The number of seconds to wait for requests on a Keep-Alive connection. | |
|
92 | # Generally set in the 1-5 seconds range. | |
|
93 | #keepalive = 2 | |
|
94 | ||
|
95 | ; Maximum memory usage that each worker can use before it will receive a | |
|
96 | ; graceful restart signal 0 = memory monitoring is disabled | |
|
97 | ; Examples: 268435456 (256MB), 536870912 (512MB) | |
|
98 | ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) | |
|
99 | #memory_max_usage = 0 | |
|
100 | ||
|
101 | ; How often in seconds to check for memory usage for each gunicorn worker | |
|
102 | #memory_usage_check_interval = 60 | |
|
103 | ||
|
104 | ; Threshold value for which we don't recycle worker if GarbageCollection | |
|
105 | ; frees up enough resources. Before each restart we try to run GC on worker | |
|
106 | ; in case we get enough free memory after that, restart will not happen. | |
|
107 | #memory_usage_recovery_threshold = 0.8 | |
|
108 | ||
|
13 | 109 | |
|
14 | 110 | [app:main] |
|
111 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
|
112 | ; of this file | |
|
15 | 113 | use = egg:rhodecode-vcsserver |
|
16 | 114 | |
|
17 | pyramid.default_locale_name = en | |
|
115 | ||
|
116 | ; ############# | |
|
117 | ; DEBUG OPTIONS | |
|
118 | ; ############# | |
|
119 | ||
|
120 | # During development the we want to have the debug toolbar enabled | |
|
18 | 121 | pyramid.includes = |
|
122 | pyramid_debugtoolbar | |
|
19 | 123 | |
|
20 | ## default locale used by VCS systems | |
|
124 | debugtoolbar.hosts = 0.0.0.0/0 | |
|
125 | debugtoolbar.exclude_prefixes = | |
|
126 | /css | |
|
127 | /fonts | |
|
128 | /images | |
|
129 | /js | |
|
130 | ||
|
131 | ; ################# | |
|
132 | ; END DEBUG OPTIONS | |
|
133 | ; ################# | |
|
134 | ||
|
135 | ; Pyramid default locales, we need this to be set | |
|
136 | pyramid.default_locale_name = en | |
|
137 | ||
|
138 | ; default locale used by VCS systems | |
|
21 | 139 | locale = en_US.UTF-8 |
|
22 | 140 | |
|
23 | ||
|
24 | ## path to binaries for vcsserver, it should be set by the installer | |
|
25 | ## at installation time, e.g /home/user/vcsserver-1/profile/bin | |
|
141 | ; path to binaries for vcsserver, it should be set by the installer | |
|
142 | ; at installation time, e.g /home/user/vcsserver-1/profile/bin | |
|
143 | ; it can also be a path to nix-build output in case of development | |
|
26 | 144 | core.binary_dir = "" |
|
27 | 145 | |
|
28 |
|
|
|
29 |
|
|
|
146 | ; Custom exception store path, defaults to TMPDIR | |
|
147 | ; This is used to store exception from RhodeCode in shared directory | |
|
30 | 148 | #exception_tracker.store_path = |
|
31 | 149 | |
|
32 | ## Default cache dir for caches. Putting this into a ramdisk | |
|
33 | ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require | |
|
34 | ## large amount of space | |
|
35 | cache_dir = %(here)s/rcdev/data | |
|
150 | ; ############# | |
|
151 | ; DOGPILE CACHE | |
|
152 | ; ############# | |
|
153 | ||
|
154 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
|
155 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
|
156 | cache_dir = %(here)s/data | |
|
157 | ||
|
158 | ; *************************************** | |
|
159 | ; `repo_object` cache, default file based | |
|
160 | ; *************************************** | |
|
161 | ||
|
162 | ; `repo_object` cache settings for vcs methods for repositories | |
|
163 | rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace | |
|
164 | ||
|
165 | ; cache auto-expires after N seconds | |
|
166 | ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days) | |
|
167 | rc_cache.repo_object.expiration_time = 2592000 | |
|
168 | ||
|
169 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
|
170 | #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db | |
|
36 | 171 | |
|
37 | ## cache region for storing repo_objects cache | |
|
38 | rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru | |
|
39 | ## cache auto-expires after N seconds | |
|
40 | rc_cache.repo_object.expiration_time = 300 | |
|
41 | ## max size of LRU, old values will be discarded if the size of cache reaches max_size | |
|
42 | rc_cache.repo_object.max_size = 100 | |
|
172 | ; *********************************************************** | |
|
173 | ; `repo_object` cache with redis backend | |
|
174 | ; recommended for larger instance, and for better performance | |
|
175 | ; *********************************************************** | |
|
176 | ||
|
177 | ; `repo_object` cache settings for vcs methods for repositories | |
|
178 | #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack | |
|
179 | ||
|
180 | ; cache auto-expires after N seconds | |
|
181 | ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days) | |
|
182 | #rc_cache.repo_object.expiration_time = 2592000 | |
|
183 | ||
|
184 | ; redis_expiration_time needs to be greater then expiration_time | |
|
185 | #rc_cache.repo_object.arguments.redis_expiration_time = 3592000 | |
|
186 | ||
|
187 | #rc_cache.repo_object.arguments.host = localhost | |
|
188 | #rc_cache.repo_object.arguments.port = 6379 | |
|
189 | #rc_cache.repo_object.arguments.db = 5 | |
|
190 | #rc_cache.repo_object.arguments.socket_timeout = 30 | |
|
191 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
|
192 | #rc_cache.repo_object.arguments.distributed_lock = true | |
|
43 | 193 | |
|
44 | 194 | |
|
45 |
|
|
|
46 |
|
|
|
47 |
|
|
|
195 | ; ##################### | |
|
196 | ; LOGGING CONFIGURATION | |
|
197 | ; ##################### | |
|
48 | 198 | [loggers] |
|
49 | 199 | keys = root, vcsserver |
|
50 | 200 | |
@@ -54,9 +204,9 b' keys = console' | |||
|
54 | 204 | [formatters] |
|
55 | 205 | keys = generic |
|
56 | 206 | |
|
57 |
|
|
|
58 |
|
|
|
59 |
|
|
|
207 | ; ####### | |
|
208 | ; LOGGERS | |
|
209 | ; ####### | |
|
60 | 210 | [logger_root] |
|
61 | 211 | level = NOTSET |
|
62 | 212 | handlers = console |
@@ -68,19 +218,19 b' qualname = vcsserver' | |||
|
68 | 218 | propagate = 1 |
|
69 | 219 | |
|
70 | 220 | |
|
71 |
|
|
|
72 |
|
|
|
73 |
|
|
|
221 | ; ######## | |
|
222 | ; HANDLERS | |
|
223 | ; ######## | |
|
74 | 224 | |
|
75 | 225 | [handler_console] |
|
76 | 226 | class = StreamHandler |
|
77 | args = (sys.stderr,) | |
|
227 | args = (sys.stderr, ) | |
|
78 | 228 | level = DEBUG |
|
79 | 229 | formatter = generic |
|
80 | 230 | |
|
81 |
|
|
|
82 |
|
|
|
83 |
|
|
|
231 | ; ########## | |
|
232 | ; FORMATTERS | |
|
233 | ; ########## | |
|
84 | 234 | |
|
85 | 235 | [formatter_generic] |
|
86 | 236 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
@@ -1,58 +1,26 b'' | |||
|
1 | 1 | """ |
|
2 | gunicorn config extension and hooks. Sets additional configuration that is | |
|
3 | available post the .ini config. | |
|
4 | ||
|
5 | - workers = ${cpu_number} | |
|
6 | - threads = 1 | |
|
7 | - proc_name = ${gunicorn_proc_name} | |
|
8 | - worker_class = sync | |
|
9 | - worker_connections = 10 | |
|
10 | - max_requests = 1000 | |
|
11 | - max_requests_jitter = 30 | |
|
12 | - timeout = 21600 | |
|
13 | ||
|
2 | Gunicorn config extension and hooks. This config file adds some extra settings and memory management. | |
|
3 | Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer | |
|
14 | 4 | """ |
|
15 | 5 | |
|
16 | import multiprocessing | |
|
6 | import gc | |
|
7 | import os | |
|
17 | 8 | import sys |
|
9 | import math | |
|
18 | 10 | import time |
|
19 | import datetime | |
|
20 | 11 | import threading |
|
21 | 12 | import traceback |
|
13 | import random | |
|
22 | 14 | from gunicorn.glogging import Logger |
|
23 | 15 | |
|
24 | 16 | |
|
17 | def get_workers(): | |
|
18 | import multiprocessing | |
|
19 | return multiprocessing.cpu_count() * 2 + 1 | |
|
20 | ||
|
25 | 21 | # GLOBAL |
|
26 | 22 | errorlog = '-' |
|
27 | 23 | accesslog = '-' |
|
28 | loglevel = 'debug' | |
|
29 | ||
|
30 | # SECURITY | |
|
31 | ||
|
32 | # The maximum size of HTTP request line in bytes. | |
|
33 | # 0 for unlimited | |
|
34 | limit_request_line = 0 | |
|
35 | ||
|
36 | # Limit the number of HTTP headers fields in a request. | |
|
37 | # By default this value is 100 and can't be larger than 32768. | |
|
38 | limit_request_fields = 10240 | |
|
39 | ||
|
40 | # Limit the allowed size of an HTTP request header field. | |
|
41 | # Value is a positive number or 0. | |
|
42 | # Setting it to 0 will allow unlimited header field sizes. | |
|
43 | limit_request_field_size = 0 | |
|
44 | ||
|
45 | ||
|
46 | # Timeout for graceful workers restart. | |
|
47 | # After receiving a restart signal, workers have this much time to finish | |
|
48 | # serving requests. Workers still alive after the timeout (starting from the | |
|
49 | # receipt of the restart signal) are force killed. | |
|
50 | graceful_timeout = 30 | |
|
51 | ||
|
52 | ||
|
53 | # The number of seconds to wait for requests on a Keep-Alive connection. | |
|
54 | # Generally set in the 1-5 seconds range. | |
|
55 | keepalive = 2 | |
|
56 | 24 | |
|
57 | 25 | |
|
58 | 26 | # SERVER MECHANICS |
@@ -63,38 +31,178 b' tmp_upload_dir = None' | |||
|
63 | 31 | |
|
64 | 32 | # Custom log format |
|
65 | 33 | access_log_format = ( |
|
66 |
'%(t)s |
|
|
34 | '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') | |
|
67 | 35 | |
|
68 | 36 | # self adjust workers based on CPU count |
|
69 | # workers = multiprocessing.cpu_count() * 2 + 1 | |
|
37 | # workers = get_workers() | |
|
38 | ||
|
39 | ||
|
40 | def _get_process_rss(pid=None): | |
|
41 | try: | |
|
42 | import psutil | |
|
43 | if pid: | |
|
44 | proc = psutil.Process(pid) | |
|
45 | else: | |
|
46 | proc = psutil.Process() | |
|
47 | return proc.memory_info().rss | |
|
48 | except Exception: | |
|
49 | return None | |
|
70 | 50 | |
|
71 | 51 | |
|
72 | def post_fork(server, worker): | |
|
73 | server.log.info("[<%-10s>] WORKER spawned", worker.pid) | |
|
52 | def _get_config(ini_path): | |
|
53 | ||
|
54 | try: | |
|
55 | import configparser | |
|
56 | except ImportError: | |
|
57 | import ConfigParser as configparser | |
|
58 | try: | |
|
59 | config = configparser.RawConfigParser() | |
|
60 | config.read(ini_path) | |
|
61 | return config | |
|
62 | except Exception: | |
|
63 | return None | |
|
64 | ||
|
65 | ||
|
66 | def _time_with_offset(memory_usage_check_interval): | |
|
67 | return time.time() - random.randint(0, memory_usage_check_interval/2.0) | |
|
74 | 68 | |
|
75 | 69 | |
|
76 | 70 | def pre_fork(server, worker): |
|
77 | 71 | pass |
|
78 | 72 | |
|
79 | 73 | |
|
74 | def post_fork(server, worker): | |
|
75 | ||
|
76 | # memory spec defaults | |
|
77 | _memory_max_usage = 0 | |
|
78 | _memory_usage_check_interval = 60 | |
|
79 | _memory_usage_recovery_threshold = 0.8 | |
|
80 | ||
|
81 | ini_path = os.path.abspath(server.cfg.paste) | |
|
82 | conf = _get_config(ini_path) | |
|
83 | ||
|
84 | section = 'server:main' | |
|
85 | if conf and conf.has_section(section): | |
|
86 | ||
|
87 | if conf.has_option(section, 'memory_max_usage'): | |
|
88 | _memory_max_usage = conf.getint(section, 'memory_max_usage') | |
|
89 | ||
|
90 | if conf.has_option(section, 'memory_usage_check_interval'): | |
|
91 | _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval') | |
|
92 | ||
|
93 | if conf.has_option(section, 'memory_usage_recovery_threshold'): | |
|
94 | _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold') | |
|
95 | ||
|
96 | worker._memory_max_usage = _memory_max_usage | |
|
97 | worker._memory_usage_check_interval = _memory_usage_check_interval | |
|
98 | worker._memory_usage_recovery_threshold = _memory_usage_recovery_threshold | |
|
99 | ||
|
100 | # register memory last check time, with some random offset so we don't recycle all | |
|
101 | # at once | |
|
102 | worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval) | |
|
103 | ||
|
104 | if _memory_max_usage: | |
|
105 | server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid, | |
|
106 | _format_data_size(_memory_max_usage)) | |
|
107 | else: | |
|
108 | server.log.info("[%-10s] WORKER spawned", worker.pid) | |
|
109 | ||
|
110 | ||
|
80 | 111 | def pre_exec(server): |
|
81 | 112 | server.log.info("Forked child, re-executing.") |
|
82 | 113 | |
|
83 | 114 | |
|
84 | 115 | def on_starting(server): |
|
85 | server.log.info("Server is starting.") | |
|
116 | server_lbl = '{} {}'.format(server.proc_name, server.address) | |
|
117 | server.log.info("Server %s is starting.", server_lbl) | |
|
86 | 118 | |
|
87 | 119 | |
|
88 | 120 | def when_ready(server): |
|
89 | server.log.info("Server is ready. Spawning workers") | |
|
121 | server.log.info("Server %s is ready. Spawning workers", server) | |
|
90 | 122 | |
|
91 | 123 | |
|
92 | 124 | def on_reload(server): |
|
93 | 125 | pass |
|
94 | 126 | |
|
95 | 127 | |
|
128 | def _format_data_size(size, unit="B", precision=1, binary=True): | |
|
129 | """Format a number using SI units (kilo, mega, etc.). | |
|
130 | ||
|
131 | ``size``: The number as a float or int. | |
|
132 | ||
|
133 | ``unit``: The unit name in plural form. Examples: "bytes", "B". | |
|
134 | ||
|
135 | ``precision``: How many digits to the right of the decimal point. Default | |
|
136 | is 1. 0 suppresses the decimal point. | |
|
137 | ||
|
138 | ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000). | |
|
139 | If true, use base-2 binary prefixes (kibi = Ki = 1024). | |
|
140 | ||
|
141 | ``full_name``: If false (default), use the prefix abbreviation ("k" or | |
|
142 | "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false, | |
|
143 | use abbreviation ("k" or "Ki"). | |
|
144 | ||
|
145 | """ | |
|
146 | ||
|
147 | if not binary: | |
|
148 | base = 1000 | |
|
149 | multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') | |
|
150 | else: | |
|
151 | base = 1024 | |
|
152 | multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') | |
|
153 | ||
|
154 | sign = "" | |
|
155 | if size > 0: | |
|
156 | m = int(math.log(size, base)) | |
|
157 | elif size < 0: | |
|
158 | sign = "-" | |
|
159 | size = -size | |
|
160 | m = int(math.log(size, base)) | |
|
161 | else: | |
|
162 | m = 0 | |
|
163 | if m > 8: | |
|
164 | m = 8 | |
|
165 | ||
|
166 | if m == 0: | |
|
167 | precision = '%.0f' | |
|
168 | else: | |
|
169 | precision = '%%.%df' % precision | |
|
170 | ||
|
171 | size = precision % (size / math.pow(base, m)) | |
|
172 | ||
|
173 | return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit) | |
|
174 | ||
|
175 | ||
|
176 | def _check_memory_usage(worker): | |
|
177 | memory_max_usage = worker._memory_max_usage | |
|
178 | if not memory_max_usage: | |
|
179 | return | |
|
180 | ||
|
181 | memory_usage_check_interval = worker._memory_usage_check_interval | |
|
182 | memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold | |
|
183 | ||
|
184 | elapsed = time.time() - worker._last_memory_check_time | |
|
185 | if elapsed > memory_usage_check_interval: | |
|
186 | mem_usage = _get_process_rss() | |
|
187 | if mem_usage and mem_usage > memory_max_usage: | |
|
188 | worker.log.info( | |
|
189 | "memory usage %s > %s, forcing gc", | |
|
190 | _format_data_size(mem_usage), _format_data_size(memory_max_usage)) | |
|
191 | # Try to clean it up by forcing a full collection. | |
|
192 | gc.collect() | |
|
193 | mem_usage = _get_process_rss() | |
|
194 | if mem_usage > memory_usage_recovery_threshold: | |
|
195 | # Didn't clean up enough, we'll have to terminate. | |
|
196 | worker.log.warning( | |
|
197 | "memory usage %s > %s after gc, quitting", | |
|
198 | _format_data_size(mem_usage), _format_data_size(memory_max_usage)) | |
|
199 | # This will cause worker to auto-restart itself | |
|
200 | worker.alive = False | |
|
201 | worker._last_memory_check_time = time.time() | |
|
202 | ||
|
203 | ||
|
96 | 204 | def worker_int(worker): |
|
97 |
worker.log.info("[ |
|
|
205 | worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid) | |
|
98 | 206 | |
|
99 | 207 | # get traceback info, on worker crash |
|
100 | 208 | id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) |
@@ -110,15 +218,15 b' def worker_int(worker):' | |||
|
110 | 218 | |
|
111 | 219 | |
|
112 | 220 | def worker_abort(worker): |
|
113 |
worker.log.info("[ |
|
|
221 | worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid) | |
|
114 | 222 | |
|
115 | 223 | |
|
116 | 224 | def worker_exit(server, worker): |
|
117 |
worker.log.info("[ |
|
|
225 | worker.log.info("[%-10s] worker exit", worker.pid) | |
|
118 | 226 | |
|
119 | 227 | |
|
120 | 228 | def child_exit(server, worker): |
|
121 |
worker.log.info("[ |
|
|
229 | worker.log.info("[%-10s] worker child exit", worker.pid) | |
|
122 | 230 | |
|
123 | 231 | |
|
124 | 232 | def pre_request(worker, req): |
@@ -129,9 +237,12 b' def pre_request(worker, req):' | |||
|
129 | 237 | |
|
130 | 238 | def post_request(worker, req, environ, resp): |
|
131 | 239 | total_time = time.time() - worker.start_time |
|
240 | # Gunicorn sometimes has problems with reading the status_code | |
|
241 | status_code = getattr(resp, 'status_code', '') | |
|
132 | 242 | worker.log.debug( |
|
133 |
"GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %. |
|
|
134 |
worker.nr, req.method, req.path, |
|
|
243 | "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs", | |
|
244 | worker.nr, req.method, req.path, status_code, total_time) | |
|
245 | _check_memory_usage(worker) | |
|
135 | 246 | |
|
136 | 247 | |
|
137 | 248 | class RhodeCodeLogger(Logger): |
@@ -1,71 +1,163 b'' | |||
|
1 | ################################################################################ | |
|
2 | # RhodeCode VCSServer with HTTP Backend - configuration # | |
|
3 | ################################################################################ | |
|
1 | ## -*- coding: utf-8 -*- | |
|
4 | 2 | |
|
3 | ; ################################# | |
|
4 | ; RHODECODE VCSSERVER CONFIGURATION | |
|
5 | ; ################################# | |
|
5 | 6 | |
|
6 | 7 | [server:main] |
|
7 | ## COMMON ## | |
|
8 | ; COMMON HOST/IP CONFIG | |
|
8 | 9 | host = 127.0.0.1 |
|
9 | 10 | port = 9900 |
|
10 | 11 | |
|
11 | 12 | |
|
12 | ########################## | |
|
13 |
|
|
|
14 | ########################## | |
|
15 | ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini | |
|
13 | ; ########################### | |
|
14 | ; GUNICORN APPLICATION SERVER | |
|
15 | ; ########################### | |
|
16 | ||
|
17 | ; run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini | |
|
18 | ||
|
19 | ; Module to use, this setting shouldn't be changed | |
|
16 | 20 | use = egg:gunicorn#main |
|
17 | ## Sets the number of process workers. Recommended | |
|
18 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers | |
|
21 | ||
|
22 | ; Sets the number of process workers. More workers means more concurrent connections | |
|
23 | ; RhodeCode can handle at the same time. Each additional worker also it increases | |
|
24 | ; memory usage as each has it's own set of caches. | |
|
25 | ; Recommended value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers, but no more | |
|
26 | ; than 8-10 unless for really big deployments .e.g 700-1000 users. | |
|
27 | ; `instance_id = *` must be set in the [app:main] section below (which is the default) | |
|
28 | ; when using more than 1 worker. | |
|
19 | 29 | workers = 2 |
|
20 | ## process name | |
|
30 | ||
|
31 | ; Gunicorn access log level | |
|
32 | loglevel = info | |
|
33 | ||
|
34 | ; Process name visible in process list | |
|
21 | 35 | proc_name = rhodecode_vcsserver |
|
22 | ## type of worker class, currently `sync` is the only option allowed. | |
|
36 | ||
|
37 | ; Type of worker class, one of `sync`, `gevent` | |
|
38 | ; currently `sync` is the only option allowed. | |
|
23 | 39 | worker_class = sync |
|
24 | ## The maximum number of simultaneous clients. Valid only for Gevent | |
|
25 | #worker_connections = 10 | |
|
26 | ## max number of requests that worker will handle before being gracefully | |
|
27 | ## restarted, could prevent memory leaks | |
|
40 | ||
|
41 | ; The maximum number of simultaneous clients. Valid only for gevent | |
|
42 | worker_connections = 10 | |
|
43 | ||
|
44 | ; Max number of requests that worker will handle before being gracefully restarted. | |
|
45 | ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. | |
|
28 | 46 | max_requests = 1000 |
|
29 | 47 | max_requests_jitter = 30 |
|
30 | ## amount of time a worker can spend with handling a request before it | |
|
31 | ## gets killed and restarted. Set to 6hrs | |
|
48 | ||
|
49 | ; Amount of time a worker can spend with handling a request before it | |
|
50 | ; gets killed and restarted. By default set to 21600 (6hrs) | |
|
51 | ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) | |
|
32 | 52 | timeout = 21600 |
|
33 | 53 | |
|
54 | ; The maximum size of HTTP request line in bytes. | |
|
55 | ; 0 for unlimited | |
|
56 | limit_request_line = 0 | |
|
57 | ||
|
58 | ; Limit the number of HTTP headers fields in a request. | |
|
59 | ; By default this value is 100 and can't be larger than 32768. | |
|
60 | limit_request_fields = 32768 | |
|
61 | ||
|
62 | ; Limit the allowed size of an HTTP request header field. | |
|
63 | ; Value is a positive number or 0. | |
|
64 | ; Setting it to 0 will allow unlimited header field sizes. | |
|
65 | limit_request_field_size = 0 | |
|
66 | ||
|
67 | ; Timeout for graceful workers restart. | |
|
68 | ; After receiving a restart signal, workers have this much time to finish | |
|
69 | ; serving requests. Workers still alive after the timeout (starting from the | |
|
70 | ; receipt of the restart signal) are force killed. | |
|
71 | ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) | |
|
72 | graceful_timeout = 3600 | |
|
73 | ||
|
74 | # The number of seconds to wait for requests on a Keep-Alive connection. | |
|
75 | # Generally set in the 1-5 seconds range. | |
|
76 | keepalive = 2 | |
|
77 | ||
|
78 | ; Maximum memory usage that each worker can use before it will receive a | |
|
79 | ; graceful restart signal 0 = memory monitoring is disabled | |
|
80 | ; Examples: 268435456 (256MB), 536870912 (512MB) | |
|
81 | ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) | |
|
82 | memory_max_usage = 0 | |
|
83 | ||
|
84 | ; How often in seconds to check for memory usage for each gunicorn worker | |
|
85 | memory_usage_check_interval = 60 | |
|
86 | ||
|
87 | ; Threshold value for which we don't recycle worker if GarbageCollection | |
|
88 | ; frees up enough resources. Before each restart we try to run GC on worker | |
|
89 | ; in case we get enough free memory after that, restart will not happen. | |
|
90 | memory_usage_recovery_threshold = 0.8 | |
|
91 | ||
|
34 | 92 | |
|
35 | 93 | [app:main] |
|
94 | ; The %(here)s variable will be replaced with the absolute path of parent directory | |
|
95 | ; of this file | |
|
36 | 96 | use = egg:rhodecode-vcsserver |
|
37 | 97 | |
|
98 | ; Pyramid default locales, we need this to be set | |
|
38 | 99 | pyramid.default_locale_name = en |
|
39 | pyramid.includes = | |
|
40 | 100 | |
|
41 |
|
|
|
101 | ; default locale used by VCS systems | |
|
42 | 102 | locale = en_US.UTF-8 |
|
43 | 103 | |
|
44 | ||
|
45 | ## path to binaries for vcsserver, it should be set by the installer | |
|
46 | ## at installation time, e.g /home/user/vcsserver-1/profile/bin | |
|
104 | ; path to binaries for vcsserver, it should be set by the installer | |
|
105 | ; at installation time, e.g /home/user/vcsserver-1/profile/bin | |
|
106 | ; it can also be a path to nix-build output in case of development | |
|
47 | 107 | core.binary_dir = "" |
|
48 | 108 | |
|
49 |
|
|
|
50 |
|
|
|
109 | ; Custom exception store path, defaults to TMPDIR | |
|
110 | ; This is used to store exception from RhodeCode in shared directory | |
|
51 | 111 | #exception_tracker.store_path = |
|
52 | 112 | |
|
53 | ## Default cache dir for caches. Putting this into a ramdisk | |
|
54 | ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require | |
|
55 | ## large amount of space | |
|
56 | cache_dir = %(here)s/rcdev/data | |
|
113 | ; ############# | |
|
114 | ; DOGPILE CACHE | |
|
115 | ; ############# | |
|
116 | ||
|
117 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
|
118 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
|
119 | cache_dir = %(here)s/data | |
|
120 | ||
|
121 | ; *************************************** | |
|
122 | ; `repo_object` cache, default file based | |
|
123 | ; *************************************** | |
|
124 | ||
|
125 | ; `repo_object` cache settings for vcs methods for repositories | |
|
126 | rc_cache.repo_object.backend = dogpile.cache.rc.file_namespace | |
|
127 | ||
|
128 | ; cache auto-expires after N seconds | |
|
129 | ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days) | |
|
130 | rc_cache.repo_object.expiration_time = 2592000 | |
|
131 | ||
|
132 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
|
133 | #rc_cache.repo_object.arguments.filename = /tmp/vcsserver_cache.db | |
|
57 | 134 | |
|
58 | ## cache region for storing repo_objects cache | |
|
59 | rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru | |
|
60 | ## cache auto-expires after N seconds | |
|
61 | rc_cache.repo_object.expiration_time = 300 | |
|
62 | ## max size of LRU, old values will be discarded if the size of cache reaches max_size | |
|
63 | rc_cache.repo_object.max_size = 100 | |
|
135 | ; *********************************************************** | |
|
136 | ; `repo_object` cache with redis backend | |
|
137 | ; recommended for larger instance, and for better performance | |
|
138 | ; *********************************************************** | |
|
139 | ||
|
140 | ; `repo_object` cache settings for vcs methods for repositories | |
|
141 | #rc_cache.repo_object.backend = dogpile.cache.rc.redis_msgpack | |
|
142 | ||
|
143 | ; cache auto-expires after N seconds | |
|
144 | ; Examples: 86400 (1Day), 604800 (7Days), 1209600 (14Days), 2592000 (30days), 7776000 (90Days) | |
|
145 | #rc_cache.repo_object.expiration_time = 2592000 | |
|
146 | ||
|
147 | ; redis_expiration_time needs to be greater then expiration_time | |
|
148 | #rc_cache.repo_object.arguments.redis_expiration_time = 3592000 | |
|
149 | ||
|
150 | #rc_cache.repo_object.arguments.host = localhost | |
|
151 | #rc_cache.repo_object.arguments.port = 6379 | |
|
152 | #rc_cache.repo_object.arguments.db = 5 | |
|
153 | #rc_cache.repo_object.arguments.socket_timeout = 30 | |
|
154 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
|
155 | #rc_cache.repo_object.arguments.distributed_lock = true | |
|
64 | 156 | |
|
65 | 157 | |
|
66 |
|
|
|
67 |
|
|
|
68 |
|
|
|
158 | ; ##################### | |
|
159 | ; LOGGING CONFIGURATION | |
|
160 | ; ##################### | |
|
69 | 161 | [loggers] |
|
70 | 162 | keys = root, vcsserver |
|
71 | 163 | |
@@ -75,9 +167,9 b' keys = console' | |||
|
75 | 167 | [formatters] |
|
76 | 168 | keys = generic |
|
77 | 169 | |
|
78 |
|
|
|
79 |
|
|
|
80 |
|
|
|
170 | ; ####### | |
|
171 | ; LOGGERS | |
|
172 | ; ####### | |
|
81 | 173 | [logger_root] |
|
82 | 174 | level = NOTSET |
|
83 | 175 | handlers = console |
@@ -89,19 +181,19 b' qualname = vcsserver' | |||
|
89 | 181 | propagate = 1 |
|
90 | 182 | |
|
91 | 183 | |
|
92 |
|
|
|
93 |
|
|
|
94 |
|
|
|
184 | ; ######## | |
|
185 | ; HANDLERS | |
|
186 | ; ######## | |
|
95 | 187 | |
|
96 | 188 | [handler_console] |
|
97 | 189 | class = StreamHandler |
|
98 | args = (sys.stderr,) | |
|
99 |
level = |
|
|
190 | args = (sys.stderr, ) | |
|
191 | level = INFO | |
|
100 | 192 | formatter = generic |
|
101 | 193 | |
|
102 |
|
|
|
103 |
|
|
|
104 |
|
|
|
194 | ; ########## | |
|
195 | ; FORMATTERS | |
|
196 | ; ########## | |
|
105 | 197 | |
|
106 | 198 | [formatter_generic] |
|
107 | 199 | format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s |
@@ -1,10 +1,11 b'' | |||
|
1 | 1 | self: super: { |
|
2 | ||
|
2 | 3 | # bump GIT version |
|
3 | 4 | git = super.lib.overrideDerivation super.git (oldAttrs: { |
|
4 |
name = "git-2.1 |
|
|
5 | name = "git-2.24.1"; | |
|
5 | 6 | src = self.fetchurl { |
|
6 |
url = "https://www.kernel.org/pub/software/scm/git/git-2.1 |
|
|
7 | sha256 = "1scbggzghkzzfqg4ky3qh7h9w87c3zya4ls5disz7dbx56is7sgw"; | |
|
7 | url = "https://www.kernel.org/pub/software/scm/git/git-2.24.1.tar.xz"; | |
|
8 | sha256 = "0ql5z31vgl7b785gwrf00m129mg7zi9pa65n12ij3mpxx3f28gvj"; | |
|
8 | 9 | }; |
|
9 | 10 | |
|
10 | 11 | # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git |
@@ -18,6 +19,29 b' self: super: {' | |||
|
18 | 19 | |
|
19 | 20 | }); |
|
20 | 21 | |
|
22 | libgit2rc = super.lib.overrideDerivation super.libgit2 (oldAttrs: { | |
|
23 | name = "libgit2-0.28.2"; | |
|
24 | version = "0.28.2"; | |
|
25 | ||
|
26 | src = self.fetchFromGitHub { | |
|
27 | owner = "libgit2"; | |
|
28 | repo = "libgit2"; | |
|
29 | rev = "v0.28.2"; | |
|
30 | sha256 = "0cm8fvs05rj0baigs2133q5a0sm3pa234y8h6hmwhl2bz9xq3k4b"; | |
|
31 | }; | |
|
32 | ||
|
33 | cmakeFlags = [ "-DTHREADSAFE=ON" "-DUSE_HTTPS=no"]; | |
|
34 | ||
|
35 | buildInputs = [ | |
|
36 | super.zlib | |
|
37 | super.libssh2 | |
|
38 | super.openssl | |
|
39 | super.curl | |
|
40 | ]; | |
|
41 | ||
|
42 | ||
|
43 | }); | |
|
44 | ||
|
21 | 45 | # Override subversion derivation to |
|
22 | 46 | # - activate python bindings |
|
23 | 47 | subversion = |
@@ -29,10 +53,10 b' self: super: {' | |||
|
29 | 53 | }; |
|
30 | 54 | in |
|
31 | 55 | super.lib.overrideDerivation subversionWithPython (oldAttrs: { |
|
32 |
name = "subversion-1.1 |
|
|
56 | name = "subversion-1.12.2"; | |
|
33 | 57 | src = self.fetchurl { |
|
34 |
url = "https://archive.apache.org/dist/subversion/subversion-1.1 |
|
|
35 | sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg"; | |
|
58 | url = "https://archive.apache.org/dist/subversion/subversion-1.12.2.tar.gz"; | |
|
59 | sha256 = "1wr1pklnq67xdzmf237zj6l1hg43yshfkbxvpvd5sv6r0dk7v4pl"; | |
|
36 | 60 | }; |
|
37 | 61 | |
|
38 | 62 | ## use internal lz4/utf8proc because it is stable and shipped with SVN |
@@ -41,7 +65,7 b' self: super: {' | |||
|
41 | 65 | " --with-utf8proc=internal" |
|
42 | 66 | ]; |
|
43 | 67 | |
|
44 | ||
|
45 | 68 | }); |
|
46 | 69 | |
|
70 | ||
|
47 | 71 | } |
@@ -3,9 +3,10 b" and (2) make sure `gitman.info' isn't pr" | |||
|
3 | 3 | node names). |
|
4 | 4 | |
|
5 | 5 | diff --git a/Documentation/Makefile b/Documentation/Makefile |
|
6 | index 26a2342bea..ceccd67ebb 100644 | |
|
6 | 7 | --- a/Documentation/Makefile |
|
7 | 8 | +++ b/Documentation/Makefile |
|
8 | @@ -122,7 +122,7 @@ | |
|
9 | @@ -132,7 +132,7 @@ HTML_REPO = ../../git-htmldocs | |
|
9 | 10 | |
|
10 | 11 | MAKEINFO = makeinfo |
|
11 | 12 | INSTALL_INFO = install-info |
@@ -14,7 +15,7 b' diff --git a/Documentation/Makefile b/Do' | |||
|
14 | 15 | DBLATEX = dblatex |
|
15 | 16 | ASCIIDOC_DBLATEX_DIR = /etc/asciidoc/dblatex |
|
16 | 17 | DBLATEX_COMMON = -p $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.xsl -s $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.sty |
|
17 |
@@ -2 |
|
|
18 | @@ -250,7 +250,7 @@ man1: $(DOC_MAN1) | |
|
18 | 19 | man5: $(DOC_MAN5) |
|
19 | 20 | man7: $(DOC_MAN7) |
|
20 | 21 | |
@@ -23,7 +24,7 b' diff --git a/Documentation/Makefile b/Do' | |||
|
23 | 24 | |
|
24 | 25 | pdf: user-manual.pdf |
|
25 | 26 | |
|
26 |
@@ -2 |
|
|
27 | @@ -266,10 +266,9 @@ install-man: man | |
|
27 | 28 | |
|
28 | 29 | install-info: info |
|
29 | 30 | $(INSTALL) -d -m 755 $(DESTDIR)$(infodir) |
@@ -1,7 +1,8 b'' | |||
|
1 | 1 | diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt |
|
2 | index 1afe9fc858..05dd7c3a90 100644 | |
|
2 | 3 | --- a/Documentation/git-send-email.txt |
|
3 | 4 | +++ b/Documentation/git-send-email.txt |
|
4 |
@@ -2 |
|
|
5 | @@ -215,8 +215,7 @@ a password is obtained using 'git-credential'. | |
|
5 | 6 | specify a full pathname of a sendmail-like program instead; |
|
6 | 7 | the program must support the `-i` option. Default value can |
|
7 | 8 | be specified by the `sendemail.smtpServer` configuration |
@@ -12,9 +13,10 b' diff --git a/Documentation/git-send-emai' | |||
|
12 | 13 | |
|
13 | 14 | --smtp-server-port=<port>:: |
|
14 | 15 | diff --git a/git-send-email.perl b/git-send-email.perl |
|
16 | index 8eb63b5a2f..74a61d8213 100755 | |
|
15 | 17 | --- a/git-send-email.perl |
|
16 | 18 | +++ b/git-send-email.perl |
|
17 | @@ -944,8 +944,7 @@ if (defined $reply_to) { | |
|
19 | @@ -956,8 +956,7 @@ sub expand_one_alias { | |
|
18 | 20 | } |
|
19 | 21 | |
|
20 | 22 | if (!defined $smtp_server) { |
@@ -1,94 +1,23 b'' | |||
|
1 | diff --git a/git-sh-i18n.sh b/git-sh-i18n.sh | |
|
2 | index e1d917fd27..e90f8e1414 100644 | |
|
1 | 3 | --- a/git-sh-i18n.sh |
|
2 | 4 | +++ b/git-sh-i18n.sh |
|
3 | @@ -15,87 +15,11 @@ | |
|
4 | fi | |
|
5 | export TEXTDOMAINDIR | |
|
6 | ||
|
7 | -# First decide what scheme to use... | |
|
8 | -GIT_INTERNAL_GETTEXT_SH_SCHEME=fallthrough | |
|
9 | -if test -n "$GIT_GETTEXT_POISON" | |
|
10 | -then | |
|
11 | - GIT_INTERNAL_GETTEXT_SH_SCHEME=poison | |
|
12 | -elif test -n "@@USE_GETTEXT_SCHEME@@" | |
|
13 | -then | |
|
14 | - GIT_INTERNAL_GETTEXT_SH_SCHEME="@@USE_GETTEXT_SCHEME@@" | |
|
15 | -elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS" | |
|
16 | -then | |
|
17 | - : no probing necessary | |
|
5 | @@ -26,7 +26,7 @@ then | |
|
6 | elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS" | |
|
7 | then | |
|
8 | : no probing necessary | |
|
18 | 9 | -elif type gettext.sh >/dev/null 2>&1 |
|
19 | -then | |
|
20 | - # GNU libintl's gettext.sh | |
|
21 | - GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu | |
|
22 | -elif test "$(gettext -h 2>&1)" = "-h" | |
|
23 | -then | |
|
24 | - # gettext binary exists but no gettext.sh. likely to be a gettext | |
|
25 | - # binary on a Solaris or something that is not GNU libintl and | |
|
26 | - # lack eval_gettext. | |
|
27 | - GIT_INTERNAL_GETTEXT_SH_SCHEME=gettext_without_eval_gettext | |
|
28 | -fi | |
|
29 | -export GIT_INTERNAL_GETTEXT_SH_SCHEME | |
|
30 | - | |
|
31 | -# ... and then follow that decision. | |
|
32 | -case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in | |
|
33 | -gnu) | |
|
34 | - # Use libintl's gettext.sh, or fall back to English if we can't. | |
|
10 | +elif type @gettext@/bin/gettext.sh >/dev/null 2>&1 | |
|
11 | then | |
|
12 | # GNU libintl's gettext.sh | |
|
13 | GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu | |
|
14 | @@ -43,7 +43,8 @@ export GIT_INTERNAL_GETTEXT_SH_SCHEME | |
|
15 | case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in | |
|
16 | gnu) | |
|
17 | # Use libintl's gettext.sh, or fall back to English if we can't. | |
|
35 | 18 | - . gettext.sh |
|
36 | - ;; | |
|
37 | -gettext_without_eval_gettext) | |
|
38 | - # Solaris has a gettext(1) but no eval_gettext(1) | |
|
39 | - eval_gettext () { | |
|
40 | - gettext "$1" | ( | |
|
41 | - export PATH $(git sh-i18n--envsubst --variables "$1"); | |
|
42 | - git sh-i18n--envsubst "$1" | |
|
43 | - ) | |
|
44 | - } | |
|
45 | - | |
|
46 | - eval_ngettext () { | |
|
47 | - ngettext "$1" "$2" "$3" | ( | |
|
48 | - export PATH $(git sh-i18n--envsubst --variables "$2"); | |
|
49 | - git sh-i18n--envsubst "$2" | |
|
50 | - ) | |
|
51 | - } | |
|
52 | - ;; | |
|
53 | -poison) | |
|
54 | - # Emit garbage so that tests that incorrectly rely on translatable | |
|
55 | - # strings will fail. | |
|
56 | - gettext () { | |
|
57 | - printf "%s" "# GETTEXT POISON #" | |
|
58 | - } | |
|
59 | - | |
|
60 | - eval_gettext () { | |
|
61 | - printf "%s" "# GETTEXT POISON #" | |
|
62 | - } | |
|
63 | - | |
|
64 | - eval_ngettext () { | |
|
65 | - printf "%s" "# GETTEXT POISON #" | |
|
66 | - } | |
|
67 | - ;; | |
|
68 | -*) | |
|
69 | - gettext () { | |
|
70 | - printf "%s" "$1" | |
|
71 | - } | |
|
72 | - | |
|
73 | - eval_gettext () { | |
|
74 | - printf "%s" "$1" | ( | |
|
75 | - export PATH $(git sh-i18n--envsubst --variables "$1"); | |
|
76 | - git sh-i18n--envsubst "$1" | |
|
77 | - ) | |
|
78 | - } | |
|
79 | +# GNU gettext | |
|
80 | +export GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu | |
|
81 | +export PATH=@gettext@/bin:$PATH | |
|
82 | ||
|
83 | - eval_ngettext () { | |
|
84 | - (test "$3" = 1 && printf "%s" "$1" || printf "%s" "$2") | ( | |
|
85 | - export PATH $(git sh-i18n--envsubst --variables "$2"); | |
|
86 | - git sh-i18n--envsubst "$2" | |
|
87 | - ) | |
|
88 | - } | |
|
89 | - ;; | |
|
90 | -esac | |
|
91 | +. @gettext@/bin/gettext.sh | |
|
92 | ||
|
93 | # Git-specific wrapper functions | |
|
94 | gettextln () { | |
|
19 | + . @gettext@/bin/gettext.sh | |
|
20 | + export PATH=@gettext@/bin:$PATH | |
|
21 | ;; | |
|
22 | gettext_without_eval_gettext) | |
|
23 | # Solaris has a gettext(1) but no eval_gettext(1) |
@@ -1,12 +1,13 b'' | |||
|
1 | 1 | diff --git a/t/test-lib.sh b/t/test-lib.sh |
|
2 | index 8665b0a9b6..8bb892b1af 100644 | |
|
2 | 3 | --- a/t/test-lib.sh |
|
3 | 4 | +++ b/t/test-lib.sh |
|
4 | @@ -923,7 +923,7 @@ | |
|
5 | @@ -1227,7 +1227,7 @@ elif test -n "$GIT_TEST_INSTALLED" | |
|
5 | 6 | then |
|
6 | 7 | GIT_EXEC_PATH=$($GIT_TEST_INSTALLED/git --exec-path) || |
|
7 | 8 | error "Cannot run git from $GIT_TEST_INSTALLED." |
|
8 | - PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR:$PATH | |
|
9 | - PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$PATH | |
|
9 | 10 | + PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$GIT_BUILD_DIR:$PATH |
|
10 | 11 | GIT_EXEC_PATH=${GIT_TEST_EXEC_PATH:-$GIT_EXEC_PATH} |
|
11 | 12 | else # normal case, use ../bin-wrappers only unless $with_dashes: |
|
12 | git_bin_dir="$GIT_BUILD_DIR/bin-wrappers" | |
|
13 | if test -n "$no_bin_wrappers" |
@@ -1,8 +1,8 b'' | |||
|
1 | 1 | diff --git a/connect.c b/connect.c |
|
2 |
index |
|
|
2 | index 4813f005ab..b3f12f3268 100644 | |
|
3 | 3 | --- a/connect.c |
|
4 | 4 | +++ b/connect.c |
|
5 |
@@ -1 |
|
|
5 | @@ -1183,7 +1183,7 @@ static void fill_ssh_args(struct child_process *conn, const char *ssh_host, | |
|
6 | 6 | |
|
7 | 7 | ssh = getenv("GIT_SSH"); |
|
8 | 8 | if (!ssh) |
@@ -12,7 +12,7 b' index c3a014c5b..fbca3262b 100644' | |||
|
12 | 12 | } |
|
13 | 13 | |
|
14 | 14 | diff --git a/git-gui/lib/remote_add.tcl b/git-gui/lib/remote_add.tcl |
|
15 | index 480a6b30d..781720424 100644 | |
|
15 | index 480a6b30d0..7817204241 100644 | |
|
16 | 16 | --- a/git-gui/lib/remote_add.tcl |
|
17 | 17 | +++ b/git-gui/lib/remote_add.tcl |
|
18 | 18 | @@ -139,7 +139,7 @@ method _add {} { |
@@ -15,6 +15,12 b' in' | |||
|
15 | 15 | |
|
16 | 16 | self: super: { |
|
17 | 17 | |
|
18 | "cffi" = super."cffi".override (attrs: { | |
|
19 | buildInputs = [ | |
|
20 | pkgs.libffi | |
|
21 | ]; | |
|
22 | }); | |
|
23 | ||
|
18 | 24 | "gevent" = super."gevent".override (attrs: { |
|
19 | 25 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ |
|
20 | 26 | # NOTE: (marcink) odd requirements from gevent aren't set properly, |
@@ -52,6 +58,12 b' self: super: {' | |||
|
52 | 58 | ]; |
|
53 | 59 | }); |
|
54 | 60 | |
|
61 | "pygit2" = super."pygit2".override (attrs: { | |
|
62 | propagatedBuildInputs = attrs.propagatedBuildInputs ++ [ | |
|
63 | pkgs.libffi | |
|
64 | pkgs.libgit2rc | |
|
65 | ]; | |
|
66 | }); | |
|
55 | 67 | |
|
56 | 68 | # Avoid that base packages screw up the build process |
|
57 | 69 | inherit (basePythonPackages) |
@@ -5,22 +5,22 b'' | |||
|
5 | 5 | |
|
6 | 6 | self: super: { |
|
7 | 7 | "atomicwrites" = super.buildPythonPackage { |
|
8 |
name = "atomicwrites-1. |
|
|
8 | name = "atomicwrites-1.3.0"; | |
|
9 | 9 | doCheck = false; |
|
10 | 10 | src = fetchurl { |
|
11 |
url = "https://files.pythonhosted.org/packages/ac |
|
|
12 | sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc"; | |
|
11 | url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz"; | |
|
12 | sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm"; | |
|
13 | 13 | }; |
|
14 | 14 | meta = { |
|
15 | 15 | license = [ pkgs.lib.licenses.mit ]; |
|
16 | 16 | }; |
|
17 | 17 | }; |
|
18 | 18 | "attrs" = super.buildPythonPackage { |
|
19 |
name = "attrs-1 |
|
|
19 | name = "attrs-19.3.0"; | |
|
20 | 20 | doCheck = false; |
|
21 | 21 | src = fetchurl { |
|
22 |
url = "https://files.pythonhosted.org/packages/ |
|
|
23 | sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh"; | |
|
22 | url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz"; | |
|
23 | sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp"; | |
|
24 | 24 | }; |
|
25 | 25 | meta = { |
|
26 | 26 | license = [ pkgs.lib.licenses.mit ]; |
@@ -48,6 +48,20 b' self: super: {' | |||
|
48 | 48 | license = [ pkgs.lib.licenses.mit ]; |
|
49 | 49 | }; |
|
50 | 50 | }; |
|
51 | "cffi" = super.buildPythonPackage { | |
|
52 | name = "cffi-1.12.3"; | |
|
53 | doCheck = false; | |
|
54 | propagatedBuildInputs = [ | |
|
55 | self."pycparser" | |
|
56 | ]; | |
|
57 | src = fetchurl { | |
|
58 | url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz"; | |
|
59 | sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704"; | |
|
60 | }; | |
|
61 | meta = { | |
|
62 | license = [ pkgs.lib.licenses.mit ]; | |
|
63 | }; | |
|
64 | }; | |
|
51 | 65 | "configobj" = super.buildPythonPackage { |
|
52 | 66 | name = "configobj-5.0.6"; |
|
53 | 67 | doCheck = false; |
@@ -62,6 +76,28 b' self: super: {' | |||
|
62 | 76 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
63 | 77 | }; |
|
64 | 78 | }; |
|
79 | "configparser" = super.buildPythonPackage { | |
|
80 | name = "configparser-4.0.2"; | |
|
81 | doCheck = false; | |
|
82 | src = fetchurl { | |
|
83 | url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz"; | |
|
84 | sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7"; | |
|
85 | }; | |
|
86 | meta = { | |
|
87 | license = [ pkgs.lib.licenses.mit ]; | |
|
88 | }; | |
|
89 | }; | |
|
90 | "contextlib2" = super.buildPythonPackage { | |
|
91 | name = "contextlib2-0.6.0.post1"; | |
|
92 | doCheck = false; | |
|
93 | src = fetchurl { | |
|
94 | url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz"; | |
|
95 | sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01"; | |
|
96 | }; | |
|
97 | meta = { | |
|
98 | license = [ pkgs.lib.licenses.psfl ]; | |
|
99 | }; | |
|
100 | }; | |
|
65 | 101 | "cov-core" = super.buildPythonPackage { |
|
66 | 102 | name = "cov-core-1.15.0"; |
|
67 | 103 | doCheck = false; |
@@ -77,11 +113,11 b' self: super: {' | |||
|
77 | 113 | }; |
|
78 | 114 | }; |
|
79 | 115 | "coverage" = super.buildPythonPackage { |
|
80 |
name = "coverage-4.5. |
|
|
116 | name = "coverage-4.5.4"; | |
|
81 | 117 | doCheck = false; |
|
82 | 118 | src = fetchurl { |
|
83 |
url = "https://files.pythonhosted.org/packages/8 |
|
|
84 | sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx"; | |
|
119 | url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz"; | |
|
120 | sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0"; | |
|
85 | 121 | }; |
|
86 | 122 | meta = { |
|
87 | 123 | license = [ pkgs.lib.licenses.asl20 ]; |
@@ -99,14 +135,14 b' self: super: {' | |||
|
99 | 135 | }; |
|
100 | 136 | }; |
|
101 | 137 | "dogpile.cache" = super.buildPythonPackage { |
|
102 |
name = "dogpile.cache-0. |
|
|
138 | name = "dogpile.cache-0.9.0"; | |
|
103 | 139 | doCheck = false; |
|
104 | 140 | propagatedBuildInputs = [ |
|
105 | 141 | self."decorator" |
|
106 | 142 | ]; |
|
107 | 143 | src = fetchurl { |
|
108 |
url = "https://files.pythonhosted.org/packages/84 |
|
|
109 | sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9"; | |
|
144 | url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz"; | |
|
145 | sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k"; | |
|
110 | 146 | }; |
|
111 | 147 | meta = { |
|
112 | 148 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
@@ -204,11 +240,11 b' self: super: {' | |||
|
204 | 240 | }; |
|
205 | 241 | }; |
|
206 | 242 | "hg-evolve" = super.buildPythonPackage { |
|
207 |
name = "hg-evolve- |
|
|
243 | name = "hg-evolve-9.1.0"; | |
|
208 | 244 | doCheck = false; |
|
209 | 245 | src = fetchurl { |
|
210 |
url = "https://files.pythonhosted.org/packages/ |
|
|
211 | sha256 = "09avqn7c1biz97vb1zw91q6nfzydpcqv43mgpfrj7ywp0fscfgf3"; | |
|
246 | url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz"; | |
|
247 | sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps"; | |
|
212 | 248 | }; |
|
213 | 249 | meta = { |
|
214 | 250 | license = [ { fullName = "GPLv2+"; } ]; |
@@ -230,16 +266,33 b' self: super: {' | |||
|
230 | 266 | }; |
|
231 | 267 | }; |
|
232 | 268 | "hupper" = super.buildPythonPackage { |
|
233 |
name = "hupper-1. |
|
|
269 | name = "hupper-1.9.1"; | |
|
234 | 270 | doCheck = false; |
|
235 | 271 | src = fetchurl { |
|
236 |
url = "https://files.pythonhosted.org/packages/ |
|
|
237 | sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy"; | |
|
272 | url = "https://files.pythonhosted.org/packages/09/3a/4f215659f31eeffe364a984dba486bfa3907bfcc54b7013bdfe825cebb5f/hupper-1.9.1.tar.gz"; | |
|
273 | sha256 = "0pyg879fv9mbwlnbzw2a3234qqycqs9l97h5mpkmk0bvxhi2471v"; | |
|
238 | 274 | }; |
|
239 | 275 | meta = { |
|
240 | 276 | license = [ pkgs.lib.licenses.mit ]; |
|
241 | 277 | }; |
|
242 | 278 | }; |
|
279 | "importlib-metadata" = super.buildPythonPackage { | |
|
280 | name = "importlib-metadata-0.23"; | |
|
281 | doCheck = false; | |
|
282 | propagatedBuildInputs = [ | |
|
283 | self."zipp" | |
|
284 | self."contextlib2" | |
|
285 | self."configparser" | |
|
286 | self."pathlib2" | |
|
287 | ]; | |
|
288 | src = fetchurl { | |
|
289 | url = "https://files.pythonhosted.org/packages/5d/44/636bcd15697791943e2dedda0dbe098d8530a38d113b202817133e0b06c0/importlib_metadata-0.23.tar.gz"; | |
|
290 | sha256 = "09mdqdfv5rdrwz80jh9m379gxmvk2vhjfz0fg53hid00icvxf65a"; | |
|
291 | }; | |
|
292 | meta = { | |
|
293 | license = [ pkgs.lib.licenses.asl20 ]; | |
|
294 | }; | |
|
295 | }; | |
|
243 | 296 | "ipdb" = super.buildPythonPackage { |
|
244 | 297 | name = "ipdb-0.12"; |
|
245 | 298 | doCheck = false; |
@@ -291,50 +344,54 b' self: super: {' | |||
|
291 | 344 | }; |
|
292 | 345 | }; |
|
293 | 346 | "mako" = super.buildPythonPackage { |
|
294 |
name = "mako-1.0 |
|
|
347 | name = "mako-1.1.0"; | |
|
295 | 348 | doCheck = false; |
|
296 | 349 | propagatedBuildInputs = [ |
|
297 | 350 | self."markupsafe" |
|
298 | 351 | ]; |
|
299 | 352 | src = fetchurl { |
|
300 |
url = "https://files.pythonhosted.org/packages/e |
|
|
301 | sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf"; | |
|
353 | url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz"; | |
|
354 | sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3"; | |
|
302 | 355 | }; |
|
303 | 356 | meta = { |
|
304 | 357 | license = [ pkgs.lib.licenses.mit ]; |
|
305 | 358 | }; |
|
306 | 359 | }; |
|
307 | 360 | "markupsafe" = super.buildPythonPackage { |
|
308 |
name = "markupsafe-1.1. |
|
|
361 | name = "markupsafe-1.1.1"; | |
|
309 | 362 | doCheck = false; |
|
310 | 363 | src = fetchurl { |
|
311 |
url = "https://files.pythonhosted.org/packages/ |
|
|
312 | sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf"; | |
|
364 | url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"; | |
|
365 | sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9"; | |
|
313 | 366 | }; |
|
314 | 367 | meta = { |
|
315 | license = [ pkgs.lib.licenses.bsdOriginal ]; | |
|
368 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ]; | |
|
316 | 369 | }; |
|
317 | 370 | }; |
|
318 | 371 | "mercurial" = super.buildPythonPackage { |
|
319 |
name = "mercurial- |
|
|
372 | name = "mercurial-5.1.1"; | |
|
320 | 373 | doCheck = false; |
|
321 | 374 | src = fetchurl { |
|
322 |
url = "https://files.pythonhosted.org/packages/60 |
|
|
323 | sha256 = "0iybbkd9add066729zg01kwz5hhc1s6lhp9rrnsmzq6ihyxj3p8v"; | |
|
375 | url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz"; | |
|
376 | sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m"; | |
|
324 | 377 | }; |
|
325 | 378 | meta = { |
|
326 | 379 | license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ]; |
|
327 | 380 | }; |
|
328 | 381 | }; |
|
329 | 382 | "mock" = super.buildPythonPackage { |
|
330 |
name = "mock- |
|
|
383 | name = "mock-3.0.5"; | |
|
331 | 384 | doCheck = false; |
|
385 | propagatedBuildInputs = [ | |
|
386 | self."six" | |
|
387 | self."funcsigs" | |
|
388 | ]; | |
|
332 | 389 | src = fetchurl { |
|
333 | url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz"; | |
|
334 | sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq"; | |
|
390 | url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz"; | |
|
391 | sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3"; | |
|
335 | 392 | }; |
|
336 | 393 | meta = { |
|
337 | license = [ pkgs.lib.licenses.bsdOriginal ]; | |
|
394 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ]; | |
|
338 | 395 | }; |
|
339 | 396 | }; |
|
340 | 397 | "more-itertools" = super.buildPythonPackage { |
@@ -362,6 +419,21 b' self: super: {' | |||
|
362 | 419 | license = [ pkgs.lib.licenses.asl20 ]; |
|
363 | 420 | }; |
|
364 | 421 | }; |
|
422 | "packaging" = super.buildPythonPackage { | |
|
423 | name = "packaging-19.2"; | |
|
424 | doCheck = false; | |
|
425 | propagatedBuildInputs = [ | |
|
426 | self."pyparsing" | |
|
427 | self."six" | |
|
428 | ]; | |
|
429 | src = fetchurl { | |
|
430 | url = "https://files.pythonhosted.org/packages/5a/2f/449ded84226d0e2fda8da9252e5ee7731bdf14cd338f622dfcd9934e0377/packaging-19.2.tar.gz"; | |
|
431 | sha256 = "0izwlz9h0bw171a1chr311g2y7n657zjaf4mq4rgm8pp9lbj9f98"; | |
|
432 | }; | |
|
433 | meta = { | |
|
434 | license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ]; | |
|
435 | }; | |
|
436 | }; | |
|
365 | 437 | "pastedeploy" = super.buildPythonPackage { |
|
366 | 438 | name = "pastedeploy-2.0.1"; |
|
367 | 439 | doCheck = false; |
@@ -374,15 +446,15 b' self: super: {' | |||
|
374 | 446 | }; |
|
375 | 447 | }; |
|
376 | 448 | "pathlib2" = super.buildPythonPackage { |
|
377 |
name = "pathlib2-2.3. |
|
|
449 | name = "pathlib2-2.3.5"; | |
|
378 | 450 | doCheck = false; |
|
379 | 451 | propagatedBuildInputs = [ |
|
380 | 452 | self."six" |
|
381 | 453 | self."scandir" |
|
382 | 454 | ]; |
|
383 | 455 | src = fetchurl { |
|
384 |
url = "https://files.pythonhosted.org/packages/ |
|
|
385 | sha256 = "1y0f9rkm1924zrc5dn4bwxlhgdkbml82lkcc28l5rgmr7d918q24"; | |
|
456 | url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz"; | |
|
457 | sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc"; | |
|
386 | 458 | }; |
|
387 | 459 | meta = { |
|
388 | 460 | license = [ pkgs.lib.licenses.mit ]; |
@@ -446,37 +518,40 b' self: super: {' | |||
|
446 | 518 | }; |
|
447 | 519 | }; |
|
448 | 520 | "pluggy" = super.buildPythonPackage { |
|
449 |
name = "pluggy-0.11 |
|
|
521 | name = "pluggy-0.13.1"; | |
|
450 | 522 | doCheck = false; |
|
523 | propagatedBuildInputs = [ | |
|
524 | self."importlib-metadata" | |
|
525 | ]; | |
|
451 | 526 | src = fetchurl { |
|
452 |
url = "https://files.pythonhosted.org/packages/0 |
|
|
453 | sha256 = "10511a54dvafw1jrk75mrhml53c7b7w4yaw7241696lc2hfvr895"; | |
|
527 | url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz"; | |
|
528 | sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm"; | |
|
454 | 529 | }; |
|
455 | 530 | meta = { |
|
456 | 531 | license = [ pkgs.lib.licenses.mit ]; |
|
457 | 532 | }; |
|
458 | 533 | }; |
|
459 | 534 | "prompt-toolkit" = super.buildPythonPackage { |
|
460 |
name = "prompt-toolkit-1.0.1 |
|
|
535 | name = "prompt-toolkit-1.0.18"; | |
|
461 | 536 | doCheck = false; |
|
462 | 537 | propagatedBuildInputs = [ |
|
463 | 538 | self."six" |
|
464 | 539 | self."wcwidth" |
|
465 | 540 | ]; |
|
466 | 541 | src = fetchurl { |
|
467 |
url = "https://files.pythonhosted.org/packages/f |
|
|
468 | sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1"; | |
|
542 | url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz"; | |
|
543 | sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx"; | |
|
469 | 544 | }; |
|
470 | 545 | meta = { |
|
471 | 546 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
472 | 547 | }; |
|
473 | 548 | }; |
|
474 | 549 | "psutil" = super.buildPythonPackage { |
|
475 |
name = "psutil-5.5 |
|
|
550 | name = "psutil-5.6.5"; | |
|
476 | 551 | doCheck = false; |
|
477 | 552 | src = fetchurl { |
|
478 |
url = "https://files.pythonhosted.org/packages/c |
|
|
479 | sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj"; | |
|
553 | url = "https://files.pythonhosted.org/packages/03/9a/95c4b3d0424426e5fd94b5302ff74cea44d5d4f53466e1228ac8e73e14b4/psutil-5.6.5.tar.gz"; | |
|
554 | sha256 = "0isil5jxwwd8awz54qk28rpgjg43i5l6yl70g40vxwa4r4m56lfh"; | |
|
480 | 555 | }; |
|
481 | 556 | meta = { |
|
482 | 557 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
@@ -494,16 +569,42 b' self: super: {' | |||
|
494 | 569 | }; |
|
495 | 570 | }; |
|
496 | 571 | "py" = super.buildPythonPackage { |
|
497 |
name = "py-1. |
|
|
572 | name = "py-1.8.0"; | |
|
498 | 573 | doCheck = false; |
|
499 | 574 | src = fetchurl { |
|
500 | url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz"; | |
|
501 | sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6"; | |
|
575 | url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz"; | |
|
576 | sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw"; | |
|
502 | 577 | }; |
|
503 | 578 | meta = { |
|
504 | 579 | license = [ pkgs.lib.licenses.mit ]; |
|
505 | 580 | }; |
|
506 | 581 | }; |
|
582 | "pycparser" = super.buildPythonPackage { | |
|
583 | name = "pycparser-2.19"; | |
|
584 | doCheck = false; | |
|
585 | src = fetchurl { | |
|
586 | url = "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"; | |
|
587 | sha256 = "1cr5dcj9628lkz1qlwq3fv97c25363qppkmcayqvd05dpy573259"; | |
|
588 | }; | |
|
589 | meta = { | |
|
590 | license = [ pkgs.lib.licenses.bsdOriginal ]; | |
|
591 | }; | |
|
592 | }; | |
|
593 | "pygit2" = super.buildPythonPackage { | |
|
594 | name = "pygit2-0.28.2"; | |
|
595 | doCheck = false; | |
|
596 | propagatedBuildInputs = [ | |
|
597 | self."cffi" | |
|
598 | self."six" | |
|
599 | ]; | |
|
600 | src = fetchurl { | |
|
601 | url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz"; | |
|
602 | sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d"; | |
|
603 | }; | |
|
604 | meta = { | |
|
605 | license = [ { fullName = "GPLv2 with linking exception"; } ]; | |
|
606 | }; | |
|
607 | }; | |
|
507 | 608 | "pygments" = super.buildPythonPackage { |
|
508 | 609 | name = "pygments-2.4.2"; |
|
509 | 610 | doCheck = false; |
@@ -515,6 +616,17 b' self: super: {' | |||
|
515 | 616 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
516 | 617 | }; |
|
517 | 618 | }; |
|
619 | "pyparsing" = super.buildPythonPackage { | |
|
620 | name = "pyparsing-2.4.5"; | |
|
621 | doCheck = false; | |
|
622 | src = fetchurl { | |
|
623 | url = "https://files.pythonhosted.org/packages/00/32/8076fa13e832bb4dcff379f18f228e5a53412be0631808b9ca2610c0f566/pyparsing-2.4.5.tar.gz"; | |
|
624 | sha256 = "0fk8gsybiw1gm146mkjdjvaajwh20xwvpv4j7syh2zrnpq0j19jc"; | |
|
625 | }; | |
|
626 | meta = { | |
|
627 | license = [ pkgs.lib.licenses.mit ]; | |
|
628 | }; | |
|
629 | }; | |
|
518 | 630 | "pyramid" = super.buildPythonPackage { |
|
519 | 631 | name = "pyramid-1.10.4"; |
|
520 | 632 | doCheck = false; |
@@ -539,59 +651,61 b' self: super: {' | |||
|
539 | 651 | }; |
|
540 | 652 | }; |
|
541 | 653 | "pyramid-mako" = super.buildPythonPackage { |
|
542 |
name = "pyramid-mako-1.0 |
|
|
654 | name = "pyramid-mako-1.1.0"; | |
|
543 | 655 | doCheck = false; |
|
544 | 656 | propagatedBuildInputs = [ |
|
545 | 657 | self."pyramid" |
|
546 | 658 | self."mako" |
|
547 | 659 | ]; |
|
548 | 660 | src = fetchurl { |
|
549 |
url = "https://files.pythonhosted.org/packages/f1 |
|
|
550 | sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d"; | |
|
661 | url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz"; | |
|
662 | sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0"; | |
|
551 | 663 | }; |
|
552 | 664 | meta = { |
|
553 | 665 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
554 | 666 | }; |
|
555 | 667 | }; |
|
556 | 668 | "pytest" = super.buildPythonPackage { |
|
557 |
name = "pytest- |
|
|
669 | name = "pytest-4.6.5"; | |
|
558 | 670 | doCheck = false; |
|
559 | 671 | propagatedBuildInputs = [ |
|
560 | 672 | self."py" |
|
561 | 673 | self."six" |
|
562 |
self." |
|
|
674 | self."packaging" | |
|
563 | 675 | self."attrs" |
|
564 | self."more-itertools" | |
|
565 | 676 | self."atomicwrites" |
|
566 | 677 | self."pluggy" |
|
678 | self."importlib-metadata" | |
|
679 | self."wcwidth" | |
|
567 | 680 | self."funcsigs" |
|
568 | 681 | self."pathlib2" |
|
682 | self."more-itertools" | |
|
569 | 683 | ]; |
|
570 | 684 | src = fetchurl { |
|
571 |
url = "https://files.pythonhosted.org/packages/5f |
|
|
572 | sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk"; | |
|
685 | url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz"; | |
|
686 | sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg"; | |
|
573 | 687 | }; |
|
574 | 688 | meta = { |
|
575 | 689 | license = [ pkgs.lib.licenses.mit ]; |
|
576 | 690 | }; |
|
577 | 691 | }; |
|
578 | 692 | "pytest-cov" = super.buildPythonPackage { |
|
579 |
name = "pytest-cov-2. |
|
|
693 | name = "pytest-cov-2.7.1"; | |
|
580 | 694 | doCheck = false; |
|
581 | 695 | propagatedBuildInputs = [ |
|
582 | 696 | self."pytest" |
|
583 | 697 | self."coverage" |
|
584 | 698 | ]; |
|
585 | 699 | src = fetchurl { |
|
586 | url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz"; | |
|
587 | sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73"; | |
|
700 | url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz"; | |
|
701 | sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0"; | |
|
588 | 702 | }; |
|
589 | 703 | meta = { |
|
590 | 704 | license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ]; |
|
591 | 705 | }; |
|
592 | 706 | }; |
|
593 | 707 | "pytest-profiling" = super.buildPythonPackage { |
|
594 |
name = "pytest-profiling-1. |
|
|
708 | name = "pytest-profiling-1.7.0"; | |
|
595 | 709 | doCheck = false; |
|
596 | 710 | propagatedBuildInputs = [ |
|
597 | 711 | self."six" |
@@ -599,53 +713,65 b' self: super: {' | |||
|
599 | 713 | self."gprof2dot" |
|
600 | 714 | ]; |
|
601 | 715 | src = fetchurl { |
|
602 |
url = "https://files.pythonhosted.org/packages/f |
|
|
603 | sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb"; | |
|
716 | url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz"; | |
|
717 | sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk"; | |
|
604 | 718 | }; |
|
605 | 719 | meta = { |
|
606 | 720 | license = [ pkgs.lib.licenses.mit ]; |
|
607 | 721 | }; |
|
608 | 722 | }; |
|
609 | 723 | "pytest-runner" = super.buildPythonPackage { |
|
610 |
name = "pytest-runner- |
|
|
724 | name = "pytest-runner-5.1"; | |
|
611 | 725 | doCheck = false; |
|
612 | 726 | src = fetchurl { |
|
613 |
url = "https://files.pythonhosted.org/packages/9e |
|
|
614 | sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj"; | |
|
727 | url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz"; | |
|
728 | sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815"; | |
|
615 | 729 | }; |
|
616 | 730 | meta = { |
|
617 | 731 | license = [ pkgs.lib.licenses.mit ]; |
|
618 | 732 | }; |
|
619 | 733 | }; |
|
620 | 734 | "pytest-sugar" = super.buildPythonPackage { |
|
621 |
name = "pytest-sugar-0.9. |
|
|
735 | name = "pytest-sugar-0.9.2"; | |
|
622 | 736 | doCheck = false; |
|
623 | 737 | propagatedBuildInputs = [ |
|
624 | 738 | self."pytest" |
|
625 | 739 | self."termcolor" |
|
740 | self."packaging" | |
|
626 | 741 | ]; |
|
627 | 742 | src = fetchurl { |
|
628 | url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz"; | |
|
629 | sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b"; | |
|
743 | url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz"; | |
|
744 | sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w"; | |
|
630 | 745 | }; |
|
631 | 746 | meta = { |
|
632 | 747 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
633 | 748 | }; |
|
634 | 749 | }; |
|
635 | 750 | "pytest-timeout" = super.buildPythonPackage { |
|
636 |
name = "pytest-timeout-1.3. |
|
|
751 | name = "pytest-timeout-1.3.3"; | |
|
637 | 752 | doCheck = false; |
|
638 | 753 | propagatedBuildInputs = [ |
|
639 | 754 | self."pytest" |
|
640 | 755 | ]; |
|
641 | 756 | src = fetchurl { |
|
642 |
url = "https://files.pythonhosted.org/packages/8c |
|
|
643 | sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi"; | |
|
757 | url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz"; | |
|
758 | sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a"; | |
|
644 | 759 | }; |
|
645 | 760 | meta = { |
|
646 | 761 | license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ]; |
|
647 | 762 | }; |
|
648 | 763 | }; |
|
764 | "redis" = super.buildPythonPackage { | |
|
765 | name = "redis-3.3.11"; | |
|
766 | doCheck = false; | |
|
767 | src = fetchurl { | |
|
768 | url = "https://files.pythonhosted.org/packages/06/ca/00557c74279d2f256d3c42cabf237631355f3a132e4c74c2000e6647ad98/redis-3.3.11.tar.gz"; | |
|
769 | sha256 = "1hicqbi5xl92hhml82awrr2rxl9jar5fp8nbcycj9qgmsdwc43wd"; | |
|
770 | }; | |
|
771 | meta = { | |
|
772 | license = [ pkgs.lib.licenses.mit ]; | |
|
773 | }; | |
|
774 | }; | |
|
649 | 775 | "repoze.lru" = super.buildPythonPackage { |
|
650 | 776 | name = "repoze.lru-0.7"; |
|
651 | 777 | doCheck = false; |
@@ -658,7 +784,7 b' self: super: {' | |||
|
658 | 784 | }; |
|
659 | 785 | }; |
|
660 | 786 | "rhodecode-vcsserver" = super.buildPythonPackage { |
|
661 |
name = "rhodecode-vcsserver-4.1 |
|
|
787 | name = "rhodecode-vcsserver-4.18.0"; | |
|
662 | 788 | buildInputs = [ |
|
663 | 789 | self."pytest" |
|
664 | 790 | self."py" |
@@ -691,7 +817,9 b' self: super: {' | |||
|
691 | 817 | self."pastedeploy" |
|
692 | 818 | self."pyramid" |
|
693 | 819 | self."pyramid-mako" |
|
820 | self."pygit2" | |
|
694 | 821 | self."repoze.lru" |
|
822 | self."redis" | |
|
695 | 823 | self."simplejson" |
|
696 | 824 | self."subprocess32" |
|
697 | 825 | self."subvertpy" |
@@ -748,11 +876,11 b' self: super: {' | |||
|
748 | 876 | }; |
|
749 | 877 | }; |
|
750 | 878 | "setuptools" = super.buildPythonPackage { |
|
751 |
name = "setuptools-4 |
|
|
879 | name = "setuptools-44.0.0"; | |
|
752 | 880 | doCheck = false; |
|
753 | 881 | src = fetchurl { |
|
754 |
url = "https://files.pythonhosted.org/packages/ |
|
|
755 | sha256 = "04sns22y2hhsrwfy1mha2lgslvpjsjsz8xws7h2rh5a7ylkd28m2"; | |
|
882 | url = "https://files.pythonhosted.org/packages/b0/f3/44da7482ac6da3f36f68e253cb04de37365b3dba9036a3c70773b778b485/setuptools-44.0.0.zip"; | |
|
883 | sha256 = "025h5cnxcmda1893l6i12hrwdvs1n8r31qs6q4pkif2v7rrggfp5"; | |
|
756 | 884 | }; |
|
757 | 885 | meta = { |
|
758 | 886 | license = [ pkgs.lib.licenses.mit ]; |
@@ -825,7 +953,7 b' self: super: {' | |||
|
825 | 953 | }; |
|
826 | 954 | }; |
|
827 | 955 | "traitlets" = super.buildPythonPackage { |
|
828 |
name = "traitlets-4.3. |
|
|
956 | name = "traitlets-4.3.3"; | |
|
829 | 957 | doCheck = false; |
|
830 | 958 | propagatedBuildInputs = [ |
|
831 | 959 | self."ipython-genutils" |
@@ -834,8 +962,8 b' self: super: {' | |||
|
834 | 962 | self."enum34" |
|
835 | 963 | ]; |
|
836 | 964 | src = fetchurl { |
|
837 |
url = "https://files.pythonhosted.org/packages/ |
|
|
838 | sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww"; | |
|
965 | url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz"; | |
|
966 | sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh"; | |
|
839 | 967 | }; |
|
840 | 968 | meta = { |
|
841 | 969 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
@@ -864,11 +992,11 b' self: super: {' | |||
|
864 | 992 | }; |
|
865 | 993 | }; |
|
866 | 994 | "waitress" = super.buildPythonPackage { |
|
867 |
name = "waitress-1.3. |
|
|
995 | name = "waitress-1.3.1"; | |
|
868 | 996 | doCheck = false; |
|
869 | 997 | src = fetchurl { |
|
870 |
url = "https://files.pythonhosted.org/packages/ |
|
|
871 | sha256 = "09j5dzbbcxib7vdskhx39s1qsydlr4n2p2png71d7mjnr9pnwajf"; | |
|
998 | url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz"; | |
|
999 | sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7"; | |
|
872 | 1000 | }; |
|
873 | 1001 | meta = { |
|
874 | 1002 | license = [ pkgs.lib.licenses.zpl21 ]; |
@@ -913,6 +1041,20 b' self: super: {' | |||
|
913 | 1041 | license = [ pkgs.lib.licenses.mit ]; |
|
914 | 1042 | }; |
|
915 | 1043 | }; |
|
1044 | "zipp" = super.buildPythonPackage { | |
|
1045 | name = "zipp-0.6.0"; | |
|
1046 | doCheck = false; | |
|
1047 | propagatedBuildInputs = [ | |
|
1048 | self."more-itertools" | |
|
1049 | ]; | |
|
1050 | src = fetchurl { | |
|
1051 | url = "https://files.pythonhosted.org/packages/57/dd/585d728479d97d25aeeb9aa470d36a4ad8d0ba5610f84e14770128ce6ff7/zipp-0.6.0.tar.gz"; | |
|
1052 | sha256 = "13ndkf7vklw978a4gdl1yfvn8hch28429a0iam67sg4nrp5v261p"; | |
|
1053 | }; | |
|
1054 | meta = { | |
|
1055 | license = [ pkgs.lib.licenses.mit ]; | |
|
1056 | }; | |
|
1057 | }; | |
|
916 | 1058 | "zope.deprecation" = super.buildPythonPackage { |
|
917 | 1059 | name = "zope.deprecation-4.4.0"; |
|
918 | 1060 | doCheck = false; |
@@ -23,6 +23,7 b' pkgs.stdenv.mkDerivation {' | |||
|
23 | 23 | pythonPackages.pip-tools |
|
24 | 24 | pkgs.apr |
|
25 | 25 | pkgs.aprutil |
|
26 | pkgs.libffi | |
|
26 | 27 | ]; |
|
27 | 28 | |
|
28 | 29 | shellHook = '' |
@@ -3,22 +3,24 b'' | |||
|
3 | 3 | # our custom configobj |
|
4 | 4 | https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6 |
|
5 | 5 | |
|
6 |
dogpile.cache==0. |
|
|
6 | dogpile.cache==0.9.0 | |
|
7 | 7 | dogpile.core==0.4.1 |
|
8 | 8 | decorator==4.1.2 |
|
9 | 9 | dulwich==0.13.0 |
|
10 | 10 | hgsubversion==1.9.3 |
|
11 |
hg-evolve== |
|
|
12 |
mako==1. |
|
|
13 |
markupsafe==1.1. |
|
|
14 |
mercurial== |
|
|
11 | hg-evolve==9.1.0 | |
|
12 | mako==1.1.0 | |
|
13 | markupsafe==1.1.1 | |
|
14 | mercurial==5.1.1 | |
|
15 | 15 | msgpack-python==0.5.6 |
|
16 | 16 | |
|
17 | 17 | pastedeploy==2.0.1 |
|
18 | 18 | pyramid==1.10.4 |
|
19 |
pyramid-mako==1. |
|
|
19 | pyramid-mako==1.1.0 | |
|
20 | pygit2==0.28.2 | |
|
20 | 21 | |
|
21 | 22 | repoze.lru==0.7 |
|
23 | redis==3.3.11 | |
|
22 | 24 | simplejson==3.16.0 |
|
23 | 25 | subprocess32==3.5.4 |
|
24 | 26 | subvertpy==0.10.1 |
@@ -33,7 +35,7 b' zope.interface==4.6.0' | |||
|
33 | 35 | gevent==1.4.0 |
|
34 | 36 | greenlet==0.4.15 |
|
35 | 37 | gunicorn==19.9.0 |
|
36 |
waitress==1.3. |
|
|
38 | waitress==1.3.1 | |
|
37 | 39 | |
|
38 | 40 | ## debug |
|
39 | 41 | ipdb==0.12.0 |
@@ -41,3 +43,6 b' ipython==5.1.0' | |||
|
41 | 43 | |
|
42 | 44 | ## test related requirements |
|
43 | 45 | -r requirements_test.txt |
|
46 | ||
|
47 | ## uncomment to add the debug libraries | |
|
48 | #-r requirements_debug.txt |
@@ -1,12 +1,18 b'' | |||
|
1 | 1 | # contains not directly required libraries we want to pin the version. |
|
2 | 2 | |
|
3 |
atomicwrites==1. |
|
|
4 |
attrs==1 |
|
|
5 | hupper==1.6.1 | |
|
6 | pathlib2==2.3.4 | |
|
3 | atomicwrites==1.3.0 | |
|
4 | attrs==19.3.0 | |
|
5 | contextlib2==0.6.0.post1 | |
|
6 | cffi==1.12.3 | |
|
7 | hupper==1.9.1 | |
|
8 | importlib-metadata==0.23 | |
|
9 | packaging==19.2.0 | |
|
10 | pathlib2==2.3.5 | |
|
7 | 11 | pygments==2.4.2 |
|
8 | psutil==5.5.1 | |
|
9 | pluggy==0.11.0 | |
|
12 | pyparsing==2.4.5 | |
|
13 | psutil==5.6.5 | |
|
14 | pluggy==0.13.1 | |
|
10 | 15 | scandir==1.10.0 |
|
11 | 16 | setproctitle==1.1.10 |
|
12 | 17 | venusian==1.2.0 |
|
18 | wcwidth==0.1.7 |
@@ -1,16 +1,16 b'' | |||
|
1 | 1 | # test related requirements |
|
2 |
pytest== |
|
|
3 |
py==1. |
|
|
4 |
pytest-cov==2. |
|
|
5 |
pytest-sugar==0.9. |
|
|
6 |
pytest-runner== |
|
|
7 |
pytest-profiling==1. |
|
|
8 |
pytest-timeout==1.3. |
|
|
2 | pytest==4.6.5 | |
|
3 | py==1.8.0 | |
|
4 | pytest-cov==2.7.1 | |
|
5 | pytest-sugar==0.9.2 | |
|
6 | pytest-runner==5.1.0 | |
|
7 | pytest-profiling==1.7.0 | |
|
8 | pytest-timeout==1.3.3 | |
|
9 | 9 | gprof2dot==2017.9.19 |
|
10 | 10 | |
|
11 |
mock== |
|
|
11 | mock==3.0.5 | |
|
12 | 12 | cov-core==1.15.0 |
|
13 |
coverage==4.5. |
|
|
13 | coverage==4.5.4 | |
|
14 | 14 | |
|
15 | 15 | webtest==2.0.33 |
|
16 | 16 | beautifulsoup4==4.6.3 |
@@ -44,25 +44,7 b' class RepoFactory(object):' | |||
|
44 | 44 | raise NotImplementedError() |
|
45 | 45 | |
|
46 | 46 | def repo(self, wire, create=False): |
|
47 | """ | |
|
48 | Get a repository instance for the given path. | |
|
49 | ||
|
50 | Uses internally the low level beaker API since the decorators introduce | |
|
51 | significant overhead. | |
|
52 | """ | |
|
53 | region = self._cache_region | |
|
54 | context = wire.get('context', None) | |
|
55 | repo_path = wire.get('path', '') | |
|
56 | context_uid = '{}'.format(context) | |
|
57 | cache = wire.get('cache', True) | |
|
58 | cache_on = context and cache | |
|
59 | ||
|
60 | @region.conditional_cache_on_arguments(condition=cache_on) | |
|
61 | def create_new_repo(_repo_type, _repo_path, _context_uid): | |
|
62 | return self._create_repo(wire, create) | |
|
63 | ||
|
64 | repo = create_new_repo(self.repo_type, repo_path, context_uid) | |
|
65 | return repo | |
|
47 | raise NotImplementedError() | |
|
66 | 48 | |
|
67 | 49 | |
|
68 | 50 | def obfuscate_qs(query_string): |
This diff has been collapsed as it changes many lines, (753 lines changed) Show them Hide them | |||
@@ -14,6 +14,7 b'' | |||
|
14 | 14 | # You should have received a copy of the GNU General Public License |
|
15 | 15 | # along with this program; if not, write to the Free Software Foundation, |
|
16 | 16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
17 | ||
|
17 | 18 | import collections |
|
18 | 19 | import logging |
|
19 | 20 | import os |
@@ -26,42 +27,54 b' import urllib2' | |||
|
26 | 27 | from functools import wraps |
|
27 | 28 | |
|
28 | 29 | import more_itertools |
|
30 | import pygit2 | |
|
31 | from pygit2 import Repository as LibGit2Repo | |
|
29 | 32 | from dulwich import index, objects |
|
30 | 33 | from dulwich.client import HttpGitClient, LocalGitClient |
|
31 | 34 | from dulwich.errors import ( |
|
32 | 35 | NotGitRepository, ChecksumMismatch, WrongObjectException, |
|
33 | 36 | MissingCommitError, ObjectMissing, HangupException, |
|
34 | 37 | UnexpectedCommandError) |
|
35 |
from dulwich.repo import Repo as DulwichRepo |
|
|
38 | from dulwich.repo import Repo as DulwichRepo | |
|
36 | 39 | from dulwich.server import update_server_info |
|
37 | 40 | |
|
38 | 41 | from vcsserver import exceptions, settings, subprocessio |
|
39 | from vcsserver.utils import safe_str | |
|
40 |
from vcsserver.base import RepoFactory, obfuscate_qs |
|
|
42 | from vcsserver.utils import safe_str, safe_int | |
|
43 | from vcsserver.base import RepoFactory, obfuscate_qs | |
|
41 | 44 | from vcsserver.hgcompat import ( |
|
42 | 45 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler) |
|
43 | 46 | from vcsserver.git_lfs.lib import LFSOidStore |
|
47 | from vcsserver.vcs_base import RemoteBase | |
|
44 | 48 | |
|
45 | 49 | DIR_STAT = stat.S_IFDIR |
|
46 | 50 | FILE_MODE = stat.S_IFMT |
|
47 | 51 | GIT_LINK = objects.S_IFGITLINK |
|
52 | PEELED_REF_MARKER = '^{}' | |
|
53 | ||
|
48 | 54 | |
|
49 | 55 | log = logging.getLogger(__name__) |
|
50 | 56 | |
|
51 | 57 | |
|
58 | def str_to_dulwich(value): | |
|
59 | """ | |
|
60 | Dulwich 0.10.1a requires `unicode` objects to be passed in. | |
|
61 | """ | |
|
62 | return value.decode(settings.WIRE_ENCODING) | |
|
63 | ||
|
64 | ||
|
52 | 65 | def reraise_safe_exceptions(func): |
|
53 | 66 | """Converts Dulwich exceptions to something neutral.""" |
|
67 | ||
|
54 | 68 | @wraps(func) |
|
55 | 69 | def wrapper(*args, **kwargs): |
|
56 | 70 | try: |
|
57 | 71 | return func(*args, **kwargs) |
|
58 | except (ChecksumMismatch, WrongObjectException, MissingCommitError, | |
|
59 | ObjectMissing) as e: | |
|
60 | exc = exceptions.LookupException(e) | |
|
61 | raise exc(e) | |
|
72 | except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e: | |
|
73 | exc = exceptions.LookupException(org_exc=e) | |
|
74 | raise exc(safe_str(e)) | |
|
62 | 75 | except (HangupException, UnexpectedCommandError) as e: |
|
63 | exc = exceptions.VcsException(e) | |
|
64 | raise exc(e) | |
|
76 | exc = exceptions.VcsException(org_exc=e) | |
|
77 | raise exc(safe_str(e)) | |
|
65 | 78 | except Exception as e: |
|
66 | 79 | # NOTE(marcink): becuase of how dulwich handles some exceptions |
|
67 | 80 | # (KeyError on empty repos), we cannot track this and catch all |
@@ -80,33 +93,51 b' class Repo(DulwichRepo):' | |||
|
80 | 93 | Since dulwich is sometimes keeping .idx file descriptors open, it leads to |
|
81 | 94 | "Too many open files" error. We need to close all opened file descriptors |
|
82 | 95 | once the repo object is destroyed. |
|
83 | ||
|
84 | TODO: mikhail: please check if we need this wrapper after updating dulwich | |
|
85 | to 0.12.0 + | |
|
86 | 96 | """ |
|
87 | 97 | def __del__(self): |
|
88 | 98 | if hasattr(self, 'object_store'): |
|
89 | 99 | self.close() |
|
90 | 100 | |
|
91 | 101 | |
|
102 | class Repository(LibGit2Repo): | |
|
103 | ||
|
104 | def __enter__(self): | |
|
105 | return self | |
|
106 | ||
|
107 | def __exit__(self, exc_type, exc_val, exc_tb): | |
|
108 | self.free() | |
|
109 | ||
|
110 | ||
|
92 | 111 | class GitFactory(RepoFactory): |
|
93 | 112 | repo_type = 'git' |
|
94 | 113 | |
|
95 | def _create_repo(self, wire, create): | |
|
96 | repo_path = str_to_dulwich(wire['path']) | |
|
97 |
return Repo( |
|
|
114 | def _create_repo(self, wire, create, use_libgit2=False): | |
|
115 | if use_libgit2: | |
|
116 | return Repository(wire['path']) | |
|
117 | else: | |
|
118 | repo_path = str_to_dulwich(wire['path']) | |
|
119 | return Repo(repo_path) | |
|
120 | ||
|
121 | def repo(self, wire, create=False, use_libgit2=False): | |
|
122 | """ | |
|
123 | Get a repository instance for the given path. | |
|
124 | """ | |
|
125 | return self._create_repo(wire, create, use_libgit2) | |
|
126 | ||
|
127 | def repo_libgit2(self, wire): | |
|
128 | return self.repo(wire, use_libgit2=True) | |
|
98 | 129 | |
|
99 | 130 | |
|
100 |
class GitRemote( |
|
|
131 | class GitRemote(RemoteBase): | |
|
101 | 132 | |
|
102 | 133 | def __init__(self, factory): |
|
103 | 134 | self._factory = factory |
|
104 | self.peeled_ref_marker = '^{}' | |
|
105 | 135 | self._bulk_methods = { |
|
106 |
" |
|
|
107 |
" |
|
|
108 |
" |
|
|
109 |
" |
|
|
136 | "date": self.date, | |
|
137 | "author": self.author, | |
|
138 | "branch": self.branch, | |
|
139 | "message": self.message, | |
|
140 | "parents": self.parents, | |
|
110 | 141 | "_commit": self.revision, |
|
111 | 142 | } |
|
112 | 143 | |
@@ -115,10 +146,6 b' class GitRemote(object):' | |||
|
115 | 146 | return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']]) |
|
116 | 147 | return {} |
|
117 | 148 | |
|
118 | def _assign_ref(self, wire, ref, commit_id): | |
|
119 | repo = self._factory.repo(wire) | |
|
120 | repo[ref] = commit_id | |
|
121 | ||
|
122 | 149 | def _remote_conf(self, config): |
|
123 | 150 | params = [ |
|
124 | 151 | '-c', 'core.askpass=""', |
@@ -129,49 +156,75 b' class GitRemote(object):' | |||
|
129 | 156 | return params |
|
130 | 157 | |
|
131 | 158 | @reraise_safe_exceptions |
|
132 | def is_empty(self, wire): | |
|
133 | repo = self._factory.repo(wire) | |
|
134 | try: | |
|
135 | return not repo.head() | |
|
136 | except Exception: | |
|
137 | log.exception("failed to read object_store") | |
|
138 |
|
|
|
159 | def discover_git_version(self): | |
|
160 | stdout, _ = self.run_git_command( | |
|
161 | {}, ['--version'], _bare=True, _safe=True) | |
|
162 | prefix = 'git version' | |
|
163 | if stdout.startswith(prefix): | |
|
164 | stdout = stdout[len(prefix):] | |
|
165 | return stdout.strip() | |
|
139 | 166 | |
|
140 | 167 | @reraise_safe_exceptions |
|
141 |
def |
|
|
142 | repo = self._factory.repo(wire) | |
|
143 | blob = objects.Blob() | |
|
144 | blob.set_raw_string(content) | |
|
145 | repo.object_store.add_object(blob) | |
|
146 | return blob.id | |
|
168 | def is_empty(self, wire): | |
|
169 | repo_init = self._factory.repo_libgit2(wire) | |
|
170 | with repo_init as repo: | |
|
171 | ||
|
172 | try: | |
|
173 | has_head = repo.head.name | |
|
174 | if has_head: | |
|
175 | return False | |
|
176 | ||
|
177 | # NOTE(marcink): check again using more expensive method | |
|
178 | return repo.is_empty | |
|
179 | except Exception: | |
|
180 | pass | |
|
181 | ||
|
182 | return True | |
|
147 | 183 | |
|
148 | 184 | @reraise_safe_exceptions |
|
149 | 185 | def assert_correct_path(self, wire): |
|
150 | path = wire.get('path') | |
|
151 | try: | |
|
152 | self._factory.repo(wire) | |
|
153 | except NotGitRepository as e: | |
|
154 | tb = traceback.format_exc() | |
|
155 | log.debug("Invalid Git path `%s`, tb: %s", path, tb) | |
|
156 |
|
|
|
186 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
187 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
188 | def _assert_correct_path(_context_uid, _repo_id): | |
|
189 | try: | |
|
190 | repo_init = self._factory.repo_libgit2(wire) | |
|
191 | with repo_init as repo: | |
|
192 | pass | |
|
193 | except pygit2.GitError: | |
|
194 | path = wire.get('path') | |
|
195 | tb = traceback.format_exc() | |
|
196 | log.debug("Invalid Git path `%s`, tb: %s", path, tb) | |
|
197 | return False | |
|
157 | 198 | |
|
158 | return True | |
|
199 | return True | |
|
200 | return _assert_correct_path(context_uid, repo_id) | |
|
159 | 201 | |
|
160 | 202 | @reraise_safe_exceptions |
|
161 | 203 | def bare(self, wire): |
|
162 | repo = self._factory.repo(wire) | |
|
163 |
|
|
|
204 | repo_init = self._factory.repo_libgit2(wire) | |
|
205 | with repo_init as repo: | |
|
206 | return repo.is_bare | |
|
164 | 207 | |
|
165 | 208 | @reraise_safe_exceptions |
|
166 | 209 | def blob_as_pretty_string(self, wire, sha): |
|
167 | repo = self._factory.repo(wire) | |
|
168 | return repo[sha].as_pretty_string() | |
|
210 | repo_init = self._factory.repo_libgit2(wire) | |
|
211 | with repo_init as repo: | |
|
212 | blob_obj = repo[sha] | |
|
213 | blob = blob_obj.data | |
|
214 | return blob | |
|
169 | 215 | |
|
170 | 216 | @reraise_safe_exceptions |
|
171 | 217 | def blob_raw_length(self, wire, sha): |
|
172 |
repo = self._ |
|
|
173 | blob = repo[sha] | |
|
174 |
|
|
|
218 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
219 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
220 | def _blob_raw_length(_repo_id, _sha): | |
|
221 | ||
|
222 | repo_init = self._factory.repo_libgit2(wire) | |
|
223 | with repo_init as repo: | |
|
224 | blob = repo[sha] | |
|
225 | return blob.size | |
|
226 | ||
|
227 | return _blob_raw_length(repo_id, sha) | |
|
175 | 228 | |
|
176 | 229 | def _parse_lfs_pointer(self, raw_content): |
|
177 | 230 | |
@@ -191,19 +244,44 b' class GitRemote(object):' | |||
|
191 | 244 | return {} |
|
192 | 245 | |
|
193 | 246 | @reraise_safe_exceptions |
|
194 |
def is_large_file(self, wire, |
|
|
195 |
repo = self._ |
|
|
196 | blob = repo[sha] | |
|
197 | return self._parse_lfs_pointer(blob.as_raw_string()) | |
|
247 | def is_large_file(self, wire, commit_id): | |
|
248 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
249 | ||
|
250 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
251 | def _is_large_file(_repo_id, _sha): | |
|
252 | repo_init = self._factory.repo_libgit2(wire) | |
|
253 | with repo_init as repo: | |
|
254 | blob = repo[commit_id] | |
|
255 | if blob.is_binary: | |
|
256 | return {} | |
|
257 | ||
|
258 | return self._parse_lfs_pointer(blob.data) | |
|
259 | ||
|
260 | return _is_large_file(repo_id, commit_id) | |
|
261 | ||
|
262 | @reraise_safe_exceptions | |
|
263 | def is_binary(self, wire, tree_id): | |
|
264 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
265 | ||
|
266 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
267 | def _is_binary(_repo_id, _tree_id): | |
|
268 | repo_init = self._factory.repo_libgit2(wire) | |
|
269 | with repo_init as repo: | |
|
270 | blob_obj = repo[tree_id] | |
|
271 | return blob_obj.is_binary | |
|
272 | ||
|
273 | return _is_binary(repo_id, tree_id) | |
|
198 | 274 | |
|
199 | 275 | @reraise_safe_exceptions |
|
200 | 276 | def in_largefiles_store(self, wire, oid): |
|
201 | repo = self._factory.repo(wire) | |
|
202 | 277 | conf = self._wire_to_config(wire) |
|
278 | repo_init = self._factory.repo_libgit2(wire) | |
|
279 | with repo_init as repo: | |
|
280 | repo_name = repo.path | |
|
203 | 281 | |
|
204 | 282 | store_location = conf.get('vcs_git_lfs_store_location') |
|
205 | 283 | if store_location: |
|
206 | repo_name = repo.path | |
|
284 | ||
|
207 | 285 | store = LFSOidStore( |
|
208 | 286 | oid=oid, repo=repo_name, store_location=store_location) |
|
209 | 287 | return store.has_oid() |
@@ -212,12 +290,13 b' class GitRemote(object):' | |||
|
212 | 290 | |
|
213 | 291 | @reraise_safe_exceptions |
|
214 | 292 | def store_path(self, wire, oid): |
|
215 | repo = self._factory.repo(wire) | |
|
216 | 293 | conf = self._wire_to_config(wire) |
|
294 | repo_init = self._factory.repo_libgit2(wire) | |
|
295 | with repo_init as repo: | |
|
296 | repo_name = repo.path | |
|
217 | 297 | |
|
218 | 298 | store_location = conf.get('vcs_git_lfs_store_location') |
|
219 | 299 | if store_location: |
|
220 | repo_name = repo.path | |
|
221 | 300 | store = LFSOidStore( |
|
222 | 301 | oid=oid, repo=repo_name, store_location=store_location) |
|
223 | 302 | return store.oid_path |
@@ -225,20 +304,21 b' class GitRemote(object):' | |||
|
225 | 304 | |
|
226 | 305 | @reraise_safe_exceptions |
|
227 | 306 | def bulk_request(self, wire, rev, pre_load): |
|
228 | result = {} | |
|
229 | for attr in pre_load: | |
|
230 | try: | |
|
231 | method = self._bulk_methods[attr] | |
|
232 | args = [wire, rev] | |
|
233 |
|
|
|
234 | args.extend(["commit_time", "commit_timezone"]) | |
|
235 | elif attr in ["author", "message", "parents"]: | |
|
236 | args.append(attr) | |
|
237 | result[attr] = method(*args) | |
|
238 | except KeyError as e: | |
|
239 | raise exceptions.VcsException(e)( | |
|
240 | "Unknown bulk attribute: %s" % attr) | |
|
241 | return result | |
|
307 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
308 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
309 | def _bulk_request(_repo_id, _rev, _pre_load): | |
|
310 | result = {} | |
|
311 | for attr in pre_load: | |
|
312 | try: | |
|
313 | method = self._bulk_methods[attr] | |
|
314 | args = [wire, rev] | |
|
315 | result[attr] = method(*args) | |
|
316 | except KeyError as e: | |
|
317 | raise exceptions.VcsException(e)( | |
|
318 | "Unknown bulk attribute: %s" % attr) | |
|
319 | return result | |
|
320 | ||
|
321 | return _bulk_request(repo_id, rev, sorted(pre_load)) | |
|
242 | 322 | |
|
243 | 323 | def _build_opener(self, url): |
|
244 | 324 | handlers = [] |
@@ -255,6 +335,14 b' class GitRemote(object):' | |||
|
255 | 335 | |
|
256 | 336 | return urllib2.build_opener(*handlers) |
|
257 | 337 | |
|
338 | def _type_id_to_name(self, type_id): | |
|
339 | return { | |
|
340 | 1: b'commit', | |
|
341 | 2: b'tree', | |
|
342 | 3: b'blob', | |
|
343 | 4: b'tag' | |
|
344 | }[type_id] | |
|
345 | ||
|
258 | 346 | @reraise_safe_exceptions |
|
259 | 347 | def check_url(self, url, config): |
|
260 | 348 | url_obj = url_parser(url) |
@@ -317,6 +405,42 b' class GitRemote(object):' | |||
|
317 | 405 | index.build_index_from_tree(repo.path, repo.index_path(), |
|
318 | 406 | repo.object_store, repo["HEAD"].tree) |
|
319 | 407 | |
|
408 | @reraise_safe_exceptions | |
|
409 | def branch(self, wire, commit_id): | |
|
410 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
411 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
412 | def _branch(_context_uid, _repo_id, _commit_id): | |
|
413 | regex = re.compile('^refs/heads') | |
|
414 | ||
|
415 | def filter_with(ref): | |
|
416 | return regex.match(ref[0]) and ref[1] == _commit_id | |
|
417 | ||
|
418 | branches = filter(filter_with, self.get_refs(wire).items()) | |
|
419 | return [x[0].split('refs/heads/')[-1] for x in branches] | |
|
420 | ||
|
421 | return _branch(context_uid, repo_id, commit_id) | |
|
422 | ||
|
423 | @reraise_safe_exceptions | |
|
424 | def commit_branches(self, wire, commit_id): | |
|
425 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
426 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
427 | def _commit_branches(_context_uid, _repo_id, _commit_id): | |
|
428 | repo_init = self._factory.repo_libgit2(wire) | |
|
429 | with repo_init as repo: | |
|
430 | branches = [x for x in repo.branches.with_commit(_commit_id)] | |
|
431 | return branches | |
|
432 | ||
|
433 | return _commit_branches(context_uid, repo_id, commit_id) | |
|
434 | ||
|
435 | @reraise_safe_exceptions | |
|
436 | def add_object(self, wire, content): | |
|
437 | repo_init = self._factory.repo_libgit2(wire) | |
|
438 | with repo_init as repo: | |
|
439 | blob = objects.Blob() | |
|
440 | blob.set_raw_string(content) | |
|
441 | repo.object_store.add_object(blob) | |
|
442 | return blob.id | |
|
443 | ||
|
320 | 444 | # TODO: this is quite complex, check if that can be simplified |
|
321 | 445 | @reraise_safe_exceptions |
|
322 | 446 | def commit(self, wire, commit_data, branch, commit_tree, updated, removed): |
@@ -367,8 +491,7 b' class GitRemote(object):' | |||
|
367 | 491 | curtree = newtree |
|
368 | 492 | parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id) |
|
369 | 493 | else: |
|
370 | parent.add( | |
|
371 | name=node['node_path'], mode=node['mode'], hexsha=blob.id) | |
|
494 | parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id) | |
|
372 | 495 | |
|
373 | 496 | new_trees.append(parent) |
|
374 | 497 | # Update ancestors |
@@ -412,6 +535,9 b' class GitRemote(object):' | |||
|
412 | 535 | setattr(commit, k, v) |
|
413 | 536 | object_store.add_object(commit) |
|
414 | 537 | |
|
538 | self.create_branch(wire, branch, commit.id) | |
|
539 | ||
|
540 | # dulwich set-ref | |
|
415 | 541 | ref = 'refs/heads/%s' % branch |
|
416 | 542 | repo.refs[ref] = commit.id |
|
417 | 543 | |
@@ -454,7 +580,7 b' class GitRemote(object):' | |||
|
454 | 580 | # that contains a tag object, so that we would end up with |
|
455 | 581 | # a peeled ref at this point. |
|
456 | 582 | for k in remote_refs: |
|
457 |
if k.endswith( |
|
|
583 | if k.endswith(PEELED_REF_MARKER): | |
|
458 | 584 | log.debug("Skipping peeled reference %s", k) |
|
459 | 585 | continue |
|
460 | 586 | repo[k] = remote_refs[k] |
@@ -471,14 +597,19 b' class GitRemote(object):' | |||
|
471 | 597 | return remote_refs |
|
472 | 598 | |
|
473 | 599 | @reraise_safe_exceptions |
|
474 | def sync_fetch(self, wire, url, refs=None): | |
|
600 | def sync_fetch(self, wire, url, refs=None, all_refs=False): | |
|
475 | 601 | repo = self._factory.repo(wire) |
|
476 | 602 | if refs and not isinstance(refs, (list, tuple)): |
|
477 | 603 | refs = [refs] |
|
604 | ||
|
478 | 605 | config = self._wire_to_config(wire) |
|
479 | 606 | # get all remote refs we'll use to fetch later |
|
607 | cmd = ['ls-remote'] | |
|
608 | if not all_refs: | |
|
609 | cmd += ['--heads', '--tags'] | |
|
610 | cmd += [url] | |
|
480 | 611 | output, __ = self.run_git_command( |
|
481 |
wire, |
|
|
612 | wire, cmd, fail_on_stderr=False, | |
|
482 | 613 | _copts=self._remote_conf(config), |
|
483 | 614 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) |
|
484 | 615 | |
@@ -491,7 +622,7 b' class GitRemote(object):' | |||
|
491 | 622 | if ref in remote_refs: |
|
492 | 623 | # duplicate, skip |
|
493 | 624 | continue |
|
494 |
if ref.endswith( |
|
|
625 | if ref.endswith(PEELED_REF_MARKER): | |
|
495 | 626 | log.debug("Skipping peeled reference %s", ref) |
|
496 | 627 | continue |
|
497 | 628 | # don't sync HEAD |
@@ -506,6 +637,7 b' class GitRemote(object):' | |||
|
506 | 637 | elif not refs: |
|
507 | 638 | fetch_refs.append('{}:{}'.format(ref, ref)) |
|
508 | 639 | log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs)) |
|
640 | ||
|
509 | 641 | if fetch_refs: |
|
510 | 642 | for chunk in more_itertools.chunked(fetch_refs, 1024 * 4): |
|
511 | 643 | fetch_refs_chunks = list(chunk) |
@@ -523,7 +655,7 b' class GitRemote(object):' | |||
|
523 | 655 | if not self.check_url(url, wire): |
|
524 | 656 | return |
|
525 | 657 | config = self._wire_to_config(wire) |
|
526 |
|
|
|
658 | self._factory.repo(wire) | |
|
527 | 659 | self.run_git_command( |
|
528 | 660 | wire, ['push', url, '--mirror'], fail_on_stderr=False, |
|
529 | 661 | _copts=self._remote_conf(config), |
@@ -556,48 +688,92 b' class GitRemote(object):' | |||
|
556 | 688 | |
|
557 | 689 | @reraise_safe_exceptions |
|
558 | 690 | def get_object(self, wire, sha): |
|
559 |
repo = self._ |
|
|
560 | obj = repo.get_object(sha) | |
|
561 | commit_id = obj.id | |
|
691 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
692 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
693 | def _get_object(_context_uid, _repo_id, _sha): | |
|
694 | repo_init = self._factory.repo_libgit2(wire) | |
|
695 | with repo_init as repo: | |
|
562 | 696 | |
|
563 | if isinstance(obj, Tag): | |
|
564 | commit_id = obj.object[1] | |
|
697 | missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path']) | |
|
698 | try: | |
|
699 | commit = repo.revparse_single(sha) | |
|
700 | except (KeyError, ValueError) as e: | |
|
701 | raise exceptions.LookupException(e)(missing_commit_err) | |
|
702 | ||
|
703 | is_tag = False | |
|
704 | if isinstance(commit, pygit2.Tag): | |
|
705 | commit = repo.get(commit.target) | |
|
706 | is_tag = True | |
|
707 | ||
|
708 | check_dangling = True | |
|
709 | if is_tag: | |
|
710 | check_dangling = False | |
|
565 | 711 | |
|
566 | return { | |
|
567 | 'id': obj.id, | |
|
568 | 'type': obj.type_name, | |
|
569 | 'commit_id': commit_id, | |
|
570 | 'idx': 0 | |
|
571 | } | |
|
712 | # we used a reference and it parsed means we're not having a dangling commit | |
|
713 | if sha != commit.hex: | |
|
714 | check_dangling = False | |
|
715 | ||
|
716 | if check_dangling: | |
|
717 | # check for dangling commit | |
|
718 | for branch in repo.branches.with_commit(commit.hex): | |
|
719 | if branch: | |
|
720 | break | |
|
721 | else: | |
|
722 | raise exceptions.LookupException(None)(missing_commit_err) | |
|
572 | 723 | |
|
573 | @reraise_safe_exceptions | |
|
574 | def get_object_attrs(self, wire, sha, *attrs): | |
|
575 | repo = self._factory.repo(wire) | |
|
576 | obj = repo.get_object(sha) | |
|
577 | return list(getattr(obj, a) for a in attrs) | |
|
724 | commit_id = commit.hex | |
|
725 | type_id = commit.type | |
|
726 | ||
|
727 | return { | |
|
728 | 'id': commit_id, | |
|
729 | 'type': self._type_id_to_name(type_id), | |
|
730 | 'commit_id': commit_id, | |
|
731 | 'idx': 0 | |
|
732 | } | |
|
733 | ||
|
734 | return _get_object(context_uid, repo_id, sha) | |
|
578 | 735 | |
|
579 | 736 | @reraise_safe_exceptions |
|
580 | 737 | def get_refs(self, wire): |
|
581 |
repo = self._ |
|
|
582 | result = {} | |
|
583 | for ref, sha in repo.refs.as_dict().items(): | |
|
584 | peeled_sha = repo.get_peeled(ref) | |
|
585 | result[ref] = peeled_sha | |
|
586 | return result | |
|
738 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
739 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
740 | def _get_refs(_context_uid, _repo_id): | |
|
741 | ||
|
742 | repo_init = self._factory.repo_libgit2(wire) | |
|
743 | with repo_init as repo: | |
|
744 | regex = re.compile('^refs/(heads|tags)/') | |
|
745 | return {x.name: x.target.hex for x in | |
|
746 | filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())} | |
|
747 | ||
|
748 | return _get_refs(context_uid, repo_id) | |
|
587 | 749 | |
|
588 | 750 | @reraise_safe_exceptions |
|
589 |
def get_ |
|
|
590 |
repo = self._ |
|
|
591 | return repo.refs.path | |
|
751 | def get_branch_pointers(self, wire): | |
|
752 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
753 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
754 | def _get_branch_pointers(_context_uid, _repo_id): | |
|
755 | ||
|
756 | repo_init = self._factory.repo_libgit2(wire) | |
|
757 | regex = re.compile('^refs/heads') | |
|
758 | with repo_init as repo: | |
|
759 | branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects()) | |
|
760 | return {x.target.hex: x.shorthand for x in branches} | |
|
761 | ||
|
762 | return _get_branch_pointers(context_uid, repo_id) | |
|
592 | 763 | |
|
593 | 764 | @reraise_safe_exceptions |
|
594 | 765 | def head(self, wire, show_exc=True): |
|
595 |
repo = self._ |
|
|
596 | try: | |
|
597 | return repo.head() | |
|
598 | except Exception: | |
|
599 | if show_exc: | |
|
600 |
|
|
|
766 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
767 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
768 | def _head(_context_uid, _repo_id, _show_exc): | |
|
769 | repo_init = self._factory.repo_libgit2(wire) | |
|
770 | with repo_init as repo: | |
|
771 | try: | |
|
772 | return repo.head.peel().hex | |
|
773 | except Exception: | |
|
774 | if show_exc: | |
|
775 | raise | |
|
776 | return _head(context_uid, repo_id, show_exc) | |
|
601 | 777 | |
|
602 | 778 | @reraise_safe_exceptions |
|
603 | 779 | def init(self, wire): |
@@ -611,35 +787,141 b' class GitRemote(object):' | |||
|
611 | 787 | |
|
612 | 788 | @reraise_safe_exceptions |
|
613 | 789 | def revision(self, wire, rev): |
|
614 | repo = self._factory.repo(wire) | |
|
615 | obj = repo[rev] | |
|
616 | obj_data = { | |
|
617 | 'id': obj.id, | |
|
618 | } | |
|
619 | try: | |
|
620 | obj_data['tree'] = obj.tree | |
|
621 | except AttributeError: | |
|
622 | pass | |
|
623 | return obj_data | |
|
790 | ||
|
791 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
792 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
793 | def _revision(_context_uid, _repo_id, _rev): | |
|
794 | repo_init = self._factory.repo_libgit2(wire) | |
|
795 | with repo_init as repo: | |
|
796 | commit = repo[rev] | |
|
797 | obj_data = { | |
|
798 | 'id': commit.id.hex, | |
|
799 | } | |
|
800 | # tree objects itself don't have tree_id attribute | |
|
801 | if hasattr(commit, 'tree_id'): | |
|
802 | obj_data['tree'] = commit.tree_id.hex | |
|
803 | ||
|
804 | return obj_data | |
|
805 | return _revision(context_uid, repo_id, rev) | |
|
806 | ||
|
807 | @reraise_safe_exceptions | |
|
808 | def date(self, wire, commit_id): | |
|
809 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
810 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
811 | def _date(_repo_id, _commit_id): | |
|
812 | repo_init = self._factory.repo_libgit2(wire) | |
|
813 | with repo_init as repo: | |
|
814 | commit = repo[commit_id] | |
|
815 | ||
|
816 | if hasattr(commit, 'commit_time'): | |
|
817 | commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset | |
|
818 | else: | |
|
819 | commit = commit.get_object() | |
|
820 | commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset | |
|
821 | ||
|
822 | # TODO(marcink): check dulwich difference of offset vs timezone | |
|
823 | return [commit_time, commit_time_offset] | |
|
824 | return _date(repo_id, commit_id) | |
|
624 | 825 | |
|
625 | 826 | @reraise_safe_exceptions |
|
626 |
def |
|
|
627 |
repo = self._ |
|
|
628 | obj = repo[rev] | |
|
629 | return getattr(obj, attr) | |
|
827 | def author(self, wire, commit_id): | |
|
828 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
829 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
830 | def _author(_repo_id, _commit_id): | |
|
831 | repo_init = self._factory.repo_libgit2(wire) | |
|
832 | with repo_init as repo: | |
|
833 | commit = repo[commit_id] | |
|
834 | ||
|
835 | if hasattr(commit, 'author'): | |
|
836 | author = commit.author | |
|
837 | else: | |
|
838 | author = commit.get_object().author | |
|
839 | ||
|
840 | if author.email: | |
|
841 | return u"{} <{}>".format(author.name, author.email) | |
|
842 | ||
|
843 | return u"{}".format(author.raw_name) | |
|
844 | return _author(repo_id, commit_id) | |
|
845 | ||
|
846 | @reraise_safe_exceptions | |
|
847 | def message(self, wire, commit_id): | |
|
848 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
849 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
850 | def _message(_repo_id, _commit_id): | |
|
851 | repo_init = self._factory.repo_libgit2(wire) | |
|
852 | with repo_init as repo: | |
|
853 | commit = repo[commit_id] | |
|
854 | return commit.message | |
|
855 | return _message(repo_id, commit_id) | |
|
856 | ||
|
857 | @reraise_safe_exceptions | |
|
858 | def parents(self, wire, commit_id): | |
|
859 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
860 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
861 | def _parents(_repo_id, _commit_id): | |
|
862 | repo_init = self._factory.repo_libgit2(wire) | |
|
863 | with repo_init as repo: | |
|
864 | commit = repo[commit_id] | |
|
865 | if hasattr(commit, 'parent_ids'): | |
|
866 | parent_ids = commit.parent_ids | |
|
867 | else: | |
|
868 | parent_ids = commit.get_object().parent_ids | |
|
869 | ||
|
870 | return [x.hex for x in parent_ids] | |
|
871 | return _parents(repo_id, commit_id) | |
|
872 | ||
|
873 | @reraise_safe_exceptions | |
|
874 | def children(self, wire, commit_id): | |
|
875 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
876 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
877 | def _children(_repo_id, _commit_id): | |
|
878 | output, __ = self.run_git_command( | |
|
879 | wire, ['rev-list', '--all', '--children']) | |
|
880 | ||
|
881 | child_ids = [] | |
|
882 | pat = re.compile(r'^%s' % commit_id) | |
|
883 | for l in output.splitlines(): | |
|
884 | if pat.match(l): | |
|
885 | found_ids = l.split(' ')[1:] | |
|
886 | child_ids.extend(found_ids) | |
|
887 | ||
|
888 | return child_ids | |
|
889 | return _children(repo_id, commit_id) | |
|
630 | 890 | |
|
631 | 891 | @reraise_safe_exceptions |
|
632 | 892 | def set_refs(self, wire, key, value): |
|
633 | repo = self._factory.repo(wire) | |
|
634 | repo.refs[key] = value | |
|
893 | repo_init = self._factory.repo_libgit2(wire) | |
|
894 | with repo_init as repo: | |
|
895 | repo.references.create(key, value, force=True) | |
|
896 | ||
|
897 | @reraise_safe_exceptions | |
|
898 | def create_branch(self, wire, branch_name, commit_id, force=False): | |
|
899 | repo_init = self._factory.repo_libgit2(wire) | |
|
900 | with repo_init as repo: | |
|
901 | commit = repo[commit_id] | |
|
902 | ||
|
903 | if force: | |
|
904 | repo.branches.local.create(branch_name, commit, force=force) | |
|
905 | elif not repo.branches.get(branch_name): | |
|
906 | # create only if that branch isn't existing | |
|
907 | repo.branches.local.create(branch_name, commit, force=force) | |
|
635 | 908 | |
|
636 | 909 | @reraise_safe_exceptions |
|
637 | 910 | def remove_ref(self, wire, key): |
|
638 | repo = self._factory.repo(wire) | |
|
639 | del repo.refs[key] | |
|
911 | repo_init = self._factory.repo_libgit2(wire) | |
|
912 | with repo_init as repo: | |
|
913 | repo.references.delete(key) | |
|
914 | ||
|
915 | @reraise_safe_exceptions | |
|
916 | def tag_remove(self, wire, tag_name): | |
|
917 | repo_init = self._factory.repo_libgit2(wire) | |
|
918 | with repo_init as repo: | |
|
919 | key = 'refs/tags/{}'.format(tag_name) | |
|
920 | repo.references.delete(key) | |
|
640 | 921 | |
|
641 | 922 | @reraise_safe_exceptions |
|
642 | 923 | def tree_changes(self, wire, source_id, target_id): |
|
924 | # TODO(marcink): remove this seems it's only used by tests | |
|
643 | 925 | repo = self._factory.repo(wire) |
|
644 | 926 | source = repo[source_id].tree if source_id else None |
|
645 | 927 | target = repo[target_id].tree |
@@ -647,21 +929,158 b' class GitRemote(object):' | |||
|
647 | 929 | return list(result) |
|
648 | 930 | |
|
649 | 931 | @reraise_safe_exceptions |
|
932 | def tree_and_type_for_path(self, wire, commit_id, path): | |
|
933 | ||
|
934 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
935 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
936 | def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path): | |
|
937 | repo_init = self._factory.repo_libgit2(wire) | |
|
938 | ||
|
939 | with repo_init as repo: | |
|
940 | commit = repo[commit_id] | |
|
941 | try: | |
|
942 | tree = commit.tree[path] | |
|
943 | except KeyError: | |
|
944 | return None, None, None | |
|
945 | ||
|
946 | return tree.id.hex, tree.type, tree.filemode | |
|
947 | return _tree_and_type_for_path(context_uid, repo_id, commit_id, path) | |
|
948 | ||
|
949 | @reraise_safe_exceptions | |
|
650 | 950 | def tree_items(self, wire, tree_id): |
|
651 |
repo = self._ |
|
|
652 | tree = repo[tree_id] | |
|
951 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
952 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
953 | def _tree_items(_repo_id, _tree_id): | |
|
954 | ||
|
955 | repo_init = self._factory.repo_libgit2(wire) | |
|
956 | with repo_init as repo: | |
|
957 | try: | |
|
958 | tree = repo[tree_id] | |
|
959 | except KeyError: | |
|
960 | raise ObjectMissing('No tree with id: {}'.format(tree_id)) | |
|
961 | ||
|
962 | result = [] | |
|
963 | for item in tree: | |
|
964 | item_sha = item.hex | |
|
965 | item_mode = item.filemode | |
|
966 | item_type = item.type | |
|
967 | ||
|
968 | if item_type == 'commit': | |
|
969 | # NOTE(marcink): submodules we translate to 'link' for backward compat | |
|
970 | item_type = 'link' | |
|
971 | ||
|
972 | result.append((item.name, item_mode, item_sha, item_type)) | |
|
973 | return result | |
|
974 | return _tree_items(repo_id, tree_id) | |
|
975 | ||
|
976 | @reraise_safe_exceptions | |
|
977 | def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): | |
|
978 | """ | |
|
979 | Old version that uses subprocess to call diff | |
|
980 | """ | |
|
981 | ||
|
982 | flags = [ | |
|
983 | '-U%s' % context, '--patch', | |
|
984 | '--binary', | |
|
985 | '--find-renames', | |
|
986 | '--no-indent-heuristic', | |
|
987 | # '--indent-heuristic', | |
|
988 | #'--full-index', | |
|
989 | #'--abbrev=40' | |
|
990 | ] | |
|
991 | ||
|
992 | if opt_ignorews: | |
|
993 | flags.append('--ignore-all-space') | |
|
994 | ||
|
995 | if commit_id_1 == self.EMPTY_COMMIT: | |
|
996 | cmd = ['show'] + flags + [commit_id_2] | |
|
997 | else: | |
|
998 | cmd = ['diff'] + flags + [commit_id_1, commit_id_2] | |
|
999 | ||
|
1000 | if file_filter: | |
|
1001 | cmd.extend(['--', file_filter]) | |
|
1002 | ||
|
1003 | diff, __ = self.run_git_command(wire, cmd) | |
|
1004 | # If we used 'show' command, strip first few lines (until actual diff | |
|
1005 | # starts) | |
|
1006 | if commit_id_1 == self.EMPTY_COMMIT: | |
|
1007 | lines = diff.splitlines() | |
|
1008 | x = 0 | |
|
1009 | for line in lines: | |
|
1010 | if line.startswith('diff'): | |
|
1011 | break | |
|
1012 | x += 1 | |
|
1013 | # Append new line just like 'diff' command do | |
|
1014 | diff = '\n'.join(lines[x:]) + '\n' | |
|
1015 | return diff | |
|
1016 | ||
|
1017 | @reraise_safe_exceptions | |
|
1018 | def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): | |
|
1019 | repo_init = self._factory.repo_libgit2(wire) | |
|
1020 | with repo_init as repo: | |
|
1021 | swap = True | |
|
1022 | flags = 0 | |
|
1023 | flags |= pygit2.GIT_DIFF_SHOW_BINARY | |
|
1024 | ||
|
1025 | if opt_ignorews: | |
|
1026 | flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE | |
|
1027 | ||
|
1028 | if commit_id_1 == self.EMPTY_COMMIT: | |
|
1029 | comm1 = repo[commit_id_2] | |
|
1030 | diff_obj = comm1.tree.diff_to_tree( | |
|
1031 | flags=flags, context_lines=context, swap=swap) | |
|
1032 | ||
|
1033 | else: | |
|
1034 | comm1 = repo[commit_id_2] | |
|
1035 | comm2 = repo[commit_id_1] | |
|
1036 | diff_obj = comm1.tree.diff_to_tree( | |
|
1037 | comm2.tree, flags=flags, context_lines=context, swap=swap) | |
|
1038 | similar_flags = 0 | |
|
1039 | similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES | |
|
1040 | diff_obj.find_similar(flags=similar_flags) | |
|
1041 | ||
|
1042 | if file_filter: | |
|
1043 | for p in diff_obj: | |
|
1044 | if p.delta.old_file.path == file_filter: | |
|
1045 | return p.patch or '' | |
|
1046 | # fo matching path == no diff | |
|
1047 | return '' | |
|
1048 | return diff_obj.patch or '' | |
|
1049 | ||
|
1050 | @reraise_safe_exceptions | |
|
1051 | def node_history(self, wire, commit_id, path, limit): | |
|
1052 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
1053 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
1054 | def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit): | |
|
1055 | # optimize for n==1, rev-list is much faster for that use-case | |
|
1056 | if limit == 1: | |
|
1057 | cmd = ['rev-list', '-1', commit_id, '--', path] | |
|
1058 | else: | |
|
1059 | cmd = ['log'] | |
|
1060 | if limit: | |
|
1061 | cmd.extend(['-n', str(safe_int(limit, 0))]) | |
|
1062 | cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path]) | |
|
1063 | ||
|
1064 | output, __ = self.run_git_command(wire, cmd) | |
|
1065 | commit_ids = re.findall(r'[0-9a-fA-F]{40}', output) | |
|
1066 | ||
|
1067 | return [x for x in commit_ids] | |
|
1068 | return _node_history(context_uid, repo_id, commit_id, path, limit) | |
|
1069 | ||
|
1070 | @reraise_safe_exceptions | |
|
1071 | def node_annotate(self, wire, commit_id, path): | |
|
1072 | ||
|
1073 | cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path] | |
|
1074 | # -l ==> outputs long shas (and we need all 40 characters) | |
|
1075 | # --root ==> doesn't put '^' character for boundaries | |
|
1076 | # -r commit_id ==> blames for the given commit | |
|
1077 | output, __ = self.run_git_command(wire, cmd) | |
|
653 | 1078 | |
|
654 | 1079 | result = [] |
|
655 | for item in tree.iteritems(): | |
|
656 |
|
|
|
657 | item_mode = item.mode | |
|
658 | ||
|
659 | if FILE_MODE(item_mode) == GIT_LINK: | |
|
660 | item_type = "link" | |
|
661 | else: | |
|
662 | item_type = repo[item_sha].type_name | |
|
663 | ||
|
664 | result.append((item.path, item_mode, item_sha, item_type)) | |
|
1080 | for i, blame_line in enumerate(output.split('\n')[:-1]): | |
|
1081 | line_no = i + 1 | |
|
1082 | commit_id, line = re.split(r' ', blame_line, 1) | |
|
1083 | result.append((line_no, commit_id, line)) | |
|
665 | 1084 | return result |
|
666 | 1085 | |
|
667 | 1086 | @reraise_safe_exceptions |
@@ -670,13 +1089,20 b' class GitRemote(object):' | |||
|
670 | 1089 | update_server_info(repo) |
|
671 | 1090 | |
|
672 | 1091 | @reraise_safe_exceptions |
|
673 | def discover_git_version(self): | |
|
674 | stdout, _ = self.run_git_command( | |
|
675 | {}, ['--version'], _bare=True, _safe=True) | |
|
676 | prefix = 'git version' | |
|
677 | if stdout.startswith(prefix): | |
|
678 | stdout = stdout[len(prefix):] | |
|
679 | return stdout.strip() | |
|
1092 | def get_all_commit_ids(self, wire): | |
|
1093 | ||
|
1094 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
1095 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
1096 | def _get_all_commit_ids(_context_uid, _repo_id): | |
|
1097 | ||
|
1098 | cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags'] | |
|
1099 | try: | |
|
1100 | output, __ = self.run_git_command(wire, cmd) | |
|
1101 | return output.splitlines() | |
|
1102 | except Exception: | |
|
1103 | # Can be raised for empty repositories | |
|
1104 | return [] | |
|
1105 | return _get_all_commit_ids(context_uid, repo_id) | |
|
680 | 1106 | |
|
681 | 1107 | @reraise_safe_exceptions |
|
682 | 1108 | def run_git_command(self, wire, cmd, **opts): |
@@ -711,11 +1137,12 b' class GitRemote(object):' | |||
|
711 | 1137 | cmd = [settings.GIT_EXECUTABLE] + _copts + cmd |
|
712 | 1138 | _opts = {'env': gitenv, 'shell': False} |
|
713 | 1139 | |
|
1140 | proc = None | |
|
714 | 1141 | try: |
|
715 | 1142 | _opts.update(opts) |
|
716 | p = subprocessio.SubprocessIOChunker(cmd, **_opts) | |
|
1143 | proc = subprocessio.SubprocessIOChunker(cmd, **_opts) | |
|
717 | 1144 | |
|
718 | return ''.join(p), ''.join(p.error) | |
|
1145 | return ''.join(proc), ''.join(proc.error) | |
|
719 | 1146 | except (EnvironmentError, OSError) as err: |
|
720 | 1147 | cmd = ' '.join(cmd) # human friendly CMD |
|
721 | 1148 | tb_err = ("Couldn't run git command (%s).\n" |
@@ -727,26 +1154,24 b' class GitRemote(object):' | |||
|
727 | 1154 | return '', err |
|
728 | 1155 | else: |
|
729 | 1156 | raise exceptions.VcsException()(tb_err) |
|
1157 | finally: | |
|
1158 | if proc: | |
|
1159 | proc.close() | |
|
730 | 1160 | |
|
731 | 1161 | @reraise_safe_exceptions |
|
732 | 1162 | def install_hooks(self, wire, force=False): |
|
733 | 1163 | from vcsserver.hook_utils import install_git_hooks |
|
734 |
re |
|
|
735 | return install_git_hooks(repo.path, repo.bare, force_create=force) | |
|
1164 | bare = self.bare(wire) | |
|
1165 | path = wire['path'] | |
|
1166 | return install_git_hooks(path, bare, force_create=force) | |
|
736 | 1167 | |
|
737 | 1168 | @reraise_safe_exceptions |
|
738 | 1169 | def get_hooks_info(self, wire): |
|
739 | 1170 | from vcsserver.hook_utils import ( |
|
740 | 1171 | get_git_pre_hook_version, get_git_post_hook_version) |
|
741 |
re |
|
|
1172 | bare = self.bare(wire) | |
|
1173 | path = wire['path'] | |
|
742 | 1174 | return { |
|
743 |
'pre_version': get_git_pre_hook_version( |
|
|
744 |
'post_version': get_git_post_hook_version( |
|
|
1175 | 'pre_version': get_git_pre_hook_version(path, bare), | |
|
1176 | 'post_version': get_git_post_hook_version(path, bare), | |
|
745 | 1177 | } |
|
746 | ||
|
747 | ||
|
748 | def str_to_dulwich(value): | |
|
749 | """ | |
|
750 | Dulwich 0.10.1a requires `unicode` objects to be passed in. | |
|
751 | """ | |
|
752 | return value.decode(settings.WIRE_ENCODING) |
This diff has been collapsed as it changes many lines, (648 lines changed) Show them Hide them | |||
@@ -22,7 +22,7 b' import urllib' | |||
|
22 | 22 | import urllib2 |
|
23 | 23 | import traceback |
|
24 | 24 | |
|
25 | from hgext import largefiles, rebase | |
|
25 | from hgext import largefiles, rebase, purge | |
|
26 | 26 | from hgext.strip import strip as hgext_strip |
|
27 | 27 | from mercurial import commands |
|
28 | 28 | from mercurial import unionrepo |
@@ -37,6 +37,7 b' from vcsserver.hgcompat import (' | |||
|
37 | 37 | makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge, |
|
38 | 38 | patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError, |
|
39 | 39 | RepoLookupError, InterventionRequired, RequirementError) |
|
40 | from vcsserver.vcs_base import RemoteBase | |
|
40 | 41 | |
|
41 | 42 | log = logging.getLogger(__name__) |
|
42 | 43 | |
@@ -98,6 +99,7 b' def make_ui_from_config(repo_config):' | |||
|
98 | 99 | |
|
99 | 100 | def reraise_safe_exceptions(func): |
|
100 | 101 | """Decorator for converting mercurial exceptions to something neutral.""" |
|
102 | ||
|
101 | 103 | def wrapper(*args, **kwargs): |
|
102 | 104 | try: |
|
103 | 105 | return func(*args, **kwargs) |
@@ -142,12 +144,17 b' class MercurialFactory(RepoFactory):' | |||
|
142 | 144 | baseui = self._create_config(wire["config"]) |
|
143 | 145 | return instance(baseui, wire["path"], create) |
|
144 | 146 | |
|
147 | def repo(self, wire, create=False): | |
|
148 | """ | |
|
149 | Get a repository instance for the given path. | |
|
150 | """ | |
|
151 | return self._create_repo(wire, create) | |
|
145 | 152 | |
|
146 | class HgRemote(object): | |
|
153 | ||
|
154 | class HgRemote(RemoteBase): | |
|
147 | 155 | |
|
148 | 156 | def __init__(self, factory): |
|
149 | 157 | self._factory = factory |
|
150 | ||
|
151 | 158 | self._bulk_methods = { |
|
152 | 159 | "affected_files": self.ctx_files, |
|
153 | 160 | "author": self.ctx_user, |
@@ -199,113 +206,68 b' class HgRemote(object):' | |||
|
199 | 206 | |
|
200 | 207 | @reraise_safe_exceptions |
|
201 | 208 | def bookmarks(self, wire): |
|
202 |
repo = self._ |
|
|
203 | return dict(repo._bookmarks) | |
|
209 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
210 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
211 | def _bookmarks(_context_uid, _repo_id): | |
|
212 | repo = self._factory.repo(wire) | |
|
213 | return dict(repo._bookmarks) | |
|
214 | ||
|
215 | return _bookmarks(context_uid, repo_id) | |
|
204 | 216 | |
|
205 | 217 | @reraise_safe_exceptions |
|
206 | 218 | def branches(self, wire, normal, closed): |
|
207 |
repo = self._ |
|
|
208 | iter_branches = repo.branchmap().iterbranches() | |
|
209 | bt = {} | |
|
210 | for branch_name, _heads, tip, is_closed in iter_branches: | |
|
211 | if normal and not is_closed: | |
|
212 |
|
|
|
213 | if closed and is_closed: | |
|
214 | bt[branch_name] = tip | |
|
215 | ||
|
216 | return bt | |
|
219 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
220 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
221 | def _branches(_context_uid, _repo_id, _normal, _closed): | |
|
222 | repo = self._factory.repo(wire) | |
|
223 | iter_branches = repo.branchmap().iterbranches() | |
|
224 | bt = {} | |
|
225 | for branch_name, _heads, tip, is_closed in iter_branches: | |
|
226 | if normal and not is_closed: | |
|
227 | bt[branch_name] = tip | |
|
228 | if closed and is_closed: | |
|
229 | bt[branch_name] = tip | |
|
217 | 230 | |
|
218 | @reraise_safe_exceptions | |
|
219 | def bulk_request(self, wire, rev, pre_load): | |
|
220 | result = {} | |
|
221 | for attr in pre_load: | |
|
222 | try: | |
|
223 | method = self._bulk_methods[attr] | |
|
224 | result[attr] = method(wire, rev) | |
|
225 | except KeyError as e: | |
|
226 | raise exceptions.VcsException(e)( | |
|
227 | 'Unknown bulk attribute: "%s"' % attr) | |
|
228 | return result | |
|
231 | return bt | |
|
229 | 232 | |
|
230 | @reraise_safe_exceptions | |
|
231 | def clone(self, wire, source, dest, update_after_clone=False, hooks=True): | |
|
232 | baseui = self._factory._create_config(wire["config"], hooks=hooks) | |
|
233 | clone(baseui, source, dest, noupdate=not update_after_clone) | |
|
233 | return _branches(context_uid, repo_id, normal, closed) | |
|
234 | 234 | |
|
235 | 235 | @reraise_safe_exceptions |
|
236 | def commitctx( | |
|
237 | self, wire, message, parents, commit_time, commit_timezone, | |
|
238 | user, files, extra, removed, updated): | |
|
239 | ||
|
240 | repo = self._factory.repo(wire) | |
|
241 | baseui = self._factory._create_config(wire['config']) | |
|
242 | publishing = baseui.configbool('phases', 'publish') | |
|
243 | if publishing: | |
|
244 | new_commit = 'public' | |
|
245 | else: | |
|
246 | new_commit = 'draft' | |
|
247 | ||
|
248 | def _filectxfn(_repo, ctx, path): | |
|
249 | """ | |
|
250 | Marks given path as added/changed/removed in a given _repo. This is | |
|
251 | for internal mercurial commit function. | |
|
252 | """ | |
|
253 | ||
|
254 | # check if this path is removed | |
|
255 | if path in removed: | |
|
256 | # returning None is a way to mark node for removal | |
|
257 | return None | |
|
236 | def bulk_request(self, wire, commit_id, pre_load): | |
|
237 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
238 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
239 | def _bulk_request(_repo_id, _commit_id, _pre_load): | |
|
240 | result = {} | |
|
241 | for attr in pre_load: | |
|
242 | try: | |
|
243 | method = self._bulk_methods[attr] | |
|
244 | result[attr] = method(wire, commit_id) | |
|
245 | except KeyError as e: | |
|
246 | raise exceptions.VcsException(e)( | |
|
247 | 'Unknown bulk attribute: "%s"' % attr) | |
|
248 | return result | |
|
258 | 249 | |
|
259 | # check if this path is added | |
|
260 | for node in updated: | |
|
261 | if node['path'] == path: | |
|
262 | return memfilectx( | |
|
263 | _repo, | |
|
264 | changectx=ctx, | |
|
265 | path=node['path'], | |
|
266 | data=node['content'], | |
|
267 | islink=False, | |
|
268 | isexec=bool(node['mode'] & stat.S_IXUSR), | |
|
269 | copied=False) | |
|
270 | ||
|
271 | raise exceptions.AbortException()( | |
|
272 | "Given path haven't been marked as added, " | |
|
273 | "changed or removed (%s)" % path) | |
|
274 | ||
|
275 | with repo.ui.configoverride({('phases', 'new-commit'): new_commit}): | |
|
276 | ||
|
277 | commit_ctx = memctx( | |
|
278 | repo=repo, | |
|
279 | parents=parents, | |
|
280 | text=message, | |
|
281 | files=files, | |
|
282 | filectxfn=_filectxfn, | |
|
283 | user=user, | |
|
284 | date=(commit_time, commit_timezone), | |
|
285 | extra=extra) | |
|
286 | ||
|
287 | n = repo.commitctx(commit_ctx) | |
|
288 | new_id = hex(n) | |
|
289 | ||
|
290 | return new_id | |
|
250 | return _bulk_request(repo_id, commit_id, sorted(pre_load)) | |
|
291 | 251 | |
|
292 | 252 | @reraise_safe_exceptions |
|
293 |
def ctx_branch(self, wire, |
|
|
294 |
repo = self._ |
|
|
295 | ctx = self._get_ctx(repo, revision) | |
|
296 | return ctx.branch() | |
|
253 | def ctx_branch(self, wire, commit_id): | |
|
254 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
255 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
256 | def _ctx_branch(_repo_id, _commit_id): | |
|
257 | repo = self._factory.repo(wire) | |
|
258 | ctx = self._get_ctx(repo, commit_id) | |
|
259 | return ctx.branch() | |
|
260 | return _ctx_branch(repo_id, commit_id) | |
|
297 | 261 | |
|
298 | 262 | @reraise_safe_exceptions |
|
299 |
def ctx_ |
|
|
300 |
repo = self._ |
|
|
301 | ctx = self._get_ctx(repo, revision) | |
|
302 | return [child.rev() for child in ctx.children()] | |
|
303 | ||
|
304 | @reraise_safe_exceptions | |
|
305 | def ctx_date(self, wire, revision): | |
|
306 | repo = self._factory.repo(wire) | |
|
307 | ctx = self._get_ctx(repo, revision) | |
|
308 | return ctx.date() | |
|
263 | def ctx_date(self, wire, commit_id): | |
|
264 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
265 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
266 | def _ctx_date(_repo_id, _commit_id): | |
|
267 | repo = self._factory.repo(wire) | |
|
268 | ctx = self._get_ctx(repo, commit_id) | |
|
269 | return ctx.date() | |
|
270 | return _ctx_date(repo_id, commit_id) | |
|
309 | 271 | |
|
310 | 272 | @reraise_safe_exceptions |
|
311 | 273 | def ctx_description(self, wire, revision): |
@@ -314,10 +276,15 b' class HgRemote(object):' | |||
|
314 | 276 | return ctx.description() |
|
315 | 277 | |
|
316 | 278 | @reraise_safe_exceptions |
|
317 |
def ctx_files(self, wire, |
|
|
318 |
repo = self._ |
|
|
319 | ctx = self._get_ctx(repo, revision) | |
|
320 | return ctx.files() | |
|
279 | def ctx_files(self, wire, commit_id): | |
|
280 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
281 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
282 | def _ctx_files(_repo_id, _commit_id): | |
|
283 | repo = self._factory.repo(wire) | |
|
284 | ctx = self._get_ctx(repo, commit_id) | |
|
285 | return ctx.files() | |
|
286 | ||
|
287 | return _ctx_files(repo_id, commit_id) | |
|
321 | 288 | |
|
322 | 289 | @reraise_safe_exceptions |
|
323 | 290 | def ctx_list(self, path, revision): |
@@ -326,29 +293,59 b' class HgRemote(object):' | |||
|
326 | 293 | return list(ctx) |
|
327 | 294 | |
|
328 | 295 | @reraise_safe_exceptions |
|
329 |
def ctx_parents(self, wire, |
|
|
330 |
repo = self._ |
|
|
331 | ctx = self._get_ctx(repo, revision) | |
|
332 | return [parent.rev() for parent in ctx.parents()] | |
|
296 | def ctx_parents(self, wire, commit_id): | |
|
297 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
298 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
299 | def _ctx_parents(_repo_id, _commit_id): | |
|
300 | repo = self._factory.repo(wire) | |
|
301 | ctx = self._get_ctx(repo, commit_id) | |
|
302 | return [parent.hex() for parent in ctx.parents() | |
|
303 | if not (parent.hidden() or parent.obsolete())] | |
|
304 | ||
|
305 | return _ctx_parents(repo_id, commit_id) | |
|
306 | ||
|
307 | @reraise_safe_exceptions | |
|
308 | def ctx_children(self, wire, commit_id): | |
|
309 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
310 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
311 | def _ctx_children(_repo_id, _commit_id): | |
|
312 | repo = self._factory.repo(wire) | |
|
313 | ctx = self._get_ctx(repo, commit_id) | |
|
314 | return [child.hex() for child in ctx.children() | |
|
315 | if not (child.hidden() or child.obsolete())] | |
|
316 | ||
|
317 | return _ctx_children(repo_id, commit_id) | |
|
333 | 318 | |
|
334 | 319 | @reraise_safe_exceptions |
|
335 |
def ctx_phase(self, wire, |
|
|
336 |
repo = self._ |
|
|
337 | ctx = self._get_ctx(repo, revision) | |
|
338 | # public=0, draft=1, secret=3 | |
|
339 | return ctx.phase() | |
|
320 | def ctx_phase(self, wire, commit_id): | |
|
321 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
322 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
323 | def _ctx_phase(_context_uid, _repo_id, _commit_id): | |
|
324 | repo = self._factory.repo(wire) | |
|
325 | ctx = self._get_ctx(repo, commit_id) | |
|
326 | # public=0, draft=1, secret=3 | |
|
327 | return ctx.phase() | |
|
328 | return _ctx_phase(context_uid, repo_id, commit_id) | |
|
340 | 329 | |
|
341 | 330 | @reraise_safe_exceptions |
|
342 |
def ctx_obsolete(self, wire, |
|
|
343 |
repo = self._ |
|
|
344 | ctx = self._get_ctx(repo, revision) | |
|
345 | return ctx.obsolete() | |
|
331 | def ctx_obsolete(self, wire, commit_id): | |
|
332 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
333 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
334 | def _ctx_obsolete(_context_uid, _repo_id, _commit_id): | |
|
335 | repo = self._factory.repo(wire) | |
|
336 | ctx = self._get_ctx(repo, commit_id) | |
|
337 | return ctx.obsolete() | |
|
338 | return _ctx_obsolete(context_uid, repo_id, commit_id) | |
|
346 | 339 | |
|
347 | 340 | @reraise_safe_exceptions |
|
348 |
def ctx_hidden(self, wire, |
|
|
349 |
repo = self._ |
|
|
350 | ctx = self._get_ctx(repo, revision) | |
|
351 | return ctx.hidden() | |
|
341 | def ctx_hidden(self, wire, commit_id): | |
|
342 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
343 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
344 | def _ctx_hidden(_context_uid, _repo_id, _commit_id): | |
|
345 | repo = self._factory.repo(wire) | |
|
346 | ctx = self._get_ctx(repo, commit_id) | |
|
347 | return ctx.hidden() | |
|
348 | return _ctx_hidden(context_uid, repo_id, commit_id) | |
|
352 | 349 | |
|
353 | 350 | @reraise_safe_exceptions |
|
354 | 351 | def ctx_substate(self, wire, revision): |
@@ -438,9 +435,7 b' class HgRemote(object):' | |||
|
438 | 435 | return True |
|
439 | 436 | |
|
440 | 437 | @reraise_safe_exceptions |
|
441 | def diff( | |
|
442 | self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews, | |
|
443 | context): | |
|
438 | def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context): | |
|
444 | 439 | repo = self._factory.repo(wire) |
|
445 | 440 | |
|
446 | 441 | if file_filter: |
@@ -451,48 +446,56 b' class HgRemote(object):' | |||
|
451 | 446 | |
|
452 | 447 | try: |
|
453 | 448 | return "".join(patch.diff( |
|
454 |
repo, node1= |
|
|
449 | repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)) | |
|
455 | 450 | except RepoLookupError as e: |
|
456 | 451 | raise exceptions.LookupException(e)() |
|
457 | 452 | |
|
458 | 453 | @reraise_safe_exceptions |
|
459 | 454 | def node_history(self, wire, revision, path, limit): |
|
460 |
repo = self._ |
|
|
455 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
456 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
457 | def _node_history(_context_uid, _repo_id, _revision, _path, _limit): | |
|
458 | repo = self._factory.repo(wire) | |
|
461 | 459 | |
|
462 | ctx = self._get_ctx(repo, revision) | |
|
463 | fctx = ctx.filectx(path) | |
|
460 | ctx = self._get_ctx(repo, revision) | |
|
461 | fctx = ctx.filectx(path) | |
|
464 | 462 | |
|
465 | def history_iter(): | |
|
466 | limit_rev = fctx.rev() | |
|
467 | for obj in reversed(list(fctx.filelog())): | |
|
468 | obj = fctx.filectx(obj) | |
|
469 | ctx = obj.changectx() | |
|
470 | if ctx.hidden() or ctx.obsolete(): | |
|
471 | continue | |
|
463 | def history_iter(): | |
|
464 | limit_rev = fctx.rev() | |
|
465 | for obj in reversed(list(fctx.filelog())): | |
|
466 | obj = fctx.filectx(obj) | |
|
467 | ctx = obj.changectx() | |
|
468 | if ctx.hidden() or ctx.obsolete(): | |
|
469 | continue | |
|
472 | 470 | |
|
473 | if limit_rev >= obj.rev(): | |
|
474 | yield obj | |
|
471 | if limit_rev >= obj.rev(): | |
|
472 | yield obj | |
|
475 | 473 | |
|
476 | history = [] | |
|
477 | for cnt, obj in enumerate(history_iter()): | |
|
478 | if limit and cnt >= limit: | |
|
479 | break | |
|
480 | history.append(hex(obj.node())) | |
|
474 | history = [] | |
|
475 | for cnt, obj in enumerate(history_iter()): | |
|
476 | if limit and cnt >= limit: | |
|
477 | break | |
|
478 | history.append(hex(obj.node())) | |
|
481 | 479 | |
|
482 | return [x for x in history] | |
|
480 | return [x for x in history] | |
|
481 | return _node_history(context_uid, repo_id, revision, path, limit) | |
|
483 | 482 | |
|
484 | 483 | @reraise_safe_exceptions |
|
485 | 484 | def node_history_untill(self, wire, revision, path, limit): |
|
486 |
repo = self._ |
|
|
487 | ctx = self._get_ctx(repo, revision) | |
|
488 | fctx = ctx.filectx(path) | |
|
485 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
486 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
487 | def _node_history_until(_context_uid, _repo_id): | |
|
488 | repo = self._factory.repo(wire) | |
|
489 | ctx = self._get_ctx(repo, revision) | |
|
490 | fctx = ctx.filectx(path) | |
|
489 | 491 | |
|
490 | file_log = list(fctx.filelog()) | |
|
491 | if limit: | |
|
492 | # Limit to the last n items | |
|
493 | file_log = file_log[-limit:] | |
|
492 | file_log = list(fctx.filelog()) | |
|
493 | if limit: | |
|
494 | # Limit to the last n items | |
|
495 | file_log = file_log[-limit:] | |
|
494 | 496 | |
|
495 | return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)] | |
|
497 | return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)] | |
|
498 | return _node_history_until(context_uid, repo_id, revision, path, limit) | |
|
496 | 499 | |
|
497 | 500 | @reraise_safe_exceptions |
|
498 | 501 | def fctx_annotate(self, wire, revision, path): |
@@ -509,32 +512,45 b' class HgRemote(object):' | |||
|
509 | 512 | return result |
|
510 | 513 | |
|
511 | 514 | @reraise_safe_exceptions |
|
512 | def fctx_data(self, wire, revision, path): | |
|
515 | def fctx_node_data(self, wire, revision, path): | |
|
513 | 516 | repo = self._factory.repo(wire) |
|
514 | 517 | ctx = self._get_ctx(repo, revision) |
|
515 | 518 | fctx = ctx.filectx(path) |
|
516 | 519 | return fctx.data() |
|
517 | 520 | |
|
518 | 521 | @reraise_safe_exceptions |
|
519 |
def fctx_flags(self, wire, |
|
|
520 |
repo = self._ |
|
|
521 | ctx = self._get_ctx(repo, revision) | |
|
522 | fctx = ctx.filectx(path) | |
|
523 | return fctx.flags() | |
|
522 | def fctx_flags(self, wire, commit_id, path): | |
|
523 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
524 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
525 | def _fctx_flags(_repo_id, _commit_id, _path): | |
|
526 | repo = self._factory.repo(wire) | |
|
527 | ctx = self._get_ctx(repo, commit_id) | |
|
528 | fctx = ctx.filectx(path) | |
|
529 | return fctx.flags() | |
|
530 | ||
|
531 | return _fctx_flags(repo_id, commit_id, path) | |
|
524 | 532 | |
|
525 | 533 | @reraise_safe_exceptions |
|
526 |
def fctx_size(self, wire, |
|
|
527 |
repo = self._ |
|
|
528 | ctx = self._get_ctx(repo, revision) | |
|
529 | fctx = ctx.filectx(path) | |
|
530 | return fctx.size() | |
|
534 | def fctx_size(self, wire, commit_id, path): | |
|
535 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
536 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
537 | def _fctx_size(_repo_id, _revision, _path): | |
|
538 | repo = self._factory.repo(wire) | |
|
539 | ctx = self._get_ctx(repo, commit_id) | |
|
540 | fctx = ctx.filectx(path) | |
|
541 | return fctx.size() | |
|
542 | return _fctx_size(repo_id, commit_id, path) | |
|
531 | 543 | |
|
532 | 544 | @reraise_safe_exceptions |
|
533 | 545 | def get_all_commit_ids(self, wire, name): |
|
534 |
repo = self._ |
|
|
535 | repo = repo.filtered(name) | |
|
536 | revs = map(lambda x: hex(x[7]), repo.changelog.index) | |
|
537 | return revs | |
|
546 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
547 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
548 | def _get_all_commit_ids(_context_uid, _repo_id, _name): | |
|
549 | repo = self._factory.repo(wire) | |
|
550 | repo = repo.filtered(name) | |
|
551 | revs = map(lambda x: hex(x[7]), repo.changelog.index) | |
|
552 | return revs | |
|
553 | return _get_all_commit_ids(context_uid, repo_id, name) | |
|
538 | 554 | |
|
539 | 555 | @reraise_safe_exceptions |
|
540 | 556 | def get_config_value(self, wire, section, name, untrusted=False): |
@@ -542,18 +558,26 b' class HgRemote(object):' | |||
|
542 | 558 | return repo.ui.config(section, name, untrusted=untrusted) |
|
543 | 559 | |
|
544 | 560 | @reraise_safe_exceptions |
|
545 |
def ge |
|
|
546 |
repo = self._ |
|
|
547 | return repo.ui.configbool(section, name, untrusted=untrusted) | |
|
561 | def is_large_file(self, wire, commit_id, path): | |
|
562 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
563 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
564 | def _is_large_file(_context_uid, _repo_id, _commit_id, _path): | |
|
565 | return largefiles.lfutil.isstandin(path) | |
|
566 | ||
|
567 | return _is_large_file(context_uid, repo_id, commit_id, path) | |
|
548 | 568 | |
|
549 | 569 | @reraise_safe_exceptions |
|
550 | def get_config_list(self, wire, section, name, untrusted=False): | |
|
551 |
repo = self._ |
|
|
552 | return repo.ui.configlist(section, name, untrusted=untrusted) | |
|
570 | def is_binary(self, wire, revision, path): | |
|
571 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
553 | 572 | |
|
554 | @reraise_safe_exceptions | |
|
555 |
def |
|
|
556 | return largefiles.lfutil.isstandin(path) | |
|
573 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
574 | def _is_binary(_repo_id, _sha, _path): | |
|
575 | repo = self._factory.repo(wire) | |
|
576 | ctx = self._get_ctx(repo, revision) | |
|
577 | fctx = ctx.filectx(path) | |
|
578 | return fctx.isbinary() | |
|
579 | ||
|
580 | return _is_binary(repo_id, revision, path) | |
|
557 | 581 | |
|
558 | 582 | @reraise_safe_exceptions |
|
559 | 583 | def in_largefiles_store(self, wire, sha): |
@@ -582,47 +606,36 b' class HgRemote(object):' | |||
|
582 | 606 | |
|
583 | 607 | @reraise_safe_exceptions |
|
584 | 608 | def lookup(self, wire, revision, both): |
|
585 | ||
|
586 | repo = self._factory.repo(wire) | |
|
587 | ||
|
588 | if isinstance(revision, int): | |
|
589 | # NOTE(marcink): | |
|
590 | # since Mercurial doesn't support negative indexes properly | |
|
591 | # we need to shift accordingly by one to get proper index, e.g | |
|
592 | # repo[-1] => repo[-2] | |
|
593 | # repo[0] => repo[-1] | |
|
594 | if revision <= 0: | |
|
595 | revision = revision + -1 | |
|
596 | try: | |
|
597 | ctx = self._get_ctx(repo, revision) | |
|
598 | except (TypeError, RepoLookupError) as e: | |
|
599 | e._org_exc_tb = traceback.format_exc() | |
|
600 | raise exceptions.LookupException(e)(revision) | |
|
601 | except LookupError as e: | |
|
602 | e._org_exc_tb = traceback.format_exc() | |
|
603 | raise exceptions.LookupException(e)(e.name) | |
|
609 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
610 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
611 | def _lookup(_context_uid, _repo_id, _revision, _both): | |
|
604 | 612 | |
|
605 | if not both: | |
|
606 | return ctx.hex() | |
|
607 | ||
|
608 | ctx = repo[ctx.hex()] | |
|
609 | return ctx.hex(), ctx.rev() | |
|
613 | repo = self._factory.repo(wire) | |
|
614 | rev = _revision | |
|
615 | if isinstance(rev, int): | |
|
616 | # NOTE(marcink): | |
|
617 | # since Mercurial doesn't support negative indexes properly | |
|
618 | # we need to shift accordingly by one to get proper index, e.g | |
|
619 | # repo[-1] => repo[-2] | |
|
620 | # repo[0] => repo[-1] | |
|
621 | if rev <= 0: | |
|
622 | rev = rev + -1 | |
|
623 | try: | |
|
624 | ctx = self._get_ctx(repo, rev) | |
|
625 | except (TypeError, RepoLookupError) as e: | |
|
626 | e._org_exc_tb = traceback.format_exc() | |
|
627 | raise exceptions.LookupException(e)(rev) | |
|
628 | except LookupError as e: | |
|
629 | e._org_exc_tb = traceback.format_exc() | |
|
630 | raise exceptions.LookupException(e)(e.name) | |
|
610 | 631 | |
|
611 | @reraise_safe_exceptions | |
|
612 | def pull(self, wire, url, commit_ids=None): | |
|
613 | repo = self._factory.repo(wire) | |
|
614 | # Disable any prompts for this repo | |
|
615 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') | |
|
632 | if not both: | |
|
633 | return ctx.hex() | |
|
616 | 634 | |
|
617 | remote = peer(repo, {}, url) | |
|
618 | # Disable any prompts for this remote | |
|
619 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') | |
|
635 | ctx = repo[ctx.hex()] | |
|
636 | return ctx.hex(), ctx.rev() | |
|
620 | 637 | |
|
621 | if commit_ids: | |
|
622 | commit_ids = [bin(commit_id) for commit_id in commit_ids] | |
|
623 | ||
|
624 | return exchange.pull( | |
|
625 | repo, remote, heads=commit_ids, force=None).cgresult | |
|
638 | return _lookup(context_uid, repo_id, revision, both) | |
|
626 | 639 | |
|
627 | 640 | @reraise_safe_exceptions |
|
628 | 641 | def sync_push(self, wire, url): |
@@ -649,10 +662,16 b' class HgRemote(object):' | |||
|
649 | 662 | return ctx.rev() |
|
650 | 663 | |
|
651 | 664 | @reraise_safe_exceptions |
|
652 | def rev_range(self, wire, filter): | |
|
653 |
repo = self._ |
|
|
654 | revisions = [rev for rev in revrange(repo, filter)] | |
|
655 | return revisions | |
|
665 | def rev_range(self, wire, commit_filter): | |
|
666 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
667 | ||
|
668 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
669 | def _rev_range(_context_uid, _repo_id, _filter): | |
|
670 | repo = self._factory.repo(wire) | |
|
671 | revisions = [rev for rev in revrange(repo, commit_filter)] | |
|
672 | return revisions | |
|
673 | ||
|
674 | return _rev_range(context_uid, repo_id, sorted(commit_filter)) | |
|
656 | 675 | |
|
657 | 676 | @reraise_safe_exceptions |
|
658 | 677 | def rev_range_hash(self, wire, node): |
@@ -684,13 +703,6 b' class HgRemote(object):' | |||
|
684 | 703 | return list(repo.revs(rev_spec, *args)) |
|
685 | 704 | |
|
686 | 705 | @reraise_safe_exceptions |
|
687 | def strip(self, wire, revision, update, backup): | |
|
688 | repo = self._factory.repo(wire) | |
|
689 | ctx = self._get_ctx(repo, revision) | |
|
690 | hgext_strip( | |
|
691 | repo.baseui, repo, ctx.node(), update=update, backup=backup) | |
|
692 | ||
|
693 | @reraise_safe_exceptions | |
|
694 | 706 | def verify(self, wire,): |
|
695 | 707 | repo = self._factory.repo(wire) |
|
696 | 708 | baseui = self._factory._create_config(wire['config']) |
@@ -706,24 +718,31 b' class HgRemote(object):' | |||
|
706 | 718 | return output.getvalue() |
|
707 | 719 | |
|
708 | 720 | @reraise_safe_exceptions |
|
709 | def tag(self, wire, name, revision, message, local, user, | |
|
710 | tag_time, tag_timezone): | |
|
721 | def hg_update_cache(self, wire,): | |
|
711 | 722 | repo = self._factory.repo(wire) |
|
712 | ctx = self._get_ctx(repo, revision) | |
|
713 | node = ctx.node() | |
|
723 | baseui = self._factory._create_config(wire['config']) | |
|
724 | baseui.setconfig('ui', 'quiet', 'false') | |
|
725 | output = io.BytesIO() | |
|
714 | 726 | |
|
715 | date = (tag_time, tag_timezone) | |
|
716 | try: | |
|
717 | hg_tag.tag(repo, name, node, message, local, user, date) | |
|
718 | except Abort as e: | |
|
719 | log.exception("Tag operation aborted") | |
|
720 | # Exception can contain unicode which we convert | |
|
721 | raise exceptions.AbortException(e)(repr(e)) | |
|
727 | def write(data, **unused_kwargs): | |
|
728 | output.write(data) | |
|
729 | baseui.write = write | |
|
730 | ||
|
731 | repo.ui = baseui | |
|
732 | with repo.wlock(), repo.lock(): | |
|
733 | repo.updatecaches(full=True) | |
|
734 | ||
|
735 | return output.getvalue() | |
|
722 | 736 | |
|
723 | 737 | @reraise_safe_exceptions |
|
724 | 738 | def tags(self, wire): |
|
725 |
repo = self._ |
|
|
726 | return repo.tags() | |
|
739 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
740 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
741 | def _tags(_context_uid, _repo_id): | |
|
742 | repo = self._factory.repo(wire) | |
|
743 | return repo.tags() | |
|
744 | ||
|
745 | return _tags(context_uid, repo_id) | |
|
727 | 746 | |
|
728 | 747 | @reraise_safe_exceptions |
|
729 | 748 | def update(self, wire, node=None, clean=False): |
@@ -744,24 +763,6 b' class HgRemote(object):' | |||
|
744 | 763 | return output.getvalue() |
|
745 | 764 | |
|
746 | 765 | @reraise_safe_exceptions |
|
747 | def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, | |
|
748 | hooks=True): | |
|
749 | repo = self._factory.repo(wire) | |
|
750 | baseui = self._factory._create_config(wire['config'], hooks=hooks) | |
|
751 | ||
|
752 | # Mercurial internally has a lot of logic that checks ONLY if | |
|
753 | # option is defined, we just pass those if they are defined then | |
|
754 | opts = {} | |
|
755 | if bookmark: | |
|
756 | opts['bookmark'] = bookmark | |
|
757 | if branch: | |
|
758 | opts['branch'] = branch | |
|
759 | if revision: | |
|
760 | opts['rev'] = revision | |
|
761 | ||
|
762 | commands.pull(baseui, repo, source, **opts) | |
|
763 | ||
|
764 | @reraise_safe_exceptions | |
|
765 | 766 | def heads(self, wire, branch=None): |
|
766 | 767 | repo = self._factory.repo(wire) |
|
767 | 768 | baseui = self._factory._create_config(wire['config']) |
@@ -788,14 +789,130 b' class HgRemote(object):' | |||
|
788 | 789 | return hex(a) |
|
789 | 790 | |
|
790 | 791 | @reraise_safe_exceptions |
|
791 |
def |
|
|
792 | push_branches=False): | |
|
792 | def clone(self, wire, source, dest, update_after_clone=False, hooks=True): | |
|
793 | baseui = self._factory._create_config(wire["config"], hooks=hooks) | |
|
794 | clone(baseui, source, dest, noupdate=not update_after_clone) | |
|
795 | ||
|
796 | @reraise_safe_exceptions | |
|
797 | def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated): | |
|
798 | ||
|
799 | repo = self._factory.repo(wire) | |
|
800 | baseui = self._factory._create_config(wire['config']) | |
|
801 | publishing = baseui.configbool('phases', 'publish') | |
|
802 | if publishing: | |
|
803 | new_commit = 'public' | |
|
804 | else: | |
|
805 | new_commit = 'draft' | |
|
806 | ||
|
807 | def _filectxfn(_repo, ctx, path): | |
|
808 | """ | |
|
809 | Marks given path as added/changed/removed in a given _repo. This is | |
|
810 | for internal mercurial commit function. | |
|
811 | """ | |
|
812 | ||
|
813 | # check if this path is removed | |
|
814 | if path in removed: | |
|
815 | # returning None is a way to mark node for removal | |
|
816 | return None | |
|
817 | ||
|
818 | # check if this path is added | |
|
819 | for node in updated: | |
|
820 | if node['path'] == path: | |
|
821 | return memfilectx( | |
|
822 | _repo, | |
|
823 | changectx=ctx, | |
|
824 | path=node['path'], | |
|
825 | data=node['content'], | |
|
826 | islink=False, | |
|
827 | isexec=bool(node['mode'] & stat.S_IXUSR), | |
|
828 | copysource=False) | |
|
829 | ||
|
830 | raise exceptions.AbortException()( | |
|
831 | "Given path haven't been marked as added, " | |
|
832 | "changed or removed (%s)" % path) | |
|
833 | ||
|
834 | with repo.ui.configoverride({('phases', 'new-commit'): new_commit}): | |
|
835 | ||
|
836 | commit_ctx = memctx( | |
|
837 | repo=repo, | |
|
838 | parents=parents, | |
|
839 | text=message, | |
|
840 | files=files, | |
|
841 | filectxfn=_filectxfn, | |
|
842 | user=user, | |
|
843 | date=(commit_time, commit_timezone), | |
|
844 | extra=extra) | |
|
845 | ||
|
846 | n = repo.commitctx(commit_ctx) | |
|
847 | new_id = hex(n) | |
|
848 | ||
|
849 | return new_id | |
|
850 | ||
|
851 | @reraise_safe_exceptions | |
|
852 | def pull(self, wire, url, commit_ids=None): | |
|
853 | repo = self._factory.repo(wire) | |
|
854 | # Disable any prompts for this repo | |
|
855 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') | |
|
856 | ||
|
857 | remote = peer(repo, {}, url) | |
|
858 | # Disable any prompts for this remote | |
|
859 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') | |
|
860 | ||
|
861 | if commit_ids: | |
|
862 | commit_ids = [bin(commit_id) for commit_id in commit_ids] | |
|
863 | ||
|
864 | return exchange.pull( | |
|
865 | repo, remote, heads=commit_ids, force=None).cgresult | |
|
866 | ||
|
867 | @reraise_safe_exceptions | |
|
868 | def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True): | |
|
869 | repo = self._factory.repo(wire) | |
|
870 | baseui = self._factory._create_config(wire['config'], hooks=hooks) | |
|
871 | ||
|
872 | # Mercurial internally has a lot of logic that checks ONLY if | |
|
873 | # option is defined, we just pass those if they are defined then | |
|
874 | opts = {} | |
|
875 | if bookmark: | |
|
876 | opts['bookmark'] = bookmark | |
|
877 | if branch: | |
|
878 | opts['branch'] = branch | |
|
879 | if revision: | |
|
880 | opts['rev'] = revision | |
|
881 | ||
|
882 | commands.pull(baseui, repo, source, **opts) | |
|
883 | ||
|
884 | @reraise_safe_exceptions | |
|
885 | def push(self, wire, revisions, dest_path, hooks=True, push_branches=False): | |
|
793 | 886 | repo = self._factory.repo(wire) |
|
794 | 887 | baseui = self._factory._create_config(wire['config'], hooks=hooks) |
|
795 | 888 | commands.push(baseui, repo, dest=dest_path, rev=revisions, |
|
796 | 889 | new_branch=push_branches) |
|
797 | 890 | |
|
798 | 891 | @reraise_safe_exceptions |
|
892 | def strip(self, wire, revision, update, backup): | |
|
893 | repo = self._factory.repo(wire) | |
|
894 | ctx = self._get_ctx(repo, revision) | |
|
895 | hgext_strip( | |
|
896 | repo.baseui, repo, ctx.node(), update=update, backup=backup) | |
|
897 | ||
|
898 | @reraise_safe_exceptions | |
|
899 | def get_unresolved_files(self, wire): | |
|
900 | repo = self._factory.repo(wire) | |
|
901 | ||
|
902 | log.debug('Calculating unresolved files for repo: %s', repo) | |
|
903 | output = io.BytesIO() | |
|
904 | ||
|
905 | def write(data, **unused_kwargs): | |
|
906 | output.write(data) | |
|
907 | ||
|
908 | baseui = self._factory._create_config(wire['config']) | |
|
909 | baseui.write = write | |
|
910 | ||
|
911 | commands.resolve(baseui, repo, list=True) | |
|
912 | unresolved = output.getvalue().splitlines(0) | |
|
913 | return unresolved | |
|
914 | ||
|
915 | @reraise_safe_exceptions | |
|
799 | 916 | def merge(self, wire, revision): |
|
800 | 917 | repo = self._factory.repo(wire) |
|
801 | 918 | baseui = self._factory._create_config(wire['config']) |
@@ -828,14 +945,31 b' class HgRemote(object):' | |||
|
828 | 945 | repo.ui.setconfig('ui', 'username', username) |
|
829 | 946 | commands.commit(baseui, repo, message=message, close_branch=close_branch) |
|
830 | 947 | |
|
831 | ||
|
832 | 948 | @reraise_safe_exceptions |
|
833 | 949 | def rebase(self, wire, source=None, dest=None, abort=False): |
|
834 | 950 | repo = self._factory.repo(wire) |
|
835 | 951 | baseui = self._factory._create_config(wire['config']) |
|
836 | 952 | repo.ui.setconfig('ui', 'merge', 'internal:dump') |
|
837 | rebase.rebase( | |
|
838 | baseui, repo, base=source, dest=dest, abort=abort, keep=not abort) | |
|
953 | # In case of sub repositories are used mercurial prompts the user in | |
|
954 | # case of merge conflicts or different sub repository sources. By | |
|
955 | # setting the interactive flag to `False` mercurial doesn't prompt the | |
|
956 | # used but instead uses a default value. | |
|
957 | repo.ui.setconfig('ui', 'interactive', False) | |
|
958 | rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort) | |
|
959 | ||
|
960 | @reraise_safe_exceptions | |
|
961 | def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone): | |
|
962 | repo = self._factory.repo(wire) | |
|
963 | ctx = self._get_ctx(repo, revision) | |
|
964 | node = ctx.node() | |
|
965 | ||
|
966 | date = (tag_time, tag_timezone) | |
|
967 | try: | |
|
968 | hg_tag.tag(repo, name, node, message, local, user, date) | |
|
969 | except Abort as e: | |
|
970 | log.exception("Tag operation aborted") | |
|
971 | # Exception can contain unicode which we convert | |
|
972 | raise exceptions.AbortException(e)(repr(e)) | |
|
839 | 973 | |
|
840 | 974 | @reraise_safe_exceptions |
|
841 | 975 | def bookmark(self, wire, bookmark, revision=None): |
@@ -33,7 +33,6 b' import mercurial.node' | |||
|
33 | 33 | import simplejson as json |
|
34 | 34 | |
|
35 | 35 | from vcsserver import exceptions, subprocessio, settings |
|
36 | from vcsserver.hgcompat import get_ctx | |
|
37 | 36 | |
|
38 | 37 | log = logging.getLogger(__name__) |
|
39 | 38 | |
@@ -81,6 +80,12 b' class HooksDummyClient(object):' | |||
|
81 | 80 | return getattr(hooks, hook_name)(extras) |
|
82 | 81 | |
|
83 | 82 | |
|
83 | class HooksShadowRepoClient(object): | |
|
84 | ||
|
85 | def __call__(self, hook_name, extras): | |
|
86 | return {'output': '', 'status': 0} | |
|
87 | ||
|
88 | ||
|
84 | 89 | class RemoteMessageWriter(object): |
|
85 | 90 | """Writer base class.""" |
|
86 | 91 | def write(self, message): |
@@ -141,9 +146,12 b' def _handle_exception(result):' | |||
|
141 | 146 | |
|
142 | 147 | |
|
143 | 148 | def _get_hooks_client(extras): |
|
144 | if 'hooks_uri' in extras: | |
|
145 | protocol = extras.get('hooks_protocol') | |
|
149 | hooks_uri = extras.get('hooks_uri') | |
|
150 | is_shadow_repo = extras.get('is_shadow_repo') | |
|
151 | if hooks_uri: | |
|
146 | 152 | return HooksHttpClient(extras['hooks_uri']) |
|
153 | elif is_shadow_repo: | |
|
154 | return HooksShadowRepoClient() | |
|
147 | 155 | else: |
|
148 | 156 | return HooksDummyClient(extras['hooks_module']) |
|
149 | 157 | |
@@ -175,6 +183,7 b' def _extras_from_ui(ui):' | |||
|
175 | 183 | |
|
176 | 184 | |
|
177 | 185 | def _rev_range_hash(repo, node, check_heads=False): |
|
186 | from vcsserver.hgcompat import get_ctx | |
|
178 | 187 | |
|
179 | 188 | commits = [] |
|
180 | 189 | revs = [] |
@@ -194,6 +203,7 b' def _rev_range_hash(repo, node, check_he' | |||
|
194 | 203 | |
|
195 | 204 | |
|
196 | 205 | def _check_heads(repo, start, end, commits): |
|
206 | from vcsserver.hgcompat import get_ctx | |
|
197 | 207 | changelog = repo.changelog |
|
198 | 208 | parents = set() |
|
199 | 209 | |
@@ -384,6 +394,7 b' def post_push_ssh(ui, repo, node, **kwar' | |||
|
384 | 394 | |
|
385 | 395 | |
|
386 | 396 | def key_push(ui, repo, **kwargs): |
|
397 | from vcsserver.hgcompat import get_ctx | |
|
387 | 398 | if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks': |
|
388 | 399 | # store new bookmarks in our UI object propagated later to post_push |
|
389 | 400 | ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks() |
@@ -25,6 +25,7 b' import wsgiref.util' | |||
|
25 | 25 | import traceback |
|
26 | 26 | import tempfile |
|
27 | 27 | from itertools import chain |
|
28 | from cStringIO import StringIO | |
|
28 | 29 | |
|
29 | 30 | import simplejson as json |
|
30 | 31 | import msgpack |
@@ -32,7 +33,9 b' from pyramid.config import Configurator' | |||
|
32 | 33 | from pyramid.settings import asbool, aslist |
|
33 | 34 | from pyramid.wsgi import wsgiapp |
|
34 | 35 | from pyramid.compat import configparser |
|
36 | from pyramid.response import Response | |
|
35 | 37 | |
|
38 | from vcsserver.utils import safe_int | |
|
36 | 39 | |
|
37 | 40 | log = logging.getLogger(__name__) |
|
38 | 41 | |
@@ -114,8 +117,8 b' def _string_setting(settings, name, defa' | |||
|
114 | 117 | |
|
115 | 118 | |
|
116 | 119 | class VCS(object): |
|
117 | def __init__(self, locale=None, cache_config=None): | |
|
118 | self.locale = locale | |
|
120 | def __init__(self, locale_conf=None, cache_config=None): | |
|
121 | self.locale = locale_conf | |
|
119 | 122 | self.cache_config = cache_config |
|
120 | 123 | self._configure_locale() |
|
121 | 124 | |
@@ -232,8 +235,8 b' class HTTPApplication(object):' | |||
|
232 | 235 | self.global_config = global_config |
|
233 | 236 | self.config.include('vcsserver.lib.rc_cache') |
|
234 | 237 | |
|
235 | locale = settings.get('locale', '') or 'en_US.UTF-8' | |
|
236 | vcs = VCS(locale=locale, cache_config=settings) | |
|
238 | settings_locale = settings.get('locale', '') or 'en_US.UTF-8' | |
|
239 | vcs = VCS(locale_conf=settings_locale, cache_config=settings) | |
|
237 | 240 | self._remotes = { |
|
238 | 241 | 'hg': vcs._hg_remote, |
|
239 | 242 | 'git': vcs._git_remote, |
@@ -290,15 +293,15 b' class HTTPApplication(object):' | |||
|
290 | 293 | _string_setting( |
|
291 | 294 | settings, |
|
292 | 295 | 'rc_cache.repo_object.backend', |
|
293 |
'dogpile.cache.rc. |
|
|
296 | 'dogpile.cache.rc.file_namespace', lower=False) | |
|
294 | 297 | _int_setting( |
|
295 | 298 | settings, |
|
296 | 299 | 'rc_cache.repo_object.expiration_time', |
|
297 | 300) | |
|
298 |
_ |
|
|
300 | 30 * 24 * 60 * 60) | |
|
301 | _string_setting( | |
|
299 | 302 | settings, |
|
300 |
'rc_cache.repo_object. |
|
|
301 | 1024) | |
|
303 | 'rc_cache.repo_object.arguments.filename', | |
|
304 | os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False) | |
|
302 | 305 | |
|
303 | 306 | def _configure(self): |
|
304 | 307 | self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory) |
@@ -307,7 +310,14 b' class HTTPApplication(object):' | |||
|
307 | 310 | self.config.add_route('status', '/status') |
|
308 | 311 | self.config.add_route('hg_proxy', '/proxy/hg') |
|
309 | 312 | self.config.add_route('git_proxy', '/proxy/git') |
|
313 | ||
|
314 | # rpc methods | |
|
310 | 315 | self.config.add_route('vcs', '/{backend}') |
|
316 | ||
|
317 | # streaming rpc remote methods | |
|
318 | self.config.add_route('vcs_stream', '/{backend}/stream') | |
|
319 | ||
|
320 | # vcs operations clone/push as streaming | |
|
311 | 321 | self.config.add_route('stream_git', '/stream/git/*repo_name') |
|
312 | 322 | self.config.add_route('stream_hg', '/stream/hg/*repo_name') |
|
313 | 323 | |
@@ -318,6 +328,8 b' class HTTPApplication(object):' | |||
|
318 | 328 | self.config.add_view(self.git_proxy(), route_name='git_proxy') |
|
319 | 329 | self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack', |
|
320 | 330 | vcs_view=self._remotes) |
|
331 | self.config.add_view(self.vcs_stream_view, route_name='vcs_stream', | |
|
332 | vcs_view=self._remotes) | |
|
321 | 333 | |
|
322 | 334 | self.config.add_view(self.hg_stream(), route_name='stream_hg') |
|
323 | 335 | self.config.add_view(self.git_stream(), route_name='stream_git') |
@@ -329,17 +341,20 b' class HTTPApplication(object):' | |||
|
329 | 341 | self.config.add_view(self.handle_vcs_exception, context=Exception) |
|
330 | 342 | |
|
331 | 343 | self.config.add_tween( |
|
332 | 'vcsserver.tweens.RequestWrapperTween', | |
|
344 | 'vcsserver.tweens.request_wrapper.RequestWrapperTween', | |
|
333 | 345 | ) |
|
346 | self.config.add_request_method( | |
|
347 | 'vcsserver.lib.request_counter.get_request_counter', | |
|
348 | 'request_count') | |
|
334 | 349 | |
|
335 | 350 | def wsgi_app(self): |
|
336 | 351 | return self.config.make_wsgi_app() |
|
337 | 352 | |
|
338 | def vcs_view(self, request): | |
|
353 | def _vcs_view_params(self, request): | |
|
339 | 354 | remote = self._remotes[request.matchdict['backend']] |
|
340 | 355 | payload = msgpack.unpackb(request.body, use_list=True) |
|
341 | 356 | method = payload.get('method') |
|
342 |
params = payload |
|
|
357 | params = payload['params'] | |
|
343 | 358 | wire = params.get('wire') |
|
344 | 359 | args = params.get('args') |
|
345 | 360 | kwargs = params.get('kwargs') |
@@ -351,9 +366,28 b' class HTTPApplication(object):' | |||
|
351 | 366 | except KeyError: |
|
352 | 367 | pass |
|
353 | 368 | args.insert(0, wire) |
|
369 | repo_state_uid = wire.get('repo_state_uid') if wire else None | |
|
354 | 370 | |
|
355 | log.debug('method called:%s with kwargs:%s context_uid: %s', | |
|
356 | method, kwargs, context_uid) | |
|
371 | # NOTE(marcink): trading complexity for slight performance | |
|
372 | if log.isEnabledFor(logging.DEBUG): | |
|
373 | no_args_methods = [ | |
|
374 | 'archive_repo' | |
|
375 | ] | |
|
376 | if method in no_args_methods: | |
|
377 | call_args = '' | |
|
378 | else: | |
|
379 | call_args = args[1:] | |
|
380 | ||
|
381 | log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s', | |
|
382 | method, call_args, kwargs, context_uid, repo_state_uid) | |
|
383 | ||
|
384 | return payload, remote, method, args, kwargs | |
|
385 | ||
|
386 | def vcs_view(self, request): | |
|
387 | ||
|
388 | payload, remote, method, args, kwargs = self._vcs_view_params(request) | |
|
389 | payload_id = payload.get('id') | |
|
390 | ||
|
357 | 391 | try: |
|
358 | 392 | resp = getattr(remote, method)(*args, **kwargs) |
|
359 | 393 | except Exception as e: |
@@ -380,7 +414,7 b' class HTTPApplication(object):' | |||
|
380 | 414 | type_ = None |
|
381 | 415 | |
|
382 | 416 | resp = { |
|
383 |
'id': payload |
|
|
417 | 'id': payload_id, | |
|
384 | 418 | 'error': { |
|
385 | 419 | 'message': e.message, |
|
386 | 420 | 'traceback': tb_info, |
@@ -395,12 +429,36 b' class HTTPApplication(object):' | |||
|
395 | 429 | pass |
|
396 | 430 | else: |
|
397 | 431 | resp = { |
|
398 |
'id': payload |
|
|
432 | 'id': payload_id, | |
|
399 | 433 | 'result': resp |
|
400 | 434 | } |
|
401 | 435 | |
|
402 | 436 | return resp |
|
403 | 437 | |
|
438 | def vcs_stream_view(self, request): | |
|
439 | payload, remote, method, args, kwargs = self._vcs_view_params(request) | |
|
440 | # this method has a stream: marker we remove it here | |
|
441 | method = method.split('stream:')[-1] | |
|
442 | chunk_size = safe_int(payload.get('chunk_size')) or 4096 | |
|
443 | ||
|
444 | try: | |
|
445 | resp = getattr(remote, method)(*args, **kwargs) | |
|
446 | except Exception as e: | |
|
447 | raise | |
|
448 | ||
|
449 | def get_chunked_data(method_resp): | |
|
450 | stream = StringIO(method_resp) | |
|
451 | while 1: | |
|
452 | chunk = stream.read(chunk_size) | |
|
453 | if not chunk: | |
|
454 | break | |
|
455 | yield chunk | |
|
456 | ||
|
457 | response = Response(app_iter=get_chunked_data(resp)) | |
|
458 | response.content_type = 'application/octet-stream' | |
|
459 | ||
|
460 | return response | |
|
461 | ||
|
404 | 462 | def status_view(self, request): |
|
405 | 463 | import vcsserver |
|
406 | 464 | return {'status': 'OK', 'vcsserver_version': vcsserver.__version__, |
@@ -410,23 +468,31 b' class HTTPApplication(object):' | |||
|
410 | 468 | import vcsserver |
|
411 | 469 | |
|
412 | 470 | payload = msgpack.unpackb(request.body, use_list=True) |
|
471 | server_config, app_config = {}, {} | |
|
413 | 472 | |
|
414 | 473 | try: |
|
415 | 474 | path = self.global_config['__file__'] |
|
416 | config = configparser.ConfigParser() | |
|
475 | config = configparser.RawConfigParser() | |
|
476 | ||
|
417 | 477 | config.read(path) |
|
418 | parsed_ini = config | |
|
419 |
if |
|
|
420 |
|
|
|
478 | ||
|
479 | if config.has_section('server:main'): | |
|
480 | server_config = dict(config.items('server:main')) | |
|
481 | if config.has_section('app:main'): | |
|
482 | app_config = dict(config.items('app:main')) | |
|
483 | ||
|
421 | 484 | except Exception: |
|
422 | 485 | log.exception('Failed to read .ini file for display') |
|
423 | parsed_ini = {} | |
|
486 | ||
|
487 | environ = os.environ.items() | |
|
424 | 488 | |
|
425 | 489 | resp = { |
|
426 | 490 | 'id': payload.get('id'), |
|
427 | 491 | 'result': dict( |
|
428 | 492 | version=vcsserver.__version__, |
|
429 |
config= |
|
|
493 | config=server_config, | |
|
494 | app_config=app_config, | |
|
495 | environ=environ, | |
|
430 | 496 | payload=payload, |
|
431 | 497 | ) |
|
432 | 498 | } |
@@ -434,14 +500,13 b' class HTTPApplication(object):' | |||
|
434 | 500 | |
|
435 | 501 | def _msgpack_renderer_factory(self, info): |
|
436 | 502 | def _render(value, system): |
|
437 | value = msgpack.packb(value) | |
|
438 | 503 | request = system.get('request') |
|
439 | 504 | if request is not None: |
|
440 | 505 | response = request.response |
|
441 | 506 | ct = response.content_type |
|
442 | 507 | if ct == response.default_content_type: |
|
443 | 508 | response.content_type = 'application/x-msgpack' |
|
444 | return value | |
|
509 | return msgpack.packb(value) | |
|
445 | 510 | return _render |
|
446 | 511 | |
|
447 | 512 | def set_env_from_config(self, environ, config): |
@@ -22,10 +22,23 b' register_backend(' | |||
|
22 | 22 | "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends", |
|
23 | 23 | "LRUMemoryBackend") |
|
24 | 24 | |
|
25 | register_backend( | |
|
26 | "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends", | |
|
27 | "FileNamespaceBackend") | |
|
28 | ||
|
29 | register_backend( | |
|
30 | "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends", | |
|
31 | "RedisPickleBackend") | |
|
32 | ||
|
33 | register_backend( | |
|
34 | "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends", | |
|
35 | "RedisMsgPackBackend") | |
|
36 | ||
|
37 | ||
|
25 | 38 | log = logging.getLogger(__name__) |
|
26 | 39 | |
|
27 | 40 | from . import region_meta |
|
28 |
from .util import |
|
|
41 | from .utils import (get_default_cache_settings, backend_key_generator, make_region) | |
|
29 | 42 | |
|
30 | 43 | |
|
31 | 44 | def configure_dogpile_cache(settings): |
@@ -46,13 +59,12 b' def configure_dogpile_cache(settings):' | |||
|
46 | 59 | for region_name in avail_regions: |
|
47 | 60 | new_region = make_region( |
|
48 | 61 | name=region_name, |
|
49 |
function_key_generator= |
|
|
62 | function_key_generator=None | |
|
50 | 63 | ) |
|
51 | 64 | |
|
52 | 65 | new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name)) |
|
53 | ||
|
54 | log.debug('dogpile: registering a new region %s[%s]', | |
|
55 | region_name, new_region.__dict__) | |
|
66 | new_region.function_key_generator = backend_key_generator(new_region.actual_backend) | |
|
67 | log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__) | |
|
56 | 68 | region_meta.dogpile_cache_regions[region_name] = new_region |
|
57 | 69 | |
|
58 | 70 |
@@ -15,9 +15,20 b'' | |||
|
15 | 15 | # along with this program; if not, write to the Free Software Foundation, |
|
16 | 16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
17 | 17 | |
|
18 | import time | |
|
19 | import errno | |
|
18 | 20 | import logging |
|
19 | 21 | |
|
22 | import msgpack | |
|
23 | import redis | |
|
24 | ||
|
25 | from dogpile.cache.api import CachedValue | |
|
20 | 26 | from dogpile.cache.backends import memory as memory_backend |
|
27 | from dogpile.cache.backends import file as file_backend | |
|
28 | from dogpile.cache.backends import redis as redis_backend | |
|
29 | from dogpile.cache.backends.file import NO_VALUE, compat, FileLock | |
|
30 | from dogpile.cache.util import memoized_property | |
|
31 | ||
|
21 | 32 | from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug |
|
22 | 33 | |
|
23 | 34 | |
@@ -27,6 +38,7 b' log = logging.getLogger(__name__)' | |||
|
27 | 38 | |
|
28 | 39 | |
|
29 | 40 | class LRUMemoryBackend(memory_backend.MemoryBackend): |
|
41 | key_prefix = 'lru_mem_backend' | |
|
30 | 42 | pickle_values = False |
|
31 | 43 | |
|
32 | 44 | def __init__(self, arguments): |
@@ -49,3 +61,193 b' class LRUMemoryBackend(memory_backend.Me' | |||
|
49 | 61 | def delete_multi(self, keys): |
|
50 | 62 | for key in keys: |
|
51 | 63 | self.delete(key) |
|
64 | ||
|
65 | ||
|
66 | class PickleSerializer(object): | |
|
67 | ||
|
68 | def _dumps(self, value, safe=False): | |
|
69 | try: | |
|
70 | return compat.pickle.dumps(value) | |
|
71 | except Exception: | |
|
72 | if safe: | |
|
73 | return NO_VALUE | |
|
74 | else: | |
|
75 | raise | |
|
76 | ||
|
77 | def _loads(self, value, safe=True): | |
|
78 | try: | |
|
79 | return compat.pickle.loads(value) | |
|
80 | except Exception: | |
|
81 | if safe: | |
|
82 | return NO_VALUE | |
|
83 | else: | |
|
84 | raise | |
|
85 | ||
|
86 | ||
|
87 | class MsgPackSerializer(object): | |
|
88 | ||
|
89 | def _dumps(self, value, safe=False): | |
|
90 | try: | |
|
91 | return msgpack.packb(value) | |
|
92 | except Exception: | |
|
93 | if safe: | |
|
94 | return NO_VALUE | |
|
95 | else: | |
|
96 | raise | |
|
97 | ||
|
98 | def _loads(self, value, safe=True): | |
|
99 | """ | |
|
100 | pickle maintained the `CachedValue` wrapper of the tuple | |
|
101 | msgpack does not, so it must be added back in. | |
|
102 | """ | |
|
103 | try: | |
|
104 | value = msgpack.unpackb(value, use_list=False) | |
|
105 | return CachedValue(*value) | |
|
106 | except Exception: | |
|
107 | if safe: | |
|
108 | return NO_VALUE | |
|
109 | else: | |
|
110 | raise | |
|
111 | ||
|
112 | ||
|
113 | import fcntl | |
|
114 | flock_org = fcntl.flock | |
|
115 | ||
|
116 | ||
|
117 | class CustomLockFactory(FileLock): | |
|
118 | ||
|
119 | pass | |
|
120 | ||
|
121 | ||
|
122 | class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend): | |
|
123 | key_prefix = 'file_backend' | |
|
124 | ||
|
125 | def __init__(self, arguments): | |
|
126 | arguments['lock_factory'] = CustomLockFactory | |
|
127 | super(FileNamespaceBackend, self).__init__(arguments) | |
|
128 | ||
|
129 | def __repr__(self): | |
|
130 | return '{} `{}`'.format(self.__class__, self.filename) | |
|
131 | ||
|
132 | def list_keys(self, prefix=''): | |
|
133 | prefix = '{}:{}'.format(self.key_prefix, prefix) | |
|
134 | ||
|
135 | def cond(v): | |
|
136 | if not prefix: | |
|
137 | return True | |
|
138 | ||
|
139 | if v.startswith(prefix): | |
|
140 | return True | |
|
141 | return False | |
|
142 | ||
|
143 | with self._dbm_file(True) as dbm: | |
|
144 | ||
|
145 | return filter(cond, dbm.keys()) | |
|
146 | ||
|
147 | def get_store(self): | |
|
148 | return self.filename | |
|
149 | ||
|
150 | def get(self, key): | |
|
151 | with self._dbm_file(False) as dbm: | |
|
152 | if hasattr(dbm, 'get'): | |
|
153 | value = dbm.get(key, NO_VALUE) | |
|
154 | else: | |
|
155 | # gdbm objects lack a .get method | |
|
156 | try: | |
|
157 | value = dbm[key] | |
|
158 | except KeyError: | |
|
159 | value = NO_VALUE | |
|
160 | if value is not NO_VALUE: | |
|
161 | value = self._loads(value) | |
|
162 | return value | |
|
163 | ||
|
164 | def set(self, key, value): | |
|
165 | with self._dbm_file(True) as dbm: | |
|
166 | dbm[key] = self._dumps(value) | |
|
167 | ||
|
168 | def set_multi(self, mapping): | |
|
169 | with self._dbm_file(True) as dbm: | |
|
170 | for key, value in mapping.items(): | |
|
171 | dbm[key] = self._dumps(value) | |
|
172 | ||
|
173 | ||
|
174 | class BaseRedisBackend(redis_backend.RedisBackend): | |
|
175 | ||
|
176 | def _create_client(self): | |
|
177 | args = {} | |
|
178 | ||
|
179 | if self.url is not None: | |
|
180 | args.update(url=self.url) | |
|
181 | ||
|
182 | else: | |
|
183 | args.update( | |
|
184 | host=self.host, password=self.password, | |
|
185 | port=self.port, db=self.db | |
|
186 | ) | |
|
187 | ||
|
188 | connection_pool = redis.ConnectionPool(**args) | |
|
189 | ||
|
190 | return redis.StrictRedis(connection_pool=connection_pool) | |
|
191 | ||
|
192 | def list_keys(self, prefix=''): | |
|
193 | prefix = '{}:{}*'.format(self.key_prefix, prefix) | |
|
194 | return self.client.keys(prefix) | |
|
195 | ||
|
196 | def get_store(self): | |
|
197 | return self.client.connection_pool | |
|
198 | ||
|
199 | def get(self, key): | |
|
200 | value = self.client.get(key) | |
|
201 | if value is None: | |
|
202 | return NO_VALUE | |
|
203 | return self._loads(value) | |
|
204 | ||
|
205 | def get_multi(self, keys): | |
|
206 | if not keys: | |
|
207 | return [] | |
|
208 | values = self.client.mget(keys) | |
|
209 | loads = self._loads | |
|
210 | return [ | |
|
211 | loads(v) if v is not None else NO_VALUE | |
|
212 | for v in values] | |
|
213 | ||
|
214 | def set(self, key, value): | |
|
215 | if self.redis_expiration_time: | |
|
216 | self.client.setex(key, self.redis_expiration_time, | |
|
217 | self._dumps(value)) | |
|
218 | else: | |
|
219 | self.client.set(key, self._dumps(value)) | |
|
220 | ||
|
221 | def set_multi(self, mapping): | |
|
222 | dumps = self._dumps | |
|
223 | mapping = dict( | |
|
224 | (k, dumps(v)) | |
|
225 | for k, v in mapping.items() | |
|
226 | ) | |
|
227 | ||
|
228 | if not self.redis_expiration_time: | |
|
229 | self.client.mset(mapping) | |
|
230 | else: | |
|
231 | pipe = self.client.pipeline() | |
|
232 | for key, value in mapping.items(): | |
|
233 | pipe.setex(key, self.redis_expiration_time, value) | |
|
234 | pipe.execute() | |
|
235 | ||
|
236 | def get_mutex(self, key): | |
|
237 | u = redis_backend.u | |
|
238 | if self.distributed_lock: | |
|
239 | lock_key = u('_lock_{0}').format(key) | |
|
240 | log.debug('Trying to acquire Redis lock for key %s', lock_key) | |
|
241 | return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep) | |
|
242 | else: | |
|
243 | return None | |
|
244 | ||
|
245 | ||
|
246 | class RedisPickleBackend(PickleSerializer, BaseRedisBackend): | |
|
247 | key_prefix = 'redis_pickle_backend' | |
|
248 | pass | |
|
249 | ||
|
250 | ||
|
251 | class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend): | |
|
252 | key_prefix = 'redis_msgpack_backend' | |
|
253 | pass |
@@ -18,10 +18,13 b'' | |||
|
18 | 18 | import os |
|
19 | 19 | import logging |
|
20 | 20 | import functools |
|
21 | from decorator import decorate | |
|
22 | ||
|
23 | from dogpile.cache import CacheRegion | |
|
24 | from dogpile.cache.util import compat | |
|
21 | 25 | |
|
22 | 26 | from vcsserver.utils import safe_str, sha1 |
|
23 | from dogpile.cache import CacheRegion | |
|
24 | from dogpile.cache.util import compat | |
|
27 | ||
|
25 | 28 | |
|
26 | 29 | log = logging.getLogger(__name__) |
|
27 | 30 | |
@@ -45,28 +48,35 b' class RhodeCodeCacheRegion(CacheRegion):' | |||
|
45 | 48 | if function_key_generator is None: |
|
46 | 49 | function_key_generator = self.function_key_generator |
|
47 | 50 | |
|
48 | def decorator(fn): | |
|
51 | def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): | |
|
52 | ||
|
53 | if not condition: | |
|
54 | log.debug('Calling un-cached func:%s', user_func.func_name) | |
|
55 | return user_func(*arg, **kw) | |
|
56 | ||
|
57 | key = key_generator(*arg, **kw) | |
|
58 | ||
|
59 | timeout = expiration_time() if expiration_time_is_callable \ | |
|
60 | else expiration_time | |
|
61 | ||
|
62 | log.debug('Calling cached fn:%s', user_func.func_name) | |
|
63 | return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) | |
|
64 | ||
|
65 | def cache_decorator(user_func): | |
|
49 | 66 | if to_str is compat.string_type: |
|
50 | 67 | # backwards compatible |
|
51 | key_generator = function_key_generator(namespace, fn) | |
|
68 | key_generator = function_key_generator(namespace, user_func) | |
|
52 | 69 | else: |
|
53 | key_generator = function_key_generator(namespace, fn, to_str=to_str) | |
|
54 | ||
|
55 | @functools.wraps(fn) | |
|
56 | def decorate(*arg, **kw): | |
|
57 | key = key_generator(*arg, **kw) | |
|
70 | key_generator = function_key_generator(namespace, user_func, to_str=to_str) | |
|
58 | 71 | |
|
59 | @functools.wraps(fn) | |
|
60 |
|
|
|
61 | return fn(*arg, **kw) | |
|
62 | ||
|
63 | if not condition: | |
|
64 | return creator() | |
|
65 | ||
|
66 | timeout = expiration_time() if expiration_time_is_callable \ | |
|
67 | else expiration_time | |
|
68 | ||
|
69 | return self.get_or_create(key, creator, timeout, should_cache_fn) | |
|
72 | def refresh(*arg, **kw): | |
|
73 | """ | |
|
74 | Like invalidate, but regenerates the value instead | |
|
75 | """ | |
|
76 | key = key_generator(*arg, **kw) | |
|
77 | value = user_func(*arg, **kw) | |
|
78 | self.set(key, value) | |
|
79 | return value | |
|
70 | 80 | |
|
71 | 81 | def invalidate(*arg, **kw): |
|
72 | 82 | key = key_generator(*arg, **kw) |
@@ -80,22 +90,19 b' class RhodeCodeCacheRegion(CacheRegion):' | |||
|
80 | 90 | key = key_generator(*arg, **kw) |
|
81 | 91 | return self.get(key) |
|
82 | 92 | |
|
83 | def refresh(*arg, **kw): | |
|
84 | key = key_generator(*arg, **kw) | |
|
85 | value = fn(*arg, **kw) | |
|
86 | self.set(key, value) | |
|
87 | return value | |
|
93 | user_func.set = set_ | |
|
94 | user_func.invalidate = invalidate | |
|
95 | user_func.get = get | |
|
96 | user_func.refresh = refresh | |
|
97 | user_func.key_generator = key_generator | |
|
98 | user_func.original = user_func | |
|
88 | 99 | |
|
89 | decorate.set = set_ | |
|
90 | decorate.invalidate = invalidate | |
|
91 | decorate.refresh = refresh | |
|
92 | decorate.get = get | |
|
93 | decorate.original = fn | |
|
94 | decorate.key_generator = key_generator | |
|
100 | # Use `decorate` to preserve the signature of :param:`user_func`. | |
|
95 | 101 | |
|
96 | return decorate | |
|
102 | return decorate(user_func, functools.partial( | |
|
103 | get_or_create_for_user_func, key_generator)) | |
|
97 | 104 | |
|
98 | return decorator | |
|
105 | return cache_decorator | |
|
99 | 106 | |
|
100 | 107 | |
|
101 | 108 | def make_region(*arg, **kw): |
@@ -110,7 +117,7 b' def get_default_cache_settings(settings,' | |||
|
110 | 117 | if key.startswith(prefix): |
|
111 | 118 | name = key.split(prefix)[1].strip() |
|
112 | 119 | val = settings[key] |
|
113 |
if isinstance(val, |
|
|
120 | if isinstance(val, compat.string_types): | |
|
114 | 121 | val = val.strip() |
|
115 | 122 | cache_settings[name] = val |
|
116 | 123 | return cache_settings |
@@ -123,13 +130,23 b' def compute_key_from_params(*args):' | |||
|
123 | 130 | return sha1("_".join(map(safe_str, args))) |
|
124 | 131 | |
|
125 | 132 | |
|
126 |
def key_generator( |
|
|
133 | def backend_key_generator(backend): | |
|
134 | """ | |
|
135 | Special wrapper that also sends over the backend to the key generator | |
|
136 | """ | |
|
137 | def wrapper(namespace, fn): | |
|
138 | return key_generator(backend, namespace, fn) | |
|
139 | return wrapper | |
|
140 | ||
|
141 | ||
|
142 | def key_generator(backend, namespace, fn): | |
|
127 | 143 | fname = fn.__name__ |
|
128 | 144 | |
|
129 | 145 | def generate_key(*args): |
|
130 | namespace_pref = namespace or 'default' | |
|
146 | backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix' | |
|
147 | namespace_pref = namespace or 'default_namespace' | |
|
131 | 148 | arg_key = compute_key_from_params(*args) |
|
132 | final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key) | |
|
149 | final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key) | |
|
133 | 150 | |
|
134 | 151 | return final_key |
|
135 | 152 |
@@ -216,9 +216,6 b' class BufferedGenerator(object):' | |||
|
216 | 216 | except (GeneratorExit, StopIteration): |
|
217 | 217 | pass |
|
218 | 218 | |
|
219 | def __del__(self): | |
|
220 | self.close() | |
|
221 | ||
|
222 | 219 | #################### |
|
223 | 220 | # Threaded reader's infrastructure. |
|
224 | 221 | #################### |
@@ -475,26 +472,23 b' class SubprocessIOChunker(object):' | |||
|
475 | 472 | self._closed = True |
|
476 | 473 | try: |
|
477 | 474 | self.process.terminate() |
|
478 | except: | |
|
475 | except Exception: | |
|
479 | 476 | pass |
|
480 | 477 | if self._close_input_fd: |
|
481 | 478 | os.close(self._close_input_fd) |
|
482 | 479 | try: |
|
483 | 480 | self.output.close() |
|
484 | except: | |
|
481 | except Exception: | |
|
485 | 482 | pass |
|
486 | 483 | try: |
|
487 | 484 | self.error.close() |
|
488 | except: | |
|
485 | except Exception: | |
|
489 | 486 | pass |
|
490 | 487 | try: |
|
491 | 488 | os.close(self.inputstream) |
|
492 | except: | |
|
489 | except Exception: | |
|
493 | 490 | pass |
|
494 | 491 | |
|
495 | def __del__(self): | |
|
496 | self.close() | |
|
497 | ||
|
498 | 492 | |
|
499 | 493 | def run_command(arguments, env=None): |
|
500 | 494 | """ |
@@ -506,18 +500,20 b' def run_command(arguments, env=None):' | |||
|
506 | 500 | |
|
507 | 501 | cmd = arguments |
|
508 | 502 | log.debug('Running subprocessio command %s', cmd) |
|
503 | proc = None | |
|
509 | 504 | try: |
|
510 | 505 | _opts = {'shell': False, 'fail_on_stderr': False} |
|
511 | 506 | if env: |
|
512 | 507 | _opts.update({'env': env}) |
|
513 | p = SubprocessIOChunker(cmd, **_opts) | |
|
514 | stdout = ''.join(p) | |
|
515 | stderr = ''.join(''.join(p.error)) | |
|
508 | proc = SubprocessIOChunker(cmd, **_opts) | |
|
509 | return ''.join(proc), ''.join(proc.error) | |
|
516 | 510 | except (EnvironmentError, OSError) as err: |
|
517 | 511 | cmd = ' '.join(cmd) # human friendly CMD |
|
518 | 512 | tb_err = ("Couldn't run subprocessio command (%s).\n" |
|
519 | 513 | "Original error was:%s\n" % (cmd, err)) |
|
520 | 514 | log.exception(tb_err) |
|
521 | 515 | raise Exception(tb_err) |
|
516 | finally: | |
|
517 | if proc: | |
|
518 | proc.close() | |
|
522 | 519 | |
|
523 | return stdout, stderr |
@@ -36,6 +36,7 b' import svn.repos' | |||
|
36 | 36 | |
|
37 | 37 | from vcsserver import svn_diff, exceptions, subprocessio, settings |
|
38 | 38 | from vcsserver.base import RepoFactory, raise_from_original |
|
39 | from vcsserver.vcs_base import RemoteBase | |
|
39 | 40 | |
|
40 | 41 | log = logging.getLogger(__name__) |
|
41 | 42 | |
@@ -97,23 +98,8 b' class SubversionFactory(RepoFactory):' | |||
|
97 | 98 | def repo(self, wire, create=False, compatible_version=None): |
|
98 | 99 | """ |
|
99 | 100 | Get a repository instance for the given path. |
|
100 | ||
|
101 | Uses internally the low level beaker API since the decorators introduce | |
|
102 | significant overhead. | |
|
103 | 101 | """ |
|
104 | region = self._cache_region | |
|
105 | context = wire.get('context', None) | |
|
106 | repo_path = wire.get('path', '') | |
|
107 | context_uid = '{}'.format(context) | |
|
108 | cache = wire.get('cache', True) | |
|
109 | cache_on = context and cache | |
|
110 | ||
|
111 | @region.conditional_cache_on_arguments(condition=cache_on) | |
|
112 | def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id): | |
|
113 | return self._create_repo(wire, create, compatible_version) | |
|
114 | ||
|
115 | return create_new_repo(self.repo_type, repo_path, context_uid, | |
|
116 | compatible_version) | |
|
102 | return self._create_repo(wire, create, compatible_version) | |
|
117 | 103 | |
|
118 | 104 | |
|
119 | 105 | NODE_TYPE_MAPPING = { |
@@ -122,7 +108,7 b' NODE_TYPE_MAPPING = {' | |||
|
122 | 108 | } |
|
123 | 109 | |
|
124 | 110 | |
|
125 |
class SvnRemote( |
|
|
111 | class SvnRemote(RemoteBase): | |
|
126 | 112 | |
|
127 | 113 | def __init__(self, factory, hg_factory=None): |
|
128 | 114 | self._factory = factory |
@@ -141,7 +127,6 b' class SvnRemote(object):' | |||
|
141 | 127 | |
|
142 | 128 | @reraise_safe_exceptions |
|
143 | 129 | def is_empty(self, wire): |
|
144 | repo = self._factory.repo(wire) | |
|
145 | 130 | |
|
146 | 131 | try: |
|
147 | 132 | return self.lookup(wire, -1) == 0 |
@@ -219,9 +204,14 b' class SvnRemote(object):' | |||
|
219 | 204 | return start_rev, end_rev |
|
220 | 205 | |
|
221 | 206 | def revision_properties(self, wire, revision): |
|
222 | repo = self._factory.repo(wire) | |
|
223 | fs_ptr = svn.repos.fs(repo) | |
|
224 | return svn.fs.revision_proplist(fs_ptr, revision) | |
|
207 | ||
|
208 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
209 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
210 | def _revision_properties(_repo_id, _revision): | |
|
211 | repo = self._factory.repo(wire) | |
|
212 | fs_ptr = svn.repos.fs(repo) | |
|
213 | return svn.fs.revision_proplist(fs_ptr, revision) | |
|
214 | return _revision_properties(repo_id, revision) | |
|
225 | 215 | |
|
226 | 216 | def revision_changes(self, wire, revision): |
|
227 | 217 | |
@@ -267,28 +257,37 b' class SvnRemote(object):' | |||
|
267 | 257 | } |
|
268 | 258 | return changes |
|
269 | 259 | |
|
260 | @reraise_safe_exceptions | |
|
270 | 261 | def node_history(self, wire, path, revision, limit): |
|
271 | cross_copies = False | |
|
272 | repo = self._factory.repo(wire) | |
|
273 | fsobj = svn.repos.fs(repo) | |
|
274 | rev_root = svn.fs.revision_root(fsobj, revision) | |
|
262 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
263 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
264 | def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit): | |
|
265 | cross_copies = False | |
|
266 | repo = self._factory.repo(wire) | |
|
267 | fsobj = svn.repos.fs(repo) | |
|
268 | rev_root = svn.fs.revision_root(fsobj, revision) | |
|
275 | 269 | |
|
276 | history_revisions = [] | |
|
277 | history = svn.fs.node_history(rev_root, path) | |
|
278 | history = svn.fs.history_prev(history, cross_copies) | |
|
279 | while history: | |
|
280 | __, node_revision = svn.fs.history_location(history) | |
|
281 | history_revisions.append(node_revision) | |
|
282 | if limit and len(history_revisions) >= limit: | |
|
283 | break | |
|
270 | history_revisions = [] | |
|
271 | history = svn.fs.node_history(rev_root, path) | |
|
284 | 272 | history = svn.fs.history_prev(history, cross_copies) |
|
285 | return history_revisions | |
|
273 | while history: | |
|
274 | __, node_revision = svn.fs.history_location(history) | |
|
275 | history_revisions.append(node_revision) | |
|
276 | if limit and len(history_revisions) >= limit: | |
|
277 | break | |
|
278 | history = svn.fs.history_prev(history, cross_copies) | |
|
279 | return history_revisions | |
|
280 | return _assert_correct_path(context_uid, repo_id, path, revision, limit) | |
|
286 | 281 | |
|
287 | 282 | def node_properties(self, wire, path, revision): |
|
288 |
repo = self._ |
|
|
289 | fsobj = svn.repos.fs(repo) | |
|
290 | rev_root = svn.fs.revision_root(fsobj, revision) | |
|
291 | return svn.fs.node_proplist(rev_root, path) | |
|
283 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
284 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
285 | def _node_properties(_repo_id, _path, _revision): | |
|
286 | repo = self._factory.repo(wire) | |
|
287 | fsobj = svn.repos.fs(repo) | |
|
288 | rev_root = svn.fs.revision_root(fsobj, revision) | |
|
289 | return svn.fs.node_proplist(rev_root, path) | |
|
290 | return _node_properties(repo_id, path, revision) | |
|
292 | 291 | |
|
293 | 292 | def file_annotate(self, wire, path, revision): |
|
294 | 293 | abs_path = 'file://' + urllib.pathname2url( |
@@ -317,27 +316,37 b' class SvnRemote(object):' | |||
|
317 | 316 | |
|
318 | 317 | return annotations |
|
319 | 318 | |
|
320 | def get_node_type(self, wire, path, rev=None): | |
|
321 | repo = self._factory.repo(wire) | |
|
322 | fs_ptr = svn.repos.fs(repo) | |
|
323 | if rev is None: | |
|
324 | rev = svn.fs.youngest_rev(fs_ptr) | |
|
325 | root = svn.fs.revision_root(fs_ptr, rev) | |
|
326 | node = svn.fs.check_path(root, path) | |
|
327 | return NODE_TYPE_MAPPING.get(node, None) | |
|
319 | def get_node_type(self, wire, path, revision=None): | |
|
320 | ||
|
321 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
322 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
323 | def _get_node_type(_repo_id, _path, _revision): | |
|
324 | repo = self._factory.repo(wire) | |
|
325 | fs_ptr = svn.repos.fs(repo) | |
|
326 | if _revision is None: | |
|
327 | _revision = svn.fs.youngest_rev(fs_ptr) | |
|
328 | root = svn.fs.revision_root(fs_ptr, _revision) | |
|
329 | node = svn.fs.check_path(root, path) | |
|
330 | return NODE_TYPE_MAPPING.get(node, None) | |
|
331 | return _get_node_type(repo_id, path, revision) | |
|
328 | 332 | |
|
329 | 333 | def get_nodes(self, wire, path, revision=None): |
|
330 | repo = self._factory.repo(wire) | |
|
331 | fsobj = svn.repos.fs(repo) | |
|
332 | if revision is None: | |
|
333 | revision = svn.fs.youngest_rev(fsobj) | |
|
334 | root = svn.fs.revision_root(fsobj, revision) | |
|
335 | entries = svn.fs.dir_entries(root, path) | |
|
336 | result = [] | |
|
337 | for entry_path, entry_info in entries.iteritems(): | |
|
338 | result.append( | |
|
339 | (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None))) | |
|
340 |
|
|
|
334 | ||
|
335 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
336 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
337 | def _get_nodes(_repo_id, _path, _revision): | |
|
338 | repo = self._factory.repo(wire) | |
|
339 | fsobj = svn.repos.fs(repo) | |
|
340 | if _revision is None: | |
|
341 | _revision = svn.fs.youngest_rev(fsobj) | |
|
342 | root = svn.fs.revision_root(fsobj, _revision) | |
|
343 | entries = svn.fs.dir_entries(root, path) | |
|
344 | result = [] | |
|
345 | for entry_path, entry_info in entries.iteritems(): | |
|
346 | result.append( | |
|
347 | (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None))) | |
|
348 | return result | |
|
349 | return _get_nodes(repo_id, path, revision) | |
|
341 | 350 | |
|
342 | 351 | def get_file_content(self, wire, path, rev=None): |
|
343 | 352 | repo = self._factory.repo(wire) |
@@ -349,13 +358,18 b' class SvnRemote(object):' | |||
|
349 | 358 | return content.read() |
|
350 | 359 | |
|
351 | 360 | def get_file_size(self, wire, path, revision=None): |
|
352 | repo = self._factory.repo(wire) | |
|
353 | fsobj = svn.repos.fs(repo) | |
|
354 | if revision is None: | |
|
355 | revision = svn.fs.youngest_revision(fsobj) | |
|
356 | root = svn.fs.revision_root(fsobj, revision) | |
|
357 | size = svn.fs.file_length(root, path) | |
|
358 | return size | |
|
361 | ||
|
362 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
363 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
364 | def _get_file_size(_repo_id, _path, _revision): | |
|
365 | repo = self._factory.repo(wire) | |
|
366 | fsobj = svn.repos.fs(repo) | |
|
367 | if _revision is None: | |
|
368 | _revision = svn.fs.youngest_revision(fsobj) | |
|
369 | root = svn.fs.revision_root(fsobj, _revision) | |
|
370 | size = svn.fs.file_length(root, path) | |
|
371 | return size | |
|
372 | return _get_file_size(repo_id, path, revision) | |
|
359 | 373 | |
|
360 | 374 | def create_repository(self, wire, compatible_version=None): |
|
361 | 375 | log.info('Creating Subversion repository in path "%s"', wire['path']) |
@@ -458,6 +472,17 b' class SvnRemote(object):' | |||
|
458 | 472 | return False |
|
459 | 473 | |
|
460 | 474 | @reraise_safe_exceptions |
|
475 | def is_binary(self, wire, rev, path): | |
|
476 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
|
477 | ||
|
478 | @self.region.conditional_cache_on_arguments(condition=cache_on) | |
|
479 | def _is_binary(_repo_id, _rev, _path): | |
|
480 | raw_bytes = self.get_file_content(wire, path, rev) | |
|
481 | return raw_bytes and '\0' in raw_bytes | |
|
482 | ||
|
483 | return _is_binary(repo_id, rev, path) | |
|
484 | ||
|
485 | @reraise_safe_exceptions | |
|
461 | 486 | def run_svn_command(self, wire, cmd, **opts): |
|
462 | 487 | path = wire.get('path', None) |
|
463 | 488 | |
@@ -673,7 +698,6 b' class SvnDiffer(object):' | |||
|
673 | 698 | return content.splitlines(True) |
|
674 | 699 | |
|
675 | 700 | |
|
676 | ||
|
677 | 701 | class DiffChangeEditor(svn.delta.Editor): |
|
678 | 702 | """ |
|
679 | 703 | Records changes between two given revisions |
@@ -61,7 +61,7 b' class TestGitFetch(object):' | |||
|
61 | 61 | |
|
62 | 62 | with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch: |
|
63 | 63 | mock_fetch.side_effect = side_effect |
|
64 |
self.remote_git.pull(wire= |
|
|
64 | self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False) | |
|
65 | 65 | determine_wants = self.mock_repo.object_store.determine_wants_all |
|
66 | 66 | determine_wants.assert_called_once_with(SAMPLE_REFS) |
|
67 | 67 | |
@@ -79,7 +79,7 b' class TestGitFetch(object):' | |||
|
79 | 79 | with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch: |
|
80 | 80 | mock_fetch.side_effect = side_effect |
|
81 | 81 | self.remote_git.pull( |
|
82 |
wire= |
|
|
82 | wire={}, url='/tmp/', apply_refs=False, | |
|
83 | 83 | refs=selected_refs.keys()) |
|
84 | 84 | determine_wants = self.mock_repo.object_store.determine_wants_all |
|
85 | 85 | assert determine_wants.call_count == 0 |
@@ -95,18 +95,13 b' class TestGitFetch(object):' | |||
|
95 | 95 | |
|
96 | 96 | with patch('vcsserver.git.Repo', create=False) as mock_repo: |
|
97 | 97 | mock_repo().get_refs.return_value = sample_refs |
|
98 |
remote_refs = remote_git.get_remote_refs(wire= |
|
|
98 | remote_refs = remote_git.get_remote_refs(wire={}, url=url) | |
|
99 | 99 | mock_repo().get_refs.assert_called_once_with() |
|
100 | 100 | assert remote_refs == sample_refs |
|
101 | 101 | |
|
102 | def test_remove_ref(self): | |
|
103 | ref_to_remove = 'refs/tags/v0.1.9' | |
|
104 | self.mock_repo.refs = SAMPLE_REFS.copy() | |
|
105 | self.remote_git.remove_ref(None, ref_to_remove) | |
|
106 | assert ref_to_remove not in self.mock_repo.refs | |
|
107 | ||
|
108 | 102 | |
|
109 | 103 | class TestReraiseSafeExceptions(object): |
|
104 | ||
|
110 | 105 | def test_method_decorated_with_reraise_safe_exceptions(self): |
|
111 | 106 | factory = Mock() |
|
112 | 107 | git_remote = git.GitRemote(factory) |
@@ -26,36 +26,17 b' from mock import Mock, MagicMock, patch' | |||
|
26 | 26 | from vcsserver import exceptions, hg, hgcompat |
|
27 | 27 | |
|
28 | 28 | |
|
29 | class TestHGLookup(object): | |
|
30 | def setup(self): | |
|
31 | self.mock_repo = MagicMock() | |
|
32 | self.mock_repo.__getitem__.side_effect = LookupError( | |
|
33 | 'revision_or_commit_id', 'index', 'message') | |
|
34 | factory = Mock() | |
|
35 | factory.repo = Mock(return_value=self.mock_repo) | |
|
36 | self.remote_hg = hg.HgRemote(factory) | |
|
37 | ||
|
38 | def test_fail_lookup_hg(self): | |
|
39 | with pytest.raises(Exception) as exc_info: | |
|
40 | self.remote_hg.lookup( | |
|
41 | wire=None, revision='revision_or_commit_id', both=True) | |
|
42 | ||
|
43 | assert exc_info.value._vcs_kind == 'lookup' | |
|
44 | assert 'revision_or_commit_id' in exc_info.value.args | |
|
45 | ||
|
46 | ||
|
47 | 29 | class TestDiff(object): |
|
48 | 30 | def test_raising_safe_exception_when_lookup_failed(self): |
|
49 | repo = Mock() | |
|
31 | ||
|
50 | 32 | factory = Mock() |
|
51 | factory.repo = Mock(return_value=repo) | |
|
52 | 33 | hg_remote = hg.HgRemote(factory) |
|
53 | 34 | with patch('mercurial.patch.diff') as diff_mock: |
|
54 | 35 | diff_mock.side_effect = LookupError( |
|
55 | 36 | 'deadbeef', 'index', 'message') |
|
56 | 37 | with pytest.raises(Exception) as exc_info: |
|
57 | 38 | hg_remote.diff( |
|
58 |
wire= |
|
|
39 | wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1', | |
|
59 | 40 | file_filter=None, opt_git=True, opt_ignorews=True, |
|
60 | 41 | context=3) |
|
61 | 42 | assert type(exc_info.value) == Exception |
@@ -45,8 +45,10 b" INVALID_CERTIFICATE_STDERR = '\\n'.join([" | |||
|
45 | 45 | reason="SVN not packaged for Cygwin") |
|
46 | 46 | def test_import_remote_repository_certificate_error(stderr, expected_reason): |
|
47 | 47 | from vcsserver import svn |
|
48 | factory = mock.Mock() | |
|
49 | factory.repo = mock.Mock(return_value=mock.Mock()) | |
|
48 | 50 | |
|
49 |
remote = svn.SvnRemote( |
|
|
51 | remote = svn.SvnRemote(factory) | |
|
50 | 52 | remote.is_path_valid_repository = lambda wire, path: True |
|
51 | 53 | |
|
52 | 54 | with mock.patch('subprocess.Popen', |
@@ -76,7 +78,10 b' def test_svn_libraries_can_be_imported()' | |||
|
76 | 78 | def test_username_password_extraction_from_url(example_url, parts): |
|
77 | 79 | from vcsserver import svn |
|
78 | 80 | |
|
79 | remote = svn.SvnRemote(None) | |
|
81 | factory = mock.Mock() | |
|
82 | factory.repo = mock.Mock(return_value=mock.Mock()) | |
|
83 | ||
|
84 | remote = svn.SvnRemote(factory) | |
|
80 | 85 | remote.is_path_valid_repository = lambda wire, path: True |
|
81 | 86 | |
|
82 | 87 | assert remote.get_url_and_credentials(example_url) == parts |
@@ -15,12 +15,10 b'' | |||
|
15 | 15 | # along with this program; if not, write to the Free Software Foundation, |
|
16 | 16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
17 | 17 | |
|
18 | ||
|
19 | ||
|
20 | 18 | import time |
|
21 | 19 | import logging |
|
22 | 20 | |
|
23 | ||
|
21 | import vcsserver | |
|
24 | 22 | from vcsserver.utils import safe_str |
|
25 | 23 | |
|
26 | 24 | |
@@ -32,6 +30,10 b' def get_access_path(request):' | |||
|
32 | 30 | return environ.get('PATH_INFO') |
|
33 | 31 | |
|
34 | 32 | |
|
33 | def get_user_agent(environ): | |
|
34 | return environ.get('HTTP_USER_AGENT') | |
|
35 | ||
|
36 | ||
|
35 | 37 | class RequestWrapperTween(object): |
|
36 | 38 | def __init__(self, handler, registry): |
|
37 | 39 | self.handler = handler |
@@ -45,14 +47,18 b' class RequestWrapperTween(object):' | |||
|
45 | 47 | response = self.handler(request) |
|
46 | 48 | finally: |
|
47 | 49 | end = time.time() |
|
48 | ||
|
49 | log.info('IP: %s Request to path: `%s` time: %.3fs', | |
|
50 | '127.0.0.1', safe_str(get_access_path(request)), end - start) | |
|
50 | total = end - start | |
|
51 | count = request.request_count() | |
|
52 | _ver_ = vcsserver.__version__ | |
|
53 | log.info( | |
|
54 | 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s', | |
|
55 | count, '127.0.0.1', request.environ.get('REQUEST_METHOD'), | |
|
56 | safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_) | |
|
51 | 57 | |
|
52 | 58 | return response |
|
53 | 59 | |
|
54 | 60 | |
|
55 | 61 | def includeme(config): |
|
56 | 62 | config.add_tween( |
|
57 | 'vcsserver.tweens.RequestWrapperTween', | |
|
63 | 'vcsserver.tweens.request_wrapper.RequestWrapperTween', | |
|
58 | 64 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now