Show More
@@ -1,393 +1,393 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Gunicorn config extension and hooks. This config file adds some extra settings and memory management. |
|
2 | Gunicorn config extension and hooks. This config file adds some extra settings and memory management. | |
3 | Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer |
|
3 | Gunicorn configuration should be managed by .ini files entries of RhodeCode or VCSServer | |
4 | """ |
|
4 | """ | |
5 |
|
5 | |||
6 | import gc |
|
6 | import gc | |
7 | import os |
|
7 | import os | |
8 | import sys |
|
8 | import sys | |
9 | import math |
|
9 | import math | |
10 | import time |
|
10 | import time | |
11 | import threading |
|
11 | import threading | |
12 | import traceback |
|
12 | import traceback | |
13 | import random |
|
13 | import random | |
14 | import socket |
|
14 | import socket | |
15 | from gunicorn.glogging import Logger |
|
15 | from gunicorn.glogging import Logger | |
16 |
|
16 | |||
17 |
|
17 | |||
18 | def get_workers(): |
|
18 | def get_workers(): | |
19 | import multiprocessing |
|
19 | import multiprocessing | |
20 | return multiprocessing.cpu_count() * 2 + 1 |
|
20 | return multiprocessing.cpu_count() * 2 + 1 | |
21 |
|
21 | |||
22 | # GLOBAL |
|
22 | # GLOBAL | |
23 | errorlog = '-' |
|
23 | errorlog = '-' | |
24 | accesslog = '-' |
|
24 | accesslog = '-' | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | # SERVER MECHANICS |
|
27 | # SERVER MECHANICS | |
28 | # None == system temp dir |
|
28 | # None == system temp dir | |
29 | # worker_tmp_dir is recommended to be set to some tmpfs |
|
29 | # worker_tmp_dir is recommended to be set to some tmpfs | |
30 | worker_tmp_dir = None |
|
30 | worker_tmp_dir = None | |
31 | tmp_upload_dir = None |
|
31 | tmp_upload_dir = None | |
32 |
|
32 | |||
33 | #reuse_port = True |
|
33 | #reuse_port = True | |
34 |
|
34 | |||
35 | # Custom log format |
|
35 | # Custom log format | |
36 | #access_log_format = ( |
|
36 | #access_log_format = ( | |
37 | # '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') |
|
37 | # '%(t)s %(p)s INFO [GNCRN] %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"') | |
38 |
|
38 | |||
39 | # loki format for easier parsing in grafana |
|
39 | # loki format for easier parsing in grafana | |
40 | access_log_format = ( |
|
40 | access_log_format = ( | |
41 | 'time="%(t)s" pid=%(p)s level="INFO" type="[GNCRN]" ip="%(h)-15s" rqt="%(L)s" response_code="%(s)s" response_bytes="%(b)-6s" uri="%(m)s:%(U)s %(q)s" user=":%(u)s" user_agent="%(a)s"') |
|
41 | 'time="%(t)s" pid=%(p)s level="INFO" type="[GNCRN]" ip="%(h)-15s" rqt="%(L)s" response_code="%(s)s" response_bytes="%(b)-6s" uri="%(m)s:%(U)s %(q)s" user=":%(u)s" user_agent="%(a)s"') | |
42 |
|
42 | |||
43 | # self adjust workers based on CPU count |
|
43 | # self adjust workers based on CPU count | |
44 | # workers = get_workers() |
|
44 | # workers = get_workers() | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | def _get_process_rss(pid=None): |
|
47 | def _get_process_rss(pid=None): | |
48 | try: |
|
48 | try: | |
49 | import psutil |
|
49 | import psutil | |
50 | if pid: |
|
50 | if pid: | |
51 | proc = psutil.Process(pid) |
|
51 | proc = psutil.Process(pid) | |
52 | else: |
|
52 | else: | |
53 | proc = psutil.Process() |
|
53 | proc = psutil.Process() | |
54 | return proc.memory_info().rss |
|
54 | return proc.memory_info().rss | |
55 | except Exception: |
|
55 | except Exception: | |
56 | return None |
|
56 | return None | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def _get_config(ini_path): |
|
59 | def _get_config(ini_path): | |
60 |
|
|
60 | import configparser | |
61 |
|
61 | |||
62 | try: |
|
62 | try: | |
63 | config = configparser.RawConfigParser() |
|
63 | config = configparser.RawConfigParser() | |
64 | config.read(ini_path) |
|
64 | config.read(ini_path) | |
65 | return config |
|
65 | return config | |
66 | except Exception: |
|
66 | except Exception: | |
67 | return None |
|
67 | return None | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | def _time_with_offset(memory_usage_check_interval): |
|
70 | def _time_with_offset(memory_usage_check_interval): | |
71 | return time.time() - random.randint(0, memory_usage_check_interval/2.0) |
|
71 | return time.time() - random.randint(0, memory_usage_check_interval/2.0) | |
72 |
|
72 | |||
73 |
|
73 | |||
74 | def pre_fork(server, worker): |
|
74 | def pre_fork(server, worker): | |
75 | pass |
|
75 | pass | |
76 |
|
76 | |||
77 |
|
77 | |||
78 | def post_fork(server, worker): |
|
78 | def post_fork(server, worker): | |
79 |
|
79 | |||
80 | # memory spec defaults |
|
80 | # memory spec defaults | |
81 | _memory_max_usage = 0 |
|
81 | _memory_max_usage = 0 | |
82 | _memory_usage_check_interval = 60 |
|
82 | _memory_usage_check_interval = 60 | |
83 | _memory_usage_recovery_threshold = 0.8 |
|
83 | _memory_usage_recovery_threshold = 0.8 | |
84 |
|
84 | |||
85 | ini_path = os.path.abspath(server.cfg.paste) |
|
85 | ini_path = os.path.abspath(server.cfg.paste) | |
86 | conf = _get_config(ini_path) |
|
86 | conf = _get_config(ini_path) | |
87 |
|
87 | |||
88 | section = 'server:main' |
|
88 | section = 'server:main' | |
89 | if conf and conf.has_section(section): |
|
89 | if conf and conf.has_section(section): | |
90 |
|
90 | |||
91 | if conf.has_option(section, 'memory_max_usage'): |
|
91 | if conf.has_option(section, 'memory_max_usage'): | |
92 | _memory_max_usage = conf.getint(section, 'memory_max_usage') |
|
92 | _memory_max_usage = conf.getint(section, 'memory_max_usage') | |
93 |
|
93 | |||
94 | if conf.has_option(section, 'memory_usage_check_interval'): |
|
94 | if conf.has_option(section, 'memory_usage_check_interval'): | |
95 | _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval') |
|
95 | _memory_usage_check_interval = conf.getint(section, 'memory_usage_check_interval') | |
96 |
|
96 | |||
97 | if conf.has_option(section, 'memory_usage_recovery_threshold'): |
|
97 | if conf.has_option(section, 'memory_usage_recovery_threshold'): | |
98 | _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold') |
|
98 | _memory_usage_recovery_threshold = conf.getfloat(section, 'memory_usage_recovery_threshold') | |
99 |
|
99 | |||
100 | worker._memory_max_usage = int(os.environ.get('RC_GUNICORN_MEMORY_MAX_USAGE', '') |
|
100 | worker._memory_max_usage = int(os.environ.get('RC_GUNICORN_MEMORY_MAX_USAGE', '') | |
101 | or _memory_max_usage) |
|
101 | or _memory_max_usage) | |
102 | worker._memory_usage_check_interval = int(os.environ.get('RC_GUNICORN_MEMORY_USAGE_CHECK_INTERVAL', '') |
|
102 | worker._memory_usage_check_interval = int(os.environ.get('RC_GUNICORN_MEMORY_USAGE_CHECK_INTERVAL', '') | |
103 | or _memory_usage_check_interval) |
|
103 | or _memory_usage_check_interval) | |
104 | worker._memory_usage_recovery_threshold = float(os.environ.get('RC_GUNICORN_MEMORY_USAGE_RECOVERY_THRESHOLD', '') |
|
104 | worker._memory_usage_recovery_threshold = float(os.environ.get('RC_GUNICORN_MEMORY_USAGE_RECOVERY_THRESHOLD', '') | |
105 | or _memory_usage_recovery_threshold) |
|
105 | or _memory_usage_recovery_threshold) | |
106 |
|
106 | |||
107 | # register memory last check time, with some random offset so we don't recycle all |
|
107 | # register memory last check time, with some random offset so we don't recycle all | |
108 | # at once |
|
108 | # at once | |
109 | worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval) |
|
109 | worker._last_memory_check_time = _time_with_offset(_memory_usage_check_interval) | |
110 |
|
110 | |||
111 | if _memory_max_usage: |
|
111 | if _memory_max_usage: | |
112 | server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid, |
|
112 | server.log.info("[%-10s] WORKER spawned with max memory set at %s", worker.pid, | |
113 | _format_data_size(_memory_max_usage)) |
|
113 | _format_data_size(_memory_max_usage)) | |
114 | else: |
|
114 | else: | |
115 | server.log.info("[%-10s] WORKER spawned", worker.pid) |
|
115 | server.log.info("[%-10s] WORKER spawned", worker.pid) | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | def pre_exec(server): |
|
118 | def pre_exec(server): | |
119 | server.log.info("Forked child, re-executing.") |
|
119 | server.log.info("Forked child, re-executing.") | |
120 |
|
120 | |||
121 |
|
121 | |||
122 | def on_starting(server): |
|
122 | def on_starting(server): | |
123 | server_lbl = '{} {}'.format(server.proc_name, server.address) |
|
123 | server_lbl = '{} {}'.format(server.proc_name, server.address) | |
124 | server.log.info("Server %s is starting.", server_lbl) |
|
124 | server.log.info("Server %s is starting.", server_lbl) | |
125 |
|
125 | |||
126 |
|
126 | |||
127 | def when_ready(server): |
|
127 | def when_ready(server): | |
128 | server.log.info("Server %s is ready. Spawning workers", server) |
|
128 | server.log.info("Server %s is ready. Spawning workers", server) | |
129 |
|
129 | |||
130 |
|
130 | |||
131 | def on_reload(server): |
|
131 | def on_reload(server): | |
132 | pass |
|
132 | pass | |
133 |
|
133 | |||
134 |
|
134 | |||
135 | def _format_data_size(size, unit="B", precision=1, binary=True): |
|
135 | def _format_data_size(size, unit="B", precision=1, binary=True): | |
136 | """Format a number using SI units (kilo, mega, etc.). |
|
136 | """Format a number using SI units (kilo, mega, etc.). | |
137 |
|
137 | |||
138 | ``size``: The number as a float or int. |
|
138 | ``size``: The number as a float or int. | |
139 |
|
139 | |||
140 | ``unit``: The unit name in plural form. Examples: "bytes", "B". |
|
140 | ``unit``: The unit name in plural form. Examples: "bytes", "B". | |
141 |
|
141 | |||
142 | ``precision``: How many digits to the right of the decimal point. Default |
|
142 | ``precision``: How many digits to the right of the decimal point. Default | |
143 | is 1. 0 suppresses the decimal point. |
|
143 | is 1. 0 suppresses the decimal point. | |
144 |
|
144 | |||
145 | ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000). |
|
145 | ``binary``: If false, use base-10 decimal prefixes (kilo = K = 1000). | |
146 | If true, use base-2 binary prefixes (kibi = Ki = 1024). |
|
146 | If true, use base-2 binary prefixes (kibi = Ki = 1024). | |
147 |
|
147 | |||
148 | ``full_name``: If false (default), use the prefix abbreviation ("k" or |
|
148 | ``full_name``: If false (default), use the prefix abbreviation ("k" or | |
149 | "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false, |
|
149 | "Ki"). If true, use the full prefix ("kilo" or "kibi"). If false, | |
150 | use abbreviation ("k" or "Ki"). |
|
150 | use abbreviation ("k" or "Ki"). | |
151 |
|
151 | |||
152 | """ |
|
152 | """ | |
153 |
|
153 | |||
154 | if not binary: |
|
154 | if not binary: | |
155 | base = 1000 |
|
155 | base = 1000 | |
156 | multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') |
|
156 | multiples = ('', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') | |
157 | else: |
|
157 | else: | |
158 | base = 1024 |
|
158 | base = 1024 | |
159 | multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') |
|
159 | multiples = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') | |
160 |
|
160 | |||
161 | sign = "" |
|
161 | sign = "" | |
162 | if size > 0: |
|
162 | if size > 0: | |
163 | m = int(math.log(size, base)) |
|
163 | m = int(math.log(size, base)) | |
164 | elif size < 0: |
|
164 | elif size < 0: | |
165 | sign = "-" |
|
165 | sign = "-" | |
166 | size = -size |
|
166 | size = -size | |
167 | m = int(math.log(size, base)) |
|
167 | m = int(math.log(size, base)) | |
168 | else: |
|
168 | else: | |
169 | m = 0 |
|
169 | m = 0 | |
170 | if m > 8: |
|
170 | if m > 8: | |
171 | m = 8 |
|
171 | m = 8 | |
172 |
|
172 | |||
173 | if m == 0: |
|
173 | if m == 0: | |
174 | precision = '%.0f' |
|
174 | precision = '%.0f' | |
175 | else: |
|
175 | else: | |
176 | precision = '%%.%df' % precision |
|
176 | precision = '%%.%df' % precision | |
177 |
|
177 | |||
178 | size = precision % (size / math.pow(base, m)) |
|
178 | size = precision % (size / math.pow(base, m)) | |
179 |
|
179 | |||
180 | return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit) |
|
180 | return '%s%s %s%s' % (sign, size.strip(), multiples[m], unit) | |
181 |
|
181 | |||
182 |
|
182 | |||
183 | def _check_memory_usage(worker): |
|
183 | def _check_memory_usage(worker): | |
184 | memory_max_usage = worker._memory_max_usage |
|
184 | memory_max_usage = worker._memory_max_usage | |
185 | if not memory_max_usage: |
|
185 | if not memory_max_usage: | |
186 | return |
|
186 | return | |
187 |
|
187 | |||
188 | memory_usage_check_interval = worker._memory_usage_check_interval |
|
188 | memory_usage_check_interval = worker._memory_usage_check_interval | |
189 | memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold |
|
189 | memory_usage_recovery_threshold = memory_max_usage * worker._memory_usage_recovery_threshold | |
190 |
|
190 | |||
191 | elapsed = time.time() - worker._last_memory_check_time |
|
191 | elapsed = time.time() - worker._last_memory_check_time | |
192 | if elapsed > memory_usage_check_interval: |
|
192 | if elapsed > memory_usage_check_interval: | |
193 | mem_usage = _get_process_rss() |
|
193 | mem_usage = _get_process_rss() | |
194 | if mem_usage and mem_usage > memory_max_usage: |
|
194 | if mem_usage and mem_usage > memory_max_usage: | |
195 | worker.log.info( |
|
195 | worker.log.info( | |
196 | "memory usage %s > %s, forcing gc", |
|
196 | "memory usage %s > %s, forcing gc", | |
197 | _format_data_size(mem_usage), _format_data_size(memory_max_usage)) |
|
197 | _format_data_size(mem_usage), _format_data_size(memory_max_usage)) | |
198 | # Try to clean it up by forcing a full collection. |
|
198 | # Try to clean it up by forcing a full collection. | |
199 | gc.collect() |
|
199 | gc.collect() | |
200 | mem_usage = _get_process_rss() |
|
200 | mem_usage = _get_process_rss() | |
201 | if mem_usage > memory_usage_recovery_threshold: |
|
201 | if mem_usage > memory_usage_recovery_threshold: | |
202 | # Didn't clean up enough, we'll have to terminate. |
|
202 | # Didn't clean up enough, we'll have to terminate. | |
203 | worker.log.warning( |
|
203 | worker.log.warning( | |
204 | "memory usage %s > %s after gc, quitting", |
|
204 | "memory usage %s > %s after gc, quitting", | |
205 | _format_data_size(mem_usage), _format_data_size(memory_max_usage)) |
|
205 | _format_data_size(mem_usage), _format_data_size(memory_max_usage)) | |
206 | # This will cause worker to auto-restart itself |
|
206 | # This will cause worker to auto-restart itself | |
207 | worker.alive = False |
|
207 | worker.alive = False | |
208 | worker._last_memory_check_time = time.time() |
|
208 | worker._last_memory_check_time = time.time() | |
209 |
|
209 | |||
210 |
|
210 | |||
211 | def worker_int(worker): |
|
211 | def worker_int(worker): | |
212 | worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid) |
|
212 | worker.log.info("[%-10s] worker received INT or QUIT signal", worker.pid) | |
213 |
|
213 | |||
214 | # get traceback info, on worker crash |
|
214 | # get traceback info, on worker crash | |
215 | id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) |
|
215 | id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) | |
216 | code = [] |
|
216 | code = [] | |
217 | for thread_id, stack in sys._current_frames().items(): |
|
217 | for thread_id, stack in sys._current_frames().items(): | |
218 | code.append( |
|
218 | code.append( | |
219 | "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id)) |
|
219 | "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id)) | |
220 | for fname, lineno, name, line in traceback.extract_stack(stack): |
|
220 | for fname, lineno, name, line in traceback.extract_stack(stack): | |
221 | code.append('File: "%s", line %d, in %s' % (fname, lineno, name)) |
|
221 | code.append('File: "%s", line %d, in %s' % (fname, lineno, name)) | |
222 | if line: |
|
222 | if line: | |
223 | code.append(" %s" % (line.strip())) |
|
223 | code.append(" %s" % (line.strip())) | |
224 | worker.log.debug("\n".join(code)) |
|
224 | worker.log.debug("\n".join(code)) | |
225 |
|
225 | |||
226 |
|
226 | |||
227 | def worker_abort(worker): |
|
227 | def worker_abort(worker): | |
228 | worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid) |
|
228 | worker.log.info("[%-10s] worker received SIGABRT signal", worker.pid) | |
229 |
|
229 | |||
230 |
|
230 | |||
231 | def worker_exit(server, worker): |
|
231 | def worker_exit(server, worker): | |
232 | worker.log.info("[%-10s] worker exit", worker.pid) |
|
232 | worker.log.info("[%-10s] worker exit", worker.pid) | |
233 |
|
233 | |||
234 |
|
234 | |||
235 | def child_exit(server, worker): |
|
235 | def child_exit(server, worker): | |
236 | worker.log.info("[%-10s] worker child exit", worker.pid) |
|
236 | worker.log.info("[%-10s] worker child exit", worker.pid) | |
237 |
|
237 | |||
238 |
|
238 | |||
239 | def pre_request(worker, req): |
|
239 | def pre_request(worker, req): | |
240 | worker.start_time = time.time() |
|
240 | worker.start_time = time.time() | |
241 | worker.log.debug( |
|
241 | worker.log.debug( | |
242 | "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path) |
|
242 | "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path) | |
243 |
|
243 | |||
244 |
|
244 | |||
245 | def post_request(worker, req, environ, resp): |
|
245 | def post_request(worker, req, environ, resp): | |
246 | total_time = time.time() - worker.start_time |
|
246 | total_time = time.time() - worker.start_time | |
247 | # Gunicorn sometimes has problems with reading the status_code |
|
247 | # Gunicorn sometimes has problems with reading the status_code | |
248 | status_code = getattr(resp, 'status_code', '') |
|
248 | status_code = getattr(resp, 'status_code', '') | |
249 | worker.log.debug( |
|
249 | worker.log.debug( | |
250 | "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs", |
|
250 | "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.4fs", | |
251 | worker.nr, req.method, req.path, status_code, total_time) |
|
251 | worker.nr, req.method, req.path, status_code, total_time) | |
252 | _check_memory_usage(worker) |
|
252 | _check_memory_usage(worker) | |
253 |
|
253 | |||
254 |
|
254 | |||
255 | def _filter_proxy(ip): |
|
255 | def _filter_proxy(ip): | |
256 | """ |
|
256 | """ | |
257 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
257 | Passed in IP addresses in HEADERS can be in a special format of multiple | |
258 | ips. Those comma separated IPs are passed from various proxies in the |
|
258 | ips. Those comma separated IPs are passed from various proxies in the | |
259 | chain of request processing. The left-most being the original client. |
|
259 | chain of request processing. The left-most being the original client. | |
260 | We only care about the first IP which came from the org. client. |
|
260 | We only care about the first IP which came from the org. client. | |
261 |
|
261 | |||
262 | :param ip: ip string from headers |
|
262 | :param ip: ip string from headers | |
263 | """ |
|
263 | """ | |
264 | if ',' in ip: |
|
264 | if ',' in ip: | |
265 | _ips = ip.split(',') |
|
265 | _ips = ip.split(',') | |
266 | _first_ip = _ips[0].strip() |
|
266 | _first_ip = _ips[0].strip() | |
267 | return _first_ip |
|
267 | return _first_ip | |
268 | return ip |
|
268 | return ip | |
269 |
|
269 | |||
270 |
|
270 | |||
271 | def _filter_port(ip): |
|
271 | def _filter_port(ip): | |
272 | """ |
|
272 | """ | |
273 | Removes a port from ip, there are 4 main cases to handle here. |
|
273 | Removes a port from ip, there are 4 main cases to handle here. | |
274 | - ipv4 eg. 127.0.0.1 |
|
274 | - ipv4 eg. 127.0.0.1 | |
275 | - ipv6 eg. ::1 |
|
275 | - ipv6 eg. ::1 | |
276 | - ipv4+port eg. 127.0.0.1:8080 |
|
276 | - ipv4+port eg. 127.0.0.1:8080 | |
277 | - ipv6+port eg. [::1]:8080 |
|
277 | - ipv6+port eg. [::1]:8080 | |
278 |
|
278 | |||
279 | :param ip: |
|
279 | :param ip: | |
280 | """ |
|
280 | """ | |
281 | def is_ipv6(ip_addr): |
|
281 | def is_ipv6(ip_addr): | |
282 | if hasattr(socket, 'inet_pton'): |
|
282 | if hasattr(socket, 'inet_pton'): | |
283 | try: |
|
283 | try: | |
284 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
284 | socket.inet_pton(socket.AF_INET6, ip_addr) | |
285 | except socket.error: |
|
285 | except socket.error: | |
286 | return False |
|
286 | return False | |
287 | else: |
|
287 | else: | |
288 | return False |
|
288 | return False | |
289 | return True |
|
289 | return True | |
290 |
|
290 | |||
291 | if ':' not in ip: # must be ipv4 pure ip |
|
291 | if ':' not in ip: # must be ipv4 pure ip | |
292 | return ip |
|
292 | return ip | |
293 |
|
293 | |||
294 | if '[' in ip and ']' in ip: # ipv6 with port |
|
294 | if '[' in ip and ']' in ip: # ipv6 with port | |
295 | return ip.split(']')[0][1:].lower() |
|
295 | return ip.split(']')[0][1:].lower() | |
296 |
|
296 | |||
297 | # must be ipv6 or ipv4 with port |
|
297 | # must be ipv6 or ipv4 with port | |
298 | if is_ipv6(ip): |
|
298 | if is_ipv6(ip): | |
299 | return ip |
|
299 | return ip | |
300 | else: |
|
300 | else: | |
301 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
301 | ip, _port = ip.split(':')[:2] # means ipv4+port | |
302 | return ip |
|
302 | return ip | |
303 |
|
303 | |||
304 |
|
304 | |||
305 | def get_ip_addr(environ): |
|
305 | def get_ip_addr(environ): | |
306 | proxy_key = 'HTTP_X_REAL_IP' |
|
306 | proxy_key = 'HTTP_X_REAL_IP' | |
307 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
307 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' | |
308 | def_key = 'REMOTE_ADDR' |
|
308 | def_key = 'REMOTE_ADDR' | |
309 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
309 | _filters = lambda x: _filter_port(_filter_proxy(x)) | |
310 |
|
310 | |||
311 | ip = environ.get(proxy_key) |
|
311 | ip = environ.get(proxy_key) | |
312 | if ip: |
|
312 | if ip: | |
313 | return _filters(ip) |
|
313 | return _filters(ip) | |
314 |
|
314 | |||
315 | ip = environ.get(proxy_key2) |
|
315 | ip = environ.get(proxy_key2) | |
316 | if ip: |
|
316 | if ip: | |
317 | return _filters(ip) |
|
317 | return _filters(ip) | |
318 |
|
318 | |||
319 | ip = environ.get(def_key, '0.0.0.0') |
|
319 | ip = environ.get(def_key, '0.0.0.0') | |
320 | return _filters(ip) |
|
320 | return _filters(ip) | |
321 |
|
321 | |||
322 |
|
322 | |||
323 | class RhodeCodeLogger(Logger): |
|
323 | class RhodeCodeLogger(Logger): | |
324 | """ |
|
324 | """ | |
325 | Custom Logger that allows some customization that gunicorn doesn't allow |
|
325 | Custom Logger that allows some customization that gunicorn doesn't allow | |
326 | """ |
|
326 | """ | |
327 |
|
327 | |||
328 | datefmt = r"%Y-%m-%d %H:%M:%S" |
|
328 | datefmt = r"%Y-%m-%d %H:%M:%S" | |
329 |
|
329 | |||
330 | def __init__(self, cfg): |
|
330 | def __init__(self, cfg): | |
331 | Logger.__init__(self, cfg) |
|
331 | Logger.__init__(self, cfg) | |
332 |
|
332 | |||
333 | def now(self): |
|
333 | def now(self): | |
334 | """ return date in RhodeCode Log format """ |
|
334 | """ return date in RhodeCode Log format """ | |
335 | now = time.time() |
|
335 | now = time.time() | |
336 | msecs = int((now - long(now)) * 1000) |
|
336 | msecs = int((now - long(now)) * 1000) | |
337 | return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs) |
|
337 | return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs) | |
338 |
|
338 | |||
339 | def atoms(self, resp, req, environ, request_time): |
|
339 | def atoms(self, resp, req, environ, request_time): | |
340 | """ Gets atoms for log formatting. |
|
340 | """ Gets atoms for log formatting. | |
341 | """ |
|
341 | """ | |
342 | status = resp.status |
|
342 | status = resp.status | |
343 | if isinstance(status, str): |
|
343 | if isinstance(status, str): | |
344 | status = status.split(None, 1)[0] |
|
344 | status = status.split(None, 1)[0] | |
345 | atoms = { |
|
345 | atoms = { | |
346 | 'h': get_ip_addr(environ), |
|
346 | 'h': get_ip_addr(environ), | |
347 | 'l': '-', |
|
347 | 'l': '-', | |
348 | 'u': self._get_user(environ) or '-', |
|
348 | 'u': self._get_user(environ) or '-', | |
349 | 't': self.now(), |
|
349 | 't': self.now(), | |
350 | 'r': "%s %s %s" % (environ['REQUEST_METHOD'], |
|
350 | 'r': "%s %s %s" % (environ['REQUEST_METHOD'], | |
351 | environ['RAW_URI'], |
|
351 | environ['RAW_URI'], | |
352 | environ["SERVER_PROTOCOL"]), |
|
352 | environ["SERVER_PROTOCOL"]), | |
353 | 's': status, |
|
353 | 's': status, | |
354 | 'm': environ.get('REQUEST_METHOD'), |
|
354 | 'm': environ.get('REQUEST_METHOD'), | |
355 | 'U': environ.get('PATH_INFO'), |
|
355 | 'U': environ.get('PATH_INFO'), | |
356 | 'q': environ.get('QUERY_STRING'), |
|
356 | 'q': environ.get('QUERY_STRING'), | |
357 | 'H': environ.get('SERVER_PROTOCOL'), |
|
357 | 'H': environ.get('SERVER_PROTOCOL'), | |
358 | 'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-', |
|
358 | 'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-', | |
359 | 'B': getattr(resp, 'sent', None), |
|
359 | 'B': getattr(resp, 'sent', None), | |
360 | 'f': environ.get('HTTP_REFERER', '-'), |
|
360 | 'f': environ.get('HTTP_REFERER', '-'), | |
361 | 'a': environ.get('HTTP_USER_AGENT', '-'), |
|
361 | 'a': environ.get('HTTP_USER_AGENT', '-'), | |
362 | 'T': request_time.seconds, |
|
362 | 'T': request_time.seconds, | |
363 | 'D': (request_time.seconds * 1000000) + request_time.microseconds, |
|
363 | 'D': (request_time.seconds * 1000000) + request_time.microseconds, | |
364 | 'M': (request_time.seconds * 1000) + int(request_time.microseconds/1000), |
|
364 | 'M': (request_time.seconds * 1000) + int(request_time.microseconds/1000), | |
365 | 'L': "%d.%06d" % (request_time.seconds, request_time.microseconds), |
|
365 | 'L': "%d.%06d" % (request_time.seconds, request_time.microseconds), | |
366 | 'p': "<%s>" % os.getpid() |
|
366 | 'p': "<%s>" % os.getpid() | |
367 | } |
|
367 | } | |
368 |
|
368 | |||
369 | # add request headers |
|
369 | # add request headers | |
370 | if hasattr(req, 'headers'): |
|
370 | if hasattr(req, 'headers'): | |
371 | req_headers = req.headers |
|
371 | req_headers = req.headers | |
372 | else: |
|
372 | else: | |
373 | req_headers = req |
|
373 | req_headers = req | |
374 |
|
374 | |||
375 | if hasattr(req_headers, "items"): |
|
375 | if hasattr(req_headers, "items"): | |
376 | req_headers = req_headers.items() |
|
376 | req_headers = req_headers.items() | |
377 |
|
377 | |||
378 | atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers}) |
|
378 | atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers}) | |
379 |
|
379 | |||
380 | resp_headers = resp.headers |
|
380 | resp_headers = resp.headers | |
381 | if hasattr(resp_headers, "items"): |
|
381 | if hasattr(resp_headers, "items"): | |
382 | resp_headers = resp_headers.items() |
|
382 | resp_headers = resp_headers.items() | |
383 |
|
383 | |||
384 | # add response headers |
|
384 | # add response headers | |
385 | atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers}) |
|
385 | atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers}) | |
386 |
|
386 | |||
387 | # add environ variables |
|
387 | # add environ variables | |
388 | environ_variables = environ.items() |
|
388 | environ_variables = environ.items() | |
389 | atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables}) |
|
389 | atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables}) | |
390 |
|
390 | |||
391 | return atoms |
|
391 | return atoms | |
392 |
|
392 | |||
393 | logger_class = RhodeCodeLogger |
|
393 | logger_class = RhodeCodeLogger |
@@ -1,1281 +1,1281 b'' | |||||
1 | RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 | import collections |
|
18 | import collections | |
19 | import logging |
|
19 | import logging | |
20 | import os |
|
20 | import os | |
21 | import posixpath as vcspath |
|
21 | import posixpath as vcspath | |
22 | import re |
|
22 | import re | |
23 | import stat |
|
23 | import stat | |
24 | import traceback |
|
24 | import traceback | |
25 | import urllib.request, urllib.parse, urllib.error |
|
25 | import urllib.request, urllib.parse, urllib.error | |
26 | import urllib.request, urllib.error, urllib.parse |
|
26 | import urllib.request, urllib.error, urllib.parse | |
27 | from functools import wraps |
|
27 | from functools import wraps | |
28 |
|
28 | |||
29 | import more_itertools |
|
29 | import more_itertools | |
30 | import pygit2 |
|
30 | import pygit2 | |
31 | from pygit2 import Repository as LibGit2Repo |
|
31 | from pygit2 import Repository as LibGit2Repo | |
32 | from pygit2 import index as LibGit2Index |
|
32 | from pygit2 import index as LibGit2Index | |
33 | from dulwich import index, objects |
|
33 | from dulwich import index, objects | |
34 | from dulwich.client import HttpGitClient, LocalGitClient |
|
34 | from dulwich.client import HttpGitClient, LocalGitClient | |
35 | from dulwich.errors import ( |
|
35 | from dulwich.errors import ( | |
36 | NotGitRepository, ChecksumMismatch, WrongObjectException, |
|
36 | NotGitRepository, ChecksumMismatch, WrongObjectException, | |
37 | MissingCommitError, ObjectMissing, HangupException, |
|
37 | MissingCommitError, ObjectMissing, HangupException, | |
38 | UnexpectedCommandError) |
|
38 | UnexpectedCommandError) | |
39 | from dulwich.repo import Repo as DulwichRepo |
|
39 | from dulwich.repo import Repo as DulwichRepo | |
40 | from dulwich.server import update_server_info |
|
40 | from dulwich.server import update_server_info | |
41 |
|
41 | |||
42 | from vcsserver import exceptions, settings, subprocessio |
|
42 | from vcsserver import exceptions, settings, subprocessio | |
43 | from vcsserver.utils import safe_str, safe_int, safe_unicode |
|
43 | from vcsserver.utils import safe_str, safe_int, safe_unicode | |
44 | from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo |
|
44 | from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo | |
45 | from vcsserver.hgcompat import ( |
|
45 | from vcsserver.hgcompat import ( | |
46 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler) |
|
46 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler) | |
47 | from vcsserver.git_lfs.lib import LFSOidStore |
|
47 | from vcsserver.git_lfs.lib import LFSOidStore | |
48 | from vcsserver.vcs_base import RemoteBase |
|
48 | from vcsserver.vcs_base import RemoteBase | |
49 |
|
49 | |||
50 | DIR_STAT = stat.S_IFDIR |
|
50 | DIR_STAT = stat.S_IFDIR | |
51 | FILE_MODE = stat.S_IFMT |
|
51 | FILE_MODE = stat.S_IFMT | |
52 | GIT_LINK = objects.S_IFGITLINK |
|
52 | GIT_LINK = objects.S_IFGITLINK | |
53 | PEELED_REF_MARKER = '^{}' |
|
53 | PEELED_REF_MARKER = '^{}' | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | log = logging.getLogger(__name__) |
|
56 | log = logging.getLogger(__name__) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def str_to_dulwich(value): |
|
59 | def str_to_dulwich(value): | |
60 | """ |
|
60 | """ | |
61 | Dulwich 0.10.1a requires `unicode` objects to be passed in. |
|
61 | Dulwich 0.10.1a requires `unicode` objects to be passed in. | |
62 | """ |
|
62 | """ | |
63 | return value.decode(settings.WIRE_ENCODING) |
|
63 | return value.decode(settings.WIRE_ENCODING) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def reraise_safe_exceptions(func): |
|
66 | def reraise_safe_exceptions(func): | |
67 | """Converts Dulwich exceptions to something neutral.""" |
|
67 | """Converts Dulwich exceptions to something neutral.""" | |
68 |
|
68 | |||
69 | @wraps(func) |
|
69 | @wraps(func) | |
70 | def wrapper(*args, **kwargs): |
|
70 | def wrapper(*args, **kwargs): | |
71 | try: |
|
71 | try: | |
72 | return func(*args, **kwargs) |
|
72 | return func(*args, **kwargs) | |
73 | except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e: |
|
73 | except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e: | |
74 | exc = exceptions.LookupException(org_exc=e) |
|
74 | exc = exceptions.LookupException(org_exc=e) | |
75 | raise exc(safe_str(e)) |
|
75 | raise exc(safe_str(e)) | |
76 | except (HangupException, UnexpectedCommandError) as e: |
|
76 | except (HangupException, UnexpectedCommandError) as e: | |
77 | exc = exceptions.VcsException(org_exc=e) |
|
77 | exc = exceptions.VcsException(org_exc=e) | |
78 | raise exc(safe_str(e)) |
|
78 | raise exc(safe_str(e)) | |
79 | except Exception as e: |
|
79 | except Exception as e: | |
80 | # NOTE(marcink): becuase of how dulwich handles some exceptions |
|
80 | # NOTE(marcink): becuase of how dulwich handles some exceptions | |
81 | # (KeyError on empty repos), we cannot track this and catch all |
|
81 | # (KeyError on empty repos), we cannot track this and catch all | |
82 | # exceptions, it's an exceptions from other handlers |
|
82 | # exceptions, it's an exceptions from other handlers | |
83 | #if not hasattr(e, '_vcs_kind'): |
|
83 | #if not hasattr(e, '_vcs_kind'): | |
84 | #log.exception("Unhandled exception in git remote call") |
|
84 | #log.exception("Unhandled exception in git remote call") | |
85 | #raise_from_original(exceptions.UnhandledException) |
|
85 | #raise_from_original(exceptions.UnhandledException) | |
86 | raise |
|
86 | raise | |
87 | return wrapper |
|
87 | return wrapper | |
88 |
|
88 | |||
89 |
|
89 | |||
90 | class Repo(DulwichRepo): |
|
90 | class Repo(DulwichRepo): | |
91 | """ |
|
91 | """ | |
92 | A wrapper for dulwich Repo class. |
|
92 | A wrapper for dulwich Repo class. | |
93 |
|
93 | |||
94 | Since dulwich is sometimes keeping .idx file descriptors open, it leads to |
|
94 | Since dulwich is sometimes keeping .idx file descriptors open, it leads to | |
95 | "Too many open files" error. We need to close all opened file descriptors |
|
95 | "Too many open files" error. We need to close all opened file descriptors | |
96 | once the repo object is destroyed. |
|
96 | once the repo object is destroyed. | |
97 | """ |
|
97 | """ | |
98 | def __del__(self): |
|
98 | def __del__(self): | |
99 | if hasattr(self, 'object_store'): |
|
99 | if hasattr(self, 'object_store'): | |
100 | self.close() |
|
100 | self.close() | |
101 |
|
101 | |||
102 |
|
102 | |||
103 | class Repository(LibGit2Repo): |
|
103 | class Repository(LibGit2Repo): | |
104 |
|
104 | |||
105 | def __enter__(self): |
|
105 | def __enter__(self): | |
106 | return self |
|
106 | return self | |
107 |
|
107 | |||
108 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
108 | def __exit__(self, exc_type, exc_val, exc_tb): | |
109 | self.free() |
|
109 | self.free() | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | class GitFactory(RepoFactory): |
|
112 | class GitFactory(RepoFactory): | |
113 | repo_type = 'git' |
|
113 | repo_type = 'git' | |
114 |
|
114 | |||
115 | def _create_repo(self, wire, create, use_libgit2=False): |
|
115 | def _create_repo(self, wire, create, use_libgit2=False): | |
116 | if use_libgit2: |
|
116 | if use_libgit2: | |
117 | return Repository(wire['path']) |
|
117 | return Repository(wire['path']) | |
118 | else: |
|
118 | else: | |
119 | repo_path = str_to_dulwich(wire['path']) |
|
119 | repo_path = str_to_dulwich(wire['path']) | |
120 | return Repo(repo_path) |
|
120 | return Repo(repo_path) | |
121 |
|
121 | |||
122 | def repo(self, wire, create=False, use_libgit2=False): |
|
122 | def repo(self, wire, create=False, use_libgit2=False): | |
123 | """ |
|
123 | """ | |
124 | Get a repository instance for the given path. |
|
124 | Get a repository instance for the given path. | |
125 | """ |
|
125 | """ | |
126 | return self._create_repo(wire, create, use_libgit2) |
|
126 | return self._create_repo(wire, create, use_libgit2) | |
127 |
|
127 | |||
128 | def repo_libgit2(self, wire): |
|
128 | def repo_libgit2(self, wire): | |
129 | return self.repo(wire, use_libgit2=True) |
|
129 | return self.repo(wire, use_libgit2=True) | |
130 |
|
130 | |||
131 |
|
131 | |||
132 | class GitRemote(RemoteBase): |
|
132 | class GitRemote(RemoteBase): | |
133 |
|
133 | |||
134 | def __init__(self, factory): |
|
134 | def __init__(self, factory): | |
135 | self._factory = factory |
|
135 | self._factory = factory | |
136 | self._bulk_methods = { |
|
136 | self._bulk_methods = { | |
137 | "date": self.date, |
|
137 | "date": self.date, | |
138 | "author": self.author, |
|
138 | "author": self.author, | |
139 | "branch": self.branch, |
|
139 | "branch": self.branch, | |
140 | "message": self.message, |
|
140 | "message": self.message, | |
141 | "parents": self.parents, |
|
141 | "parents": self.parents, | |
142 | "_commit": self.revision, |
|
142 | "_commit": self.revision, | |
143 | } |
|
143 | } | |
144 |
|
144 | |||
145 | def _wire_to_config(self, wire): |
|
145 | def _wire_to_config(self, wire): | |
146 | if 'config' in wire: |
|
146 | if 'config' in wire: | |
147 | return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']]) |
|
147 | return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']]) | |
148 | return {} |
|
148 | return {} | |
149 |
|
149 | |||
150 | def _remote_conf(self, config): |
|
150 | def _remote_conf(self, config): | |
151 | params = [ |
|
151 | params = [ | |
152 | '-c', 'core.askpass=""', |
|
152 | '-c', 'core.askpass=""', | |
153 | ] |
|
153 | ] | |
154 | ssl_cert_dir = config.get('vcs_ssl_dir') |
|
154 | ssl_cert_dir = config.get('vcs_ssl_dir') | |
155 | if ssl_cert_dir: |
|
155 | if ssl_cert_dir: | |
156 | params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)]) |
|
156 | params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)]) | |
157 | return params |
|
157 | return params | |
158 |
|
158 | |||
159 | @reraise_safe_exceptions |
|
159 | @reraise_safe_exceptions | |
160 | def discover_git_version(self): |
|
160 | def discover_git_version(self): | |
161 | stdout, _ = self.run_git_command( |
|
161 | stdout, _ = self.run_git_command( | |
162 | {}, ['--version'], _bare=True, _safe=True) |
|
162 | {}, ['--version'], _bare=True, _safe=True) | |
163 | prefix = 'git version' |
|
163 | prefix = 'git version' | |
164 | if stdout.startswith(prefix): |
|
164 | if stdout.startswith(prefix): | |
165 | stdout = stdout[len(prefix):] |
|
165 | stdout = stdout[len(prefix):] | |
166 | return stdout.strip() |
|
166 | return stdout.strip() | |
167 |
|
167 | |||
168 | @reraise_safe_exceptions |
|
168 | @reraise_safe_exceptions | |
169 | def is_empty(self, wire): |
|
169 | def is_empty(self, wire): | |
170 | repo_init = self._factory.repo_libgit2(wire) |
|
170 | repo_init = self._factory.repo_libgit2(wire) | |
171 | with repo_init as repo: |
|
171 | with repo_init as repo: | |
172 |
|
172 | |||
173 | try: |
|
173 | try: | |
174 | has_head = repo.head.name |
|
174 | has_head = repo.head.name | |
175 | if has_head: |
|
175 | if has_head: | |
176 | return False |
|
176 | return False | |
177 |
|
177 | |||
178 | # NOTE(marcink): check again using more expensive method |
|
178 | # NOTE(marcink): check again using more expensive method | |
179 | return repo.is_empty |
|
179 | return repo.is_empty | |
180 | except Exception: |
|
180 | except Exception: | |
181 | pass |
|
181 | pass | |
182 |
|
182 | |||
183 | return True |
|
183 | return True | |
184 |
|
184 | |||
185 | @reraise_safe_exceptions |
|
185 | @reraise_safe_exceptions | |
186 | def assert_correct_path(self, wire): |
|
186 | def assert_correct_path(self, wire): | |
187 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
187 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
188 | region = self._region(wire) |
|
188 | region = self._region(wire) | |
189 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
189 | @region.conditional_cache_on_arguments(condition=cache_on) | |
190 | def _assert_correct_path(_context_uid, _repo_id): |
|
190 | def _assert_correct_path(_context_uid, _repo_id): | |
191 | try: |
|
191 | try: | |
192 | repo_init = self._factory.repo_libgit2(wire) |
|
192 | repo_init = self._factory.repo_libgit2(wire) | |
193 | with repo_init as repo: |
|
193 | with repo_init as repo: | |
194 | pass |
|
194 | pass | |
195 | except pygit2.GitError: |
|
195 | except pygit2.GitError: | |
196 | path = wire.get('path') |
|
196 | path = wire.get('path') | |
197 | tb = traceback.format_exc() |
|
197 | tb = traceback.format_exc() | |
198 | log.debug("Invalid Git path `%s`, tb: %s", path, tb) |
|
198 | log.debug("Invalid Git path `%s`, tb: %s", path, tb) | |
199 | return False |
|
199 | return False | |
200 |
|
200 | |||
201 | return True |
|
201 | return True | |
202 | return _assert_correct_path(context_uid, repo_id) |
|
202 | return _assert_correct_path(context_uid, repo_id) | |
203 |
|
203 | |||
204 | @reraise_safe_exceptions |
|
204 | @reraise_safe_exceptions | |
205 | def bare(self, wire): |
|
205 | def bare(self, wire): | |
206 | repo_init = self._factory.repo_libgit2(wire) |
|
206 | repo_init = self._factory.repo_libgit2(wire) | |
207 | with repo_init as repo: |
|
207 | with repo_init as repo: | |
208 | return repo.is_bare |
|
208 | return repo.is_bare | |
209 |
|
209 | |||
210 | @reraise_safe_exceptions |
|
210 | @reraise_safe_exceptions | |
211 | def blob_as_pretty_string(self, wire, sha): |
|
211 | def blob_as_pretty_string(self, wire, sha): | |
212 | repo_init = self._factory.repo_libgit2(wire) |
|
212 | repo_init = self._factory.repo_libgit2(wire) | |
213 | with repo_init as repo: |
|
213 | with repo_init as repo: | |
214 | blob_obj = repo[sha] |
|
214 | blob_obj = repo[sha] | |
215 | blob = blob_obj.data |
|
215 | blob = blob_obj.data | |
216 | return blob |
|
216 | return blob | |
217 |
|
217 | |||
218 | @reraise_safe_exceptions |
|
218 | @reraise_safe_exceptions | |
219 | def blob_raw_length(self, wire, sha): |
|
219 | def blob_raw_length(self, wire, sha): | |
220 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
220 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
221 | region = self._region(wire) |
|
221 | region = self._region(wire) | |
222 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
222 | @region.conditional_cache_on_arguments(condition=cache_on) | |
223 | def _blob_raw_length(_repo_id, _sha): |
|
223 | def _blob_raw_length(_repo_id, _sha): | |
224 |
|
224 | |||
225 | repo_init = self._factory.repo_libgit2(wire) |
|
225 | repo_init = self._factory.repo_libgit2(wire) | |
226 | with repo_init as repo: |
|
226 | with repo_init as repo: | |
227 | blob = repo[sha] |
|
227 | blob = repo[sha] | |
228 | return blob.size |
|
228 | return blob.size | |
229 |
|
229 | |||
230 | return _blob_raw_length(repo_id, sha) |
|
230 | return _blob_raw_length(repo_id, sha) | |
231 |
|
231 | |||
232 | def _parse_lfs_pointer(self, raw_content): |
|
232 | def _parse_lfs_pointer(self, raw_content): | |
233 |
|
233 | |||
234 | spec_string = 'version https://git-lfs.github.com/spec' |
|
234 | spec_string = 'version https://git-lfs.github.com/spec' | |
235 | if raw_content and raw_content.startswith(spec_string): |
|
235 | if raw_content and raw_content.startswith(spec_string): | |
236 | pattern = re.compile(r""" |
|
236 | pattern = re.compile(r""" | |
237 | (?:\n)? |
|
237 | (?:\n)? | |
238 | ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n |
|
238 | ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n | |
239 | ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n |
|
239 | ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n | |
240 | ^size[ ](?P<oid_size>[0-9]+)\n |
|
240 | ^size[ ](?P<oid_size>[0-9]+)\n | |
241 | (?:\n)? |
|
241 | (?:\n)? | |
242 | """, re.VERBOSE | re.MULTILINE) |
|
242 | """, re.VERBOSE | re.MULTILINE) | |
243 | match = pattern.match(raw_content) |
|
243 | match = pattern.match(raw_content) | |
244 | if match: |
|
244 | if match: | |
245 | return match.groupdict() |
|
245 | return match.groupdict() | |
246 |
|
246 | |||
247 | return {} |
|
247 | return {} | |
248 |
|
248 | |||
249 | @reraise_safe_exceptions |
|
249 | @reraise_safe_exceptions | |
250 | def is_large_file(self, wire, commit_id): |
|
250 | def is_large_file(self, wire, commit_id): | |
251 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
251 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
252 |
|
252 | |||
253 | region = self._region(wire) |
|
253 | region = self._region(wire) | |
254 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
254 | @region.conditional_cache_on_arguments(condition=cache_on) | |
255 | def _is_large_file(_repo_id, _sha): |
|
255 | def _is_large_file(_repo_id, _sha): | |
256 | repo_init = self._factory.repo_libgit2(wire) |
|
256 | repo_init = self._factory.repo_libgit2(wire) | |
257 | with repo_init as repo: |
|
257 | with repo_init as repo: | |
258 | blob = repo[commit_id] |
|
258 | blob = repo[commit_id] | |
259 | if blob.is_binary: |
|
259 | if blob.is_binary: | |
260 | return {} |
|
260 | return {} | |
261 |
|
261 | |||
262 | return self._parse_lfs_pointer(blob.data) |
|
262 | return self._parse_lfs_pointer(blob.data) | |
263 |
|
263 | |||
264 | return _is_large_file(repo_id, commit_id) |
|
264 | return _is_large_file(repo_id, commit_id) | |
265 |
|
265 | |||
266 | @reraise_safe_exceptions |
|
266 | @reraise_safe_exceptions | |
267 | def is_binary(self, wire, tree_id): |
|
267 | def is_binary(self, wire, tree_id): | |
268 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
268 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
269 |
|
269 | |||
270 | region = self._region(wire) |
|
270 | region = self._region(wire) | |
271 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
271 | @region.conditional_cache_on_arguments(condition=cache_on) | |
272 | def _is_binary(_repo_id, _tree_id): |
|
272 | def _is_binary(_repo_id, _tree_id): | |
273 | repo_init = self._factory.repo_libgit2(wire) |
|
273 | repo_init = self._factory.repo_libgit2(wire) | |
274 | with repo_init as repo: |
|
274 | with repo_init as repo: | |
275 | blob_obj = repo[tree_id] |
|
275 | blob_obj = repo[tree_id] | |
276 | return blob_obj.is_binary |
|
276 | return blob_obj.is_binary | |
277 |
|
277 | |||
278 | return _is_binary(repo_id, tree_id) |
|
278 | return _is_binary(repo_id, tree_id) | |
279 |
|
279 | |||
280 | @reraise_safe_exceptions |
|
280 | @reraise_safe_exceptions | |
281 | def in_largefiles_store(self, wire, oid): |
|
281 | def in_largefiles_store(self, wire, oid): | |
282 | conf = self._wire_to_config(wire) |
|
282 | conf = self._wire_to_config(wire) | |
283 | repo_init = self._factory.repo_libgit2(wire) |
|
283 | repo_init = self._factory.repo_libgit2(wire) | |
284 | with repo_init as repo: |
|
284 | with repo_init as repo: | |
285 | repo_name = repo.path |
|
285 | repo_name = repo.path | |
286 |
|
286 | |||
287 | store_location = conf.get('vcs_git_lfs_store_location') |
|
287 | store_location = conf.get('vcs_git_lfs_store_location') | |
288 | if store_location: |
|
288 | if store_location: | |
289 |
|
289 | |||
290 | store = LFSOidStore( |
|
290 | store = LFSOidStore( | |
291 | oid=oid, repo=repo_name, store_location=store_location) |
|
291 | oid=oid, repo=repo_name, store_location=store_location) | |
292 | return store.has_oid() |
|
292 | return store.has_oid() | |
293 |
|
293 | |||
294 | return False |
|
294 | return False | |
295 |
|
295 | |||
296 | @reraise_safe_exceptions |
|
296 | @reraise_safe_exceptions | |
297 | def store_path(self, wire, oid): |
|
297 | def store_path(self, wire, oid): | |
298 | conf = self._wire_to_config(wire) |
|
298 | conf = self._wire_to_config(wire) | |
299 | repo_init = self._factory.repo_libgit2(wire) |
|
299 | repo_init = self._factory.repo_libgit2(wire) | |
300 | with repo_init as repo: |
|
300 | with repo_init as repo: | |
301 | repo_name = repo.path |
|
301 | repo_name = repo.path | |
302 |
|
302 | |||
303 | store_location = conf.get('vcs_git_lfs_store_location') |
|
303 | store_location = conf.get('vcs_git_lfs_store_location') | |
304 | if store_location: |
|
304 | if store_location: | |
305 | store = LFSOidStore( |
|
305 | store = LFSOidStore( | |
306 | oid=oid, repo=repo_name, store_location=store_location) |
|
306 | oid=oid, repo=repo_name, store_location=store_location) | |
307 | return store.oid_path |
|
307 | return store.oid_path | |
308 | raise ValueError('Unable to fetch oid with path {}'.format(oid)) |
|
308 | raise ValueError('Unable to fetch oid with path {}'.format(oid)) | |
309 |
|
309 | |||
310 | @reraise_safe_exceptions |
|
310 | @reraise_safe_exceptions | |
311 | def bulk_request(self, wire, rev, pre_load): |
|
311 | def bulk_request(self, wire, rev, pre_load): | |
312 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
312 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
313 | region = self._region(wire) |
|
313 | region = self._region(wire) | |
314 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
314 | @region.conditional_cache_on_arguments(condition=cache_on) | |
315 | def _bulk_request(_repo_id, _rev, _pre_load): |
|
315 | def _bulk_request(_repo_id, _rev, _pre_load): | |
316 | result = {} |
|
316 | result = {} | |
317 | for attr in pre_load: |
|
317 | for attr in pre_load: | |
318 | try: |
|
318 | try: | |
319 | method = self._bulk_methods[attr] |
|
319 | method = self._bulk_methods[attr] | |
320 | args = [wire, rev] |
|
320 | args = [wire, rev] | |
321 | result[attr] = method(*args) |
|
321 | result[attr] = method(*args) | |
322 | except KeyError as e: |
|
322 | except KeyError as e: | |
323 | raise exceptions.VcsException(e)( |
|
323 | raise exceptions.VcsException(e)( | |
324 | "Unknown bulk attribute: %s" % attr) |
|
324 | "Unknown bulk attribute: %s" % attr) | |
325 | return result |
|
325 | return result | |
326 |
|
326 | |||
327 | return _bulk_request(repo_id, rev, sorted(pre_load)) |
|
327 | return _bulk_request(repo_id, rev, sorted(pre_load)) | |
328 |
|
328 | |||
329 | def _build_opener(self, url): |
|
329 | def _build_opener(self, url): | |
330 | handlers = [] |
|
330 | handlers = [] | |
331 | url_obj = url_parser(url) |
|
331 | url_obj = url_parser(url) | |
332 | _, authinfo = url_obj.authinfo() |
|
332 | _, authinfo = url_obj.authinfo() | |
333 |
|
333 | |||
334 | if authinfo: |
|
334 | if authinfo: | |
335 | # create a password manager |
|
335 | # create a password manager | |
336 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() |
|
336 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() | |
337 | passmgr.add_password(*authinfo) |
|
337 | passmgr.add_password(*authinfo) | |
338 |
|
338 | |||
339 | handlers.extend((httpbasicauthhandler(passmgr), |
|
339 | handlers.extend((httpbasicauthhandler(passmgr), | |
340 | httpdigestauthhandler(passmgr))) |
|
340 | httpdigestauthhandler(passmgr))) | |
341 |
|
341 | |||
342 | return urllib.request.build_opener(*handlers) |
|
342 | return urllib.request.build_opener(*handlers) | |
343 |
|
343 | |||
344 | def _type_id_to_name(self, type_id): |
|
344 | def _type_id_to_name(self, type_id): | |
345 | return { |
|
345 | return { | |
346 | 1: b'commit', |
|
346 | 1: b'commit', | |
347 | 2: b'tree', |
|
347 | 2: b'tree', | |
348 | 3: b'blob', |
|
348 | 3: b'blob', | |
349 | 4: b'tag' |
|
349 | 4: b'tag' | |
350 | }[type_id] |
|
350 | }[type_id] | |
351 |
|
351 | |||
352 | @reraise_safe_exceptions |
|
352 | @reraise_safe_exceptions | |
353 | def check_url(self, url, config): |
|
353 | def check_url(self, url, config): | |
354 | url_obj = url_parser(url) |
|
354 | url_obj = url_parser(url) | |
355 | test_uri, _ = url_obj.authinfo() |
|
355 | test_uri, _ = url_obj.authinfo() | |
356 | url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd |
|
356 | url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd | |
357 | url_obj.query = obfuscate_qs(url_obj.query) |
|
357 | url_obj.query = obfuscate_qs(url_obj.query) | |
358 | cleaned_uri = str(url_obj) |
|
358 | cleaned_uri = str(url_obj) | |
359 | log.info("Checking URL for remote cloning/import: %s", cleaned_uri) |
|
359 | log.info("Checking URL for remote cloning/import: %s", cleaned_uri) | |
360 |
|
360 | |||
361 | if not test_uri.endswith('info/refs'): |
|
361 | if not test_uri.endswith('info/refs'): | |
362 | test_uri = test_uri.rstrip('/') + '/info/refs' |
|
362 | test_uri = test_uri.rstrip('/') + '/info/refs' | |
363 |
|
363 | |||
364 | o = self._build_opener(url) |
|
364 | o = self._build_opener(url) | |
365 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git |
|
365 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git | |
366 |
|
366 | |||
367 | q = {"service": 'git-upload-pack'} |
|
367 | q = {"service": 'git-upload-pack'} | |
368 | qs = '?%s' % urllib.parse.urlencode(q) |
|
368 | qs = '?%s' % urllib.parse.urlencode(q) | |
369 | cu = "%s%s" % (test_uri, qs) |
|
369 | cu = "%s%s" % (test_uri, qs) | |
370 | req = urllib.request.Request(cu, None, {}) |
|
370 | req = urllib.request.Request(cu, None, {}) | |
371 |
|
371 | |||
372 | try: |
|
372 | try: | |
373 | log.debug("Trying to open URL %s", cleaned_uri) |
|
373 | log.debug("Trying to open URL %s", cleaned_uri) | |
374 | resp = o.open(req) |
|
374 | resp = o.open(req) | |
375 | if resp.code != 200: |
|
375 | if resp.code != 200: | |
376 | raise exceptions.URLError()('Return Code is not 200') |
|
376 | raise exceptions.URLError()('Return Code is not 200') | |
377 | except Exception as e: |
|
377 | except Exception as e: | |
378 | log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True) |
|
378 | log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True) | |
379 | # means it cannot be cloned |
|
379 | # means it cannot be cloned | |
380 | raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
380 | raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e)) | |
381 |
|
381 | |||
382 | # now detect if it's proper git repo |
|
382 | # now detect if it's proper git repo | |
383 | gitdata = resp.read() |
|
383 | gitdata = resp.read() | |
384 | if 'service=git-upload-pack' in gitdata: |
|
384 | if 'service=git-upload-pack' in gitdata: | |
385 | pass |
|
385 | pass | |
386 | elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata): |
|
386 | elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata): | |
387 | # old style git can return some other format ! |
|
387 | # old style git can return some other format ! | |
388 | pass |
|
388 | pass | |
389 | else: |
|
389 | else: | |
390 | raise exceptions.URLError()( |
|
390 | raise exceptions.URLError()( | |
391 | "url [%s] does not look like an git" % (cleaned_uri,)) |
|
391 | "url [%s] does not look like an git" % (cleaned_uri,)) | |
392 |
|
392 | |||
393 | return True |
|
393 | return True | |
394 |
|
394 | |||
395 | @reraise_safe_exceptions |
|
395 | @reraise_safe_exceptions | |
396 | def clone(self, wire, url, deferred, valid_refs, update_after_clone): |
|
396 | def clone(self, wire, url, deferred, valid_refs, update_after_clone): | |
397 | # TODO(marcink): deprecate this method. Last i checked we don't use it anymore |
|
397 | # TODO(marcink): deprecate this method. Last i checked we don't use it anymore | |
398 | remote_refs = self.pull(wire, url, apply_refs=False) |
|
398 | remote_refs = self.pull(wire, url, apply_refs=False) | |
399 | repo = self._factory.repo(wire) |
|
399 | repo = self._factory.repo(wire) | |
400 | if isinstance(valid_refs, list): |
|
400 | if isinstance(valid_refs, list): | |
401 | valid_refs = tuple(valid_refs) |
|
401 | valid_refs = tuple(valid_refs) | |
402 |
|
402 | |||
403 | for k in remote_refs: |
|
403 | for k in remote_refs: | |
404 | # only parse heads/tags and skip so called deferred tags |
|
404 | # only parse heads/tags and skip so called deferred tags | |
405 | if k.startswith(valid_refs) and not k.endswith(deferred): |
|
405 | if k.startswith(valid_refs) and not k.endswith(deferred): | |
406 | repo[k] = remote_refs[k] |
|
406 | repo[k] = remote_refs[k] | |
407 |
|
407 | |||
408 | if update_after_clone: |
|
408 | if update_after_clone: | |
409 | # we want to checkout HEAD |
|
409 | # we want to checkout HEAD | |
410 | repo["HEAD"] = remote_refs["HEAD"] |
|
410 | repo["HEAD"] = remote_refs["HEAD"] | |
411 | index.build_index_from_tree(repo.path, repo.index_path(), |
|
411 | index.build_index_from_tree(repo.path, repo.index_path(), | |
412 | repo.object_store, repo["HEAD"].tree) |
|
412 | repo.object_store, repo["HEAD"].tree) | |
413 |
|
413 | |||
414 | @reraise_safe_exceptions |
|
414 | @reraise_safe_exceptions | |
415 | def branch(self, wire, commit_id): |
|
415 | def branch(self, wire, commit_id): | |
416 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
416 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
417 | region = self._region(wire) |
|
417 | region = self._region(wire) | |
418 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
418 | @region.conditional_cache_on_arguments(condition=cache_on) | |
419 | def _branch(_context_uid, _repo_id, _commit_id): |
|
419 | def _branch(_context_uid, _repo_id, _commit_id): | |
420 | regex = re.compile('^refs/heads') |
|
420 | regex = re.compile('^refs/heads') | |
421 |
|
421 | |||
422 | def filter_with(ref): |
|
422 | def filter_with(ref): | |
423 | return regex.match(ref[0]) and ref[1] == _commit_id |
|
423 | return regex.match(ref[0]) and ref[1] == _commit_id | |
424 |
|
424 | |||
425 | branches = list(filter(filter_with, list(self.get_refs(wire).items()))) |
|
425 | branches = list(filter(filter_with, list(self.get_refs(wire).items()))) | |
426 | return [x[0].split('refs/heads/')[-1] for x in branches] |
|
426 | return [x[0].split('refs/heads/')[-1] for x in branches] | |
427 |
|
427 | |||
428 | return _branch(context_uid, repo_id, commit_id) |
|
428 | return _branch(context_uid, repo_id, commit_id) | |
429 |
|
429 | |||
430 | @reraise_safe_exceptions |
|
430 | @reraise_safe_exceptions | |
431 | def commit_branches(self, wire, commit_id): |
|
431 | def commit_branches(self, wire, commit_id): | |
432 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
432 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
433 | region = self._region(wire) |
|
433 | region = self._region(wire) | |
434 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
434 | @region.conditional_cache_on_arguments(condition=cache_on) | |
435 | def _commit_branches(_context_uid, _repo_id, _commit_id): |
|
435 | def _commit_branches(_context_uid, _repo_id, _commit_id): | |
436 | repo_init = self._factory.repo_libgit2(wire) |
|
436 | repo_init = self._factory.repo_libgit2(wire) | |
437 | with repo_init as repo: |
|
437 | with repo_init as repo: | |
438 | branches = [x for x in repo.branches.with_commit(_commit_id)] |
|
438 | branches = [x for x in repo.branches.with_commit(_commit_id)] | |
439 | return branches |
|
439 | return branches | |
440 |
|
440 | |||
441 | return _commit_branches(context_uid, repo_id, commit_id) |
|
441 | return _commit_branches(context_uid, repo_id, commit_id) | |
442 |
|
442 | |||
443 | @reraise_safe_exceptions |
|
443 | @reraise_safe_exceptions | |
444 | def add_object(self, wire, content): |
|
444 | def add_object(self, wire, content): | |
445 | repo_init = self._factory.repo_libgit2(wire) |
|
445 | repo_init = self._factory.repo_libgit2(wire) | |
446 | with repo_init as repo: |
|
446 | with repo_init as repo: | |
447 | blob = objects.Blob() |
|
447 | blob = objects.Blob() | |
448 | blob.set_raw_string(content) |
|
448 | blob.set_raw_string(content) | |
449 | repo.object_store.add_object(blob) |
|
449 | repo.object_store.add_object(blob) | |
450 | return blob.id |
|
450 | return blob.id | |
451 |
|
451 | |||
452 | # TODO: this is quite complex, check if that can be simplified |
|
452 | # TODO: this is quite complex, check if that can be simplified | |
453 | @reraise_safe_exceptions |
|
453 | @reraise_safe_exceptions | |
454 | def commit(self, wire, commit_data, branch, commit_tree, updated, removed): |
|
454 | def commit(self, wire, commit_data, branch, commit_tree, updated, removed): | |
455 | # Defines the root tree |
|
455 | # Defines the root tree | |
456 | class _Root(object): |
|
456 | class _Root(object): | |
457 | def __repr__(self): |
|
457 | def __repr__(self): | |
458 | return 'ROOT TREE' |
|
458 | return 'ROOT TREE' | |
459 | ROOT = _Root() |
|
459 | ROOT = _Root() | |
460 |
|
460 | |||
461 | repo = self._factory.repo(wire) |
|
461 | repo = self._factory.repo(wire) | |
462 | object_store = repo.object_store |
|
462 | object_store = repo.object_store | |
463 |
|
463 | |||
464 | # Create tree and populates it with blobs |
|
464 | # Create tree and populates it with blobs | |
465 |
|
465 | |||
466 | if commit_tree and repo[commit_tree]: |
|
466 | if commit_tree and repo[commit_tree]: | |
467 | git_commit = repo[commit_data['parents'][0]] |
|
467 | git_commit = repo[commit_data['parents'][0]] | |
468 | commit_tree = repo[git_commit.tree] # root tree |
|
468 | commit_tree = repo[git_commit.tree] # root tree | |
469 | else: |
|
469 | else: | |
470 | commit_tree = objects.Tree() |
|
470 | commit_tree = objects.Tree() | |
471 |
|
471 | |||
472 | for node in updated: |
|
472 | for node in updated: | |
473 | # Compute subdirs if needed |
|
473 | # Compute subdirs if needed | |
474 | dirpath, nodename = vcspath.split(node['path']) |
|
474 | dirpath, nodename = vcspath.split(node['path']) | |
475 | dirnames = list(map(safe_str, dirpath and dirpath.split('/') or [])) |
|
475 | dirnames = list(map(safe_str, dirpath and dirpath.split('/') or [])) | |
476 | parent = commit_tree |
|
476 | parent = commit_tree | |
477 | ancestors = [('', parent)] |
|
477 | ancestors = [('', parent)] | |
478 |
|
478 | |||
479 | # Tries to dig for the deepest existing tree |
|
479 | # Tries to dig for the deepest existing tree | |
480 | while dirnames: |
|
480 | while dirnames: | |
481 | curdir = dirnames.pop(0) |
|
481 | curdir = dirnames.pop(0) | |
482 | try: |
|
482 | try: | |
483 | dir_id = parent[curdir][1] |
|
483 | dir_id = parent[curdir][1] | |
484 | except KeyError: |
|
484 | except KeyError: | |
485 | # put curdir back into dirnames and stops |
|
485 | # put curdir back into dirnames and stops | |
486 | dirnames.insert(0, curdir) |
|
486 | dirnames.insert(0, curdir) | |
487 | break |
|
487 | break | |
488 | else: |
|
488 | else: | |
489 | # If found, updates parent |
|
489 | # If found, updates parent | |
490 | parent = repo[dir_id] |
|
490 | parent = repo[dir_id] | |
491 | ancestors.append((curdir, parent)) |
|
491 | ancestors.append((curdir, parent)) | |
492 | # Now parent is deepest existing tree and we need to create |
|
492 | # Now parent is deepest existing tree and we need to create | |
493 | # subtrees for dirnames (in reverse order) |
|
493 | # subtrees for dirnames (in reverse order) | |
494 | # [this only applies for nodes from added] |
|
494 | # [this only applies for nodes from added] | |
495 | new_trees = [] |
|
495 | new_trees = [] | |
496 |
|
496 | |||
497 | blob = objects.Blob.from_string(node['content']) |
|
497 | blob = objects.Blob.from_string(node['content']) | |
498 |
|
498 | |||
499 | if dirnames: |
|
499 | if dirnames: | |
500 | # If there are trees which should be created we need to build |
|
500 | # If there are trees which should be created we need to build | |
501 | # them now (in reverse order) |
|
501 | # them now (in reverse order) | |
502 | reversed_dirnames = list(reversed(dirnames)) |
|
502 | reversed_dirnames = list(reversed(dirnames)) | |
503 | curtree = objects.Tree() |
|
503 | curtree = objects.Tree() | |
504 | curtree[node['node_path']] = node['mode'], blob.id |
|
504 | curtree[node['node_path']] = node['mode'], blob.id | |
505 | new_trees.append(curtree) |
|
505 | new_trees.append(curtree) | |
506 | for dirname in reversed_dirnames[:-1]: |
|
506 | for dirname in reversed_dirnames[:-1]: | |
507 | newtree = objects.Tree() |
|
507 | newtree = objects.Tree() | |
508 | newtree[dirname] = (DIR_STAT, curtree.id) |
|
508 | newtree[dirname] = (DIR_STAT, curtree.id) | |
509 | new_trees.append(newtree) |
|
509 | new_trees.append(newtree) | |
510 | curtree = newtree |
|
510 | curtree = newtree | |
511 | parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id) |
|
511 | parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id) | |
512 | else: |
|
512 | else: | |
513 | parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id) |
|
513 | parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id) | |
514 |
|
514 | |||
515 | new_trees.append(parent) |
|
515 | new_trees.append(parent) | |
516 | # Update ancestors |
|
516 | # Update ancestors | |
517 | reversed_ancestors = reversed( |
|
517 | reversed_ancestors = reversed( | |
518 | [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])]) |
|
518 | [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])]) | |
519 | for parent, tree, path in reversed_ancestors: |
|
519 | for parent, tree, path in reversed_ancestors: | |
520 | parent[path] = (DIR_STAT, tree.id) |
|
520 | parent[path] = (DIR_STAT, tree.id) | |
521 | object_store.add_object(tree) |
|
521 | object_store.add_object(tree) | |
522 |
|
522 | |||
523 | object_store.add_object(blob) |
|
523 | object_store.add_object(blob) | |
524 | for tree in new_trees: |
|
524 | for tree in new_trees: | |
525 | object_store.add_object(tree) |
|
525 | object_store.add_object(tree) | |
526 |
|
526 | |||
527 | for node_path in removed: |
|
527 | for node_path in removed: | |
528 | paths = node_path.split('/') |
|
528 | paths = node_path.split('/') | |
529 | tree = commit_tree # start with top-level |
|
529 | tree = commit_tree # start with top-level | |
530 | trees = [{'tree': tree, 'path': ROOT}] |
|
530 | trees = [{'tree': tree, 'path': ROOT}] | |
531 | # Traverse deep into the forest... |
|
531 | # Traverse deep into the forest... | |
532 | # resolve final tree by iterating the path. |
|
532 | # resolve final tree by iterating the path. | |
533 | # e.g a/b/c.txt will get |
|
533 | # e.g a/b/c.txt will get | |
534 | # - root as tree then |
|
534 | # - root as tree then | |
535 | # - 'a' as tree, |
|
535 | # - 'a' as tree, | |
536 | # - 'b' as tree, |
|
536 | # - 'b' as tree, | |
537 | # - stop at c as blob. |
|
537 | # - stop at c as blob. | |
538 | for path in paths: |
|
538 | for path in paths: | |
539 | try: |
|
539 | try: | |
540 | obj = repo[tree[path][1]] |
|
540 | obj = repo[tree[path][1]] | |
541 | if isinstance(obj, objects.Tree): |
|
541 | if isinstance(obj, objects.Tree): | |
542 | trees.append({'tree': obj, 'path': path}) |
|
542 | trees.append({'tree': obj, 'path': path}) | |
543 | tree = obj |
|
543 | tree = obj | |
544 | except KeyError: |
|
544 | except KeyError: | |
545 | break |
|
545 | break | |
546 | #PROBLEM: |
|
546 | #PROBLEM: | |
547 | """ |
|
547 | """ | |
548 | We're not editing same reference tree object |
|
548 | We're not editing same reference tree object | |
549 | """ |
|
549 | """ | |
550 | # Cut down the blob and all rotten trees on the way back... |
|
550 | # Cut down the blob and all rotten trees on the way back... | |
551 | for path, tree_data in reversed(list(zip(paths, trees))): |
|
551 | for path, tree_data in reversed(list(zip(paths, trees))): | |
552 | tree = tree_data['tree'] |
|
552 | tree = tree_data['tree'] | |
553 | tree.__delitem__(path) |
|
553 | tree.__delitem__(path) | |
554 | # This operation edits the tree, we need to mark new commit back |
|
554 | # This operation edits the tree, we need to mark new commit back | |
555 |
|
555 | |||
556 | if len(tree) > 0: |
|
556 | if len(tree) > 0: | |
557 | # This tree still has elements - don't remove it or any |
|
557 | # This tree still has elements - don't remove it or any | |
558 | # of it's parents |
|
558 | # of it's parents | |
559 | break |
|
559 | break | |
560 |
|
560 | |||
561 | object_store.add_object(commit_tree) |
|
561 | object_store.add_object(commit_tree) | |
562 |
|
562 | |||
563 | # Create commit |
|
563 | # Create commit | |
564 | commit = objects.Commit() |
|
564 | commit = objects.Commit() | |
565 | commit.tree = commit_tree.id |
|
565 | commit.tree = commit_tree.id | |
566 | for k, v in commit_data.items(): |
|
566 | for k, v in commit_data.items(): | |
567 | setattr(commit, k, v) |
|
567 | setattr(commit, k, v) | |
568 | object_store.add_object(commit) |
|
568 | object_store.add_object(commit) | |
569 |
|
569 | |||
570 | self.create_branch(wire, branch, commit.id) |
|
570 | self.create_branch(wire, branch, commit.id) | |
571 |
|
571 | |||
572 | # dulwich set-ref |
|
572 | # dulwich set-ref | |
573 | ref = 'refs/heads/%s' % branch |
|
573 | ref = 'refs/heads/%s' % branch | |
574 | repo.refs[ref] = commit.id |
|
574 | repo.refs[ref] = commit.id | |
575 |
|
575 | |||
576 | return commit.id |
|
576 | return commit.id | |
577 |
|
577 | |||
578 | @reraise_safe_exceptions |
|
578 | @reraise_safe_exceptions | |
579 | def pull(self, wire, url, apply_refs=True, refs=None, update_after=False): |
|
579 | def pull(self, wire, url, apply_refs=True, refs=None, update_after=False): | |
580 | if url != 'default' and '://' not in url: |
|
580 | if url != 'default' and '://' not in url: | |
581 | client = LocalGitClient(url) |
|
581 | client = LocalGitClient(url) | |
582 | else: |
|
582 | else: | |
583 | url_obj = url_parser(url) |
|
583 | url_obj = url_parser(url) | |
584 | o = self._build_opener(url) |
|
584 | o = self._build_opener(url) | |
585 | url, _ = url_obj.authinfo() |
|
585 | url, _ = url_obj.authinfo() | |
586 | client = HttpGitClient(base_url=url, opener=o) |
|
586 | client = HttpGitClient(base_url=url, opener=o) | |
587 | repo = self._factory.repo(wire) |
|
587 | repo = self._factory.repo(wire) | |
588 |
|
588 | |||
589 | determine_wants = repo.object_store.determine_wants_all |
|
589 | determine_wants = repo.object_store.determine_wants_all | |
590 | if refs: |
|
590 | if refs: | |
591 | def determine_wants_requested(references): |
|
591 | def determine_wants_requested(references): | |
592 | return [references[r] for r in references if r in refs] |
|
592 | return [references[r] for r in references if r in refs] | |
593 | determine_wants = determine_wants_requested |
|
593 | determine_wants = determine_wants_requested | |
594 |
|
594 | |||
595 | try: |
|
595 | try: | |
596 | remote_refs = client.fetch( |
|
596 | remote_refs = client.fetch( | |
597 | path=url, target=repo, determine_wants=determine_wants) |
|
597 | path=url, target=repo, determine_wants=determine_wants) | |
598 | except NotGitRepository as e: |
|
598 | except NotGitRepository as e: | |
599 | log.warning( |
|
599 | log.warning( | |
600 | 'Trying to fetch from "%s" failed, not a Git repository.', url) |
|
600 | 'Trying to fetch from "%s" failed, not a Git repository.', url) | |
601 | # Exception can contain unicode which we convert |
|
601 | # Exception can contain unicode which we convert | |
602 | raise exceptions.AbortException(e)(repr(e)) |
|
602 | raise exceptions.AbortException(e)(repr(e)) | |
603 |
|
603 | |||
604 | # mikhail: client.fetch() returns all the remote refs, but fetches only |
|
604 | # mikhail: client.fetch() returns all the remote refs, but fetches only | |
605 | # refs filtered by `determine_wants` function. We need to filter result |
|
605 | # refs filtered by `determine_wants` function. We need to filter result | |
606 | # as well |
|
606 | # as well | |
607 | if refs: |
|
607 | if refs: | |
608 | remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs} |
|
608 | remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs} | |
609 |
|
609 | |||
610 | if apply_refs: |
|
610 | if apply_refs: | |
611 | # TODO: johbo: Needs proper test coverage with a git repository |
|
611 | # TODO: johbo: Needs proper test coverage with a git repository | |
612 | # that contains a tag object, so that we would end up with |
|
612 | # that contains a tag object, so that we would end up with | |
613 | # a peeled ref at this point. |
|
613 | # a peeled ref at this point. | |
614 | for k in remote_refs: |
|
614 | for k in remote_refs: | |
615 | if k.endswith(PEELED_REF_MARKER): |
|
615 | if k.endswith(PEELED_REF_MARKER): | |
616 | log.debug("Skipping peeled reference %s", k) |
|
616 | log.debug("Skipping peeled reference %s", k) | |
617 | continue |
|
617 | continue | |
618 | repo[k] = remote_refs[k] |
|
618 | repo[k] = remote_refs[k] | |
619 |
|
619 | |||
620 | if refs and not update_after: |
|
620 | if refs and not update_after: | |
621 | # mikhail: explicitly set the head to the last ref. |
|
621 | # mikhail: explicitly set the head to the last ref. | |
622 | repo["HEAD"] = remote_refs[refs[-1]] |
|
622 | repo["HEAD"] = remote_refs[refs[-1]] | |
623 |
|
623 | |||
624 | if update_after: |
|
624 | if update_after: | |
625 | # we want to checkout HEAD |
|
625 | # we want to checkout HEAD | |
626 | repo["HEAD"] = remote_refs["HEAD"] |
|
626 | repo["HEAD"] = remote_refs["HEAD"] | |
627 | index.build_index_from_tree(repo.path, repo.index_path(), |
|
627 | index.build_index_from_tree(repo.path, repo.index_path(), | |
628 | repo.object_store, repo["HEAD"].tree) |
|
628 | repo.object_store, repo["HEAD"].tree) | |
629 | return remote_refs |
|
629 | return remote_refs | |
630 |
|
630 | |||
631 | @reraise_safe_exceptions |
|
631 | @reraise_safe_exceptions | |
632 | def sync_fetch(self, wire, url, refs=None, all_refs=False): |
|
632 | def sync_fetch(self, wire, url, refs=None, all_refs=False): | |
633 | repo = self._factory.repo(wire) |
|
633 | repo = self._factory.repo(wire) | |
634 | if refs and not isinstance(refs, (list, tuple)): |
|
634 | if refs and not isinstance(refs, (list, tuple)): | |
635 | refs = [refs] |
|
635 | refs = [refs] | |
636 |
|
636 | |||
637 | config = self._wire_to_config(wire) |
|
637 | config = self._wire_to_config(wire) | |
638 | # get all remote refs we'll use to fetch later |
|
638 | # get all remote refs we'll use to fetch later | |
639 | cmd = ['ls-remote'] |
|
639 | cmd = ['ls-remote'] | |
640 | if not all_refs: |
|
640 | if not all_refs: | |
641 | cmd += ['--heads', '--tags'] |
|
641 | cmd += ['--heads', '--tags'] | |
642 | cmd += [url] |
|
642 | cmd += [url] | |
643 | output, __ = self.run_git_command( |
|
643 | output, __ = self.run_git_command( | |
644 | wire, cmd, fail_on_stderr=False, |
|
644 | wire, cmd, fail_on_stderr=False, | |
645 | _copts=self._remote_conf(config), |
|
645 | _copts=self._remote_conf(config), | |
646 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) |
|
646 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) | |
647 |
|
647 | |||
648 | remote_refs = collections.OrderedDict() |
|
648 | remote_refs = collections.OrderedDict() | |
649 | fetch_refs = [] |
|
649 | fetch_refs = [] | |
650 |
|
650 | |||
651 | for ref_line in output.splitlines(): |
|
651 | for ref_line in output.splitlines(): | |
652 | sha, ref = ref_line.split('\t') |
|
652 | sha, ref = ref_line.split('\t') | |
653 | sha = sha.strip() |
|
653 | sha = sha.strip() | |
654 | if ref in remote_refs: |
|
654 | if ref in remote_refs: | |
655 | # duplicate, skip |
|
655 | # duplicate, skip | |
656 | continue |
|
656 | continue | |
657 | if ref.endswith(PEELED_REF_MARKER): |
|
657 | if ref.endswith(PEELED_REF_MARKER): | |
658 | log.debug("Skipping peeled reference %s", ref) |
|
658 | log.debug("Skipping peeled reference %s", ref) | |
659 | continue |
|
659 | continue | |
660 | # don't sync HEAD |
|
660 | # don't sync HEAD | |
661 | if ref in ['HEAD']: |
|
661 | if ref in ['HEAD']: | |
662 | continue |
|
662 | continue | |
663 |
|
663 | |||
664 | remote_refs[ref] = sha |
|
664 | remote_refs[ref] = sha | |
665 |
|
665 | |||
666 | if refs and sha in refs: |
|
666 | if refs and sha in refs: | |
667 | # we filter fetch using our specified refs |
|
667 | # we filter fetch using our specified refs | |
668 | fetch_refs.append('{}:{}'.format(ref, ref)) |
|
668 | fetch_refs.append('{}:{}'.format(ref, ref)) | |
669 | elif not refs: |
|
669 | elif not refs: | |
670 | fetch_refs.append('{}:{}'.format(ref, ref)) |
|
670 | fetch_refs.append('{}:{}'.format(ref, ref)) | |
671 | log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs)) |
|
671 | log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs)) | |
672 |
|
672 | |||
673 | if fetch_refs: |
|
673 | if fetch_refs: | |
674 | for chunk in more_itertools.chunked(fetch_refs, 1024 * 4): |
|
674 | for chunk in more_itertools.chunked(fetch_refs, 1024 * 4): | |
675 | fetch_refs_chunks = list(chunk) |
|
675 | fetch_refs_chunks = list(chunk) | |
676 | log.debug('Fetching %s refs from import url', len(fetch_refs_chunks)) |
|
676 | log.debug('Fetching %s refs from import url', len(fetch_refs_chunks)) | |
677 | _out, _err = self.run_git_command( |
|
677 | _out, _err = self.run_git_command( | |
678 | wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks, |
|
678 | wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks, | |
679 | fail_on_stderr=False, |
|
679 | fail_on_stderr=False, | |
680 | _copts=self._remote_conf(config), |
|
680 | _copts=self._remote_conf(config), | |
681 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) |
|
681 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) | |
682 |
|
682 | |||
683 | return remote_refs |
|
683 | return remote_refs | |
684 |
|
684 | |||
685 | @reraise_safe_exceptions |
|
685 | @reraise_safe_exceptions | |
686 | def sync_push(self, wire, url, refs=None): |
|
686 | def sync_push(self, wire, url, refs=None): | |
687 | if not self.check_url(url, wire): |
|
687 | if not self.check_url(url, wire): | |
688 | return |
|
688 | return | |
689 | config = self._wire_to_config(wire) |
|
689 | config = self._wire_to_config(wire) | |
690 | self._factory.repo(wire) |
|
690 | self._factory.repo(wire) | |
691 | self.run_git_command( |
|
691 | self.run_git_command( | |
692 | wire, ['push', url, '--mirror'], fail_on_stderr=False, |
|
692 | wire, ['push', url, '--mirror'], fail_on_stderr=False, | |
693 | _copts=self._remote_conf(config), |
|
693 | _copts=self._remote_conf(config), | |
694 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) |
|
694 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) | |
695 |
|
695 | |||
696 | @reraise_safe_exceptions |
|
696 | @reraise_safe_exceptions | |
697 | def get_remote_refs(self, wire, url): |
|
697 | def get_remote_refs(self, wire, url): | |
698 | repo = Repo(url) |
|
698 | repo = Repo(url) | |
699 | return repo.get_refs() |
|
699 | return repo.get_refs() | |
700 |
|
700 | |||
701 | @reraise_safe_exceptions |
|
701 | @reraise_safe_exceptions | |
702 | def get_description(self, wire): |
|
702 | def get_description(self, wire): | |
703 | repo = self._factory.repo(wire) |
|
703 | repo = self._factory.repo(wire) | |
704 | return repo.get_description() |
|
704 | return repo.get_description() | |
705 |
|
705 | |||
706 | @reraise_safe_exceptions |
|
706 | @reraise_safe_exceptions | |
707 | def get_missing_revs(self, wire, rev1, rev2, path2): |
|
707 | def get_missing_revs(self, wire, rev1, rev2, path2): | |
708 | repo = self._factory.repo(wire) |
|
708 | repo = self._factory.repo(wire) | |
709 | LocalGitClient(thin_packs=False).fetch(path2, repo) |
|
709 | LocalGitClient(thin_packs=False).fetch(path2, repo) | |
710 |
|
710 | |||
711 | wire_remote = wire.copy() |
|
711 | wire_remote = wire.copy() | |
712 | wire_remote['path'] = path2 |
|
712 | wire_remote['path'] = path2 | |
713 | repo_remote = self._factory.repo(wire_remote) |
|
713 | repo_remote = self._factory.repo(wire_remote) | |
714 | LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote) |
|
714 | LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote) | |
715 |
|
715 | |||
716 | revs = [ |
|
716 | revs = [ | |
717 | x.commit.id |
|
717 | x.commit.id | |
718 | for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])] |
|
718 | for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])] | |
719 | return revs |
|
719 | return revs | |
720 |
|
720 | |||
721 | @reraise_safe_exceptions |
|
721 | @reraise_safe_exceptions | |
722 | def get_object(self, wire, sha, maybe_unreachable=False): |
|
722 | def get_object(self, wire, sha, maybe_unreachable=False): | |
723 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
723 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
724 | region = self._region(wire) |
|
724 | region = self._region(wire) | |
725 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
725 | @region.conditional_cache_on_arguments(condition=cache_on) | |
726 | def _get_object(_context_uid, _repo_id, _sha): |
|
726 | def _get_object(_context_uid, _repo_id, _sha): | |
727 | repo_init = self._factory.repo_libgit2(wire) |
|
727 | repo_init = self._factory.repo_libgit2(wire) | |
728 | with repo_init as repo: |
|
728 | with repo_init as repo: | |
729 |
|
729 | |||
730 | missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path']) |
|
730 | missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path']) | |
731 | try: |
|
731 | try: | |
732 | commit = repo.revparse_single(sha) |
|
732 | commit = repo.revparse_single(sha) | |
733 | except KeyError: |
|
733 | except KeyError: | |
734 | # NOTE(marcink): KeyError doesn't give us any meaningful information |
|
734 | # NOTE(marcink): KeyError doesn't give us any meaningful information | |
735 | # here, we instead give something more explicit |
|
735 | # here, we instead give something more explicit | |
736 | e = exceptions.RefNotFoundException('SHA: %s not found', sha) |
|
736 | e = exceptions.RefNotFoundException('SHA: %s not found', sha) | |
737 | raise exceptions.LookupException(e)(missing_commit_err) |
|
737 | raise exceptions.LookupException(e)(missing_commit_err) | |
738 | except ValueError as e: |
|
738 | except ValueError as e: | |
739 | raise exceptions.LookupException(e)(missing_commit_err) |
|
739 | raise exceptions.LookupException(e)(missing_commit_err) | |
740 |
|
740 | |||
741 | is_tag = False |
|
741 | is_tag = False | |
742 | if isinstance(commit, pygit2.Tag): |
|
742 | if isinstance(commit, pygit2.Tag): | |
743 | commit = repo.get(commit.target) |
|
743 | commit = repo.get(commit.target) | |
744 | is_tag = True |
|
744 | is_tag = True | |
745 |
|
745 | |||
746 | check_dangling = True |
|
746 | check_dangling = True | |
747 | if is_tag: |
|
747 | if is_tag: | |
748 | check_dangling = False |
|
748 | check_dangling = False | |
749 |
|
749 | |||
750 | if check_dangling and maybe_unreachable: |
|
750 | if check_dangling and maybe_unreachable: | |
751 | check_dangling = False |
|
751 | check_dangling = False | |
752 |
|
752 | |||
753 | # we used a reference and it parsed means we're not having a dangling commit |
|
753 | # we used a reference and it parsed means we're not having a dangling commit | |
754 | if sha != commit.hex: |
|
754 | if sha != commit.hex: | |
755 | check_dangling = False |
|
755 | check_dangling = False | |
756 |
|
756 | |||
757 | if check_dangling: |
|
757 | if check_dangling: | |
758 | # check for dangling commit |
|
758 | # check for dangling commit | |
759 | for branch in repo.branches.with_commit(commit.hex): |
|
759 | for branch in repo.branches.with_commit(commit.hex): | |
760 | if branch: |
|
760 | if branch: | |
761 | break |
|
761 | break | |
762 | else: |
|
762 | else: | |
763 | # NOTE(marcink): Empty error doesn't give us any meaningful information |
|
763 | # NOTE(marcink): Empty error doesn't give us any meaningful information | |
764 | # here, we instead give something more explicit |
|
764 | # here, we instead give something more explicit | |
765 | e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha) |
|
765 | e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha) | |
766 | raise exceptions.LookupException(e)(missing_commit_err) |
|
766 | raise exceptions.LookupException(e)(missing_commit_err) | |
767 |
|
767 | |||
768 | commit_id = commit.hex |
|
768 | commit_id = commit.hex | |
769 | type_id = commit.type_str |
|
769 | type_id = commit.type_str | |
770 |
|
770 | |||
771 | return { |
|
771 | return { | |
772 | 'id': commit_id, |
|
772 | 'id': commit_id, | |
773 | 'type': self._type_id_to_name(type_id), |
|
773 | 'type': self._type_id_to_name(type_id), | |
774 | 'commit_id': commit_id, |
|
774 | 'commit_id': commit_id, | |
775 | 'idx': 0 |
|
775 | 'idx': 0 | |
776 | } |
|
776 | } | |
777 |
|
777 | |||
778 | return _get_object(context_uid, repo_id, sha) |
|
778 | return _get_object(context_uid, repo_id, sha) | |
779 |
|
779 | |||
780 | @reraise_safe_exceptions |
|
780 | @reraise_safe_exceptions | |
781 | def get_refs(self, wire): |
|
781 | def get_refs(self, wire): | |
782 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
782 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
783 | region = self._region(wire) |
|
783 | region = self._region(wire) | |
784 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
784 | @region.conditional_cache_on_arguments(condition=cache_on) | |
785 | def _get_refs(_context_uid, _repo_id): |
|
785 | def _get_refs(_context_uid, _repo_id): | |
786 |
|
786 | |||
787 | repo_init = self._factory.repo_libgit2(wire) |
|
787 | repo_init = self._factory.repo_libgit2(wire) | |
788 | with repo_init as repo: |
|
788 | with repo_init as repo: | |
789 | regex = re.compile('^refs/(heads|tags)/') |
|
789 | regex = re.compile('^refs/(heads|tags)/') | |
790 | return {x.name: x.target.hex for x in |
|
790 | return {x.name: x.target.hex for x in | |
791 | [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]} |
|
791 | [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]} | |
792 |
|
792 | |||
793 | return _get_refs(context_uid, repo_id) |
|
793 | return _get_refs(context_uid, repo_id) | |
794 |
|
794 | |||
795 | @reraise_safe_exceptions |
|
795 | @reraise_safe_exceptions | |
796 | def get_branch_pointers(self, wire): |
|
796 | def get_branch_pointers(self, wire): | |
797 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
797 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
798 | region = self._region(wire) |
|
798 | region = self._region(wire) | |
799 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
799 | @region.conditional_cache_on_arguments(condition=cache_on) | |
800 | def _get_branch_pointers(_context_uid, _repo_id): |
|
800 | def _get_branch_pointers(_context_uid, _repo_id): | |
801 |
|
801 | |||
802 | repo_init = self._factory.repo_libgit2(wire) |
|
802 | repo_init = self._factory.repo_libgit2(wire) | |
803 | regex = re.compile('^refs/heads') |
|
803 | regex = re.compile('^refs/heads') | |
804 | with repo_init as repo: |
|
804 | with repo_init as repo: | |
805 | branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)] |
|
805 | branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)] | |
806 | return {x.target.hex: x.shorthand for x in branches} |
|
806 | return {x.target.hex: x.shorthand for x in branches} | |
807 |
|
807 | |||
808 | return _get_branch_pointers(context_uid, repo_id) |
|
808 | return _get_branch_pointers(context_uid, repo_id) | |
809 |
|
809 | |||
810 | @reraise_safe_exceptions |
|
810 | @reraise_safe_exceptions | |
811 | def head(self, wire, show_exc=True): |
|
811 | def head(self, wire, show_exc=True): | |
812 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
812 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
813 | region = self._region(wire) |
|
813 | region = self._region(wire) | |
814 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
814 | @region.conditional_cache_on_arguments(condition=cache_on) | |
815 | def _head(_context_uid, _repo_id, _show_exc): |
|
815 | def _head(_context_uid, _repo_id, _show_exc): | |
816 | repo_init = self._factory.repo_libgit2(wire) |
|
816 | repo_init = self._factory.repo_libgit2(wire) | |
817 | with repo_init as repo: |
|
817 | with repo_init as repo: | |
818 | try: |
|
818 | try: | |
819 | return repo.head.peel().hex |
|
819 | return repo.head.peel().hex | |
820 | except Exception: |
|
820 | except Exception: | |
821 | if show_exc: |
|
821 | if show_exc: | |
822 | raise |
|
822 | raise | |
823 | return _head(context_uid, repo_id, show_exc) |
|
823 | return _head(context_uid, repo_id, show_exc) | |
824 |
|
824 | |||
825 | @reraise_safe_exceptions |
|
825 | @reraise_safe_exceptions | |
826 | def init(self, wire): |
|
826 | def init(self, wire): | |
827 | repo_path = str_to_dulwich(wire['path']) |
|
827 | repo_path = str_to_dulwich(wire['path']) | |
828 | self.repo = Repo.init(repo_path) |
|
828 | self.repo = Repo.init(repo_path) | |
829 |
|
829 | |||
830 | @reraise_safe_exceptions |
|
830 | @reraise_safe_exceptions | |
831 | def init_bare(self, wire): |
|
831 | def init_bare(self, wire): | |
832 | repo_path = str_to_dulwich(wire['path']) |
|
832 | repo_path = str_to_dulwich(wire['path']) | |
833 | self.repo = Repo.init_bare(repo_path) |
|
833 | self.repo = Repo.init_bare(repo_path) | |
834 |
|
834 | |||
835 | @reraise_safe_exceptions |
|
835 | @reraise_safe_exceptions | |
836 | def revision(self, wire, rev): |
|
836 | def revision(self, wire, rev): | |
837 |
|
837 | |||
838 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
838 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
839 | region = self._region(wire) |
|
839 | region = self._region(wire) | |
840 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
840 | @region.conditional_cache_on_arguments(condition=cache_on) | |
841 | def _revision(_context_uid, _repo_id, _rev): |
|
841 | def _revision(_context_uid, _repo_id, _rev): | |
842 | repo_init = self._factory.repo_libgit2(wire) |
|
842 | repo_init = self._factory.repo_libgit2(wire) | |
843 | with repo_init as repo: |
|
843 | with repo_init as repo: | |
844 | commit = repo[rev] |
|
844 | commit = repo[rev] | |
845 | obj_data = { |
|
845 | obj_data = { | |
846 | 'id': commit.id.hex, |
|
846 | 'id': commit.id.hex, | |
847 | } |
|
847 | } | |
848 | # tree objects itself don't have tree_id attribute |
|
848 | # tree objects itself don't have tree_id attribute | |
849 | if hasattr(commit, 'tree_id'): |
|
849 | if hasattr(commit, 'tree_id'): | |
850 | obj_data['tree'] = commit.tree_id.hex |
|
850 | obj_data['tree'] = commit.tree_id.hex | |
851 |
|
851 | |||
852 | return obj_data |
|
852 | return obj_data | |
853 | return _revision(context_uid, repo_id, rev) |
|
853 | return _revision(context_uid, repo_id, rev) | |
854 |
|
854 | |||
855 | @reraise_safe_exceptions |
|
855 | @reraise_safe_exceptions | |
856 | def date(self, wire, commit_id): |
|
856 | def date(self, wire, commit_id): | |
857 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
857 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
858 | region = self._region(wire) |
|
858 | region = self._region(wire) | |
859 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
859 | @region.conditional_cache_on_arguments(condition=cache_on) | |
860 | def _date(_repo_id, _commit_id): |
|
860 | def _date(_repo_id, _commit_id): | |
861 | repo_init = self._factory.repo_libgit2(wire) |
|
861 | repo_init = self._factory.repo_libgit2(wire) | |
862 | with repo_init as repo: |
|
862 | with repo_init as repo: | |
863 | commit = repo[commit_id] |
|
863 | commit = repo[commit_id] | |
864 |
|
864 | |||
865 | if hasattr(commit, 'commit_time'): |
|
865 | if hasattr(commit, 'commit_time'): | |
866 | commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset |
|
866 | commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset | |
867 | else: |
|
867 | else: | |
868 | commit = commit.get_object() |
|
868 | commit = commit.get_object() | |
869 | commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset |
|
869 | commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset | |
870 |
|
870 | |||
871 | # TODO(marcink): check dulwich difference of offset vs timezone |
|
871 | # TODO(marcink): check dulwich difference of offset vs timezone | |
872 | return [commit_time, commit_time_offset] |
|
872 | return [commit_time, commit_time_offset] | |
873 | return _date(repo_id, commit_id) |
|
873 | return _date(repo_id, commit_id) | |
874 |
|
874 | |||
875 | @reraise_safe_exceptions |
|
875 | @reraise_safe_exceptions | |
876 | def author(self, wire, commit_id): |
|
876 | def author(self, wire, commit_id): | |
877 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
877 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
878 | region = self._region(wire) |
|
878 | region = self._region(wire) | |
879 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
879 | @region.conditional_cache_on_arguments(condition=cache_on) | |
880 | def _author(_repo_id, _commit_id): |
|
880 | def _author(_repo_id, _commit_id): | |
881 | repo_init = self._factory.repo_libgit2(wire) |
|
881 | repo_init = self._factory.repo_libgit2(wire) | |
882 | with repo_init as repo: |
|
882 | with repo_init as repo: | |
883 | commit = repo[commit_id] |
|
883 | commit = repo[commit_id] | |
884 |
|
884 | |||
885 | if hasattr(commit, 'author'): |
|
885 | if hasattr(commit, 'author'): | |
886 | author = commit.author |
|
886 | author = commit.author | |
887 | else: |
|
887 | else: | |
888 | author = commit.get_object().author |
|
888 | author = commit.get_object().author | |
889 |
|
889 | |||
890 | if author.email: |
|
890 | if author.email: | |
891 | return "{} <{}>".format(author.name, author.email) |
|
891 | return "{} <{}>".format(author.name, author.email) | |
892 |
|
892 | |||
893 | try: |
|
893 | try: | |
894 | return "{}".format(author.name) |
|
894 | return "{}".format(author.name) | |
895 | except Exception: |
|
895 | except Exception: | |
896 | return "{}".format(safe_unicode(author.raw_name)) |
|
896 | return "{}".format(safe_unicode(author.raw_name)) | |
897 |
|
897 | |||
898 | return _author(repo_id, commit_id) |
|
898 | return _author(repo_id, commit_id) | |
899 |
|
899 | |||
900 | @reraise_safe_exceptions |
|
900 | @reraise_safe_exceptions | |
901 | def message(self, wire, commit_id): |
|
901 | def message(self, wire, commit_id): | |
902 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
902 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
903 | region = self._region(wire) |
|
903 | region = self._region(wire) | |
904 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
904 | @region.conditional_cache_on_arguments(condition=cache_on) | |
905 | def _message(_repo_id, _commit_id): |
|
905 | def _message(_repo_id, _commit_id): | |
906 | repo_init = self._factory.repo_libgit2(wire) |
|
906 | repo_init = self._factory.repo_libgit2(wire) | |
907 | with repo_init as repo: |
|
907 | with repo_init as repo: | |
908 | commit = repo[commit_id] |
|
908 | commit = repo[commit_id] | |
909 | return commit.message |
|
909 | return commit.message | |
910 | return _message(repo_id, commit_id) |
|
910 | return _message(repo_id, commit_id) | |
911 |
|
911 | |||
912 | @reraise_safe_exceptions |
|
912 | @reraise_safe_exceptions | |
913 | def parents(self, wire, commit_id): |
|
913 | def parents(self, wire, commit_id): | |
914 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
914 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
915 | region = self._region(wire) |
|
915 | region = self._region(wire) | |
916 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
916 | @region.conditional_cache_on_arguments(condition=cache_on) | |
917 | def _parents(_repo_id, _commit_id): |
|
917 | def _parents(_repo_id, _commit_id): | |
918 | repo_init = self._factory.repo_libgit2(wire) |
|
918 | repo_init = self._factory.repo_libgit2(wire) | |
919 | with repo_init as repo: |
|
919 | with repo_init as repo: | |
920 | commit = repo[commit_id] |
|
920 | commit = repo[commit_id] | |
921 | if hasattr(commit, 'parent_ids'): |
|
921 | if hasattr(commit, 'parent_ids'): | |
922 | parent_ids = commit.parent_ids |
|
922 | parent_ids = commit.parent_ids | |
923 | else: |
|
923 | else: | |
924 | parent_ids = commit.get_object().parent_ids |
|
924 | parent_ids = commit.get_object().parent_ids | |
925 |
|
925 | |||
926 | return [x.hex for x in parent_ids] |
|
926 | return [x.hex for x in parent_ids] | |
927 | return _parents(repo_id, commit_id) |
|
927 | return _parents(repo_id, commit_id) | |
928 |
|
928 | |||
929 | @reraise_safe_exceptions |
|
929 | @reraise_safe_exceptions | |
930 | def children(self, wire, commit_id): |
|
930 | def children(self, wire, commit_id): | |
931 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
931 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
932 | region = self._region(wire) |
|
932 | region = self._region(wire) | |
933 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
933 | @region.conditional_cache_on_arguments(condition=cache_on) | |
934 | def _children(_repo_id, _commit_id): |
|
934 | def _children(_repo_id, _commit_id): | |
935 | output, __ = self.run_git_command( |
|
935 | output, __ = self.run_git_command( | |
936 | wire, ['rev-list', '--all', '--children']) |
|
936 | wire, ['rev-list', '--all', '--children']) | |
937 |
|
937 | |||
938 | child_ids = [] |
|
938 | child_ids = [] | |
939 | pat = re.compile(r'^%s' % commit_id) |
|
939 | pat = re.compile(r'^%s' % commit_id) | |
940 | for l in output.splitlines(): |
|
940 | for l in output.splitlines(): | |
941 | if pat.match(l): |
|
941 | if pat.match(l): | |
942 | found_ids = l.split(' ')[1:] |
|
942 | found_ids = l.split(' ')[1:] | |
943 | child_ids.extend(found_ids) |
|
943 | child_ids.extend(found_ids) | |
944 |
|
944 | |||
945 | return child_ids |
|
945 | return child_ids | |
946 | return _children(repo_id, commit_id) |
|
946 | return _children(repo_id, commit_id) | |
947 |
|
947 | |||
948 | @reraise_safe_exceptions |
|
948 | @reraise_safe_exceptions | |
949 | def set_refs(self, wire, key, value): |
|
949 | def set_refs(self, wire, key, value): | |
950 | repo_init = self._factory.repo_libgit2(wire) |
|
950 | repo_init = self._factory.repo_libgit2(wire) | |
951 | with repo_init as repo: |
|
951 | with repo_init as repo: | |
952 | repo.references.create(key, value, force=True) |
|
952 | repo.references.create(key, value, force=True) | |
953 |
|
953 | |||
954 | @reraise_safe_exceptions |
|
954 | @reraise_safe_exceptions | |
955 | def create_branch(self, wire, branch_name, commit_id, force=False): |
|
955 | def create_branch(self, wire, branch_name, commit_id, force=False): | |
956 | repo_init = self._factory.repo_libgit2(wire) |
|
956 | repo_init = self._factory.repo_libgit2(wire) | |
957 | with repo_init as repo: |
|
957 | with repo_init as repo: | |
958 | commit = repo[commit_id] |
|
958 | commit = repo[commit_id] | |
959 |
|
959 | |||
960 | if force: |
|
960 | if force: | |
961 | repo.branches.local.create(branch_name, commit, force=force) |
|
961 | repo.branches.local.create(branch_name, commit, force=force) | |
962 | elif not repo.branches.get(branch_name): |
|
962 | elif not repo.branches.get(branch_name): | |
963 | # create only if that branch isn't existing |
|
963 | # create only if that branch isn't existing | |
964 | repo.branches.local.create(branch_name, commit, force=force) |
|
964 | repo.branches.local.create(branch_name, commit, force=force) | |
965 |
|
965 | |||
966 | @reraise_safe_exceptions |
|
966 | @reraise_safe_exceptions | |
967 | def remove_ref(self, wire, key): |
|
967 | def remove_ref(self, wire, key): | |
968 | repo_init = self._factory.repo_libgit2(wire) |
|
968 | repo_init = self._factory.repo_libgit2(wire) | |
969 | with repo_init as repo: |
|
969 | with repo_init as repo: | |
970 | repo.references.delete(key) |
|
970 | repo.references.delete(key) | |
971 |
|
971 | |||
972 | @reraise_safe_exceptions |
|
972 | @reraise_safe_exceptions | |
973 | def tag_remove(self, wire, tag_name): |
|
973 | def tag_remove(self, wire, tag_name): | |
974 | repo_init = self._factory.repo_libgit2(wire) |
|
974 | repo_init = self._factory.repo_libgit2(wire) | |
975 | with repo_init as repo: |
|
975 | with repo_init as repo: | |
976 | key = 'refs/tags/{}'.format(tag_name) |
|
976 | key = 'refs/tags/{}'.format(tag_name) | |
977 | repo.references.delete(key) |
|
977 | repo.references.delete(key) | |
978 |
|
978 | |||
979 | @reraise_safe_exceptions |
|
979 | @reraise_safe_exceptions | |
980 | def tree_changes(self, wire, source_id, target_id): |
|
980 | def tree_changes(self, wire, source_id, target_id): | |
981 | # TODO(marcink): remove this seems it's only used by tests |
|
981 | # TODO(marcink): remove this seems it's only used by tests | |
982 | repo = self._factory.repo(wire) |
|
982 | repo = self._factory.repo(wire) | |
983 | source = repo[source_id].tree if source_id else None |
|
983 | source = repo[source_id].tree if source_id else None | |
984 | target = repo[target_id].tree |
|
984 | target = repo[target_id].tree | |
985 | result = repo.object_store.tree_changes(source, target) |
|
985 | result = repo.object_store.tree_changes(source, target) | |
986 | return list(result) |
|
986 | return list(result) | |
987 |
|
987 | |||
988 | @reraise_safe_exceptions |
|
988 | @reraise_safe_exceptions | |
989 | def tree_and_type_for_path(self, wire, commit_id, path): |
|
989 | def tree_and_type_for_path(self, wire, commit_id, path): | |
990 |
|
990 | |||
991 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
991 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
992 | region = self._region(wire) |
|
992 | region = self._region(wire) | |
993 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
993 | @region.conditional_cache_on_arguments(condition=cache_on) | |
994 | def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path): |
|
994 | def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path): | |
995 | repo_init = self._factory.repo_libgit2(wire) |
|
995 | repo_init = self._factory.repo_libgit2(wire) | |
996 |
|
996 | |||
997 | with repo_init as repo: |
|
997 | with repo_init as repo: | |
998 | commit = repo[commit_id] |
|
998 | commit = repo[commit_id] | |
999 | try: |
|
999 | try: | |
1000 | tree = commit.tree[path] |
|
1000 | tree = commit.tree[path] | |
1001 | except KeyError: |
|
1001 | except KeyError: | |
1002 | return None, None, None |
|
1002 | return None, None, None | |
1003 |
|
1003 | |||
1004 | return tree.id.hex, tree.type_str, tree.filemode |
|
1004 | return tree.id.hex, tree.type_str, tree.filemode | |
1005 | return _tree_and_type_for_path(context_uid, repo_id, commit_id, path) |
|
1005 | return _tree_and_type_for_path(context_uid, repo_id, commit_id, path) | |
1006 |
|
1006 | |||
1007 | @reraise_safe_exceptions |
|
1007 | @reraise_safe_exceptions | |
1008 | def tree_items(self, wire, tree_id): |
|
1008 | def tree_items(self, wire, tree_id): | |
1009 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
1009 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
1010 | region = self._region(wire) |
|
1010 | region = self._region(wire) | |
1011 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
1011 | @region.conditional_cache_on_arguments(condition=cache_on) | |
1012 | def _tree_items(_repo_id, _tree_id): |
|
1012 | def _tree_items(_repo_id, _tree_id): | |
1013 |
|
1013 | |||
1014 | repo_init = self._factory.repo_libgit2(wire) |
|
1014 | repo_init = self._factory.repo_libgit2(wire) | |
1015 | with repo_init as repo: |
|
1015 | with repo_init as repo: | |
1016 | try: |
|
1016 | try: | |
1017 | tree = repo[tree_id] |
|
1017 | tree = repo[tree_id] | |
1018 | except KeyError: |
|
1018 | except KeyError: | |
1019 | raise ObjectMissing('No tree with id: {}'.format(tree_id)) |
|
1019 | raise ObjectMissing('No tree with id: {}'.format(tree_id)) | |
1020 |
|
1020 | |||
1021 | result = [] |
|
1021 | result = [] | |
1022 | for item in tree: |
|
1022 | for item in tree: | |
1023 | item_sha = item.hex |
|
1023 | item_sha = item.hex | |
1024 | item_mode = item.filemode |
|
1024 | item_mode = item.filemode | |
1025 | item_type = item.type_str |
|
1025 | item_type = item.type_str | |
1026 |
|
1026 | |||
1027 | if item_type == 'commit': |
|
1027 | if item_type == 'commit': | |
1028 | # NOTE(marcink): submodules we translate to 'link' for backward compat |
|
1028 | # NOTE(marcink): submodules we translate to 'link' for backward compat | |
1029 | item_type = 'link' |
|
1029 | item_type = 'link' | |
1030 |
|
1030 | |||
1031 | result.append((item.name, item_mode, item_sha, item_type)) |
|
1031 | result.append((item.name, item_mode, item_sha, item_type)) | |
1032 | return result |
|
1032 | return result | |
1033 | return _tree_items(repo_id, tree_id) |
|
1033 | return _tree_items(repo_id, tree_id) | |
1034 |
|
1034 | |||
1035 | @reraise_safe_exceptions |
|
1035 | @reraise_safe_exceptions | |
1036 | def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): |
|
1036 | def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): | |
1037 | """ |
|
1037 | """ | |
1038 | Old version that uses subprocess to call diff |
|
1038 | Old version that uses subprocess to call diff | |
1039 | """ |
|
1039 | """ | |
1040 |
|
1040 | |||
1041 | flags = [ |
|
1041 | flags = [ | |
1042 | '-U%s' % context, '--patch', |
|
1042 | '-U%s' % context, '--patch', | |
1043 | '--binary', |
|
1043 | '--binary', | |
1044 | '--find-renames', |
|
1044 | '--find-renames', | |
1045 | '--no-indent-heuristic', |
|
1045 | '--no-indent-heuristic', | |
1046 | # '--indent-heuristic', |
|
1046 | # '--indent-heuristic', | |
1047 | #'--full-index', |
|
1047 | #'--full-index', | |
1048 | #'--abbrev=40' |
|
1048 | #'--abbrev=40' | |
1049 | ] |
|
1049 | ] | |
1050 |
|
1050 | |||
1051 | if opt_ignorews: |
|
1051 | if opt_ignorews: | |
1052 | flags.append('--ignore-all-space') |
|
1052 | flags.append('--ignore-all-space') | |
1053 |
|
1053 | |||
1054 | if commit_id_1 == self.EMPTY_COMMIT: |
|
1054 | if commit_id_1 == self.EMPTY_COMMIT: | |
1055 | cmd = ['show'] + flags + [commit_id_2] |
|
1055 | cmd = ['show'] + flags + [commit_id_2] | |
1056 | else: |
|
1056 | else: | |
1057 | cmd = ['diff'] + flags + [commit_id_1, commit_id_2] |
|
1057 | cmd = ['diff'] + flags + [commit_id_1, commit_id_2] | |
1058 |
|
1058 | |||
1059 | if file_filter: |
|
1059 | if file_filter: | |
1060 | cmd.extend(['--', file_filter]) |
|
1060 | cmd.extend(['--', file_filter]) | |
1061 |
|
1061 | |||
1062 | diff, __ = self.run_git_command(wire, cmd) |
|
1062 | diff, __ = self.run_git_command(wire, cmd) | |
1063 | # If we used 'show' command, strip first few lines (until actual diff |
|
1063 | # If we used 'show' command, strip first few lines (until actual diff | |
1064 | # starts) |
|
1064 | # starts) | |
1065 | if commit_id_1 == self.EMPTY_COMMIT: |
|
1065 | if commit_id_1 == self.EMPTY_COMMIT: | |
1066 | lines = diff.splitlines() |
|
1066 | lines = diff.splitlines() | |
1067 | x = 0 |
|
1067 | x = 0 | |
1068 | for line in lines: |
|
1068 | for line in lines: | |
1069 | if line.startswith('diff'): |
|
1069 | if line.startswith('diff'): | |
1070 | break |
|
1070 | break | |
1071 | x += 1 |
|
1071 | x += 1 | |
1072 | # Append new line just like 'diff' command do |
|
1072 | # Append new line just like 'diff' command do | |
1073 | diff = '\n'.join(lines[x:]) + '\n' |
|
1073 | diff = '\n'.join(lines[x:]) + '\n' | |
1074 | return diff |
|
1074 | return diff | |
1075 |
|
1075 | |||
1076 | @reraise_safe_exceptions |
|
1076 | @reraise_safe_exceptions | |
1077 | def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): |
|
1077 | def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context): | |
1078 | repo_init = self._factory.repo_libgit2(wire) |
|
1078 | repo_init = self._factory.repo_libgit2(wire) | |
1079 | with repo_init as repo: |
|
1079 | with repo_init as repo: | |
1080 | swap = True |
|
1080 | swap = True | |
1081 | flags = 0 |
|
1081 | flags = 0 | |
1082 | flags |= pygit2.GIT_DIFF_SHOW_BINARY |
|
1082 | flags |= pygit2.GIT_DIFF_SHOW_BINARY | |
1083 |
|
1083 | |||
1084 | if opt_ignorews: |
|
1084 | if opt_ignorews: | |
1085 | flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE |
|
1085 | flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE | |
1086 |
|
1086 | |||
1087 | if commit_id_1 == self.EMPTY_COMMIT: |
|
1087 | if commit_id_1 == self.EMPTY_COMMIT: | |
1088 | comm1 = repo[commit_id_2] |
|
1088 | comm1 = repo[commit_id_2] | |
1089 | diff_obj = comm1.tree.diff_to_tree( |
|
1089 | diff_obj = comm1.tree.diff_to_tree( | |
1090 | flags=flags, context_lines=context, swap=swap) |
|
1090 | flags=flags, context_lines=context, swap=swap) | |
1091 |
|
1091 | |||
1092 | else: |
|
1092 | else: | |
1093 | comm1 = repo[commit_id_2] |
|
1093 | comm1 = repo[commit_id_2] | |
1094 | comm2 = repo[commit_id_1] |
|
1094 | comm2 = repo[commit_id_1] | |
1095 | diff_obj = comm1.tree.diff_to_tree( |
|
1095 | diff_obj = comm1.tree.diff_to_tree( | |
1096 | comm2.tree, flags=flags, context_lines=context, swap=swap) |
|
1096 | comm2.tree, flags=flags, context_lines=context, swap=swap) | |
1097 | similar_flags = 0 |
|
1097 | similar_flags = 0 | |
1098 | similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES |
|
1098 | similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES | |
1099 | diff_obj.find_similar(flags=similar_flags) |
|
1099 | diff_obj.find_similar(flags=similar_flags) | |
1100 |
|
1100 | |||
1101 | if file_filter: |
|
1101 | if file_filter: | |
1102 | for p in diff_obj: |
|
1102 | for p in diff_obj: | |
1103 | if p.delta.old_file.path == file_filter: |
|
1103 | if p.delta.old_file.path == file_filter: | |
1104 | return p.patch or '' |
|
1104 | return p.patch or '' | |
1105 | # fo matching path == no diff |
|
1105 | # fo matching path == no diff | |
1106 | return '' |
|
1106 | return '' | |
1107 | return diff_obj.patch or '' |
|
1107 | return diff_obj.patch or '' | |
1108 |
|
1108 | |||
1109 | @reraise_safe_exceptions |
|
1109 | @reraise_safe_exceptions | |
1110 | def node_history(self, wire, commit_id, path, limit): |
|
1110 | def node_history(self, wire, commit_id, path, limit): | |
1111 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
1111 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
1112 | region = self._region(wire) |
|
1112 | region = self._region(wire) | |
1113 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
1113 | @region.conditional_cache_on_arguments(condition=cache_on) | |
1114 | def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit): |
|
1114 | def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit): | |
1115 | # optimize for n==1, rev-list is much faster for that use-case |
|
1115 | # optimize for n==1, rev-list is much faster for that use-case | |
1116 | if limit == 1: |
|
1116 | if limit == 1: | |
1117 | cmd = ['rev-list', '-1', commit_id, '--', path] |
|
1117 | cmd = ['rev-list', '-1', commit_id, '--', path] | |
1118 | else: |
|
1118 | else: | |
1119 | cmd = ['log'] |
|
1119 | cmd = ['log'] | |
1120 | if limit: |
|
1120 | if limit: | |
1121 | cmd.extend(['-n', str(safe_int(limit, 0))]) |
|
1121 | cmd.extend(['-n', str(safe_int(limit, 0))]) | |
1122 | cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path]) |
|
1122 | cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path]) | |
1123 |
|
1123 | |||
1124 | output, __ = self.run_git_command(wire, cmd) |
|
1124 | output, __ = self.run_git_command(wire, cmd) | |
1125 | commit_ids = re.findall(r'[0-9a-fA-F]{40}', output) |
|
1125 | commit_ids = re.findall(r'[0-9a-fA-F]{40}', output) | |
1126 |
|
1126 | |||
1127 | return [x for x in commit_ids] |
|
1127 | return [x for x in commit_ids] | |
1128 | return _node_history(context_uid, repo_id, commit_id, path, limit) |
|
1128 | return _node_history(context_uid, repo_id, commit_id, path, limit) | |
1129 |
|
1129 | |||
1130 | @reraise_safe_exceptions |
|
1130 | @reraise_safe_exceptions | |
1131 | def node_annotate(self, wire, commit_id, path): |
|
1131 | def node_annotate(self, wire, commit_id, path): | |
1132 |
|
1132 | |||
1133 | cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path] |
|
1133 | cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path] | |
1134 | # -l ==> outputs long shas (and we need all 40 characters) |
|
1134 | # -l ==> outputs long shas (and we need all 40 characters) | |
1135 | # --root ==> doesn't put '^' character for boundaries |
|
1135 | # --root ==> doesn't put '^' character for boundaries | |
1136 | # -r commit_id ==> blames for the given commit |
|
1136 | # -r commit_id ==> blames for the given commit | |
1137 | output, __ = self.run_git_command(wire, cmd) |
|
1137 | output, __ = self.run_git_command(wire, cmd) | |
1138 |
|
1138 | |||
1139 | result = [] |
|
1139 | result = [] | |
1140 | for i, blame_line in enumerate(output.split('\n')[:-1]): |
|
1140 | for i, blame_line in enumerate(output.split('\n')[:-1]): | |
1141 | line_no = i + 1 |
|
1141 | line_no = i + 1 | |
1142 | commit_id, line = re.split(r' ', blame_line, 1) |
|
1142 | commit_id, line = re.split(r' ', blame_line, 1) | |
1143 | result.append((line_no, commit_id, line)) |
|
1143 | result.append((line_no, commit_id, line)) | |
1144 | return result |
|
1144 | return result | |
1145 |
|
1145 | |||
1146 | @reraise_safe_exceptions |
|
1146 | @reraise_safe_exceptions | |
1147 | def update_server_info(self, wire): |
|
1147 | def update_server_info(self, wire): | |
1148 | repo = self._factory.repo(wire) |
|
1148 | repo = self._factory.repo(wire) | |
1149 | update_server_info(repo) |
|
1149 | update_server_info(repo) | |
1150 |
|
1150 | |||
1151 | @reraise_safe_exceptions |
|
1151 | @reraise_safe_exceptions | |
1152 | def get_all_commit_ids(self, wire): |
|
1152 | def get_all_commit_ids(self, wire): | |
1153 |
|
1153 | |||
1154 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
1154 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
1155 | region = self._region(wire) |
|
1155 | region = self._region(wire) | |
1156 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
1156 | @region.conditional_cache_on_arguments(condition=cache_on) | |
1157 | def _get_all_commit_ids(_context_uid, _repo_id): |
|
1157 | def _get_all_commit_ids(_context_uid, _repo_id): | |
1158 |
|
1158 | |||
1159 | cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags'] |
|
1159 | cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags'] | |
1160 | try: |
|
1160 | try: | |
1161 | output, __ = self.run_git_command(wire, cmd) |
|
1161 | output, __ = self.run_git_command(wire, cmd) | |
1162 | return output.splitlines() |
|
1162 | return output.splitlines() | |
1163 | except Exception: |
|
1163 | except Exception: | |
1164 | # Can be raised for empty repositories |
|
1164 | # Can be raised for empty repositories | |
1165 | return [] |
|
1165 | return [] | |
1166 | return _get_all_commit_ids(context_uid, repo_id) |
|
1166 | return _get_all_commit_ids(context_uid, repo_id) | |
1167 |
|
1167 | |||
1168 | @reraise_safe_exceptions |
|
1168 | @reraise_safe_exceptions | |
1169 | def run_git_command(self, wire, cmd, **opts): |
|
1169 | def run_git_command(self, wire, cmd, **opts): | |
1170 | path = wire.get('path', None) |
|
1170 | path = wire.get('path', None) | |
1171 |
|
1171 | |||
1172 | if path and os.path.isdir(path): |
|
1172 | if path and os.path.isdir(path): | |
1173 | opts['cwd'] = path |
|
1173 | opts['cwd'] = path | |
1174 |
|
1174 | |||
1175 | if '_bare' in opts: |
|
1175 | if '_bare' in opts: | |
1176 | _copts = [] |
|
1176 | _copts = [] | |
1177 | del opts['_bare'] |
|
1177 | del opts['_bare'] | |
1178 | else: |
|
1178 | else: | |
1179 | _copts = ['-c', 'core.quotepath=false', ] |
|
1179 | _copts = ['-c', 'core.quotepath=false', ] | |
1180 | safe_call = False |
|
1180 | safe_call = False | |
1181 | if '_safe' in opts: |
|
1181 | if '_safe' in opts: | |
1182 | # no exc on failure |
|
1182 | # no exc on failure | |
1183 | del opts['_safe'] |
|
1183 | del opts['_safe'] | |
1184 | safe_call = True |
|
1184 | safe_call = True | |
1185 |
|
1185 | |||
1186 | if '_copts' in opts: |
|
1186 | if '_copts' in opts: | |
1187 | _copts.extend(opts['_copts'] or []) |
|
1187 | _copts.extend(opts['_copts'] or []) | |
1188 | del opts['_copts'] |
|
1188 | del opts['_copts'] | |
1189 |
|
1189 | |||
1190 | gitenv = os.environ.copy() |
|
1190 | gitenv = os.environ.copy() | |
1191 | gitenv.update(opts.pop('extra_env', {})) |
|
1191 | gitenv.update(opts.pop('extra_env', {})) | |
1192 | # need to clean fix GIT_DIR ! |
|
1192 | # need to clean fix GIT_DIR ! | |
1193 | if 'GIT_DIR' in gitenv: |
|
1193 | if 'GIT_DIR' in gitenv: | |
1194 | del gitenv['GIT_DIR'] |
|
1194 | del gitenv['GIT_DIR'] | |
1195 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
1195 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | |
1196 | gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1' |
|
1196 | gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1' | |
1197 |
|
1197 | |||
1198 | cmd = [settings.GIT_EXECUTABLE] + _copts + cmd |
|
1198 | cmd = [settings.GIT_EXECUTABLE] + _copts + cmd | |
1199 | _opts = {'env': gitenv, 'shell': False} |
|
1199 | _opts = {'env': gitenv, 'shell': False} | |
1200 |
|
1200 | |||
1201 | proc = None |
|
1201 | proc = None | |
1202 | try: |
|
1202 | try: | |
1203 | _opts.update(opts) |
|
1203 | _opts.update(opts) | |
1204 | proc = subprocessio.SubprocessIOChunker(cmd, **_opts) |
|
1204 | proc = subprocessio.SubprocessIOChunker(cmd, **_opts) | |
1205 |
|
1205 | |||
1206 | return ''.join(proc), ''.join(proc.error) |
|
1206 | return ''.join(proc), ''.join(proc.error) | |
1207 | except (EnvironmentError, OSError) as err: |
|
1207 | except (EnvironmentError, OSError) as err: | |
1208 | cmd = ' '.join(cmd) # human friendly CMD |
|
1208 | cmd = ' '.join(cmd) # human friendly CMD | |
1209 | tb_err = ("Couldn't run git command (%s).\n" |
|
1209 | tb_err = ("Couldn't run git command (%s).\n" | |
1210 | "Original error was:%s\n" |
|
1210 | "Original error was:%s\n" | |
1211 | "Call options:%s\n" |
|
1211 | "Call options:%s\n" | |
1212 | % (cmd, err, _opts)) |
|
1212 | % (cmd, err, _opts)) | |
1213 | log.exception(tb_err) |
|
1213 | log.exception(tb_err) | |
1214 | if safe_call: |
|
1214 | if safe_call: | |
1215 | return '', err |
|
1215 | return '', err | |
1216 | else: |
|
1216 | else: | |
1217 | raise exceptions.VcsException()(tb_err) |
|
1217 | raise exceptions.VcsException()(tb_err) | |
1218 | finally: |
|
1218 | finally: | |
1219 | if proc: |
|
1219 | if proc: | |
1220 | proc.close() |
|
1220 | proc.close() | |
1221 |
|
1221 | |||
1222 | @reraise_safe_exceptions |
|
1222 | @reraise_safe_exceptions | |
1223 | def install_hooks(self, wire, force=False): |
|
1223 | def install_hooks(self, wire, force=False): | |
1224 | from vcsserver.hook_utils import install_git_hooks |
|
1224 | from vcsserver.hook_utils import install_git_hooks | |
1225 | bare = self.bare(wire) |
|
1225 | bare = self.bare(wire) | |
1226 | path = wire['path'] |
|
1226 | path = wire['path'] | |
1227 | return install_git_hooks(path, bare, force_create=force) |
|
1227 | return install_git_hooks(path, bare, force_create=force) | |
1228 |
|
1228 | |||
1229 | @reraise_safe_exceptions |
|
1229 | @reraise_safe_exceptions | |
1230 | def get_hooks_info(self, wire): |
|
1230 | def get_hooks_info(self, wire): | |
1231 | from vcsserver.hook_utils import ( |
|
1231 | from vcsserver.hook_utils import ( | |
1232 | get_git_pre_hook_version, get_git_post_hook_version) |
|
1232 | get_git_pre_hook_version, get_git_post_hook_version) | |
1233 | bare = self.bare(wire) |
|
1233 | bare = self.bare(wire) | |
1234 | path = wire['path'] |
|
1234 | path = wire['path'] | |
1235 | return { |
|
1235 | return { | |
1236 | 'pre_version': get_git_pre_hook_version(path, bare), |
|
1236 | 'pre_version': get_git_pre_hook_version(path, bare), | |
1237 | 'post_version': get_git_post_hook_version(path, bare), |
|
1237 | 'post_version': get_git_post_hook_version(path, bare), | |
1238 | } |
|
1238 | } | |
1239 |
|
1239 | |||
1240 | @reraise_safe_exceptions |
|
1240 | @reraise_safe_exceptions | |
1241 | def set_head_ref(self, wire, head_name): |
|
1241 | def set_head_ref(self, wire, head_name): | |
1242 | log.debug('Setting refs/head to `%s`', head_name) |
|
1242 | log.debug('Setting refs/head to `%s`', head_name) | |
1243 | cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name] |
|
1243 | cmd = ['symbolic-ref', '"HEAD"', '"refs/heads/%s"' % head_name] | |
1244 | output, __ = self.run_git_command(wire, cmd) |
|
1244 | output, __ = self.run_git_command(wire, cmd) | |
1245 | return [head_name] + output.splitlines() |
|
1245 | return [head_name] + output.splitlines() | |
1246 |
|
1246 | |||
1247 | @reraise_safe_exceptions |
|
1247 | @reraise_safe_exceptions | |
1248 | def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path, |
|
1248 | def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path, | |
1249 | archive_dir_name, commit_id): |
|
1249 | archive_dir_name, commit_id): | |
1250 |
|
1250 | |||
1251 | def file_walker(_commit_id, path): |
|
1251 | def file_walker(_commit_id, path): | |
1252 | repo_init = self._factory.repo_libgit2(wire) |
|
1252 | repo_init = self._factory.repo_libgit2(wire) | |
1253 |
|
1253 | |||
1254 | with repo_init as repo: |
|
1254 | with repo_init as repo: | |
1255 | commit = repo[commit_id] |
|
1255 | commit = repo[commit_id] | |
1256 |
|
1256 | |||
1257 | if path in ['', '/']: |
|
1257 | if path in ['', '/']: | |
1258 | tree = commit.tree |
|
1258 | tree = commit.tree | |
1259 | else: |
|
1259 | else: | |
1260 | tree = commit.tree[path.rstrip('/')] |
|
1260 | tree = commit.tree[path.rstrip('/')] | |
1261 | tree_id = tree.id.hex |
|
1261 | tree_id = tree.id.hex | |
1262 | try: |
|
1262 | try: | |
1263 | tree = repo[tree_id] |
|
1263 | tree = repo[tree_id] | |
1264 | except KeyError: |
|
1264 | except KeyError: | |
1265 | raise ObjectMissing('No tree with id: {}'.format(tree_id)) |
|
1265 | raise ObjectMissing('No tree with id: {}'.format(tree_id)) | |
1266 |
|
1266 | |||
1267 | index = LibGit2Index.Index() |
|
1267 | index = LibGit2Index.Index() | |
1268 | index.read_tree(tree) |
|
1268 | index.read_tree(tree) | |
1269 | file_iter = index |
|
1269 | file_iter = index | |
1270 |
|
1270 | |||
1271 | for fn in file_iter: |
|
1271 | for fn in file_iter: | |
1272 | file_path = fn.path |
|
1272 | file_path = fn.path | |
1273 | mode = fn.mode |
|
1273 | mode = fn.mode | |
1274 | is_link = stat.S_ISLNK(mode) |
|
1274 | is_link = stat.S_ISLNK(mode) | |
1275 | if mode == pygit2.GIT_FILEMODE_COMMIT: |
|
1275 | if mode == pygit2.GIT_FILEMODE_COMMIT: | |
1276 | log.debug('Skipping path %s as a commit node', file_path) |
|
1276 | log.debug('Skipping path %s as a commit node', file_path) | |
1277 | continue |
|
1277 | continue | |
1278 | yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw) |
|
1278 | yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw) | |
1279 |
|
1279 | |||
1280 | return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path, |
|
1280 | return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path, | |
1281 | archive_dir_name, commit_id) |
|
1281 | archive_dir_name, commit_id) |
@@ -1,1047 +1,1046 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 | import functools |
|
17 | ||
18 | import io |
|
18 | import io | |
19 | import logging |
|
19 | import logging | |
20 | import os |
|
|||
21 | import stat |
|
20 | import stat | |
22 | import urllib.request, urllib.parse, urllib.error |
|
21 | import urllib.request, urllib.parse, urllib.error | |
23 | import urllib.request, urllib.error, urllib.parse |
|
22 | import urllib.request, urllib.error, urllib.parse | |
24 | import traceback |
|
23 | import traceback | |
25 |
|
24 | |||
26 | from hgext import largefiles, rebase, purge |
|
25 | from hgext import largefiles, rebase, purge | |
27 | from hgext.strip import strip as hgext_strip |
|
26 | ||
28 | from mercurial import commands |
|
27 | from mercurial import commands | |
29 | from mercurial import unionrepo |
|
28 | from mercurial import unionrepo | |
30 | from mercurial import verify |
|
29 | from mercurial import verify | |
31 | from mercurial import repair |
|
30 | from mercurial import repair | |
32 |
|
31 | |||
33 | import vcsserver |
|
32 | import vcsserver | |
34 | from vcsserver import exceptions |
|
33 | from vcsserver import exceptions | |
35 | from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode |
|
34 | from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode | |
36 | from vcsserver.hgcompat import ( |
|
35 | from vcsserver.hgcompat import ( | |
37 | archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx, |
|
36 | archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx, | |
38 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler, |
|
37 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler, | |
39 | makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge, |
|
38 | makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge, | |
40 | patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError, |
|
39 | patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError, | |
41 | RepoLookupError, InterventionRequired, RequirementError, |
|
40 | RepoLookupError, InterventionRequired, RequirementError, | |
42 | alwaysmatcher, patternmatcher, hgutil) |
|
41 | alwaysmatcher, patternmatcher, hgutil, hgext_strip) | |
43 | from vcsserver.vcs_base import RemoteBase |
|
42 | from vcsserver.vcs_base import RemoteBase | |
44 |
|
43 | |||
45 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
46 |
|
45 | |||
47 |
|
46 | |||
48 | def make_ui_from_config(repo_config): |
|
47 | def make_ui_from_config(repo_config): | |
49 |
|
48 | |||
50 | class LoggingUI(ui.ui): |
|
49 | class LoggingUI(ui.ui): | |
51 | def status(self, *msg, **opts): |
|
50 | def status(self, *msg, **opts): | |
52 | log.info(' '.join(msg).rstrip('\n')) |
|
51 | log.info(' '.join(msg).rstrip('\n')) | |
53 | super(LoggingUI, self).status(*msg, **opts) |
|
52 | super(LoggingUI, self).status(*msg, **opts) | |
54 |
|
53 | |||
55 | def warn(self, *msg, **opts): |
|
54 | def warn(self, *msg, **opts): | |
56 | log.warn(' '.join(msg).rstrip('\n')) |
|
55 | log.warn(' '.join(msg).rstrip('\n')) | |
57 | super(LoggingUI, self).warn(*msg, **opts) |
|
56 | super(LoggingUI, self).warn(*msg, **opts) | |
58 |
|
57 | |||
59 | def error(self, *msg, **opts): |
|
58 | def error(self, *msg, **opts): | |
60 | log.error(' '.join(msg).rstrip('\n')) |
|
59 | log.error(' '.join(msg).rstrip('\n')) | |
61 | super(LoggingUI, self).error(*msg, **opts) |
|
60 | super(LoggingUI, self).error(*msg, **opts) | |
62 |
|
61 | |||
63 | def note(self, *msg, **opts): |
|
62 | def note(self, *msg, **opts): | |
64 | log.info(' '.join(msg).rstrip('\n')) |
|
63 | log.info(' '.join(msg).rstrip('\n')) | |
65 | super(LoggingUI, self).note(*msg, **opts) |
|
64 | super(LoggingUI, self).note(*msg, **opts) | |
66 |
|
65 | |||
67 | def debug(self, *msg, **opts): |
|
66 | def debug(self, *msg, **opts): | |
68 | log.debug(' '.join(msg).rstrip('\n')) |
|
67 | log.debug(' '.join(msg).rstrip('\n')) | |
69 | super(LoggingUI, self).debug(*msg, **opts) |
|
68 | super(LoggingUI, self).debug(*msg, **opts) | |
70 |
|
69 | |||
71 | baseui = LoggingUI() |
|
70 | baseui = LoggingUI() | |
72 |
|
71 | |||
73 | # clean the baseui object |
|
72 | # clean the baseui object | |
74 | baseui._ocfg = hgconfig.config() |
|
73 | baseui._ocfg = hgconfig.config() | |
75 | baseui._ucfg = hgconfig.config() |
|
74 | baseui._ucfg = hgconfig.config() | |
76 | baseui._tcfg = hgconfig.config() |
|
75 | baseui._tcfg = hgconfig.config() | |
77 |
|
76 | |||
78 | for section, option, value in repo_config: |
|
77 | for section, option, value in repo_config: | |
79 | baseui.setconfig(section, option, value) |
|
78 | baseui.setconfig(section, option, value) | |
80 |
|
79 | |||
81 | # make our hgweb quiet so it doesn't print output |
|
80 | # make our hgweb quiet so it doesn't print output | |
82 | baseui.setconfig('ui', 'quiet', 'true') |
|
81 | baseui.setconfig('ui', 'quiet', 'true') | |
83 |
|
82 | |||
84 | baseui.setconfig('ui', 'paginate', 'never') |
|
83 | baseui.setconfig('ui', 'paginate', 'never') | |
85 | # for better Error reporting of Mercurial |
|
84 | # for better Error reporting of Mercurial | |
86 | baseui.setconfig('ui', 'message-output', 'stderr') |
|
85 | baseui.setconfig('ui', 'message-output', 'stderr') | |
87 |
|
86 | |||
88 | # force mercurial to only use 1 thread, otherwise it may try to set a |
|
87 | # force mercurial to only use 1 thread, otherwise it may try to set a | |
89 | # signal in a non-main thread, thus generating a ValueError. |
|
88 | # signal in a non-main thread, thus generating a ValueError. | |
90 | baseui.setconfig('worker', 'numcpus', 1) |
|
89 | baseui.setconfig('worker', 'numcpus', 1) | |
91 |
|
90 | |||
92 | # If there is no config for the largefiles extension, we explicitly disable |
|
91 | # If there is no config for the largefiles extension, we explicitly disable | |
93 | # it here. This overrides settings from repositories hgrc file. Recent |
|
92 | # it here. This overrides settings from repositories hgrc file. Recent | |
94 | # mercurial versions enable largefiles in hgrc on clone from largefile |
|
93 | # mercurial versions enable largefiles in hgrc on clone from largefile | |
95 | # repo. |
|
94 | # repo. | |
96 | if not baseui.hasconfig('extensions', 'largefiles'): |
|
95 | if not baseui.hasconfig('extensions', 'largefiles'): | |
97 | log.debug('Explicitly disable largefiles extension for repo.') |
|
96 | log.debug('Explicitly disable largefiles extension for repo.') | |
98 | baseui.setconfig('extensions', 'largefiles', '!') |
|
97 | baseui.setconfig('extensions', 'largefiles', '!') | |
99 |
|
98 | |||
100 | return baseui |
|
99 | return baseui | |
101 |
|
100 | |||
102 |
|
101 | |||
103 | def reraise_safe_exceptions(func): |
|
102 | def reraise_safe_exceptions(func): | |
104 | """Decorator for converting mercurial exceptions to something neutral.""" |
|
103 | """Decorator for converting mercurial exceptions to something neutral.""" | |
105 |
|
104 | |||
106 | def wrapper(*args, **kwargs): |
|
105 | def wrapper(*args, **kwargs): | |
107 | try: |
|
106 | try: | |
108 | return func(*args, **kwargs) |
|
107 | return func(*args, **kwargs) | |
109 | except (Abort, InterventionRequired) as e: |
|
108 | except (Abort, InterventionRequired) as e: | |
110 | raise_from_original(exceptions.AbortException(e)) |
|
109 | raise_from_original(exceptions.AbortException(e)) | |
111 | except RepoLookupError as e: |
|
110 | except RepoLookupError as e: | |
112 | raise_from_original(exceptions.LookupException(e)) |
|
111 | raise_from_original(exceptions.LookupException(e)) | |
113 | except RequirementError as e: |
|
112 | except RequirementError as e: | |
114 | raise_from_original(exceptions.RequirementException(e)) |
|
113 | raise_from_original(exceptions.RequirementException(e)) | |
115 | except RepoError as e: |
|
114 | except RepoError as e: | |
116 | raise_from_original(exceptions.VcsException(e)) |
|
115 | raise_from_original(exceptions.VcsException(e)) | |
117 | except LookupError as e: |
|
116 | except LookupError as e: | |
118 | raise_from_original(exceptions.LookupException(e)) |
|
117 | raise_from_original(exceptions.LookupException(e)) | |
119 | except Exception as e: |
|
118 | except Exception as e: | |
120 | if not hasattr(e, '_vcs_kind'): |
|
119 | if not hasattr(e, '_vcs_kind'): | |
121 | log.exception("Unhandled exception in hg remote call") |
|
120 | log.exception("Unhandled exception in hg remote call") | |
122 | raise_from_original(exceptions.UnhandledException(e)) |
|
121 | raise_from_original(exceptions.UnhandledException(e)) | |
123 |
|
122 | |||
124 | raise |
|
123 | raise | |
125 | return wrapper |
|
124 | return wrapper | |
126 |
|
125 | |||
127 |
|
126 | |||
128 | class MercurialFactory(RepoFactory): |
|
127 | class MercurialFactory(RepoFactory): | |
129 | repo_type = 'hg' |
|
128 | repo_type = 'hg' | |
130 |
|
129 | |||
131 | def _create_config(self, config, hooks=True): |
|
130 | def _create_config(self, config, hooks=True): | |
132 | if not hooks: |
|
131 | if not hooks: | |
133 | hooks_to_clean = frozenset(( |
|
132 | hooks_to_clean = frozenset(( | |
134 | 'changegroup.repo_size', 'preoutgoing.pre_pull', |
|
133 | 'changegroup.repo_size', 'preoutgoing.pre_pull', | |
135 | 'outgoing.pull_logger', 'prechangegroup.pre_push')) |
|
134 | 'outgoing.pull_logger', 'prechangegroup.pre_push')) | |
136 | new_config = [] |
|
135 | new_config = [] | |
137 | for section, option, value in config: |
|
136 | for section, option, value in config: | |
138 | if section == 'hooks' and option in hooks_to_clean: |
|
137 | if section == 'hooks' and option in hooks_to_clean: | |
139 | continue |
|
138 | continue | |
140 | new_config.append((section, option, value)) |
|
139 | new_config.append((section, option, value)) | |
141 | config = new_config |
|
140 | config = new_config | |
142 |
|
141 | |||
143 | baseui = make_ui_from_config(config) |
|
142 | baseui = make_ui_from_config(config) | |
144 | return baseui |
|
143 | return baseui | |
145 |
|
144 | |||
146 | def _create_repo(self, wire, create): |
|
145 | def _create_repo(self, wire, create): | |
147 | baseui = self._create_config(wire["config"]) |
|
146 | baseui = self._create_config(wire["config"]) | |
148 | return instance(baseui, wire["path"], create) |
|
147 | return instance(baseui, wire["path"], create) | |
149 |
|
148 | |||
150 | def repo(self, wire, create=False): |
|
149 | def repo(self, wire, create=False): | |
151 | """ |
|
150 | """ | |
152 | Get a repository instance for the given path. |
|
151 | Get a repository instance for the given path. | |
153 | """ |
|
152 | """ | |
154 | return self._create_repo(wire, create) |
|
153 | return self._create_repo(wire, create) | |
155 |
|
154 | |||
156 |
|
155 | |||
157 | def patch_ui_message_output(baseui): |
|
156 | def patch_ui_message_output(baseui): | |
158 | baseui.setconfig('ui', 'quiet', 'false') |
|
157 | baseui.setconfig('ui', 'quiet', 'false') | |
159 | output = io.BytesIO() |
|
158 | output = io.BytesIO() | |
160 |
|
159 | |||
161 | def write(data, **unused_kwargs): |
|
160 | def write(data, **unused_kwargs): | |
162 | output.write(data) |
|
161 | output.write(data) | |
163 |
|
162 | |||
164 | baseui.status = write |
|
163 | baseui.status = write | |
165 | baseui.write = write |
|
164 | baseui.write = write | |
166 | baseui.warn = write |
|
165 | baseui.warn = write | |
167 | baseui.debug = write |
|
166 | baseui.debug = write | |
168 |
|
167 | |||
169 | return baseui, output |
|
168 | return baseui, output | |
170 |
|
169 | |||
171 |
|
170 | |||
172 | class HgRemote(RemoteBase): |
|
171 | class HgRemote(RemoteBase): | |
173 |
|
172 | |||
174 | def __init__(self, factory): |
|
173 | def __init__(self, factory): | |
175 | self._factory = factory |
|
174 | self._factory = factory | |
176 | self._bulk_methods = { |
|
175 | self._bulk_methods = { | |
177 | "affected_files": self.ctx_files, |
|
176 | "affected_files": self.ctx_files, | |
178 | "author": self.ctx_user, |
|
177 | "author": self.ctx_user, | |
179 | "branch": self.ctx_branch, |
|
178 | "branch": self.ctx_branch, | |
180 | "children": self.ctx_children, |
|
179 | "children": self.ctx_children, | |
181 | "date": self.ctx_date, |
|
180 | "date": self.ctx_date, | |
182 | "message": self.ctx_description, |
|
181 | "message": self.ctx_description, | |
183 | "parents": self.ctx_parents, |
|
182 | "parents": self.ctx_parents, | |
184 | "status": self.ctx_status, |
|
183 | "status": self.ctx_status, | |
185 | "obsolete": self.ctx_obsolete, |
|
184 | "obsolete": self.ctx_obsolete, | |
186 | "phase": self.ctx_phase, |
|
185 | "phase": self.ctx_phase, | |
187 | "hidden": self.ctx_hidden, |
|
186 | "hidden": self.ctx_hidden, | |
188 | "_file_paths": self.ctx_list, |
|
187 | "_file_paths": self.ctx_list, | |
189 | } |
|
188 | } | |
190 |
|
189 | |||
191 | def _get_ctx(self, repo, ref): |
|
190 | def _get_ctx(self, repo, ref): | |
192 | return get_ctx(repo, ref) |
|
191 | return get_ctx(repo, ref) | |
193 |
|
192 | |||
194 | @reraise_safe_exceptions |
|
193 | @reraise_safe_exceptions | |
195 | def discover_hg_version(self): |
|
194 | def discover_hg_version(self): | |
196 | from mercurial import util |
|
195 | from mercurial import util | |
197 | return util.version() |
|
196 | return util.version() | |
198 |
|
197 | |||
199 | @reraise_safe_exceptions |
|
198 | @reraise_safe_exceptions | |
200 | def is_empty(self, wire): |
|
199 | def is_empty(self, wire): | |
201 | repo = self._factory.repo(wire) |
|
200 | repo = self._factory.repo(wire) | |
202 |
|
201 | |||
203 | try: |
|
202 | try: | |
204 | return len(repo) == 0 |
|
203 | return len(repo) == 0 | |
205 | except Exception: |
|
204 | except Exception: | |
206 | log.exception("failed to read object_store") |
|
205 | log.exception("failed to read object_store") | |
207 | return False |
|
206 | return False | |
208 |
|
207 | |||
209 | @reraise_safe_exceptions |
|
208 | @reraise_safe_exceptions | |
210 | def bookmarks(self, wire): |
|
209 | def bookmarks(self, wire): | |
211 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
210 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
212 | region = self._region(wire) |
|
211 | region = self._region(wire) | |
213 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
212 | @region.conditional_cache_on_arguments(condition=cache_on) | |
214 | def _bookmarks(_context_uid, _repo_id): |
|
213 | def _bookmarks(_context_uid, _repo_id): | |
215 | repo = self._factory.repo(wire) |
|
214 | repo = self._factory.repo(wire) | |
216 | return dict(repo._bookmarks) |
|
215 | return dict(repo._bookmarks) | |
217 |
|
216 | |||
218 | return _bookmarks(context_uid, repo_id) |
|
217 | return _bookmarks(context_uid, repo_id) | |
219 |
|
218 | |||
220 | @reraise_safe_exceptions |
|
219 | @reraise_safe_exceptions | |
221 | def branches(self, wire, normal, closed): |
|
220 | def branches(self, wire, normal, closed): | |
222 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
221 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
223 | region = self._region(wire) |
|
222 | region = self._region(wire) | |
224 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
223 | @region.conditional_cache_on_arguments(condition=cache_on) | |
225 | def _branches(_context_uid, _repo_id, _normal, _closed): |
|
224 | def _branches(_context_uid, _repo_id, _normal, _closed): | |
226 | repo = self._factory.repo(wire) |
|
225 | repo = self._factory.repo(wire) | |
227 | iter_branches = repo.branchmap().iterbranches() |
|
226 | iter_branches = repo.branchmap().iterbranches() | |
228 | bt = {} |
|
227 | bt = {} | |
229 | for branch_name, _heads, tip, is_closed in iter_branches: |
|
228 | for branch_name, _heads, tip, is_closed in iter_branches: | |
230 | if normal and not is_closed: |
|
229 | if normal and not is_closed: | |
231 | bt[branch_name] = tip |
|
230 | bt[branch_name] = tip | |
232 | if closed and is_closed: |
|
231 | if closed and is_closed: | |
233 | bt[branch_name] = tip |
|
232 | bt[branch_name] = tip | |
234 |
|
233 | |||
235 | return bt |
|
234 | return bt | |
236 |
|
235 | |||
237 | return _branches(context_uid, repo_id, normal, closed) |
|
236 | return _branches(context_uid, repo_id, normal, closed) | |
238 |
|
237 | |||
239 | @reraise_safe_exceptions |
|
238 | @reraise_safe_exceptions | |
240 | def bulk_request(self, wire, commit_id, pre_load): |
|
239 | def bulk_request(self, wire, commit_id, pre_load): | |
241 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
240 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
242 | region = self._region(wire) |
|
241 | region = self._region(wire) | |
243 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
242 | @region.conditional_cache_on_arguments(condition=cache_on) | |
244 | def _bulk_request(_repo_id, _commit_id, _pre_load): |
|
243 | def _bulk_request(_repo_id, _commit_id, _pre_load): | |
245 | result = {} |
|
244 | result = {} | |
246 | for attr in pre_load: |
|
245 | for attr in pre_load: | |
247 | try: |
|
246 | try: | |
248 | method = self._bulk_methods[attr] |
|
247 | method = self._bulk_methods[attr] | |
249 | result[attr] = method(wire, commit_id) |
|
248 | result[attr] = method(wire, commit_id) | |
250 | except KeyError as e: |
|
249 | except KeyError as e: | |
251 | raise exceptions.VcsException(e)( |
|
250 | raise exceptions.VcsException(e)( | |
252 | 'Unknown bulk attribute: "%s"' % attr) |
|
251 | 'Unknown bulk attribute: "%s"' % attr) | |
253 | return result |
|
252 | return result | |
254 |
|
253 | |||
255 | return _bulk_request(repo_id, commit_id, sorted(pre_load)) |
|
254 | return _bulk_request(repo_id, commit_id, sorted(pre_load)) | |
256 |
|
255 | |||
257 | @reraise_safe_exceptions |
|
256 | @reraise_safe_exceptions | |
258 | def ctx_branch(self, wire, commit_id): |
|
257 | def ctx_branch(self, wire, commit_id): | |
259 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
258 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
260 | region = self._region(wire) |
|
259 | region = self._region(wire) | |
261 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
260 | @region.conditional_cache_on_arguments(condition=cache_on) | |
262 | def _ctx_branch(_repo_id, _commit_id): |
|
261 | def _ctx_branch(_repo_id, _commit_id): | |
263 | repo = self._factory.repo(wire) |
|
262 | repo = self._factory.repo(wire) | |
264 | ctx = self._get_ctx(repo, commit_id) |
|
263 | ctx = self._get_ctx(repo, commit_id) | |
265 | return ctx.branch() |
|
264 | return ctx.branch() | |
266 | return _ctx_branch(repo_id, commit_id) |
|
265 | return _ctx_branch(repo_id, commit_id) | |
267 |
|
266 | |||
268 | @reraise_safe_exceptions |
|
267 | @reraise_safe_exceptions | |
269 | def ctx_date(self, wire, commit_id): |
|
268 | def ctx_date(self, wire, commit_id): | |
270 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
269 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
271 | region = self._region(wire) |
|
270 | region = self._region(wire) | |
272 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
271 | @region.conditional_cache_on_arguments(condition=cache_on) | |
273 | def _ctx_date(_repo_id, _commit_id): |
|
272 | def _ctx_date(_repo_id, _commit_id): | |
274 | repo = self._factory.repo(wire) |
|
273 | repo = self._factory.repo(wire) | |
275 | ctx = self._get_ctx(repo, commit_id) |
|
274 | ctx = self._get_ctx(repo, commit_id) | |
276 | return ctx.date() |
|
275 | return ctx.date() | |
277 | return _ctx_date(repo_id, commit_id) |
|
276 | return _ctx_date(repo_id, commit_id) | |
278 |
|
277 | |||
279 | @reraise_safe_exceptions |
|
278 | @reraise_safe_exceptions | |
280 | def ctx_description(self, wire, revision): |
|
279 | def ctx_description(self, wire, revision): | |
281 | repo = self._factory.repo(wire) |
|
280 | repo = self._factory.repo(wire) | |
282 | ctx = self._get_ctx(repo, revision) |
|
281 | ctx = self._get_ctx(repo, revision) | |
283 | return ctx.description() |
|
282 | return ctx.description() | |
284 |
|
283 | |||
285 | @reraise_safe_exceptions |
|
284 | @reraise_safe_exceptions | |
286 | def ctx_files(self, wire, commit_id): |
|
285 | def ctx_files(self, wire, commit_id): | |
287 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
286 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
288 | region = self._region(wire) |
|
287 | region = self._region(wire) | |
289 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
288 | @region.conditional_cache_on_arguments(condition=cache_on) | |
290 | def _ctx_files(_repo_id, _commit_id): |
|
289 | def _ctx_files(_repo_id, _commit_id): | |
291 | repo = self._factory.repo(wire) |
|
290 | repo = self._factory.repo(wire) | |
292 | ctx = self._get_ctx(repo, commit_id) |
|
291 | ctx = self._get_ctx(repo, commit_id) | |
293 | return ctx.files() |
|
292 | return ctx.files() | |
294 |
|
293 | |||
295 | return _ctx_files(repo_id, commit_id) |
|
294 | return _ctx_files(repo_id, commit_id) | |
296 |
|
295 | |||
297 | @reraise_safe_exceptions |
|
296 | @reraise_safe_exceptions | |
298 | def ctx_list(self, path, revision): |
|
297 | def ctx_list(self, path, revision): | |
299 | repo = self._factory.repo(path) |
|
298 | repo = self._factory.repo(path) | |
300 | ctx = self._get_ctx(repo, revision) |
|
299 | ctx = self._get_ctx(repo, revision) | |
301 | return list(ctx) |
|
300 | return list(ctx) | |
302 |
|
301 | |||
303 | @reraise_safe_exceptions |
|
302 | @reraise_safe_exceptions | |
304 | def ctx_parents(self, wire, commit_id): |
|
303 | def ctx_parents(self, wire, commit_id): | |
305 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
304 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
306 | region = self._region(wire) |
|
305 | region = self._region(wire) | |
307 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
306 | @region.conditional_cache_on_arguments(condition=cache_on) | |
308 | def _ctx_parents(_repo_id, _commit_id): |
|
307 | def _ctx_parents(_repo_id, _commit_id): | |
309 | repo = self._factory.repo(wire) |
|
308 | repo = self._factory.repo(wire) | |
310 | ctx = self._get_ctx(repo, commit_id) |
|
309 | ctx = self._get_ctx(repo, commit_id) | |
311 | return [parent.hex() for parent in ctx.parents() |
|
310 | return [parent.hex() for parent in ctx.parents() | |
312 | if not (parent.hidden() or parent.obsolete())] |
|
311 | if not (parent.hidden() or parent.obsolete())] | |
313 |
|
312 | |||
314 | return _ctx_parents(repo_id, commit_id) |
|
313 | return _ctx_parents(repo_id, commit_id) | |
315 |
|
314 | |||
316 | @reraise_safe_exceptions |
|
315 | @reraise_safe_exceptions | |
317 | def ctx_children(self, wire, commit_id): |
|
316 | def ctx_children(self, wire, commit_id): | |
318 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
317 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
319 | region = self._region(wire) |
|
318 | region = self._region(wire) | |
320 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
319 | @region.conditional_cache_on_arguments(condition=cache_on) | |
321 | def _ctx_children(_repo_id, _commit_id): |
|
320 | def _ctx_children(_repo_id, _commit_id): | |
322 | repo = self._factory.repo(wire) |
|
321 | repo = self._factory.repo(wire) | |
323 | ctx = self._get_ctx(repo, commit_id) |
|
322 | ctx = self._get_ctx(repo, commit_id) | |
324 | return [child.hex() for child in ctx.children() |
|
323 | return [child.hex() for child in ctx.children() | |
325 | if not (child.hidden() or child.obsolete())] |
|
324 | if not (child.hidden() or child.obsolete())] | |
326 |
|
325 | |||
327 | return _ctx_children(repo_id, commit_id) |
|
326 | return _ctx_children(repo_id, commit_id) | |
328 |
|
327 | |||
329 | @reraise_safe_exceptions |
|
328 | @reraise_safe_exceptions | |
330 | def ctx_phase(self, wire, commit_id): |
|
329 | def ctx_phase(self, wire, commit_id): | |
331 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
330 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
332 | region = self._region(wire) |
|
331 | region = self._region(wire) | |
333 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
332 | @region.conditional_cache_on_arguments(condition=cache_on) | |
334 | def _ctx_phase(_context_uid, _repo_id, _commit_id): |
|
333 | def _ctx_phase(_context_uid, _repo_id, _commit_id): | |
335 | repo = self._factory.repo(wire) |
|
334 | repo = self._factory.repo(wire) | |
336 | ctx = self._get_ctx(repo, commit_id) |
|
335 | ctx = self._get_ctx(repo, commit_id) | |
337 | # public=0, draft=1, secret=3 |
|
336 | # public=0, draft=1, secret=3 | |
338 | return ctx.phase() |
|
337 | return ctx.phase() | |
339 | return _ctx_phase(context_uid, repo_id, commit_id) |
|
338 | return _ctx_phase(context_uid, repo_id, commit_id) | |
340 |
|
339 | |||
341 | @reraise_safe_exceptions |
|
340 | @reraise_safe_exceptions | |
342 | def ctx_obsolete(self, wire, commit_id): |
|
341 | def ctx_obsolete(self, wire, commit_id): | |
343 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
342 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
344 | region = self._region(wire) |
|
343 | region = self._region(wire) | |
345 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
344 | @region.conditional_cache_on_arguments(condition=cache_on) | |
346 | def _ctx_obsolete(_context_uid, _repo_id, _commit_id): |
|
345 | def _ctx_obsolete(_context_uid, _repo_id, _commit_id): | |
347 | repo = self._factory.repo(wire) |
|
346 | repo = self._factory.repo(wire) | |
348 | ctx = self._get_ctx(repo, commit_id) |
|
347 | ctx = self._get_ctx(repo, commit_id) | |
349 | return ctx.obsolete() |
|
348 | return ctx.obsolete() | |
350 | return _ctx_obsolete(context_uid, repo_id, commit_id) |
|
349 | return _ctx_obsolete(context_uid, repo_id, commit_id) | |
351 |
|
350 | |||
352 | @reraise_safe_exceptions |
|
351 | @reraise_safe_exceptions | |
353 | def ctx_hidden(self, wire, commit_id): |
|
352 | def ctx_hidden(self, wire, commit_id): | |
354 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
353 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
355 | region = self._region(wire) |
|
354 | region = self._region(wire) | |
356 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
355 | @region.conditional_cache_on_arguments(condition=cache_on) | |
357 | def _ctx_hidden(_context_uid, _repo_id, _commit_id): |
|
356 | def _ctx_hidden(_context_uid, _repo_id, _commit_id): | |
358 | repo = self._factory.repo(wire) |
|
357 | repo = self._factory.repo(wire) | |
359 | ctx = self._get_ctx(repo, commit_id) |
|
358 | ctx = self._get_ctx(repo, commit_id) | |
360 | return ctx.hidden() |
|
359 | return ctx.hidden() | |
361 | return _ctx_hidden(context_uid, repo_id, commit_id) |
|
360 | return _ctx_hidden(context_uid, repo_id, commit_id) | |
362 |
|
361 | |||
363 | @reraise_safe_exceptions |
|
362 | @reraise_safe_exceptions | |
364 | def ctx_substate(self, wire, revision): |
|
363 | def ctx_substate(self, wire, revision): | |
365 | repo = self._factory.repo(wire) |
|
364 | repo = self._factory.repo(wire) | |
366 | ctx = self._get_ctx(repo, revision) |
|
365 | ctx = self._get_ctx(repo, revision) | |
367 | return ctx.substate |
|
366 | return ctx.substate | |
368 |
|
367 | |||
369 | @reraise_safe_exceptions |
|
368 | @reraise_safe_exceptions | |
370 | def ctx_status(self, wire, revision): |
|
369 | def ctx_status(self, wire, revision): | |
371 | repo = self._factory.repo(wire) |
|
370 | repo = self._factory.repo(wire) | |
372 | ctx = self._get_ctx(repo, revision) |
|
371 | ctx = self._get_ctx(repo, revision) | |
373 | status = repo[ctx.p1().node()].status(other=ctx.node()) |
|
372 | status = repo[ctx.p1().node()].status(other=ctx.node()) | |
374 | # object of status (odd, custom named tuple in mercurial) is not |
|
373 | # object of status (odd, custom named tuple in mercurial) is not | |
375 | # correctly serializable, we make it a list, as the underling |
|
374 | # correctly serializable, we make it a list, as the underling | |
376 | # API expects this to be a list |
|
375 | # API expects this to be a list | |
377 | return list(status) |
|
376 | return list(status) | |
378 |
|
377 | |||
379 | @reraise_safe_exceptions |
|
378 | @reraise_safe_exceptions | |
380 | def ctx_user(self, wire, revision): |
|
379 | def ctx_user(self, wire, revision): | |
381 | repo = self._factory.repo(wire) |
|
380 | repo = self._factory.repo(wire) | |
382 | ctx = self._get_ctx(repo, revision) |
|
381 | ctx = self._get_ctx(repo, revision) | |
383 | return ctx.user() |
|
382 | return ctx.user() | |
384 |
|
383 | |||
385 | @reraise_safe_exceptions |
|
384 | @reraise_safe_exceptions | |
386 | def check_url(self, url, config): |
|
385 | def check_url(self, url, config): | |
387 | _proto = None |
|
386 | _proto = None | |
388 | if '+' in url[:url.find('://')]: |
|
387 | if '+' in url[:url.find('://')]: | |
389 | _proto = url[0:url.find('+')] |
|
388 | _proto = url[0:url.find('+')] | |
390 | url = url[url.find('+') + 1:] |
|
389 | url = url[url.find('+') + 1:] | |
391 | handlers = [] |
|
390 | handlers = [] | |
392 | url_obj = url_parser(url) |
|
391 | url_obj = url_parser(url) | |
393 | test_uri, authinfo = url_obj.authinfo() |
|
392 | test_uri, authinfo = url_obj.authinfo() | |
394 | url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd |
|
393 | url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd | |
395 | url_obj.query = obfuscate_qs(url_obj.query) |
|
394 | url_obj.query = obfuscate_qs(url_obj.query) | |
396 |
|
395 | |||
397 | cleaned_uri = str(url_obj) |
|
396 | cleaned_uri = str(url_obj) | |
398 | log.info("Checking URL for remote cloning/import: %s", cleaned_uri) |
|
397 | log.info("Checking URL for remote cloning/import: %s", cleaned_uri) | |
399 |
|
398 | |||
400 | if authinfo: |
|
399 | if authinfo: | |
401 | # create a password manager |
|
400 | # create a password manager | |
402 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() |
|
401 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() | |
403 | passmgr.add_password(*authinfo) |
|
402 | passmgr.add_password(*authinfo) | |
404 |
|
403 | |||
405 | handlers.extend((httpbasicauthhandler(passmgr), |
|
404 | handlers.extend((httpbasicauthhandler(passmgr), | |
406 | httpdigestauthhandler(passmgr))) |
|
405 | httpdigestauthhandler(passmgr))) | |
407 |
|
406 | |||
408 | o = urllib.request.build_opener(*handlers) |
|
407 | o = urllib.request.build_opener(*handlers) | |
409 | o.addheaders = [('Content-Type', 'application/mercurial-0.1'), |
|
408 | o.addheaders = [('Content-Type', 'application/mercurial-0.1'), | |
410 | ('Accept', 'application/mercurial-0.1')] |
|
409 | ('Accept', 'application/mercurial-0.1')] | |
411 |
|
410 | |||
412 | q = {"cmd": 'between'} |
|
411 | q = {"cmd": 'between'} | |
413 | q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)}) |
|
412 | q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)}) | |
414 | qs = '?%s' % urllib.parse.urlencode(q) |
|
413 | qs = '?%s' % urllib.parse.urlencode(q) | |
415 | cu = "%s%s" % (test_uri, qs) |
|
414 | cu = "%s%s" % (test_uri, qs) | |
416 | req = urllib.request.Request(cu, None, {}) |
|
415 | req = urllib.request.Request(cu, None, {}) | |
417 |
|
416 | |||
418 | try: |
|
417 | try: | |
419 | log.debug("Trying to open URL %s", cleaned_uri) |
|
418 | log.debug("Trying to open URL %s", cleaned_uri) | |
420 | resp = o.open(req) |
|
419 | resp = o.open(req) | |
421 | if resp.code != 200: |
|
420 | if resp.code != 200: | |
422 | raise exceptions.URLError()('Return Code is not 200') |
|
421 | raise exceptions.URLError()('Return Code is not 200') | |
423 | except Exception as e: |
|
422 | except Exception as e: | |
424 | log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True) |
|
423 | log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True) | |
425 | # means it cannot be cloned |
|
424 | # means it cannot be cloned | |
426 | raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
425 | raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e)) | |
427 |
|
426 | |||
428 | # now check if it's a proper hg repo, but don't do it for svn |
|
427 | # now check if it's a proper hg repo, but don't do it for svn | |
429 | try: |
|
428 | try: | |
430 | if _proto == 'svn': |
|
429 | if _proto == 'svn': | |
431 | pass |
|
430 | pass | |
432 | else: |
|
431 | else: | |
433 | # check for pure hg repos |
|
432 | # check for pure hg repos | |
434 | log.debug( |
|
433 | log.debug( | |
435 | "Verifying if URL is a Mercurial repository: %s", |
|
434 | "Verifying if URL is a Mercurial repository: %s", | |
436 | cleaned_uri) |
|
435 | cleaned_uri) | |
437 | ui = make_ui_from_config(config) |
|
436 | ui = make_ui_from_config(config) | |
438 | peer_checker = makepeer(ui, url) |
|
437 | peer_checker = makepeer(ui, url) | |
439 | peer_checker.lookup('tip') |
|
438 | peer_checker.lookup('tip') | |
440 | except Exception as e: |
|
439 | except Exception as e: | |
441 | log.warning("URL is not a valid Mercurial repository: %s", |
|
440 | log.warning("URL is not a valid Mercurial repository: %s", | |
442 | cleaned_uri) |
|
441 | cleaned_uri) | |
443 | raise exceptions.URLError(e)( |
|
442 | raise exceptions.URLError(e)( | |
444 | "url [%s] does not look like an hg repo org_exc: %s" |
|
443 | "url [%s] does not look like an hg repo org_exc: %s" | |
445 | % (cleaned_uri, e)) |
|
444 | % (cleaned_uri, e)) | |
446 |
|
445 | |||
447 | log.info("URL is a valid Mercurial repository: %s", cleaned_uri) |
|
446 | log.info("URL is a valid Mercurial repository: %s", cleaned_uri) | |
448 | return True |
|
447 | return True | |
449 |
|
448 | |||
450 | @reraise_safe_exceptions |
|
449 | @reraise_safe_exceptions | |
451 | def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context): |
|
450 | def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context): | |
452 | repo = self._factory.repo(wire) |
|
451 | repo = self._factory.repo(wire) | |
453 |
|
452 | |||
454 | if file_filter: |
|
453 | if file_filter: | |
455 | match_filter = match(file_filter[0], '', [file_filter[1]]) |
|
454 | match_filter = match(file_filter[0], '', [file_filter[1]]) | |
456 | else: |
|
455 | else: | |
457 | match_filter = file_filter |
|
456 | match_filter = file_filter | |
458 | opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1) |
|
457 | opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1) | |
459 |
|
458 | |||
460 | try: |
|
459 | try: | |
461 | return "".join(patch.diff( |
|
460 | return "".join(patch.diff( | |
462 | repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)) |
|
461 | repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts)) | |
463 | except RepoLookupError as e: |
|
462 | except RepoLookupError as e: | |
464 | raise exceptions.LookupException(e)() |
|
463 | raise exceptions.LookupException(e)() | |
465 |
|
464 | |||
466 | @reraise_safe_exceptions |
|
465 | @reraise_safe_exceptions | |
467 | def node_history(self, wire, revision, path, limit): |
|
466 | def node_history(self, wire, revision, path, limit): | |
468 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
467 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
469 | region = self._region(wire) |
|
468 | region = self._region(wire) | |
470 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
469 | @region.conditional_cache_on_arguments(condition=cache_on) | |
471 | def _node_history(_context_uid, _repo_id, _revision, _path, _limit): |
|
470 | def _node_history(_context_uid, _repo_id, _revision, _path, _limit): | |
472 | repo = self._factory.repo(wire) |
|
471 | repo = self._factory.repo(wire) | |
473 |
|
472 | |||
474 | ctx = self._get_ctx(repo, revision) |
|
473 | ctx = self._get_ctx(repo, revision) | |
475 | fctx = ctx.filectx(path) |
|
474 | fctx = ctx.filectx(path) | |
476 |
|
475 | |||
477 | def history_iter(): |
|
476 | def history_iter(): | |
478 | limit_rev = fctx.rev() |
|
477 | limit_rev = fctx.rev() | |
479 | for obj in reversed(list(fctx.filelog())): |
|
478 | for obj in reversed(list(fctx.filelog())): | |
480 | obj = fctx.filectx(obj) |
|
479 | obj = fctx.filectx(obj) | |
481 | ctx = obj.changectx() |
|
480 | ctx = obj.changectx() | |
482 | if ctx.hidden() or ctx.obsolete(): |
|
481 | if ctx.hidden() or ctx.obsolete(): | |
483 | continue |
|
482 | continue | |
484 |
|
483 | |||
485 | if limit_rev >= obj.rev(): |
|
484 | if limit_rev >= obj.rev(): | |
486 | yield obj |
|
485 | yield obj | |
487 |
|
486 | |||
488 | history = [] |
|
487 | history = [] | |
489 | for cnt, obj in enumerate(history_iter()): |
|
488 | for cnt, obj in enumerate(history_iter()): | |
490 | if limit and cnt >= limit: |
|
489 | if limit and cnt >= limit: | |
491 | break |
|
490 | break | |
492 | history.append(hex(obj.node())) |
|
491 | history.append(hex(obj.node())) | |
493 |
|
492 | |||
494 | return [x for x in history] |
|
493 | return [x for x in history] | |
495 | return _node_history(context_uid, repo_id, revision, path, limit) |
|
494 | return _node_history(context_uid, repo_id, revision, path, limit) | |
496 |
|
495 | |||
497 | @reraise_safe_exceptions |
|
496 | @reraise_safe_exceptions | |
498 | def node_history_untill(self, wire, revision, path, limit): |
|
497 | def node_history_untill(self, wire, revision, path, limit): | |
499 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
498 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
500 | region = self._region(wire) |
|
499 | region = self._region(wire) | |
501 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
500 | @region.conditional_cache_on_arguments(condition=cache_on) | |
502 | def _node_history_until(_context_uid, _repo_id): |
|
501 | def _node_history_until(_context_uid, _repo_id): | |
503 | repo = self._factory.repo(wire) |
|
502 | repo = self._factory.repo(wire) | |
504 | ctx = self._get_ctx(repo, revision) |
|
503 | ctx = self._get_ctx(repo, revision) | |
505 | fctx = ctx.filectx(path) |
|
504 | fctx = ctx.filectx(path) | |
506 |
|
505 | |||
507 | file_log = list(fctx.filelog()) |
|
506 | file_log = list(fctx.filelog()) | |
508 | if limit: |
|
507 | if limit: | |
509 | # Limit to the last n items |
|
508 | # Limit to the last n items | |
510 | file_log = file_log[-limit:] |
|
509 | file_log = file_log[-limit:] | |
511 |
|
510 | |||
512 | return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)] |
|
511 | return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)] | |
513 | return _node_history_until(context_uid, repo_id, revision, path, limit) |
|
512 | return _node_history_until(context_uid, repo_id, revision, path, limit) | |
514 |
|
513 | |||
515 | @reraise_safe_exceptions |
|
514 | @reraise_safe_exceptions | |
516 | def fctx_annotate(self, wire, revision, path): |
|
515 | def fctx_annotate(self, wire, revision, path): | |
517 | repo = self._factory.repo(wire) |
|
516 | repo = self._factory.repo(wire) | |
518 | ctx = self._get_ctx(repo, revision) |
|
517 | ctx = self._get_ctx(repo, revision) | |
519 | fctx = ctx.filectx(path) |
|
518 | fctx = ctx.filectx(path) | |
520 |
|
519 | |||
521 | result = [] |
|
520 | result = [] | |
522 | for i, annotate_obj in enumerate(fctx.annotate(), 1): |
|
521 | for i, annotate_obj in enumerate(fctx.annotate(), 1): | |
523 | ln_no = i |
|
522 | ln_no = i | |
524 | sha = hex(annotate_obj.fctx.node()) |
|
523 | sha = hex(annotate_obj.fctx.node()) | |
525 | content = annotate_obj.text |
|
524 | content = annotate_obj.text | |
526 | result.append((ln_no, sha, content)) |
|
525 | result.append((ln_no, sha, content)) | |
527 | return result |
|
526 | return result | |
528 |
|
527 | |||
529 | @reraise_safe_exceptions |
|
528 | @reraise_safe_exceptions | |
530 | def fctx_node_data(self, wire, revision, path): |
|
529 | def fctx_node_data(self, wire, revision, path): | |
531 | repo = self._factory.repo(wire) |
|
530 | repo = self._factory.repo(wire) | |
532 | ctx = self._get_ctx(repo, revision) |
|
531 | ctx = self._get_ctx(repo, revision) | |
533 | fctx = ctx.filectx(path) |
|
532 | fctx = ctx.filectx(path) | |
534 | return fctx.data() |
|
533 | return fctx.data() | |
535 |
|
534 | |||
536 | @reraise_safe_exceptions |
|
535 | @reraise_safe_exceptions | |
537 | def fctx_flags(self, wire, commit_id, path): |
|
536 | def fctx_flags(self, wire, commit_id, path): | |
538 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
537 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
539 | region = self._region(wire) |
|
538 | region = self._region(wire) | |
540 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
539 | @region.conditional_cache_on_arguments(condition=cache_on) | |
541 | def _fctx_flags(_repo_id, _commit_id, _path): |
|
540 | def _fctx_flags(_repo_id, _commit_id, _path): | |
542 | repo = self._factory.repo(wire) |
|
541 | repo = self._factory.repo(wire) | |
543 | ctx = self._get_ctx(repo, commit_id) |
|
542 | ctx = self._get_ctx(repo, commit_id) | |
544 | fctx = ctx.filectx(path) |
|
543 | fctx = ctx.filectx(path) | |
545 | return fctx.flags() |
|
544 | return fctx.flags() | |
546 |
|
545 | |||
547 | return _fctx_flags(repo_id, commit_id, path) |
|
546 | return _fctx_flags(repo_id, commit_id, path) | |
548 |
|
547 | |||
549 | @reraise_safe_exceptions |
|
548 | @reraise_safe_exceptions | |
550 | def fctx_size(self, wire, commit_id, path): |
|
549 | def fctx_size(self, wire, commit_id, path): | |
551 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
550 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
552 | region = self._region(wire) |
|
551 | region = self._region(wire) | |
553 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
552 | @region.conditional_cache_on_arguments(condition=cache_on) | |
554 | def _fctx_size(_repo_id, _revision, _path): |
|
553 | def _fctx_size(_repo_id, _revision, _path): | |
555 | repo = self._factory.repo(wire) |
|
554 | repo = self._factory.repo(wire) | |
556 | ctx = self._get_ctx(repo, commit_id) |
|
555 | ctx = self._get_ctx(repo, commit_id) | |
557 | fctx = ctx.filectx(path) |
|
556 | fctx = ctx.filectx(path) | |
558 | return fctx.size() |
|
557 | return fctx.size() | |
559 | return _fctx_size(repo_id, commit_id, path) |
|
558 | return _fctx_size(repo_id, commit_id, path) | |
560 |
|
559 | |||
561 | @reraise_safe_exceptions |
|
560 | @reraise_safe_exceptions | |
562 | def get_all_commit_ids(self, wire, name): |
|
561 | def get_all_commit_ids(self, wire, name): | |
563 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
562 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
564 | region = self._region(wire) |
|
563 | region = self._region(wire) | |
565 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
564 | @region.conditional_cache_on_arguments(condition=cache_on) | |
566 | def _get_all_commit_ids(_context_uid, _repo_id, _name): |
|
565 | def _get_all_commit_ids(_context_uid, _repo_id, _name): | |
567 | repo = self._factory.repo(wire) |
|
566 | repo = self._factory.repo(wire) | |
568 | repo = repo.filtered(name) |
|
567 | repo = repo.filtered(name) | |
569 | revs = [hex(x[7]) for x in repo.changelog.index] |
|
568 | revs = [hex(x[7]) for x in repo.changelog.index] | |
570 | return revs |
|
569 | return revs | |
571 | return _get_all_commit_ids(context_uid, repo_id, name) |
|
570 | return _get_all_commit_ids(context_uid, repo_id, name) | |
572 |
|
571 | |||
573 | @reraise_safe_exceptions |
|
572 | @reraise_safe_exceptions | |
574 | def get_config_value(self, wire, section, name, untrusted=False): |
|
573 | def get_config_value(self, wire, section, name, untrusted=False): | |
575 | repo = self._factory.repo(wire) |
|
574 | repo = self._factory.repo(wire) | |
576 | return repo.ui.config(section, name, untrusted=untrusted) |
|
575 | return repo.ui.config(section, name, untrusted=untrusted) | |
577 |
|
576 | |||
578 | @reraise_safe_exceptions |
|
577 | @reraise_safe_exceptions | |
579 | def is_large_file(self, wire, commit_id, path): |
|
578 | def is_large_file(self, wire, commit_id, path): | |
580 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
579 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
581 | region = self._region(wire) |
|
580 | region = self._region(wire) | |
582 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
581 | @region.conditional_cache_on_arguments(condition=cache_on) | |
583 | def _is_large_file(_context_uid, _repo_id, _commit_id, _path): |
|
582 | def _is_large_file(_context_uid, _repo_id, _commit_id, _path): | |
584 | return largefiles.lfutil.isstandin(path) |
|
583 | return largefiles.lfutil.isstandin(path) | |
585 |
|
584 | |||
586 | return _is_large_file(context_uid, repo_id, commit_id, path) |
|
585 | return _is_large_file(context_uid, repo_id, commit_id, path) | |
587 |
|
586 | |||
588 | @reraise_safe_exceptions |
|
587 | @reraise_safe_exceptions | |
589 | def is_binary(self, wire, revision, path): |
|
588 | def is_binary(self, wire, revision, path): | |
590 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
589 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
591 |
|
590 | |||
592 | region = self._region(wire) |
|
591 | region = self._region(wire) | |
593 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
592 | @region.conditional_cache_on_arguments(condition=cache_on) | |
594 | def _is_binary(_repo_id, _sha, _path): |
|
593 | def _is_binary(_repo_id, _sha, _path): | |
595 | repo = self._factory.repo(wire) |
|
594 | repo = self._factory.repo(wire) | |
596 | ctx = self._get_ctx(repo, revision) |
|
595 | ctx = self._get_ctx(repo, revision) | |
597 | fctx = ctx.filectx(path) |
|
596 | fctx = ctx.filectx(path) | |
598 | return fctx.isbinary() |
|
597 | return fctx.isbinary() | |
599 |
|
598 | |||
600 | return _is_binary(repo_id, revision, path) |
|
599 | return _is_binary(repo_id, revision, path) | |
601 |
|
600 | |||
602 | @reraise_safe_exceptions |
|
601 | @reraise_safe_exceptions | |
603 | def in_largefiles_store(self, wire, sha): |
|
602 | def in_largefiles_store(self, wire, sha): | |
604 | repo = self._factory.repo(wire) |
|
603 | repo = self._factory.repo(wire) | |
605 | return largefiles.lfutil.instore(repo, sha) |
|
604 | return largefiles.lfutil.instore(repo, sha) | |
606 |
|
605 | |||
607 | @reraise_safe_exceptions |
|
606 | @reraise_safe_exceptions | |
608 | def in_user_cache(self, wire, sha): |
|
607 | def in_user_cache(self, wire, sha): | |
609 | repo = self._factory.repo(wire) |
|
608 | repo = self._factory.repo(wire) | |
610 | return largefiles.lfutil.inusercache(repo.ui, sha) |
|
609 | return largefiles.lfutil.inusercache(repo.ui, sha) | |
611 |
|
610 | |||
612 | @reraise_safe_exceptions |
|
611 | @reraise_safe_exceptions | |
613 | def store_path(self, wire, sha): |
|
612 | def store_path(self, wire, sha): | |
614 | repo = self._factory.repo(wire) |
|
613 | repo = self._factory.repo(wire) | |
615 | return largefiles.lfutil.storepath(repo, sha) |
|
614 | return largefiles.lfutil.storepath(repo, sha) | |
616 |
|
615 | |||
617 | @reraise_safe_exceptions |
|
616 | @reraise_safe_exceptions | |
618 | def link(self, wire, sha, path): |
|
617 | def link(self, wire, sha, path): | |
619 | repo = self._factory.repo(wire) |
|
618 | repo = self._factory.repo(wire) | |
620 | largefiles.lfutil.link( |
|
619 | largefiles.lfutil.link( | |
621 | largefiles.lfutil.usercachepath(repo.ui, sha), path) |
|
620 | largefiles.lfutil.usercachepath(repo.ui, sha), path) | |
622 |
|
621 | |||
623 | @reraise_safe_exceptions |
|
622 | @reraise_safe_exceptions | |
624 | def localrepository(self, wire, create=False): |
|
623 | def localrepository(self, wire, create=False): | |
625 | self._factory.repo(wire, create=create) |
|
624 | self._factory.repo(wire, create=create) | |
626 |
|
625 | |||
627 | @reraise_safe_exceptions |
|
626 | @reraise_safe_exceptions | |
628 | def lookup(self, wire, revision, both): |
|
627 | def lookup(self, wire, revision, both): | |
629 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
628 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
630 |
|
629 | |||
631 | region = self._region(wire) |
|
630 | region = self._region(wire) | |
632 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
631 | @region.conditional_cache_on_arguments(condition=cache_on) | |
633 | def _lookup(_context_uid, _repo_id, _revision, _both): |
|
632 | def _lookup(_context_uid, _repo_id, _revision, _both): | |
634 |
|
633 | |||
635 | repo = self._factory.repo(wire) |
|
634 | repo = self._factory.repo(wire) | |
636 | rev = _revision |
|
635 | rev = _revision | |
637 | if isinstance(rev, int): |
|
636 | if isinstance(rev, int): | |
638 | # NOTE(marcink): |
|
637 | # NOTE(marcink): | |
639 | # since Mercurial doesn't support negative indexes properly |
|
638 | # since Mercurial doesn't support negative indexes properly | |
640 | # we need to shift accordingly by one to get proper index, e.g |
|
639 | # we need to shift accordingly by one to get proper index, e.g | |
641 | # repo[-1] => repo[-2] |
|
640 | # repo[-1] => repo[-2] | |
642 | # repo[0] => repo[-1] |
|
641 | # repo[0] => repo[-1] | |
643 | if rev <= 0: |
|
642 | if rev <= 0: | |
644 | rev = rev + -1 |
|
643 | rev = rev + -1 | |
645 | try: |
|
644 | try: | |
646 | ctx = self._get_ctx(repo, rev) |
|
645 | ctx = self._get_ctx(repo, rev) | |
647 | except (TypeError, RepoLookupError) as e: |
|
646 | except (TypeError, RepoLookupError) as e: | |
648 | e._org_exc_tb = traceback.format_exc() |
|
647 | e._org_exc_tb = traceback.format_exc() | |
649 | raise exceptions.LookupException(e)(rev) |
|
648 | raise exceptions.LookupException(e)(rev) | |
650 | except LookupError as e: |
|
649 | except LookupError as e: | |
651 | e._org_exc_tb = traceback.format_exc() |
|
650 | e._org_exc_tb = traceback.format_exc() | |
652 | raise exceptions.LookupException(e)(e.name) |
|
651 | raise exceptions.LookupException(e)(e.name) | |
653 |
|
652 | |||
654 | if not both: |
|
653 | if not both: | |
655 | return ctx.hex() |
|
654 | return ctx.hex() | |
656 |
|
655 | |||
657 | ctx = repo[ctx.hex()] |
|
656 | ctx = repo[ctx.hex()] | |
658 | return ctx.hex(), ctx.rev() |
|
657 | return ctx.hex(), ctx.rev() | |
659 |
|
658 | |||
660 | return _lookup(context_uid, repo_id, revision, both) |
|
659 | return _lookup(context_uid, repo_id, revision, both) | |
661 |
|
660 | |||
662 | @reraise_safe_exceptions |
|
661 | @reraise_safe_exceptions | |
663 | def sync_push(self, wire, url): |
|
662 | def sync_push(self, wire, url): | |
664 | if not self.check_url(url, wire['config']): |
|
663 | if not self.check_url(url, wire['config']): | |
665 | return |
|
664 | return | |
666 |
|
665 | |||
667 | repo = self._factory.repo(wire) |
|
666 | repo = self._factory.repo(wire) | |
668 |
|
667 | |||
669 | # Disable any prompts for this repo |
|
668 | # Disable any prompts for this repo | |
670 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
669 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') | |
671 |
|
670 | |||
672 | bookmarks = list(dict(repo._bookmarks).keys()) |
|
671 | bookmarks = list(dict(repo._bookmarks).keys()) | |
673 | remote = peer(repo, {}, url) |
|
672 | remote = peer(repo, {}, url) | |
674 | # Disable any prompts for this remote |
|
673 | # Disable any prompts for this remote | |
675 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
674 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') | |
676 |
|
675 | |||
677 | return exchange.push( |
|
676 | return exchange.push( | |
678 | repo, remote, newbranch=True, bookmarks=bookmarks).cgresult |
|
677 | repo, remote, newbranch=True, bookmarks=bookmarks).cgresult | |
679 |
|
678 | |||
680 | @reraise_safe_exceptions |
|
679 | @reraise_safe_exceptions | |
681 | def revision(self, wire, rev): |
|
680 | def revision(self, wire, rev): | |
682 | repo = self._factory.repo(wire) |
|
681 | repo = self._factory.repo(wire) | |
683 | ctx = self._get_ctx(repo, rev) |
|
682 | ctx = self._get_ctx(repo, rev) | |
684 | return ctx.rev() |
|
683 | return ctx.rev() | |
685 |
|
684 | |||
686 | @reraise_safe_exceptions |
|
685 | @reraise_safe_exceptions | |
687 | def rev_range(self, wire, commit_filter): |
|
686 | def rev_range(self, wire, commit_filter): | |
688 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
687 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
689 |
|
688 | |||
690 | region = self._region(wire) |
|
689 | region = self._region(wire) | |
691 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
690 | @region.conditional_cache_on_arguments(condition=cache_on) | |
692 | def _rev_range(_context_uid, _repo_id, _filter): |
|
691 | def _rev_range(_context_uid, _repo_id, _filter): | |
693 | repo = self._factory.repo(wire) |
|
692 | repo = self._factory.repo(wire) | |
694 | revisions = [rev for rev in revrange(repo, commit_filter)] |
|
693 | revisions = [rev for rev in revrange(repo, commit_filter)] | |
695 | return revisions |
|
694 | return revisions | |
696 |
|
695 | |||
697 | return _rev_range(context_uid, repo_id, sorted(commit_filter)) |
|
696 | return _rev_range(context_uid, repo_id, sorted(commit_filter)) | |
698 |
|
697 | |||
699 | @reraise_safe_exceptions |
|
698 | @reraise_safe_exceptions | |
700 | def rev_range_hash(self, wire, node): |
|
699 | def rev_range_hash(self, wire, node): | |
701 | repo = self._factory.repo(wire) |
|
700 | repo = self._factory.repo(wire) | |
702 |
|
701 | |||
703 | def get_revs(repo, rev_opt): |
|
702 | def get_revs(repo, rev_opt): | |
704 | if rev_opt: |
|
703 | if rev_opt: | |
705 | revs = revrange(repo, rev_opt) |
|
704 | revs = revrange(repo, rev_opt) | |
706 | if len(revs) == 0: |
|
705 | if len(revs) == 0: | |
707 | return (nullrev, nullrev) |
|
706 | return (nullrev, nullrev) | |
708 | return max(revs), min(revs) |
|
707 | return max(revs), min(revs) | |
709 | else: |
|
708 | else: | |
710 | return len(repo) - 1, 0 |
|
709 | return len(repo) - 1, 0 | |
711 |
|
710 | |||
712 | stop, start = get_revs(repo, [node + ':']) |
|
711 | stop, start = get_revs(repo, [node + ':']) | |
713 | revs = [hex(repo[r].node()) for r in range(start, stop + 1)] |
|
712 | revs = [hex(repo[r].node()) for r in range(start, stop + 1)] | |
714 | return revs |
|
713 | return revs | |
715 |
|
714 | |||
716 | @reraise_safe_exceptions |
|
715 | @reraise_safe_exceptions | |
717 | def revs_from_revspec(self, wire, rev_spec, *args, **kwargs): |
|
716 | def revs_from_revspec(self, wire, rev_spec, *args, **kwargs): | |
718 | other_path = kwargs.pop('other_path', None) |
|
717 | other_path = kwargs.pop('other_path', None) | |
719 |
|
718 | |||
720 | # case when we want to compare two independent repositories |
|
719 | # case when we want to compare two independent repositories | |
721 | if other_path and other_path != wire["path"]: |
|
720 | if other_path and other_path != wire["path"]: | |
722 | baseui = self._factory._create_config(wire["config"]) |
|
721 | baseui = self._factory._create_config(wire["config"]) | |
723 | repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"]) |
|
722 | repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"]) | |
724 | else: |
|
723 | else: | |
725 | repo = self._factory.repo(wire) |
|
724 | repo = self._factory.repo(wire) | |
726 | return list(repo.revs(rev_spec, *args)) |
|
725 | return list(repo.revs(rev_spec, *args)) | |
727 |
|
726 | |||
728 | @reraise_safe_exceptions |
|
727 | @reraise_safe_exceptions | |
729 | def verify(self, wire,): |
|
728 | def verify(self, wire,): | |
730 | repo = self._factory.repo(wire) |
|
729 | repo = self._factory.repo(wire) | |
731 | baseui = self._factory._create_config(wire['config']) |
|
730 | baseui = self._factory._create_config(wire['config']) | |
732 |
|
731 | |||
733 | baseui, output = patch_ui_message_output(baseui) |
|
732 | baseui, output = patch_ui_message_output(baseui) | |
734 |
|
733 | |||
735 | repo.ui = baseui |
|
734 | repo.ui = baseui | |
736 | verify.verify(repo) |
|
735 | verify.verify(repo) | |
737 | return output.getvalue() |
|
736 | return output.getvalue() | |
738 |
|
737 | |||
739 | @reraise_safe_exceptions |
|
738 | @reraise_safe_exceptions | |
740 | def hg_update_cache(self, wire,): |
|
739 | def hg_update_cache(self, wire,): | |
741 | repo = self._factory.repo(wire) |
|
740 | repo = self._factory.repo(wire) | |
742 | baseui = self._factory._create_config(wire['config']) |
|
741 | baseui = self._factory._create_config(wire['config']) | |
743 | baseui, output = patch_ui_message_output(baseui) |
|
742 | baseui, output = patch_ui_message_output(baseui) | |
744 |
|
743 | |||
745 | repo.ui = baseui |
|
744 | repo.ui = baseui | |
746 | with repo.wlock(), repo.lock(): |
|
745 | with repo.wlock(), repo.lock(): | |
747 | repo.updatecaches(full=True) |
|
746 | repo.updatecaches(full=True) | |
748 |
|
747 | |||
749 | return output.getvalue() |
|
748 | return output.getvalue() | |
750 |
|
749 | |||
751 | @reraise_safe_exceptions |
|
750 | @reraise_safe_exceptions | |
752 | def hg_rebuild_fn_cache(self, wire,): |
|
751 | def hg_rebuild_fn_cache(self, wire,): | |
753 | repo = self._factory.repo(wire) |
|
752 | repo = self._factory.repo(wire) | |
754 | baseui = self._factory._create_config(wire['config']) |
|
753 | baseui = self._factory._create_config(wire['config']) | |
755 | baseui, output = patch_ui_message_output(baseui) |
|
754 | baseui, output = patch_ui_message_output(baseui) | |
756 |
|
755 | |||
757 | repo.ui = baseui |
|
756 | repo.ui = baseui | |
758 |
|
757 | |||
759 | repair.rebuildfncache(baseui, repo) |
|
758 | repair.rebuildfncache(baseui, repo) | |
760 |
|
759 | |||
761 | return output.getvalue() |
|
760 | return output.getvalue() | |
762 |
|
761 | |||
763 | @reraise_safe_exceptions |
|
762 | @reraise_safe_exceptions | |
764 | def tags(self, wire): |
|
763 | def tags(self, wire): | |
765 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
764 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
766 | region = self._region(wire) |
|
765 | region = self._region(wire) | |
767 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
766 | @region.conditional_cache_on_arguments(condition=cache_on) | |
768 | def _tags(_context_uid, _repo_id): |
|
767 | def _tags(_context_uid, _repo_id): | |
769 | repo = self._factory.repo(wire) |
|
768 | repo = self._factory.repo(wire) | |
770 | return repo.tags() |
|
769 | return repo.tags() | |
771 |
|
770 | |||
772 | return _tags(context_uid, repo_id) |
|
771 | return _tags(context_uid, repo_id) | |
773 |
|
772 | |||
774 | @reraise_safe_exceptions |
|
773 | @reraise_safe_exceptions | |
775 | def update(self, wire, node=None, clean=False): |
|
774 | def update(self, wire, node=None, clean=False): | |
776 | repo = self._factory.repo(wire) |
|
775 | repo = self._factory.repo(wire) | |
777 | baseui = self._factory._create_config(wire['config']) |
|
776 | baseui = self._factory._create_config(wire['config']) | |
778 | commands.update(baseui, repo, node=node, clean=clean) |
|
777 | commands.update(baseui, repo, node=node, clean=clean) | |
779 |
|
778 | |||
780 | @reraise_safe_exceptions |
|
779 | @reraise_safe_exceptions | |
781 | def identify(self, wire): |
|
780 | def identify(self, wire): | |
782 | repo = self._factory.repo(wire) |
|
781 | repo = self._factory.repo(wire) | |
783 | baseui = self._factory._create_config(wire['config']) |
|
782 | baseui = self._factory._create_config(wire['config']) | |
784 | output = io.BytesIO() |
|
783 | output = io.BytesIO() | |
785 | baseui.write = output.write |
|
784 | baseui.write = output.write | |
786 | # This is required to get a full node id |
|
785 | # This is required to get a full node id | |
787 | baseui.debugflag = True |
|
786 | baseui.debugflag = True | |
788 | commands.identify(baseui, repo, id=True) |
|
787 | commands.identify(baseui, repo, id=True) | |
789 |
|
788 | |||
790 | return output.getvalue() |
|
789 | return output.getvalue() | |
791 |
|
790 | |||
792 | @reraise_safe_exceptions |
|
791 | @reraise_safe_exceptions | |
793 | def heads(self, wire, branch=None): |
|
792 | def heads(self, wire, branch=None): | |
794 | repo = self._factory.repo(wire) |
|
793 | repo = self._factory.repo(wire) | |
795 | baseui = self._factory._create_config(wire['config']) |
|
794 | baseui = self._factory._create_config(wire['config']) | |
796 | output = io.BytesIO() |
|
795 | output = io.BytesIO() | |
797 |
|
796 | |||
798 | def write(data, **unused_kwargs): |
|
797 | def write(data, **unused_kwargs): | |
799 | output.write(data) |
|
798 | output.write(data) | |
800 |
|
799 | |||
801 | baseui.write = write |
|
800 | baseui.write = write | |
802 | if branch: |
|
801 | if branch: | |
803 | args = [branch] |
|
802 | args = [branch] | |
804 | else: |
|
803 | else: | |
805 | args = [] |
|
804 | args = [] | |
806 | commands.heads(baseui, repo, template='{node} ', *args) |
|
805 | commands.heads(baseui, repo, template='{node} ', *args) | |
807 |
|
806 | |||
808 | return output.getvalue() |
|
807 | return output.getvalue() | |
809 |
|
808 | |||
810 | @reraise_safe_exceptions |
|
809 | @reraise_safe_exceptions | |
811 | def ancestor(self, wire, revision1, revision2): |
|
810 | def ancestor(self, wire, revision1, revision2): | |
812 | repo = self._factory.repo(wire) |
|
811 | repo = self._factory.repo(wire) | |
813 | changelog = repo.changelog |
|
812 | changelog = repo.changelog | |
814 | lookup = repo.lookup |
|
813 | lookup = repo.lookup | |
815 | a = changelog.ancestor(lookup(revision1), lookup(revision2)) |
|
814 | a = changelog.ancestor(lookup(revision1), lookup(revision2)) | |
816 | return hex(a) |
|
815 | return hex(a) | |
817 |
|
816 | |||
818 | @reraise_safe_exceptions |
|
817 | @reraise_safe_exceptions | |
819 | def clone(self, wire, source, dest, update_after_clone=False, hooks=True): |
|
818 | def clone(self, wire, source, dest, update_after_clone=False, hooks=True): | |
820 | baseui = self._factory._create_config(wire["config"], hooks=hooks) |
|
819 | baseui = self._factory._create_config(wire["config"], hooks=hooks) | |
821 | clone(baseui, source, dest, noupdate=not update_after_clone) |
|
820 | clone(baseui, source, dest, noupdate=not update_after_clone) | |
822 |
|
821 | |||
823 | @reraise_safe_exceptions |
|
822 | @reraise_safe_exceptions | |
824 | def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated): |
|
823 | def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated): | |
825 |
|
824 | |||
826 | repo = self._factory.repo(wire) |
|
825 | repo = self._factory.repo(wire) | |
827 | baseui = self._factory._create_config(wire['config']) |
|
826 | baseui = self._factory._create_config(wire['config']) | |
828 | publishing = baseui.configbool('phases', 'publish') |
|
827 | publishing = baseui.configbool('phases', 'publish') | |
829 | if publishing: |
|
828 | if publishing: | |
830 | new_commit = 'public' |
|
829 | new_commit = 'public' | |
831 | else: |
|
830 | else: | |
832 | new_commit = 'draft' |
|
831 | new_commit = 'draft' | |
833 |
|
832 | |||
834 | def _filectxfn(_repo, ctx, path): |
|
833 | def _filectxfn(_repo, ctx, path): | |
835 | """ |
|
834 | """ | |
836 | Marks given path as added/changed/removed in a given _repo. This is |
|
835 | Marks given path as added/changed/removed in a given _repo. This is | |
837 | for internal mercurial commit function. |
|
836 | for internal mercurial commit function. | |
838 | """ |
|
837 | """ | |
839 |
|
838 | |||
840 | # check if this path is removed |
|
839 | # check if this path is removed | |
841 | if path in removed: |
|
840 | if path in removed: | |
842 | # returning None is a way to mark node for removal |
|
841 | # returning None is a way to mark node for removal | |
843 | return None |
|
842 | return None | |
844 |
|
843 | |||
845 | # check if this path is added |
|
844 | # check if this path is added | |
846 | for node in updated: |
|
845 | for node in updated: | |
847 | if node['path'] == path: |
|
846 | if node['path'] == path: | |
848 | return memfilectx( |
|
847 | return memfilectx( | |
849 | _repo, |
|
848 | _repo, | |
850 | changectx=ctx, |
|
849 | changectx=ctx, | |
851 | path=node['path'], |
|
850 | path=node['path'], | |
852 | data=node['content'], |
|
851 | data=node['content'], | |
853 | islink=False, |
|
852 | islink=False, | |
854 | isexec=bool(node['mode'] & stat.S_IXUSR), |
|
853 | isexec=bool(node['mode'] & stat.S_IXUSR), | |
855 | copysource=False) |
|
854 | copysource=False) | |
856 |
|
855 | |||
857 | raise exceptions.AbortException()( |
|
856 | raise exceptions.AbortException()( | |
858 | "Given path haven't been marked as added, " |
|
857 | "Given path haven't been marked as added, " | |
859 | "changed or removed (%s)" % path) |
|
858 | "changed or removed (%s)" % path) | |
860 |
|
859 | |||
861 | with repo.ui.configoverride({('phases', 'new-commit'): new_commit}): |
|
860 | with repo.ui.configoverride({('phases', 'new-commit'): new_commit}): | |
862 |
|
861 | |||
863 | commit_ctx = memctx( |
|
862 | commit_ctx = memctx( | |
864 | repo=repo, |
|
863 | repo=repo, | |
865 | parents=parents, |
|
864 | parents=parents, | |
866 | text=message, |
|
865 | text=message, | |
867 | files=files, |
|
866 | files=files, | |
868 | filectxfn=_filectxfn, |
|
867 | filectxfn=_filectxfn, | |
869 | user=user, |
|
868 | user=user, | |
870 | date=(commit_time, commit_timezone), |
|
869 | date=(commit_time, commit_timezone), | |
871 | extra=extra) |
|
870 | extra=extra) | |
872 |
|
871 | |||
873 | n = repo.commitctx(commit_ctx) |
|
872 | n = repo.commitctx(commit_ctx) | |
874 | new_id = hex(n) |
|
873 | new_id = hex(n) | |
875 |
|
874 | |||
876 | return new_id |
|
875 | return new_id | |
877 |
|
876 | |||
878 | @reraise_safe_exceptions |
|
877 | @reraise_safe_exceptions | |
879 | def pull(self, wire, url, commit_ids=None): |
|
878 | def pull(self, wire, url, commit_ids=None): | |
880 | repo = self._factory.repo(wire) |
|
879 | repo = self._factory.repo(wire) | |
881 | # Disable any prompts for this repo |
|
880 | # Disable any prompts for this repo | |
882 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
881 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') | |
883 |
|
882 | |||
884 | remote = peer(repo, {}, url) |
|
883 | remote = peer(repo, {}, url) | |
885 | # Disable any prompts for this remote |
|
884 | # Disable any prompts for this remote | |
886 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
885 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') | |
887 |
|
886 | |||
888 | if commit_ids: |
|
887 | if commit_ids: | |
889 | commit_ids = [bin(commit_id) for commit_id in commit_ids] |
|
888 | commit_ids = [bin(commit_id) for commit_id in commit_ids] | |
890 |
|
889 | |||
891 | return exchange.pull( |
|
890 | return exchange.pull( | |
892 | repo, remote, heads=commit_ids, force=None).cgresult |
|
891 | repo, remote, heads=commit_ids, force=None).cgresult | |
893 |
|
892 | |||
894 | @reraise_safe_exceptions |
|
893 | @reraise_safe_exceptions | |
895 | def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True): |
|
894 | def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True): | |
896 | repo = self._factory.repo(wire) |
|
895 | repo = self._factory.repo(wire) | |
897 | baseui = self._factory._create_config(wire['config'], hooks=hooks) |
|
896 | baseui = self._factory._create_config(wire['config'], hooks=hooks) | |
898 |
|
897 | |||
899 | # Mercurial internally has a lot of logic that checks ONLY if |
|
898 | # Mercurial internally has a lot of logic that checks ONLY if | |
900 | # option is defined, we just pass those if they are defined then |
|
899 | # option is defined, we just pass those if they are defined then | |
901 | opts = {} |
|
900 | opts = {} | |
902 | if bookmark: |
|
901 | if bookmark: | |
903 | opts['bookmark'] = bookmark |
|
902 | opts['bookmark'] = bookmark | |
904 | if branch: |
|
903 | if branch: | |
905 | opts['branch'] = branch |
|
904 | opts['branch'] = branch | |
906 | if revision: |
|
905 | if revision: | |
907 | opts['rev'] = revision |
|
906 | opts['rev'] = revision | |
908 |
|
907 | |||
909 | commands.pull(baseui, repo, source, **opts) |
|
908 | commands.pull(baseui, repo, source, **opts) | |
910 |
|
909 | |||
911 | @reraise_safe_exceptions |
|
910 | @reraise_safe_exceptions | |
912 | def push(self, wire, revisions, dest_path, hooks=True, push_branches=False): |
|
911 | def push(self, wire, revisions, dest_path, hooks=True, push_branches=False): | |
913 | repo = self._factory.repo(wire) |
|
912 | repo = self._factory.repo(wire) | |
914 | baseui = self._factory._create_config(wire['config'], hooks=hooks) |
|
913 | baseui = self._factory._create_config(wire['config'], hooks=hooks) | |
915 | commands.push(baseui, repo, dest=dest_path, rev=revisions, |
|
914 | commands.push(baseui, repo, dest=dest_path, rev=revisions, | |
916 | new_branch=push_branches) |
|
915 | new_branch=push_branches) | |
917 |
|
916 | |||
918 | @reraise_safe_exceptions |
|
917 | @reraise_safe_exceptions | |
919 | def strip(self, wire, revision, update, backup): |
|
918 | def strip(self, wire, revision, update, backup): | |
920 | repo = self._factory.repo(wire) |
|
919 | repo = self._factory.repo(wire) | |
921 | ctx = self._get_ctx(repo, revision) |
|
920 | ctx = self._get_ctx(repo, revision) | |
922 | hgext_strip( |
|
921 | hgext_strip( | |
923 | repo.baseui, repo, ctx.node(), update=update, backup=backup) |
|
922 | repo.baseui, repo, ctx.node(), update=update, backup=backup) | |
924 |
|
923 | |||
925 | @reraise_safe_exceptions |
|
924 | @reraise_safe_exceptions | |
926 | def get_unresolved_files(self, wire): |
|
925 | def get_unresolved_files(self, wire): | |
927 | repo = self._factory.repo(wire) |
|
926 | repo = self._factory.repo(wire) | |
928 |
|
927 | |||
929 | log.debug('Calculating unresolved files for repo: %s', repo) |
|
928 | log.debug('Calculating unresolved files for repo: %s', repo) | |
930 | output = io.BytesIO() |
|
929 | output = io.BytesIO() | |
931 |
|
930 | |||
932 | def write(data, **unused_kwargs): |
|
931 | def write(data, **unused_kwargs): | |
933 | output.write(data) |
|
932 | output.write(data) | |
934 |
|
933 | |||
935 | baseui = self._factory._create_config(wire['config']) |
|
934 | baseui = self._factory._create_config(wire['config']) | |
936 | baseui.write = write |
|
935 | baseui.write = write | |
937 |
|
936 | |||
938 | commands.resolve(baseui, repo, list=True) |
|
937 | commands.resolve(baseui, repo, list=True) | |
939 | unresolved = output.getvalue().splitlines(0) |
|
938 | unresolved = output.getvalue().splitlines(0) | |
940 | return unresolved |
|
939 | return unresolved | |
941 |
|
940 | |||
942 | @reraise_safe_exceptions |
|
941 | @reraise_safe_exceptions | |
943 | def merge(self, wire, revision): |
|
942 | def merge(self, wire, revision): | |
944 | repo = self._factory.repo(wire) |
|
943 | repo = self._factory.repo(wire) | |
945 | baseui = self._factory._create_config(wire['config']) |
|
944 | baseui = self._factory._create_config(wire['config']) | |
946 | repo.ui.setconfig('ui', 'merge', 'internal:dump') |
|
945 | repo.ui.setconfig('ui', 'merge', 'internal:dump') | |
947 |
|
946 | |||
948 | # In case of sub repositories are used mercurial prompts the user in |
|
947 | # In case of sub repositories are used mercurial prompts the user in | |
949 | # case of merge conflicts or different sub repository sources. By |
|
948 | # case of merge conflicts or different sub repository sources. By | |
950 | # setting the interactive flag to `False` mercurial doesn't prompt the |
|
949 | # setting the interactive flag to `False` mercurial doesn't prompt the | |
951 | # used but instead uses a default value. |
|
950 | # used but instead uses a default value. | |
952 | repo.ui.setconfig('ui', 'interactive', False) |
|
951 | repo.ui.setconfig('ui', 'interactive', False) | |
953 | commands.merge(baseui, repo, rev=revision) |
|
952 | commands.merge(baseui, repo, rev=revision) | |
954 |
|
953 | |||
955 | @reraise_safe_exceptions |
|
954 | @reraise_safe_exceptions | |
956 | def merge_state(self, wire): |
|
955 | def merge_state(self, wire): | |
957 | repo = self._factory.repo(wire) |
|
956 | repo = self._factory.repo(wire) | |
958 | repo.ui.setconfig('ui', 'merge', 'internal:dump') |
|
957 | repo.ui.setconfig('ui', 'merge', 'internal:dump') | |
959 |
|
958 | |||
960 | # In case of sub repositories are used mercurial prompts the user in |
|
959 | # In case of sub repositories are used mercurial prompts the user in | |
961 | # case of merge conflicts or different sub repository sources. By |
|
960 | # case of merge conflicts or different sub repository sources. By | |
962 | # setting the interactive flag to `False` mercurial doesn't prompt the |
|
961 | # setting the interactive flag to `False` mercurial doesn't prompt the | |
963 | # used but instead uses a default value. |
|
962 | # used but instead uses a default value. | |
964 | repo.ui.setconfig('ui', 'interactive', False) |
|
963 | repo.ui.setconfig('ui', 'interactive', False) | |
965 | ms = hg_merge.mergestate(repo) |
|
964 | ms = hg_merge.mergestate(repo) | |
966 | return [x for x in ms.unresolved()] |
|
965 | return [x for x in ms.unresolved()] | |
967 |
|
966 | |||
968 | @reraise_safe_exceptions |
|
967 | @reraise_safe_exceptions | |
969 | def commit(self, wire, message, username, close_branch=False): |
|
968 | def commit(self, wire, message, username, close_branch=False): | |
970 | repo = self._factory.repo(wire) |
|
969 | repo = self._factory.repo(wire) | |
971 | baseui = self._factory._create_config(wire['config']) |
|
970 | baseui = self._factory._create_config(wire['config']) | |
972 | repo.ui.setconfig('ui', 'username', username) |
|
971 | repo.ui.setconfig('ui', 'username', username) | |
973 | commands.commit(baseui, repo, message=message, close_branch=close_branch) |
|
972 | commands.commit(baseui, repo, message=message, close_branch=close_branch) | |
974 |
|
973 | |||
975 | @reraise_safe_exceptions |
|
974 | @reraise_safe_exceptions | |
976 | def rebase(self, wire, source=None, dest=None, abort=False): |
|
975 | def rebase(self, wire, source=None, dest=None, abort=False): | |
977 | repo = self._factory.repo(wire) |
|
976 | repo = self._factory.repo(wire) | |
978 | baseui = self._factory._create_config(wire['config']) |
|
977 | baseui = self._factory._create_config(wire['config']) | |
979 | repo.ui.setconfig('ui', 'merge', 'internal:dump') |
|
978 | repo.ui.setconfig('ui', 'merge', 'internal:dump') | |
980 | # In case of sub repositories are used mercurial prompts the user in |
|
979 | # In case of sub repositories are used mercurial prompts the user in | |
981 | # case of merge conflicts or different sub repository sources. By |
|
980 | # case of merge conflicts or different sub repository sources. By | |
982 | # setting the interactive flag to `False` mercurial doesn't prompt the |
|
981 | # setting the interactive flag to `False` mercurial doesn't prompt the | |
983 | # used but instead uses a default value. |
|
982 | # used but instead uses a default value. | |
984 | repo.ui.setconfig('ui', 'interactive', False) |
|
983 | repo.ui.setconfig('ui', 'interactive', False) | |
985 | rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort) |
|
984 | rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort) | |
986 |
|
985 | |||
987 | @reraise_safe_exceptions |
|
986 | @reraise_safe_exceptions | |
988 | def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone): |
|
987 | def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone): | |
989 | repo = self._factory.repo(wire) |
|
988 | repo = self._factory.repo(wire) | |
990 | ctx = self._get_ctx(repo, revision) |
|
989 | ctx = self._get_ctx(repo, revision) | |
991 | node = ctx.node() |
|
990 | node = ctx.node() | |
992 |
|
991 | |||
993 | date = (tag_time, tag_timezone) |
|
992 | date = (tag_time, tag_timezone) | |
994 | try: |
|
993 | try: | |
995 | hg_tag.tag(repo, name, node, message, local, user, date) |
|
994 | hg_tag.tag(repo, name, node, message, local, user, date) | |
996 | except Abort as e: |
|
995 | except Abort as e: | |
997 | log.exception("Tag operation aborted") |
|
996 | log.exception("Tag operation aborted") | |
998 | # Exception can contain unicode which we convert |
|
997 | # Exception can contain unicode which we convert | |
999 | raise exceptions.AbortException(e)(repr(e)) |
|
998 | raise exceptions.AbortException(e)(repr(e)) | |
1000 |
|
999 | |||
1001 | @reraise_safe_exceptions |
|
1000 | @reraise_safe_exceptions | |
1002 | def bookmark(self, wire, bookmark, revision=None): |
|
1001 | def bookmark(self, wire, bookmark, revision=None): | |
1003 | repo = self._factory.repo(wire) |
|
1002 | repo = self._factory.repo(wire) | |
1004 | baseui = self._factory._create_config(wire['config']) |
|
1003 | baseui = self._factory._create_config(wire['config']) | |
1005 | commands.bookmark(baseui, repo, bookmark, rev=revision, force=True) |
|
1004 | commands.bookmark(baseui, repo, bookmark, rev=revision, force=True) | |
1006 |
|
1005 | |||
1007 | @reraise_safe_exceptions |
|
1006 | @reraise_safe_exceptions | |
1008 | def install_hooks(self, wire, force=False): |
|
1007 | def install_hooks(self, wire, force=False): | |
1009 | # we don't need any special hooks for Mercurial |
|
1008 | # we don't need any special hooks for Mercurial | |
1010 | pass |
|
1009 | pass | |
1011 |
|
1010 | |||
1012 | @reraise_safe_exceptions |
|
1011 | @reraise_safe_exceptions | |
1013 | def get_hooks_info(self, wire): |
|
1012 | def get_hooks_info(self, wire): | |
1014 | return { |
|
1013 | return { | |
1015 | 'pre_version': vcsserver.__version__, |
|
1014 | 'pre_version': vcsserver.__version__, | |
1016 | 'post_version': vcsserver.__version__, |
|
1015 | 'post_version': vcsserver.__version__, | |
1017 | } |
|
1016 | } | |
1018 |
|
1017 | |||
1019 | @reraise_safe_exceptions |
|
1018 | @reraise_safe_exceptions | |
1020 | def set_head_ref(self, wire, head_name): |
|
1019 | def set_head_ref(self, wire, head_name): | |
1021 | pass |
|
1020 | pass | |
1022 |
|
1021 | |||
1023 | @reraise_safe_exceptions |
|
1022 | @reraise_safe_exceptions | |
1024 | def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path, |
|
1023 | def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path, | |
1025 | archive_dir_name, commit_id): |
|
1024 | archive_dir_name, commit_id): | |
1026 |
|
1025 | |||
1027 | def file_walker(_commit_id, path): |
|
1026 | def file_walker(_commit_id, path): | |
1028 | repo = self._factory.repo(wire) |
|
1027 | repo = self._factory.repo(wire) | |
1029 | ctx = repo[_commit_id] |
|
1028 | ctx = repo[_commit_id] | |
1030 | is_root = path in ['', '/'] |
|
1029 | is_root = path in ['', '/'] | |
1031 | if is_root: |
|
1030 | if is_root: | |
1032 | matcher = alwaysmatcher(badfn=None) |
|
1031 | matcher = alwaysmatcher(badfn=None) | |
1033 | else: |
|
1032 | else: | |
1034 | matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None) |
|
1033 | matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None) | |
1035 | file_iter = ctx.manifest().walk(matcher) |
|
1034 | file_iter = ctx.manifest().walk(matcher) | |
1036 |
|
1035 | |||
1037 | for fn in file_iter: |
|
1036 | for fn in file_iter: | |
1038 | file_path = fn |
|
1037 | file_path = fn | |
1039 | flags = ctx.flags(fn) |
|
1038 | flags = ctx.flags(fn) | |
1040 | mode = b'x' in flags and 0o755 or 0o644 |
|
1039 | mode = b'x' in flags and 0o755 or 0o644 | |
1041 | is_link = b'l' in flags |
|
1040 | is_link = b'l' in flags | |
1042 |
|
1041 | |||
1043 | yield ArchiveNode(file_path, mode, is_link, ctx[fn].data) |
|
1042 | yield ArchiveNode(file_path, mode, is_link, ctx[fn].data) | |
1044 |
|
1043 | |||
1045 | return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path, |
|
1044 | return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path, | |
1046 | archive_dir_name, commit_id) |
|
1045 | archive_dir_name, commit_id) | |
1047 |
|
1046 |
@@ -1,79 +1,84 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 | """ |
|
18 | """ | |
19 | Mercurial libs compatibility |
|
19 | Mercurial libs compatibility | |
20 | """ |
|
20 | """ | |
21 |
|
21 | |||
22 | import mercurial |
|
22 | import mercurial | |
23 | from mercurial import demandimport |
|
23 | from mercurial import demandimport | |
|
24 | ||||
24 | # patch demandimport, due to bug in mercurial when it always triggers |
|
25 | # patch demandimport, due to bug in mercurial when it always triggers | |
25 | # demandimport.enable() |
|
26 | # demandimport.enable() | |
26 | demandimport.enable = lambda *args, **kwargs: 1 |
|
27 | demandimport.enable = lambda *args, **kwargs: 1 | |
27 |
|
28 | |||
28 | from mercurial import ui |
|
29 | from mercurial import ui | |
29 | from mercurial import patch |
|
30 | from mercurial import patch | |
30 | from mercurial import config |
|
31 | from mercurial import config | |
31 | from mercurial import extensions |
|
32 | from mercurial import extensions | |
32 | from mercurial import scmutil |
|
33 | from mercurial import scmutil | |
33 | from mercurial import archival |
|
34 | from mercurial import archival | |
34 | from mercurial import discovery |
|
35 | from mercurial import discovery | |
35 | from mercurial import unionrepo |
|
36 | from mercurial import unionrepo | |
36 | from mercurial import localrepo |
|
37 | from mercurial import localrepo | |
37 | from mercurial import merge as hg_merge |
|
38 | from mercurial import merge as hg_merge | |
38 | from mercurial import subrepo |
|
39 | from mercurial import subrepo | |
39 | from mercurial import subrepoutil |
|
40 | from mercurial import subrepoutil | |
40 | from mercurial import tags as hg_tag |
|
41 | from mercurial import tags as hg_tag | |
41 | from mercurial import util as hgutil |
|
42 | from mercurial import util as hgutil | |
42 |
from mercurial.commands import clone, |
|
43 | from mercurial.commands import clone, pull | |
|
44 | from mercurial.node import nullid | |||
43 | from mercurial.context import memctx, memfilectx |
|
45 | from mercurial.context import memctx, memfilectx | |
44 | from mercurial.error import ( |
|
46 | from mercurial.error import ( | |
45 | LookupError, RepoError, RepoLookupError, Abort, InterventionRequired, |
|
47 | LookupError, RepoError, RepoLookupError, Abort, InterventionRequired, | |
46 | RequirementError, ProgrammingError) |
|
48 | RequirementError, ProgrammingError) | |
47 | from mercurial.hgweb import hgweb_mod |
|
49 | from mercurial.hgweb import hgweb_mod | |
48 | from mercurial.localrepo import instance |
|
50 | from mercurial.localrepo import instance | |
49 | from mercurial.match import match, alwaysmatcher, patternmatcher |
|
51 | from mercurial.match import match, alwaysmatcher, patternmatcher | |
50 | from mercurial.mdiff import diffopts |
|
52 | from mercurial.mdiff import diffopts | |
51 | from mercurial.node import bin, hex |
|
53 | from mercurial.node import bin, hex | |
52 | from mercurial.encoding import tolocal |
|
54 | from mercurial.encoding import tolocal | |
53 | from mercurial.discovery import findcommonoutgoing |
|
55 | from mercurial.discovery import findcommonoutgoing | |
54 | from mercurial.hg import peer |
|
56 | from mercurial.hg import peer | |
55 | from mercurial.httppeer import makepeer |
|
57 | from mercurial.httppeer import makepeer | |
56 | from mercurial.util import url as hg_url |
|
58 | from mercurial.utils.urlutil import url as hg_url | |
57 | from mercurial.scmutil import revrange, revsymbol |
|
59 | from mercurial.scmutil import revrange, revsymbol | |
58 | from mercurial.node import nullrev |
|
60 | from mercurial.node import nullrev | |
59 | from mercurial import exchange |
|
61 | from mercurial import exchange | |
60 | from hgext import largefiles |
|
62 | from hgext import largefiles | |
61 |
|
63 | |||
62 | # those authnadlers are patched for python 2.6.5 bug an |
|
64 | # those authnadlers are patched for python 2.6.5 bug an | |
63 | # infinit looping when given invalid resources |
|
65 | # infinit looping when given invalid resources | |
64 | from mercurial.url import httpbasicauthhandler, httpdigestauthhandler |
|
66 | from mercurial.url import httpbasicauthhandler, httpdigestauthhandler | |
65 |
|
67 | |||
|
68 | # hg strip is in core now | |||
|
69 | from mercurial import strip as hgext_strip | |||
|
70 | ||||
66 |
|
71 | |||
67 | def get_ctx(repo, ref): |
|
72 | def get_ctx(repo, ref): | |
68 | try: |
|
73 | try: | |
69 | ctx = repo[ref] |
|
74 | ctx = repo[ref] | |
70 | except (ProgrammingError, TypeError): |
|
75 | except (ProgrammingError, TypeError): | |
71 | # we're unable to find the rev using a regular lookup, we fallback |
|
76 | # we're unable to find the rev using a regular lookup, we fallback | |
72 | # to slower, but backward compat revsymbol usage |
|
77 | # to slower, but backward compat revsymbol usage | |
73 | ctx = revsymbol(repo, ref) |
|
78 | ctx = revsymbol(repo, ref) | |
74 | except (LookupError, RepoLookupError): |
|
79 | except (LookupError, RepoLookupError): | |
75 | # Similar case as above but only for refs that are not numeric |
|
80 | # Similar case as above but only for refs that are not numeric | |
76 | if isinstance(ref, (int, long)): |
|
81 | if isinstance(ref, (int, long)): | |
77 | raise |
|
82 | raise | |
78 | ctx = revsymbol(repo, ref) |
|
83 | ctx = revsymbol(repo, ref) | |
79 | return ctx |
|
84 | return ctx |
@@ -1,263 +1,261 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 | import os |
|
18 | import os | |
19 | import time |
|
19 | import time | |
20 | import logging |
|
20 | import logging | |
21 | import functools |
|
21 | import functools | |
22 |
|
22 | |||
23 | from dogpile.cache import CacheRegion |
|
23 | from dogpile.cache import CacheRegion | |
24 | from dogpile.cache.util import compat |
|
|||
25 |
|
24 | |||
26 | from vcsserver.utils import safe_str, sha1 |
|
25 | from vcsserver.utils import safe_str, sha1 | |
27 |
|
||||
28 | from vcsserver.lib.rc_cache import region_meta |
|
26 | from vcsserver.lib.rc_cache import region_meta | |
29 |
|
27 | |||
30 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
31 |
|
29 | |||
32 |
|
30 | |||
33 | class RhodeCodeCacheRegion(CacheRegion): |
|
31 | class RhodeCodeCacheRegion(CacheRegion): | |
34 |
|
32 | |||
35 | def conditional_cache_on_arguments( |
|
33 | def conditional_cache_on_arguments( | |
36 | self, namespace=None, |
|
34 | self, namespace=None, | |
37 | expiration_time=None, |
|
35 | expiration_time=None, | |
38 | should_cache_fn=None, |
|
36 | should_cache_fn=None, | |
39 |
to_str= |
|
37 | to_str=str, | |
40 | function_key_generator=None, |
|
38 | function_key_generator=None, | |
41 | condition=True): |
|
39 | condition=True): | |
42 | """ |
|
40 | """ | |
43 | Custom conditional decorator, that will not touch any dogpile internals if |
|
41 | Custom conditional decorator, that will not touch any dogpile internals if | |
44 | condition isn't meet. This works a bit different than should_cache_fn |
|
42 | condition isn't meet. This works a bit different than should_cache_fn | |
45 | And it's faster in cases we don't ever want to compute cached values |
|
43 | And it's faster in cases we don't ever want to compute cached values | |
46 | """ |
|
44 | """ | |
47 |
expiration_time_is_callable = |
|
45 | expiration_time_is_callable = callable(expiration_time) | |
48 |
|
46 | |||
49 | if function_key_generator is None: |
|
47 | if function_key_generator is None: | |
50 | function_key_generator = self.function_key_generator |
|
48 | function_key_generator = self.function_key_generator | |
51 |
|
49 | |||
52 | # workaround for py2 and cython problems, this block should be removed |
|
50 | # workaround for py2 and cython problems, this block should be removed | |
53 | # once we've migrated to py3 |
|
51 | # once we've migrated to py3 | |
54 | if 'cython' == 'cython': |
|
52 | if 'cython' == 'cython': | |
55 | def decorator(fn): |
|
53 | def decorator(fn): | |
56 |
if to_str is |
|
54 | if to_str is str: | |
57 | # backwards compatible |
|
55 | # backwards compatible | |
58 | key_generator = function_key_generator(namespace, fn) |
|
56 | key_generator = function_key_generator(namespace, fn) | |
59 | else: |
|
57 | else: | |
60 | key_generator = function_key_generator(namespace, fn, to_str=to_str) |
|
58 | key_generator = function_key_generator(namespace, fn, to_str=to_str) | |
61 |
|
59 | |||
62 | @functools.wraps(fn) |
|
60 | @functools.wraps(fn) | |
63 | def decorate(*arg, **kw): |
|
61 | def decorate(*arg, **kw): | |
64 | key = key_generator(*arg, **kw) |
|
62 | key = key_generator(*arg, **kw) | |
65 |
|
63 | |||
66 | @functools.wraps(fn) |
|
64 | @functools.wraps(fn) | |
67 | def creator(): |
|
65 | def creator(): | |
68 | return fn(*arg, **kw) |
|
66 | return fn(*arg, **kw) | |
69 |
|
67 | |||
70 | if not condition: |
|
68 | if not condition: | |
71 | return creator() |
|
69 | return creator() | |
72 |
|
70 | |||
73 | timeout = expiration_time() if expiration_time_is_callable \ |
|
71 | timeout = expiration_time() if expiration_time_is_callable \ | |
74 | else expiration_time |
|
72 | else expiration_time | |
75 |
|
73 | |||
76 | return self.get_or_create(key, creator, timeout, should_cache_fn) |
|
74 | return self.get_or_create(key, creator, timeout, should_cache_fn) | |
77 |
|
75 | |||
78 | def invalidate(*arg, **kw): |
|
76 | def invalidate(*arg, **kw): | |
79 | key = key_generator(*arg, **kw) |
|
77 | key = key_generator(*arg, **kw) | |
80 | self.delete(key) |
|
78 | self.delete(key) | |
81 |
|
79 | |||
82 | def set_(value, *arg, **kw): |
|
80 | def set_(value, *arg, **kw): | |
83 | key = key_generator(*arg, **kw) |
|
81 | key = key_generator(*arg, **kw) | |
84 | self.set(key, value) |
|
82 | self.set(key, value) | |
85 |
|
83 | |||
86 | def get(*arg, **kw): |
|
84 | def get(*arg, **kw): | |
87 | key = key_generator(*arg, **kw) |
|
85 | key = key_generator(*arg, **kw) | |
88 | return self.get(key) |
|
86 | return self.get(key) | |
89 |
|
87 | |||
90 | def refresh(*arg, **kw): |
|
88 | def refresh(*arg, **kw): | |
91 | key = key_generator(*arg, **kw) |
|
89 | key = key_generator(*arg, **kw) | |
92 | value = fn(*arg, **kw) |
|
90 | value = fn(*arg, **kw) | |
93 | self.set(key, value) |
|
91 | self.set(key, value) | |
94 | return value |
|
92 | return value | |
95 |
|
93 | |||
96 | decorate.set = set_ |
|
94 | decorate.set = set_ | |
97 | decorate.invalidate = invalidate |
|
95 | decorate.invalidate = invalidate | |
98 | decorate.refresh = refresh |
|
96 | decorate.refresh = refresh | |
99 | decorate.get = get |
|
97 | decorate.get = get | |
100 | decorate.original = fn |
|
98 | decorate.original = fn | |
101 | decorate.key_generator = key_generator |
|
99 | decorate.key_generator = key_generator | |
102 | decorate.__wrapped__ = fn |
|
100 | decorate.__wrapped__ = fn | |
103 |
|
101 | |||
104 | return decorate |
|
102 | return decorate | |
105 | return decorator |
|
103 | return decorator | |
106 |
|
104 | |||
107 | def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): |
|
105 | def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): | |
108 |
|
106 | |||
109 | if not condition: |
|
107 | if not condition: | |
110 | log.debug('Calling un-cached method:%s', user_func.func_name) |
|
108 | log.debug('Calling un-cached method:%s', user_func.func_name) | |
111 | start = time.time() |
|
109 | start = time.time() | |
112 | result = user_func(*arg, **kw) |
|
110 | result = user_func(*arg, **kw) | |
113 | total = time.time() - start |
|
111 | total = time.time() - start | |
114 | log.debug('un-cached method:%s took %.4fs', user_func.func_name, total) |
|
112 | log.debug('un-cached method:%s took %.4fs', user_func.func_name, total) | |
115 | return result |
|
113 | return result | |
116 |
|
114 | |||
117 | key = key_generator(*arg, **kw) |
|
115 | key = key_generator(*arg, **kw) | |
118 |
|
116 | |||
119 | timeout = expiration_time() if expiration_time_is_callable \ |
|
117 | timeout = expiration_time() if expiration_time_is_callable \ | |
120 | else expiration_time |
|
118 | else expiration_time | |
121 |
|
119 | |||
122 | log.debug('Calling cached method:`%s`', user_func.func_name) |
|
120 | log.debug('Calling cached method:`%s`', user_func.func_name) | |
123 | return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) |
|
121 | return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) | |
124 |
|
122 | |||
125 | def cache_decorator(user_func): |
|
123 | def cache_decorator(user_func): | |
126 |
if to_str is |
|
124 | if to_str is str: | |
127 | # backwards compatible |
|
125 | # backwards compatible | |
128 | key_generator = function_key_generator(namespace, user_func) |
|
126 | key_generator = function_key_generator(namespace, user_func) | |
129 | else: |
|
127 | else: | |
130 | key_generator = function_key_generator(namespace, user_func, to_str=to_str) |
|
128 | key_generator = function_key_generator(namespace, user_func, to_str=to_str) | |
131 |
|
129 | |||
132 | def refresh(*arg, **kw): |
|
130 | def refresh(*arg, **kw): | |
133 | """ |
|
131 | """ | |
134 | Like invalidate, but regenerates the value instead |
|
132 | Like invalidate, but regenerates the value instead | |
135 | """ |
|
133 | """ | |
136 | key = key_generator(*arg, **kw) |
|
134 | key = key_generator(*arg, **kw) | |
137 | value = user_func(*arg, **kw) |
|
135 | value = user_func(*arg, **kw) | |
138 | self.set(key, value) |
|
136 | self.set(key, value) | |
139 | return value |
|
137 | return value | |
140 |
|
138 | |||
141 | def invalidate(*arg, **kw): |
|
139 | def invalidate(*arg, **kw): | |
142 | key = key_generator(*arg, **kw) |
|
140 | key = key_generator(*arg, **kw) | |
143 | self.delete(key) |
|
141 | self.delete(key) | |
144 |
|
142 | |||
145 | def set_(value, *arg, **kw): |
|
143 | def set_(value, *arg, **kw): | |
146 | key = key_generator(*arg, **kw) |
|
144 | key = key_generator(*arg, **kw) | |
147 | self.set(key, value) |
|
145 | self.set(key, value) | |
148 |
|
146 | |||
149 | def get(*arg, **kw): |
|
147 | def get(*arg, **kw): | |
150 | key = key_generator(*arg, **kw) |
|
148 | key = key_generator(*arg, **kw) | |
151 | return self.get(key) |
|
149 | return self.get(key) | |
152 |
|
150 | |||
153 | user_func.set = set_ |
|
151 | user_func.set = set_ | |
154 | user_func.invalidate = invalidate |
|
152 | user_func.invalidate = invalidate | |
155 | user_func.get = get |
|
153 | user_func.get = get | |
156 | user_func.refresh = refresh |
|
154 | user_func.refresh = refresh | |
157 | user_func.key_generator = key_generator |
|
155 | user_func.key_generator = key_generator | |
158 | user_func.original = user_func |
|
156 | user_func.original = user_func | |
159 |
|
157 | |||
160 | # Use `decorate` to preserve the signature of :param:`user_func`. |
|
158 | # Use `decorate` to preserve the signature of :param:`user_func`. | |
161 | return decorator.decorate(user_func, functools.partial( |
|
159 | return decorator.decorate(user_func, functools.partial( | |
162 | get_or_create_for_user_func, key_generator)) |
|
160 | get_or_create_for_user_func, key_generator)) | |
163 |
|
161 | |||
164 | return cache_decorator |
|
162 | return cache_decorator | |
165 |
|
163 | |||
166 |
|
164 | |||
167 | def make_region(*arg, **kw): |
|
165 | def make_region(*arg, **kw): | |
168 | return RhodeCodeCacheRegion(*arg, **kw) |
|
166 | return RhodeCodeCacheRegion(*arg, **kw) | |
169 |
|
167 | |||
170 |
|
168 | |||
171 | def get_default_cache_settings(settings, prefixes=None): |
|
169 | def get_default_cache_settings(settings, prefixes=None): | |
172 | prefixes = prefixes or [] |
|
170 | prefixes = prefixes or [] | |
173 | cache_settings = {} |
|
171 | cache_settings = {} | |
174 | for key in settings.keys(): |
|
172 | for key in settings.keys(): | |
175 | for prefix in prefixes: |
|
173 | for prefix in prefixes: | |
176 | if key.startswith(prefix): |
|
174 | if key.startswith(prefix): | |
177 | name = key.split(prefix)[1].strip() |
|
175 | name = key.split(prefix)[1].strip() | |
178 | val = settings[key] |
|
176 | val = settings[key] | |
179 |
if isinstance(val, |
|
177 | if isinstance(val, str): | |
180 | val = val.strip() |
|
178 | val = val.strip() | |
181 | cache_settings[name] = val |
|
179 | cache_settings[name] = val | |
182 | return cache_settings |
|
180 | return cache_settings | |
183 |
|
181 | |||
184 |
|
182 | |||
185 | def compute_key_from_params(*args): |
|
183 | def compute_key_from_params(*args): | |
186 | """ |
|
184 | """ | |
187 | Helper to compute key from given params to be used in cache manager |
|
185 | Helper to compute key from given params to be used in cache manager | |
188 | """ |
|
186 | """ | |
189 | return sha1("_".join(map(safe_str, args))) |
|
187 | return sha1("_".join(map(safe_str, args))) | |
190 |
|
188 | |||
191 |
|
189 | |||
192 | def backend_key_generator(backend): |
|
190 | def backend_key_generator(backend): | |
193 | """ |
|
191 | """ | |
194 | Special wrapper that also sends over the backend to the key generator |
|
192 | Special wrapper that also sends over the backend to the key generator | |
195 | """ |
|
193 | """ | |
196 | def wrapper(namespace, fn): |
|
194 | def wrapper(namespace, fn): | |
197 | return key_generator(backend, namespace, fn) |
|
195 | return key_generator(backend, namespace, fn) | |
198 | return wrapper |
|
196 | return wrapper | |
199 |
|
197 | |||
200 |
|
198 | |||
201 | def key_generator(backend, namespace, fn): |
|
199 | def key_generator(backend, namespace, fn): | |
202 | fname = fn.__name__ |
|
200 | fname = fn.__name__ | |
203 |
|
201 | |||
204 | def generate_key(*args): |
|
202 | def generate_key(*args): | |
205 | backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix' |
|
203 | backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix' | |
206 | namespace_pref = namespace or 'default_namespace' |
|
204 | namespace_pref = namespace or 'default_namespace' | |
207 | arg_key = compute_key_from_params(*args) |
|
205 | arg_key = compute_key_from_params(*args) | |
208 | final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key) |
|
206 | final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key) | |
209 |
|
207 | |||
210 | return final_key |
|
208 | return final_key | |
211 |
|
209 | |||
212 | return generate_key |
|
210 | return generate_key | |
213 |
|
211 | |||
214 |
|
212 | |||
215 | def get_or_create_region(region_name, region_namespace=None): |
|
213 | def get_or_create_region(region_name, region_namespace=None): | |
216 | from vcsserver.lib.rc_cache.backends import FileNamespaceBackend |
|
214 | from vcsserver.lib.rc_cache.backends import FileNamespaceBackend | |
217 | region_obj = region_meta.dogpile_cache_regions.get(region_name) |
|
215 | region_obj = region_meta.dogpile_cache_regions.get(region_name) | |
218 | if not region_obj: |
|
216 | if not region_obj: | |
219 | raise EnvironmentError( |
|
217 | raise EnvironmentError( | |
220 | 'Region `{}` not in configured: {}.'.format( |
|
218 | 'Region `{}` not in configured: {}.'.format( | |
221 | region_name, region_meta.dogpile_cache_regions.keys())) |
|
219 | region_name, region_meta.dogpile_cache_regions.keys())) | |
222 |
|
220 | |||
223 | region_uid_name = '{}:{}'.format(region_name, region_namespace) |
|
221 | region_uid_name = '{}:{}'.format(region_name, region_namespace) | |
224 | if isinstance(region_obj.actual_backend, FileNamespaceBackend): |
|
222 | if isinstance(region_obj.actual_backend, FileNamespaceBackend): | |
225 | region_exist = region_meta.dogpile_cache_regions.get(region_namespace) |
|
223 | region_exist = region_meta.dogpile_cache_regions.get(region_namespace) | |
226 | if region_exist: |
|
224 | if region_exist: | |
227 | log.debug('Using already configured region: %s', region_namespace) |
|
225 | log.debug('Using already configured region: %s', region_namespace) | |
228 | return region_exist |
|
226 | return region_exist | |
229 | cache_dir = region_meta.dogpile_config_defaults['cache_dir'] |
|
227 | cache_dir = region_meta.dogpile_config_defaults['cache_dir'] | |
230 | expiration_time = region_obj.expiration_time |
|
228 | expiration_time = region_obj.expiration_time | |
231 |
|
229 | |||
232 | if not os.path.isdir(cache_dir): |
|
230 | if not os.path.isdir(cache_dir): | |
233 | os.makedirs(cache_dir) |
|
231 | os.makedirs(cache_dir) | |
234 | new_region = make_region( |
|
232 | new_region = make_region( | |
235 | name=region_uid_name, |
|
233 | name=region_uid_name, | |
236 | function_key_generator=backend_key_generator(region_obj.actual_backend) |
|
234 | function_key_generator=backend_key_generator(region_obj.actual_backend) | |
237 | ) |
|
235 | ) | |
238 | namespace_filename = os.path.join( |
|
236 | namespace_filename = os.path.join( | |
239 | cache_dir, "{}.cache.dbm".format(region_namespace)) |
|
237 | cache_dir, "{}.cache.dbm".format(region_namespace)) | |
240 | # special type that allows 1db per namespace |
|
238 | # special type that allows 1db per namespace | |
241 | new_region.configure( |
|
239 | new_region.configure( | |
242 | backend='dogpile.cache.rc.file_namespace', |
|
240 | backend='dogpile.cache.rc.file_namespace', | |
243 | expiration_time=expiration_time, |
|
241 | expiration_time=expiration_time, | |
244 | arguments={"filename": namespace_filename} |
|
242 | arguments={"filename": namespace_filename} | |
245 | ) |
|
243 | ) | |
246 |
|
244 | |||
247 | # create and save in region caches |
|
245 | # create and save in region caches | |
248 | log.debug('configuring new region: %s', region_uid_name) |
|
246 | log.debug('configuring new region: %s', region_uid_name) | |
249 | region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region |
|
247 | region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region | |
250 |
|
248 | |||
251 | return region_obj |
|
249 | return region_obj | |
252 |
|
250 | |||
253 |
|
251 | |||
254 | def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False): |
|
252 | def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False): | |
255 | region = get_or_create_region(cache_region, cache_namespace_uid) |
|
253 | region = get_or_create_region(cache_region, cache_namespace_uid) | |
256 | cache_keys = region.backend.list_keys(prefix=cache_namespace_uid) |
|
254 | cache_keys = region.backend.list_keys(prefix=cache_namespace_uid) | |
257 | num_delete_keys = len(cache_keys) |
|
255 | num_delete_keys = len(cache_keys) | |
258 | if invalidate: |
|
256 | if invalidate: | |
259 | region.invalidate(hard=False) |
|
257 | region.invalidate(hard=False) | |
260 | else: |
|
258 | else: | |
261 | if num_delete_keys: |
|
259 | if num_delete_keys: | |
262 | region.delete_multi(cache_keys) |
|
260 | region.delete_multi(cache_keys) | |
263 | return num_delete_keys |
|
261 | return num_delete_keys |
@@ -1,867 +1,865 b'' | |||||
1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
1 | # RhodeCode VCSServer provides access to different vcs backends via network. | |
2 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
2 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
3 | # |
|
3 | # | |
4 | # This program is free software; you can redistribute it and/or modify |
|
4 | # This program is free software; you can redistribute it and/or modify | |
5 | # it under the terms of the GNU General Public License as published by |
|
5 | # it under the terms of the GNU General Public License as published by | |
6 | # the Free Software Foundation; either version 3 of the License, or |
|
6 | # the Free Software Foundation; either version 3 of the License, or | |
7 | # (at your option) any later version. |
|
7 | # (at your option) any later version. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU General Public License |
|
14 | # You should have received a copy of the GNU General Public License | |
15 | # along with this program; if not, write to the Free Software Foundation, |
|
15 | # along with this program; if not, write to the Free Software Foundation, | |
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 |
|
17 | |||
18 |
|
18 | |||
19 |
|
||||
20 | import os |
|
19 | import os | |
21 | import subprocess |
|
20 | import subprocess | |
22 | import time |
|
|||
23 | from urllib.error import URLError |
|
21 | from urllib.error import URLError | |
24 | import urllib.parse |
|
22 | import urllib.parse | |
25 | import logging |
|
23 | import logging | |
26 | import posixpath as vcspath |
|
24 | import posixpath as vcspath | |
27 | import io |
|
25 | import io | |
28 | import urllib.request, urllib.parse, urllib.error |
|
26 | import urllib.request, urllib.parse, urllib.error | |
29 | import traceback |
|
27 | import traceback | |
30 |
|
28 | |||
31 | import svn.client |
|
29 | import svn.client | |
32 | import svn.core |
|
30 | import svn.core | |
33 | import svn.delta |
|
31 | import svn.delta | |
34 | import svn.diff |
|
32 | import svn.diff | |
35 | import svn.fs |
|
33 | import svn.fs | |
36 | import svn.repos |
|
34 | import svn.repos | |
37 |
|
35 | |||
38 | from vcsserver import svn_diff, exceptions, subprocessio, settings |
|
36 | from vcsserver import svn_diff, exceptions, subprocessio, settings | |
39 | from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo |
|
37 | from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo | |
40 | from vcsserver.exceptions import NoContentException |
|
38 | from vcsserver.exceptions import NoContentException | |
41 | from vcsserver.utils import safe_str |
|
39 | from vcsserver.utils import safe_str | |
42 | from vcsserver.vcs_base import RemoteBase |
|
40 | from vcsserver.vcs_base import RemoteBase | |
43 |
|
41 | |||
44 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
45 |
|
43 | |||
46 |
|
44 | |||
47 | svn_compatible_versions_map = { |
|
45 | svn_compatible_versions_map = { | |
48 | 'pre-1.4-compatible': '1.3', |
|
46 | 'pre-1.4-compatible': '1.3', | |
49 | 'pre-1.5-compatible': '1.4', |
|
47 | 'pre-1.5-compatible': '1.4', | |
50 | 'pre-1.6-compatible': '1.5', |
|
48 | 'pre-1.6-compatible': '1.5', | |
51 | 'pre-1.8-compatible': '1.7', |
|
49 | 'pre-1.8-compatible': '1.7', | |
52 | 'pre-1.9-compatible': '1.8', |
|
50 | 'pre-1.9-compatible': '1.8', | |
53 | } |
|
51 | } | |
54 |
|
52 | |||
55 | current_compatible_version = '1.12' |
|
53 | current_compatible_version = '1.12' | |
56 |
|
54 | |||
57 |
|
55 | |||
58 | def reraise_safe_exceptions(func): |
|
56 | def reraise_safe_exceptions(func): | |
59 | """Decorator for converting svn exceptions to something neutral.""" |
|
57 | """Decorator for converting svn exceptions to something neutral.""" | |
60 | def wrapper(*args, **kwargs): |
|
58 | def wrapper(*args, **kwargs): | |
61 | try: |
|
59 | try: | |
62 | return func(*args, **kwargs) |
|
60 | return func(*args, **kwargs) | |
63 | except Exception as e: |
|
61 | except Exception as e: | |
64 | if not hasattr(e, '_vcs_kind'): |
|
62 | if not hasattr(e, '_vcs_kind'): | |
65 | log.exception("Unhandled exception in svn remote call") |
|
63 | log.exception("Unhandled exception in svn remote call") | |
66 | raise_from_original(exceptions.UnhandledException(e)) |
|
64 | raise_from_original(exceptions.UnhandledException(e)) | |
67 | raise |
|
65 | raise | |
68 | return wrapper |
|
66 | return wrapper | |
69 |
|
67 | |||
70 |
|
68 | |||
71 | class SubversionFactory(RepoFactory): |
|
69 | class SubversionFactory(RepoFactory): | |
72 | repo_type = 'svn' |
|
70 | repo_type = 'svn' | |
73 |
|
71 | |||
74 | def _create_repo(self, wire, create, compatible_version): |
|
72 | def _create_repo(self, wire, create, compatible_version): | |
75 | path = svn.core.svn_path_canonicalize(wire['path']) |
|
73 | path = svn.core.svn_path_canonicalize(wire['path']) | |
76 | if create: |
|
74 | if create: | |
77 | fs_config = {'compatible-version': current_compatible_version} |
|
75 | fs_config = {'compatible-version': current_compatible_version} | |
78 | if compatible_version: |
|
76 | if compatible_version: | |
79 |
|
77 | |||
80 | compatible_version_string = \ |
|
78 | compatible_version_string = \ | |
81 | svn_compatible_versions_map.get(compatible_version) \ |
|
79 | svn_compatible_versions_map.get(compatible_version) \ | |
82 | or compatible_version |
|
80 | or compatible_version | |
83 | fs_config['compatible-version'] = compatible_version_string |
|
81 | fs_config['compatible-version'] = compatible_version_string | |
84 |
|
82 | |||
85 | log.debug('Create SVN repo with config "%s"', fs_config) |
|
83 | log.debug('Create SVN repo with config "%s"', fs_config) | |
86 | repo = svn.repos.create(path, "", "", None, fs_config) |
|
84 | repo = svn.repos.create(path, "", "", None, fs_config) | |
87 | else: |
|
85 | else: | |
88 | repo = svn.repos.open(path) |
|
86 | repo = svn.repos.open(path) | |
89 |
|
87 | |||
90 | log.debug('Got SVN object: %s', repo) |
|
88 | log.debug('Got SVN object: %s', repo) | |
91 | return repo |
|
89 | return repo | |
92 |
|
90 | |||
93 | def repo(self, wire, create=False, compatible_version=None): |
|
91 | def repo(self, wire, create=False, compatible_version=None): | |
94 | """ |
|
92 | """ | |
95 | Get a repository instance for the given path. |
|
93 | Get a repository instance for the given path. | |
96 | """ |
|
94 | """ | |
97 | return self._create_repo(wire, create, compatible_version) |
|
95 | return self._create_repo(wire, create, compatible_version) | |
98 |
|
96 | |||
99 |
|
97 | |||
100 | NODE_TYPE_MAPPING = { |
|
98 | NODE_TYPE_MAPPING = { | |
101 | svn.core.svn_node_file: 'file', |
|
99 | svn.core.svn_node_file: 'file', | |
102 | svn.core.svn_node_dir: 'dir', |
|
100 | svn.core.svn_node_dir: 'dir', | |
103 | } |
|
101 | } | |
104 |
|
102 | |||
105 |
|
103 | |||
106 | class SvnRemote(RemoteBase): |
|
104 | class SvnRemote(RemoteBase): | |
107 |
|
105 | |||
108 | def __init__(self, factory, hg_factory=None): |
|
106 | def __init__(self, factory, hg_factory=None): | |
109 | self._factory = factory |
|
107 | self._factory = factory | |
110 | # TODO: Remove once we do not use internal Mercurial objects anymore |
|
108 | # TODO: Remove once we do not use internal Mercurial objects anymore | |
111 | # for subversion |
|
109 | # for subversion | |
112 | self._hg_factory = hg_factory |
|
110 | self._hg_factory = hg_factory | |
113 |
|
111 | |||
114 | @reraise_safe_exceptions |
|
112 | @reraise_safe_exceptions | |
115 | def discover_svn_version(self): |
|
113 | def discover_svn_version(self): | |
116 | try: |
|
114 | try: | |
117 | import svn.core |
|
115 | import svn.core | |
118 | svn_ver = svn.core.SVN_VERSION |
|
116 | svn_ver = svn.core.SVN_VERSION | |
119 | except ImportError: |
|
117 | except ImportError: | |
120 | svn_ver = None |
|
118 | svn_ver = None | |
121 | return svn_ver |
|
119 | return svn_ver | |
122 |
|
120 | |||
123 | @reraise_safe_exceptions |
|
121 | @reraise_safe_exceptions | |
124 | def is_empty(self, wire): |
|
122 | def is_empty(self, wire): | |
125 |
|
123 | |||
126 | try: |
|
124 | try: | |
127 | return self.lookup(wire, -1) == 0 |
|
125 | return self.lookup(wire, -1) == 0 | |
128 | except Exception: |
|
126 | except Exception: | |
129 | log.exception("failed to read object_store") |
|
127 | log.exception("failed to read object_store") | |
130 | return False |
|
128 | return False | |
131 |
|
129 | |||
132 | def check_url(self, url, config_items): |
|
130 | def check_url(self, url, config_items): | |
133 | # this can throw exception if not installed, but we detect this |
|
131 | # this can throw exception if not installed, but we detect this | |
134 | from hgsubversion import svnrepo |
|
132 | from hgsubversion import svnrepo | |
135 |
|
133 | |||
136 | baseui = self._hg_factory._create_config(config_items) |
|
134 | baseui = self._hg_factory._create_config(config_items) | |
137 | # uuid function get's only valid UUID from proper repo, else |
|
135 | # uuid function get's only valid UUID from proper repo, else | |
138 | # throws exception |
|
136 | # throws exception | |
139 | try: |
|
137 | try: | |
140 | svnrepo.svnremoterepo(baseui, url).svn.uuid |
|
138 | svnrepo.svnremoterepo(baseui, url).svn.uuid | |
141 | except Exception: |
|
139 | except Exception: | |
142 | tb = traceback.format_exc() |
|
140 | tb = traceback.format_exc() | |
143 | log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb) |
|
141 | log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb) | |
144 | raise URLError( |
|
142 | raise URLError( | |
145 | '"%s" is not a valid Subversion source url.' % (url, )) |
|
143 | '"%s" is not a valid Subversion source url.' % (url, )) | |
146 | return True |
|
144 | return True | |
147 |
|
145 | |||
148 | def is_path_valid_repository(self, wire, path): |
|
146 | def is_path_valid_repository(self, wire, path): | |
149 |
|
147 | |||
150 | # NOTE(marcink): short circuit the check for SVN repo |
|
148 | # NOTE(marcink): short circuit the check for SVN repo | |
151 | # the repos.open might be expensive to check, but we have one cheap |
|
149 | # the repos.open might be expensive to check, but we have one cheap | |
152 | # pre condition that we can use, to check for 'format' file |
|
150 | # pre condition that we can use, to check for 'format' file | |
153 |
|
151 | |||
154 | if not os.path.isfile(os.path.join(path, 'format')): |
|
152 | if not os.path.isfile(os.path.join(path, 'format')): | |
155 | return False |
|
153 | return False | |
156 |
|
154 | |||
157 | try: |
|
155 | try: | |
158 | svn.repos.open(path) |
|
156 | svn.repos.open(path) | |
159 | except svn.core.SubversionException: |
|
157 | except svn.core.SubversionException: | |
160 | tb = traceback.format_exc() |
|
158 | tb = traceback.format_exc() | |
161 | log.debug("Invalid Subversion path `%s`, tb: %s", path, tb) |
|
159 | log.debug("Invalid Subversion path `%s`, tb: %s", path, tb) | |
162 | return False |
|
160 | return False | |
163 | return True |
|
161 | return True | |
164 |
|
162 | |||
165 | @reraise_safe_exceptions |
|
163 | @reraise_safe_exceptions | |
166 | def verify(self, wire,): |
|
164 | def verify(self, wire,): | |
167 | repo_path = wire['path'] |
|
165 | repo_path = wire['path'] | |
168 | if not self.is_path_valid_repository(wire, repo_path): |
|
166 | if not self.is_path_valid_repository(wire, repo_path): | |
169 | raise Exception( |
|
167 | raise Exception( | |
170 | "Path %s is not a valid Subversion repository." % repo_path) |
|
168 | "Path %s is not a valid Subversion repository." % repo_path) | |
171 |
|
169 | |||
172 | cmd = ['svnadmin', 'info', repo_path] |
|
170 | cmd = ['svnadmin', 'info', repo_path] | |
173 | stdout, stderr = subprocessio.run_command(cmd) |
|
171 | stdout, stderr = subprocessio.run_command(cmd) | |
174 | return stdout |
|
172 | return stdout | |
175 |
|
173 | |||
176 | def lookup(self, wire, revision): |
|
174 | def lookup(self, wire, revision): | |
177 | if revision not in [-1, None, 'HEAD']: |
|
175 | if revision not in [-1, None, 'HEAD']: | |
178 | raise NotImplementedError |
|
176 | raise NotImplementedError | |
179 | repo = self._factory.repo(wire) |
|
177 | repo = self._factory.repo(wire) | |
180 | fs_ptr = svn.repos.fs(repo) |
|
178 | fs_ptr = svn.repos.fs(repo) | |
181 | head = svn.fs.youngest_rev(fs_ptr) |
|
179 | head = svn.fs.youngest_rev(fs_ptr) | |
182 | return head |
|
180 | return head | |
183 |
|
181 | |||
184 | def lookup_interval(self, wire, start_ts, end_ts): |
|
182 | def lookup_interval(self, wire, start_ts, end_ts): | |
185 | repo = self._factory.repo(wire) |
|
183 | repo = self._factory.repo(wire) | |
186 | fsobj = svn.repos.fs(repo) |
|
184 | fsobj = svn.repos.fs(repo) | |
187 | start_rev = None |
|
185 | start_rev = None | |
188 | end_rev = None |
|
186 | end_rev = None | |
189 | if start_ts: |
|
187 | if start_ts: | |
190 | start_ts_svn = apr_time_t(start_ts) |
|
188 | start_ts_svn = apr_time_t(start_ts) | |
191 | start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1 |
|
189 | start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1 | |
192 | else: |
|
190 | else: | |
193 | start_rev = 1 |
|
191 | start_rev = 1 | |
194 | if end_ts: |
|
192 | if end_ts: | |
195 | end_ts_svn = apr_time_t(end_ts) |
|
193 | end_ts_svn = apr_time_t(end_ts) | |
196 | end_rev = svn.repos.dated_revision(repo, end_ts_svn) |
|
194 | end_rev = svn.repos.dated_revision(repo, end_ts_svn) | |
197 | else: |
|
195 | else: | |
198 | end_rev = svn.fs.youngest_rev(fsobj) |
|
196 | end_rev = svn.fs.youngest_rev(fsobj) | |
199 | return start_rev, end_rev |
|
197 | return start_rev, end_rev | |
200 |
|
198 | |||
201 | def revision_properties(self, wire, revision): |
|
199 | def revision_properties(self, wire, revision): | |
202 |
|
200 | |||
203 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
201 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
204 | region = self._region(wire) |
|
202 | region = self._region(wire) | |
205 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
203 | @region.conditional_cache_on_arguments(condition=cache_on) | |
206 | def _revision_properties(_repo_id, _revision): |
|
204 | def _revision_properties(_repo_id, _revision): | |
207 | repo = self._factory.repo(wire) |
|
205 | repo = self._factory.repo(wire) | |
208 | fs_ptr = svn.repos.fs(repo) |
|
206 | fs_ptr = svn.repos.fs(repo) | |
209 | return svn.fs.revision_proplist(fs_ptr, revision) |
|
207 | return svn.fs.revision_proplist(fs_ptr, revision) | |
210 | return _revision_properties(repo_id, revision) |
|
208 | return _revision_properties(repo_id, revision) | |
211 |
|
209 | |||
212 | def revision_changes(self, wire, revision): |
|
210 | def revision_changes(self, wire, revision): | |
213 |
|
211 | |||
214 | repo = self._factory.repo(wire) |
|
212 | repo = self._factory.repo(wire) | |
215 | fsobj = svn.repos.fs(repo) |
|
213 | fsobj = svn.repos.fs(repo) | |
216 | rev_root = svn.fs.revision_root(fsobj, revision) |
|
214 | rev_root = svn.fs.revision_root(fsobj, revision) | |
217 |
|
215 | |||
218 | editor = svn.repos.ChangeCollector(fsobj, rev_root) |
|
216 | editor = svn.repos.ChangeCollector(fsobj, rev_root) | |
219 | editor_ptr, editor_baton = svn.delta.make_editor(editor) |
|
217 | editor_ptr, editor_baton = svn.delta.make_editor(editor) | |
220 | base_dir = "" |
|
218 | base_dir = "" | |
221 | send_deltas = False |
|
219 | send_deltas = False | |
222 | svn.repos.replay2( |
|
220 | svn.repos.replay2( | |
223 | rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas, |
|
221 | rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas, | |
224 | editor_ptr, editor_baton, None) |
|
222 | editor_ptr, editor_baton, None) | |
225 |
|
223 | |||
226 | added = [] |
|
224 | added = [] | |
227 | changed = [] |
|
225 | changed = [] | |
228 | removed = [] |
|
226 | removed = [] | |
229 |
|
227 | |||
230 | # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs |
|
228 | # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs | |
231 | for path, change in editor.changes.items(): |
|
229 | for path, change in editor.changes.items(): | |
232 | # TODO: Decide what to do with directory nodes. Subversion can add |
|
230 | # TODO: Decide what to do with directory nodes. Subversion can add | |
233 | # empty directories. |
|
231 | # empty directories. | |
234 |
|
232 | |||
235 | if change.item_kind == svn.core.svn_node_dir: |
|
233 | if change.item_kind == svn.core.svn_node_dir: | |
236 | continue |
|
234 | continue | |
237 | if change.action in [svn.repos.CHANGE_ACTION_ADD]: |
|
235 | if change.action in [svn.repos.CHANGE_ACTION_ADD]: | |
238 | added.append(path) |
|
236 | added.append(path) | |
239 | elif change.action in [svn.repos.CHANGE_ACTION_MODIFY, |
|
237 | elif change.action in [svn.repos.CHANGE_ACTION_MODIFY, | |
240 | svn.repos.CHANGE_ACTION_REPLACE]: |
|
238 | svn.repos.CHANGE_ACTION_REPLACE]: | |
241 | changed.append(path) |
|
239 | changed.append(path) | |
242 | elif change.action in [svn.repos.CHANGE_ACTION_DELETE]: |
|
240 | elif change.action in [svn.repos.CHANGE_ACTION_DELETE]: | |
243 | removed.append(path) |
|
241 | removed.append(path) | |
244 | else: |
|
242 | else: | |
245 | raise NotImplementedError( |
|
243 | raise NotImplementedError( | |
246 | "Action %s not supported on path %s" % ( |
|
244 | "Action %s not supported on path %s" % ( | |
247 | change.action, path)) |
|
245 | change.action, path)) | |
248 |
|
246 | |||
249 | changes = { |
|
247 | changes = { | |
250 | 'added': added, |
|
248 | 'added': added, | |
251 | 'changed': changed, |
|
249 | 'changed': changed, | |
252 | 'removed': removed, |
|
250 | 'removed': removed, | |
253 | } |
|
251 | } | |
254 | return changes |
|
252 | return changes | |
255 |
|
253 | |||
256 | @reraise_safe_exceptions |
|
254 | @reraise_safe_exceptions | |
257 | def node_history(self, wire, path, revision, limit): |
|
255 | def node_history(self, wire, path, revision, limit): | |
258 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
256 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
259 | region = self._region(wire) |
|
257 | region = self._region(wire) | |
260 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
258 | @region.conditional_cache_on_arguments(condition=cache_on) | |
261 | def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit): |
|
259 | def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit): | |
262 | cross_copies = False |
|
260 | cross_copies = False | |
263 | repo = self._factory.repo(wire) |
|
261 | repo = self._factory.repo(wire) | |
264 | fsobj = svn.repos.fs(repo) |
|
262 | fsobj = svn.repos.fs(repo) | |
265 | rev_root = svn.fs.revision_root(fsobj, revision) |
|
263 | rev_root = svn.fs.revision_root(fsobj, revision) | |
266 |
|
264 | |||
267 | history_revisions = [] |
|
265 | history_revisions = [] | |
268 | history = svn.fs.node_history(rev_root, path) |
|
266 | history = svn.fs.node_history(rev_root, path) | |
269 | history = svn.fs.history_prev(history, cross_copies) |
|
267 | history = svn.fs.history_prev(history, cross_copies) | |
270 | while history: |
|
268 | while history: | |
271 | __, node_revision = svn.fs.history_location(history) |
|
269 | __, node_revision = svn.fs.history_location(history) | |
272 | history_revisions.append(node_revision) |
|
270 | history_revisions.append(node_revision) | |
273 | if limit and len(history_revisions) >= limit: |
|
271 | if limit and len(history_revisions) >= limit: | |
274 | break |
|
272 | break | |
275 | history = svn.fs.history_prev(history, cross_copies) |
|
273 | history = svn.fs.history_prev(history, cross_copies) | |
276 | return history_revisions |
|
274 | return history_revisions | |
277 | return _assert_correct_path(context_uid, repo_id, path, revision, limit) |
|
275 | return _assert_correct_path(context_uid, repo_id, path, revision, limit) | |
278 |
|
276 | |||
279 | def node_properties(self, wire, path, revision): |
|
277 | def node_properties(self, wire, path, revision): | |
280 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
278 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
281 | region = self._region(wire) |
|
279 | region = self._region(wire) | |
282 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
280 | @region.conditional_cache_on_arguments(condition=cache_on) | |
283 | def _node_properties(_repo_id, _path, _revision): |
|
281 | def _node_properties(_repo_id, _path, _revision): | |
284 | repo = self._factory.repo(wire) |
|
282 | repo = self._factory.repo(wire) | |
285 | fsobj = svn.repos.fs(repo) |
|
283 | fsobj = svn.repos.fs(repo) | |
286 | rev_root = svn.fs.revision_root(fsobj, revision) |
|
284 | rev_root = svn.fs.revision_root(fsobj, revision) | |
287 | return svn.fs.node_proplist(rev_root, path) |
|
285 | return svn.fs.node_proplist(rev_root, path) | |
288 | return _node_properties(repo_id, path, revision) |
|
286 | return _node_properties(repo_id, path, revision) | |
289 |
|
287 | |||
290 | def file_annotate(self, wire, path, revision): |
|
288 | def file_annotate(self, wire, path, revision): | |
291 | abs_path = 'file://' + urllib.pathname2url( |
|
289 | abs_path = 'file://' + urllib.pathname2url( | |
292 | vcspath.join(wire['path'], path)) |
|
290 | vcspath.join(wire['path'], path)) | |
293 | file_uri = svn.core.svn_path_canonicalize(abs_path) |
|
291 | file_uri = svn.core.svn_path_canonicalize(abs_path) | |
294 |
|
292 | |||
295 | start_rev = svn_opt_revision_value_t(0) |
|
293 | start_rev = svn_opt_revision_value_t(0) | |
296 | peg_rev = svn_opt_revision_value_t(revision) |
|
294 | peg_rev = svn_opt_revision_value_t(revision) | |
297 | end_rev = peg_rev |
|
295 | end_rev = peg_rev | |
298 |
|
296 | |||
299 | annotations = [] |
|
297 | annotations = [] | |
300 |
|
298 | |||
301 | def receiver(line_no, revision, author, date, line, pool): |
|
299 | def receiver(line_no, revision, author, date, line, pool): | |
302 | annotations.append((line_no, revision, line)) |
|
300 | annotations.append((line_no, revision, line)) | |
303 |
|
301 | |||
304 | # TODO: Cannot use blame5, missing typemap function in the swig code |
|
302 | # TODO: Cannot use blame5, missing typemap function in the swig code | |
305 | try: |
|
303 | try: | |
306 | svn.client.blame2( |
|
304 | svn.client.blame2( | |
307 | file_uri, peg_rev, start_rev, end_rev, |
|
305 | file_uri, peg_rev, start_rev, end_rev, | |
308 | receiver, svn.client.create_context()) |
|
306 | receiver, svn.client.create_context()) | |
309 | except svn.core.SubversionException as exc: |
|
307 | except svn.core.SubversionException as exc: | |
310 | log.exception("Error during blame operation.") |
|
308 | log.exception("Error during blame operation.") | |
311 | raise Exception( |
|
309 | raise Exception( | |
312 | "Blame not supported or file does not exist at path %s. " |
|
310 | "Blame not supported or file does not exist at path %s. " | |
313 | "Error %s." % (path, exc)) |
|
311 | "Error %s." % (path, exc)) | |
314 |
|
312 | |||
315 | return annotations |
|
313 | return annotations | |
316 |
|
314 | |||
317 | def get_node_type(self, wire, path, revision=None): |
|
315 | def get_node_type(self, wire, path, revision=None): | |
318 |
|
316 | |||
319 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
317 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
320 | region = self._region(wire) |
|
318 | region = self._region(wire) | |
321 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
319 | @region.conditional_cache_on_arguments(condition=cache_on) | |
322 | def _get_node_type(_repo_id, _path, _revision): |
|
320 | def _get_node_type(_repo_id, _path, _revision): | |
323 | repo = self._factory.repo(wire) |
|
321 | repo = self._factory.repo(wire) | |
324 | fs_ptr = svn.repos.fs(repo) |
|
322 | fs_ptr = svn.repos.fs(repo) | |
325 | if _revision is None: |
|
323 | if _revision is None: | |
326 | _revision = svn.fs.youngest_rev(fs_ptr) |
|
324 | _revision = svn.fs.youngest_rev(fs_ptr) | |
327 | root = svn.fs.revision_root(fs_ptr, _revision) |
|
325 | root = svn.fs.revision_root(fs_ptr, _revision) | |
328 | node = svn.fs.check_path(root, path) |
|
326 | node = svn.fs.check_path(root, path) | |
329 | return NODE_TYPE_MAPPING.get(node, None) |
|
327 | return NODE_TYPE_MAPPING.get(node, None) | |
330 | return _get_node_type(repo_id, path, revision) |
|
328 | return _get_node_type(repo_id, path, revision) | |
331 |
|
329 | |||
332 | def get_nodes(self, wire, path, revision=None): |
|
330 | def get_nodes(self, wire, path, revision=None): | |
333 |
|
331 | |||
334 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
332 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
335 | region = self._region(wire) |
|
333 | region = self._region(wire) | |
336 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
334 | @region.conditional_cache_on_arguments(condition=cache_on) | |
337 | def _get_nodes(_repo_id, _path, _revision): |
|
335 | def _get_nodes(_repo_id, _path, _revision): | |
338 | repo = self._factory.repo(wire) |
|
336 | repo = self._factory.repo(wire) | |
339 | fsobj = svn.repos.fs(repo) |
|
337 | fsobj = svn.repos.fs(repo) | |
340 | if _revision is None: |
|
338 | if _revision is None: | |
341 | _revision = svn.fs.youngest_rev(fsobj) |
|
339 | _revision = svn.fs.youngest_rev(fsobj) | |
342 | root = svn.fs.revision_root(fsobj, _revision) |
|
340 | root = svn.fs.revision_root(fsobj, _revision) | |
343 | entries = svn.fs.dir_entries(root, path) |
|
341 | entries = svn.fs.dir_entries(root, path) | |
344 | result = [] |
|
342 | result = [] | |
345 | for entry_path, entry_info in entries.items(): |
|
343 | for entry_path, entry_info in entries.items(): | |
346 | result.append( |
|
344 | result.append( | |
347 | (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None))) |
|
345 | (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None))) | |
348 | return result |
|
346 | return result | |
349 | return _get_nodes(repo_id, path, revision) |
|
347 | return _get_nodes(repo_id, path, revision) | |
350 |
|
348 | |||
351 | def get_file_content(self, wire, path, rev=None): |
|
349 | def get_file_content(self, wire, path, rev=None): | |
352 | repo = self._factory.repo(wire) |
|
350 | repo = self._factory.repo(wire) | |
353 | fsobj = svn.repos.fs(repo) |
|
351 | fsobj = svn.repos.fs(repo) | |
354 | if rev is None: |
|
352 | if rev is None: | |
355 | rev = svn.fs.youngest_revision(fsobj) |
|
353 | rev = svn.fs.youngest_revision(fsobj) | |
356 | root = svn.fs.revision_root(fsobj, rev) |
|
354 | root = svn.fs.revision_root(fsobj, rev) | |
357 | content = svn.core.Stream(svn.fs.file_contents(root, path)) |
|
355 | content = svn.core.Stream(svn.fs.file_contents(root, path)) | |
358 | return content.read() |
|
356 | return content.read() | |
359 |
|
357 | |||
360 | def get_file_size(self, wire, path, revision=None): |
|
358 | def get_file_size(self, wire, path, revision=None): | |
361 |
|
359 | |||
362 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
360 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
363 | region = self._region(wire) |
|
361 | region = self._region(wire) | |
364 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
362 | @region.conditional_cache_on_arguments(condition=cache_on) | |
365 | def _get_file_size(_repo_id, _path, _revision): |
|
363 | def _get_file_size(_repo_id, _path, _revision): | |
366 | repo = self._factory.repo(wire) |
|
364 | repo = self._factory.repo(wire) | |
367 | fsobj = svn.repos.fs(repo) |
|
365 | fsobj = svn.repos.fs(repo) | |
368 | if _revision is None: |
|
366 | if _revision is None: | |
369 | _revision = svn.fs.youngest_revision(fsobj) |
|
367 | _revision = svn.fs.youngest_revision(fsobj) | |
370 | root = svn.fs.revision_root(fsobj, _revision) |
|
368 | root = svn.fs.revision_root(fsobj, _revision) | |
371 | size = svn.fs.file_length(root, path) |
|
369 | size = svn.fs.file_length(root, path) | |
372 | return size |
|
370 | return size | |
373 | return _get_file_size(repo_id, path, revision) |
|
371 | return _get_file_size(repo_id, path, revision) | |
374 |
|
372 | |||
375 | def create_repository(self, wire, compatible_version=None): |
|
373 | def create_repository(self, wire, compatible_version=None): | |
376 | log.info('Creating Subversion repository in path "%s"', wire['path']) |
|
374 | log.info('Creating Subversion repository in path "%s"', wire['path']) | |
377 | self._factory.repo(wire, create=True, |
|
375 | self._factory.repo(wire, create=True, | |
378 | compatible_version=compatible_version) |
|
376 | compatible_version=compatible_version) | |
379 |
|
377 | |||
380 | def get_url_and_credentials(self, src_url): |
|
378 | def get_url_and_credentials(self, src_url): | |
381 | obj = urllib.parse.urlparse(src_url) |
|
379 | obj = urllib.parse.urlparse(src_url) | |
382 | username = obj.username or None |
|
380 | username = obj.username or None | |
383 | password = obj.password or None |
|
381 | password = obj.password or None | |
384 | return username, password, src_url |
|
382 | return username, password, src_url | |
385 |
|
383 | |||
386 | def import_remote_repository(self, wire, src_url): |
|
384 | def import_remote_repository(self, wire, src_url): | |
387 | repo_path = wire['path'] |
|
385 | repo_path = wire['path'] | |
388 | if not self.is_path_valid_repository(wire, repo_path): |
|
386 | if not self.is_path_valid_repository(wire, repo_path): | |
389 | raise Exception( |
|
387 | raise Exception( | |
390 | "Path %s is not a valid Subversion repository." % repo_path) |
|
388 | "Path %s is not a valid Subversion repository." % repo_path) | |
391 |
|
389 | |||
392 | username, password, src_url = self.get_url_and_credentials(src_url) |
|
390 | username, password, src_url = self.get_url_and_credentials(src_url) | |
393 | rdump_cmd = ['svnrdump', 'dump', '--non-interactive', |
|
391 | rdump_cmd = ['svnrdump', 'dump', '--non-interactive', | |
394 | '--trust-server-cert-failures=unknown-ca'] |
|
392 | '--trust-server-cert-failures=unknown-ca'] | |
395 | if username and password: |
|
393 | if username and password: | |
396 | rdump_cmd += ['--username', username, '--password', password] |
|
394 | rdump_cmd += ['--username', username, '--password', password] | |
397 | rdump_cmd += [src_url] |
|
395 | rdump_cmd += [src_url] | |
398 |
|
396 | |||
399 | rdump = subprocess.Popen( |
|
397 | rdump = subprocess.Popen( | |
400 | rdump_cmd, |
|
398 | rdump_cmd, | |
401 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
|
399 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) | |
402 | load = subprocess.Popen( |
|
400 | load = subprocess.Popen( | |
403 | ['svnadmin', 'load', repo_path], stdin=rdump.stdout) |
|
401 | ['svnadmin', 'load', repo_path], stdin=rdump.stdout) | |
404 |
|
402 | |||
405 | # TODO: johbo: This can be a very long operation, might be better |
|
403 | # TODO: johbo: This can be a very long operation, might be better | |
406 | # to track some kind of status and provide an api to check if the |
|
404 | # to track some kind of status and provide an api to check if the | |
407 | # import is done. |
|
405 | # import is done. | |
408 | rdump.wait() |
|
406 | rdump.wait() | |
409 | load.wait() |
|
407 | load.wait() | |
410 |
|
408 | |||
411 | log.debug('Return process ended with code: %s', rdump.returncode) |
|
409 | log.debug('Return process ended with code: %s', rdump.returncode) | |
412 | if rdump.returncode != 0: |
|
410 | if rdump.returncode != 0: | |
413 | errors = rdump.stderr.read() |
|
411 | errors = rdump.stderr.read() | |
414 | log.error('svnrdump dump failed: statuscode %s: message: %s', |
|
412 | log.error('svnrdump dump failed: statuscode %s: message: %s', | |
415 | rdump.returncode, errors) |
|
413 | rdump.returncode, errors) | |
416 | reason = 'UNKNOWN' |
|
414 | reason = 'UNKNOWN' | |
417 | if 'svnrdump: E230001:' in errors: |
|
415 | if 'svnrdump: E230001:' in errors: | |
418 | reason = 'INVALID_CERTIFICATE' |
|
416 | reason = 'INVALID_CERTIFICATE' | |
419 |
|
417 | |||
420 | if reason == 'UNKNOWN': |
|
418 | if reason == 'UNKNOWN': | |
421 | reason = 'UNKNOWN:{}'.format(errors) |
|
419 | reason = 'UNKNOWN:{}'.format(errors) | |
422 | raise Exception( |
|
420 | raise Exception( | |
423 | 'Failed to dump the remote repository from %s. Reason:%s' % ( |
|
421 | 'Failed to dump the remote repository from %s. Reason:%s' % ( | |
424 | src_url, reason)) |
|
422 | src_url, reason)) | |
425 | if load.returncode != 0: |
|
423 | if load.returncode != 0: | |
426 | raise Exception( |
|
424 | raise Exception( | |
427 | 'Failed to load the dump of remote repository from %s.' % |
|
425 | 'Failed to load the dump of remote repository from %s.' % | |
428 | (src_url, )) |
|
426 | (src_url, )) | |
429 |
|
427 | |||
430 | def commit(self, wire, message, author, timestamp, updated, removed): |
|
428 | def commit(self, wire, message, author, timestamp, updated, removed): | |
431 | assert isinstance(message, str) |
|
429 | assert isinstance(message, str) | |
432 | assert isinstance(author, str) |
|
430 | assert isinstance(author, str) | |
433 |
|
431 | |||
434 | repo = self._factory.repo(wire) |
|
432 | repo = self._factory.repo(wire) | |
435 | fsobj = svn.repos.fs(repo) |
|
433 | fsobj = svn.repos.fs(repo) | |
436 |
|
434 | |||
437 | rev = svn.fs.youngest_rev(fsobj) |
|
435 | rev = svn.fs.youngest_rev(fsobj) | |
438 | txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message) |
|
436 | txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message) | |
439 | txn_root = svn.fs.txn_root(txn) |
|
437 | txn_root = svn.fs.txn_root(txn) | |
440 |
|
438 | |||
441 | for node in updated: |
|
439 | for node in updated: | |
442 | TxnNodeProcessor(node, txn_root).update() |
|
440 | TxnNodeProcessor(node, txn_root).update() | |
443 | for node in removed: |
|
441 | for node in removed: | |
444 | TxnNodeProcessor(node, txn_root).remove() |
|
442 | TxnNodeProcessor(node, txn_root).remove() | |
445 |
|
443 | |||
446 | commit_id = svn.repos.fs_commit_txn(repo, txn) |
|
444 | commit_id = svn.repos.fs_commit_txn(repo, txn) | |
447 |
|
445 | |||
448 | if timestamp: |
|
446 | if timestamp: | |
449 | apr_time = apr_time_t(timestamp) |
|
447 | apr_time = apr_time_t(timestamp) | |
450 | ts_formatted = svn.core.svn_time_to_cstring(apr_time) |
|
448 | ts_formatted = svn.core.svn_time_to_cstring(apr_time) | |
451 | svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted) |
|
449 | svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted) | |
452 |
|
450 | |||
453 | log.debug('Committed revision "%s" to "%s".', commit_id, wire['path']) |
|
451 | log.debug('Committed revision "%s" to "%s".', commit_id, wire['path']) | |
454 | return commit_id |
|
452 | return commit_id | |
455 |
|
453 | |||
456 | def diff(self, wire, rev1, rev2, path1=None, path2=None, |
|
454 | def diff(self, wire, rev1, rev2, path1=None, path2=None, | |
457 | ignore_whitespace=False, context=3): |
|
455 | ignore_whitespace=False, context=3): | |
458 |
|
456 | |||
459 | wire.update(cache=False) |
|
457 | wire.update(cache=False) | |
460 | repo = self._factory.repo(wire) |
|
458 | repo = self._factory.repo(wire) | |
461 | diff_creator = SvnDiffer( |
|
459 | diff_creator = SvnDiffer( | |
462 | repo, rev1, path1, rev2, path2, ignore_whitespace, context) |
|
460 | repo, rev1, path1, rev2, path2, ignore_whitespace, context) | |
463 | try: |
|
461 | try: | |
464 | return diff_creator.generate_diff() |
|
462 | return diff_creator.generate_diff() | |
465 | except svn.core.SubversionException as e: |
|
463 | except svn.core.SubversionException as e: | |
466 | log.exception( |
|
464 | log.exception( | |
467 | "Error during diff operation operation. " |
|
465 | "Error during diff operation operation. " | |
468 | "Path might not exist %s, %s" % (path1, path2)) |
|
466 | "Path might not exist %s, %s" % (path1, path2)) | |
469 | return "" |
|
467 | return "" | |
470 |
|
468 | |||
471 | @reraise_safe_exceptions |
|
469 | @reraise_safe_exceptions | |
472 | def is_large_file(self, wire, path): |
|
470 | def is_large_file(self, wire, path): | |
473 | return False |
|
471 | return False | |
474 |
|
472 | |||
475 | @reraise_safe_exceptions |
|
473 | @reraise_safe_exceptions | |
476 | def is_binary(self, wire, rev, path): |
|
474 | def is_binary(self, wire, rev, path): | |
477 | cache_on, context_uid, repo_id = self._cache_on(wire) |
|
475 | cache_on, context_uid, repo_id = self._cache_on(wire) | |
478 |
|
476 | |||
479 | region = self._region(wire) |
|
477 | region = self._region(wire) | |
480 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
478 | @region.conditional_cache_on_arguments(condition=cache_on) | |
481 | def _is_binary(_repo_id, _rev, _path): |
|
479 | def _is_binary(_repo_id, _rev, _path): | |
482 | raw_bytes = self.get_file_content(wire, path, rev) |
|
480 | raw_bytes = self.get_file_content(wire, path, rev) | |
483 | return raw_bytes and '\0' in raw_bytes |
|
481 | return raw_bytes and '\0' in raw_bytes | |
484 |
|
482 | |||
485 | return _is_binary(repo_id, rev, path) |
|
483 | return _is_binary(repo_id, rev, path) | |
486 |
|
484 | |||
487 | @reraise_safe_exceptions |
|
485 | @reraise_safe_exceptions | |
488 | def run_svn_command(self, wire, cmd, **opts): |
|
486 | def run_svn_command(self, wire, cmd, **opts): | |
489 | path = wire.get('path', None) |
|
487 | path = wire.get('path', None) | |
490 |
|
488 | |||
491 | if path and os.path.isdir(path): |
|
489 | if path and os.path.isdir(path): | |
492 | opts['cwd'] = path |
|
490 | opts['cwd'] = path | |
493 |
|
491 | |||
494 | safe_call = opts.pop('_safe', False) |
|
492 | safe_call = opts.pop('_safe', False) | |
495 |
|
493 | |||
496 | svnenv = os.environ.copy() |
|
494 | svnenv = os.environ.copy() | |
497 | svnenv.update(opts.pop('extra_env', {})) |
|
495 | svnenv.update(opts.pop('extra_env', {})) | |
498 |
|
496 | |||
499 | _opts = {'env': svnenv, 'shell': False} |
|
497 | _opts = {'env': svnenv, 'shell': False} | |
500 |
|
498 | |||
501 | try: |
|
499 | try: | |
502 | _opts.update(opts) |
|
500 | _opts.update(opts) | |
503 | p = subprocessio.SubprocessIOChunker(cmd, **_opts) |
|
501 | p = subprocessio.SubprocessIOChunker(cmd, **_opts) | |
504 |
|
502 | |||
505 | return ''.join(p), ''.join(p.error) |
|
503 | return ''.join(p), ''.join(p.error) | |
506 | except (EnvironmentError, OSError) as err: |
|
504 | except (EnvironmentError, OSError) as err: | |
507 | if safe_call: |
|
505 | if safe_call: | |
508 | return '', safe_str(err).strip() |
|
506 | return '', safe_str(err).strip() | |
509 | else: |
|
507 | else: | |
510 | cmd = ' '.join(cmd) # human friendly CMD |
|
508 | cmd = ' '.join(cmd) # human friendly CMD | |
511 | tb_err = ("Couldn't run svn command (%s).\n" |
|
509 | tb_err = ("Couldn't run svn command (%s).\n" | |
512 | "Original error was:%s\n" |
|
510 | "Original error was:%s\n" | |
513 | "Call options:%s\n" |
|
511 | "Call options:%s\n" | |
514 | % (cmd, err, _opts)) |
|
512 | % (cmd, err, _opts)) | |
515 | log.exception(tb_err) |
|
513 | log.exception(tb_err) | |
516 | raise exceptions.VcsException()(tb_err) |
|
514 | raise exceptions.VcsException()(tb_err) | |
517 |
|
515 | |||
518 | @reraise_safe_exceptions |
|
516 | @reraise_safe_exceptions | |
519 | def install_hooks(self, wire, force=False): |
|
517 | def install_hooks(self, wire, force=False): | |
520 | from vcsserver.hook_utils import install_svn_hooks |
|
518 | from vcsserver.hook_utils import install_svn_hooks | |
521 | repo_path = wire['path'] |
|
519 | repo_path = wire['path'] | |
522 | binary_dir = settings.BINARY_DIR |
|
520 | binary_dir = settings.BINARY_DIR | |
523 | executable = None |
|
521 | executable = None | |
524 | if binary_dir: |
|
522 | if binary_dir: | |
525 | executable = os.path.join(binary_dir, 'python') |
|
523 | executable = os.path.join(binary_dir, 'python') | |
526 | return install_svn_hooks( |
|
524 | return install_svn_hooks( | |
527 | repo_path, executable=executable, force_create=force) |
|
525 | repo_path, executable=executable, force_create=force) | |
528 |
|
526 | |||
529 | @reraise_safe_exceptions |
|
527 | @reraise_safe_exceptions | |
530 | def get_hooks_info(self, wire): |
|
528 | def get_hooks_info(self, wire): | |
531 | from vcsserver.hook_utils import ( |
|
529 | from vcsserver.hook_utils import ( | |
532 | get_svn_pre_hook_version, get_svn_post_hook_version) |
|
530 | get_svn_pre_hook_version, get_svn_post_hook_version) | |
533 | repo_path = wire['path'] |
|
531 | repo_path = wire['path'] | |
534 | return { |
|
532 | return { | |
535 | 'pre_version': get_svn_pre_hook_version(repo_path), |
|
533 | 'pre_version': get_svn_pre_hook_version(repo_path), | |
536 | 'post_version': get_svn_post_hook_version(repo_path), |
|
534 | 'post_version': get_svn_post_hook_version(repo_path), | |
537 | } |
|
535 | } | |
538 |
|
536 | |||
539 | @reraise_safe_exceptions |
|
537 | @reraise_safe_exceptions | |
540 | def set_head_ref(self, wire, head_name): |
|
538 | def set_head_ref(self, wire, head_name): | |
541 | pass |
|
539 | pass | |
542 |
|
540 | |||
543 | @reraise_safe_exceptions |
|
541 | @reraise_safe_exceptions | |
544 | def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path, |
|
542 | def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path, | |
545 | archive_dir_name, commit_id): |
|
543 | archive_dir_name, commit_id): | |
546 |
|
544 | |||
547 | def walk_tree(root, root_dir, _commit_id): |
|
545 | def walk_tree(root, root_dir, _commit_id): | |
548 | """ |
|
546 | """ | |
549 | Special recursive svn repo walker |
|
547 | Special recursive svn repo walker | |
550 | """ |
|
548 | """ | |
551 |
|
549 | |||
552 | filemode_default = 0o100644 |
|
550 | filemode_default = 0o100644 | |
553 | filemode_executable = 0o100755 |
|
551 | filemode_executable = 0o100755 | |
554 |
|
552 | |||
555 | file_iter = svn.fs.dir_entries(root, root_dir) |
|
553 | file_iter = svn.fs.dir_entries(root, root_dir) | |
556 | for f_name in file_iter: |
|
554 | for f_name in file_iter: | |
557 | f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None) |
|
555 | f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None) | |
558 |
|
556 | |||
559 | if f_type == 'dir': |
|
557 | if f_type == 'dir': | |
560 | # return only DIR, and then all entries in that dir |
|
558 | # return only DIR, and then all entries in that dir | |
561 | yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type |
|
559 | yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type | |
562 | new_root = os.path.join(root_dir, f_name) |
|
560 | new_root = os.path.join(root_dir, f_name) | |
563 | for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id): |
|
561 | for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id): | |
564 | yield _f_name, _f_data, _f_type |
|
562 | yield _f_name, _f_data, _f_type | |
565 | else: |
|
563 | else: | |
566 | f_path = os.path.join(root_dir, f_name).rstrip('/') |
|
564 | f_path = os.path.join(root_dir, f_name).rstrip('/') | |
567 | prop_list = svn.fs.node_proplist(root, f_path) |
|
565 | prop_list = svn.fs.node_proplist(root, f_path) | |
568 |
|
566 | |||
569 | f_mode = filemode_default |
|
567 | f_mode = filemode_default | |
570 | if prop_list.get('svn:executable'): |
|
568 | if prop_list.get('svn:executable'): | |
571 | f_mode = filemode_executable |
|
569 | f_mode = filemode_executable | |
572 |
|
570 | |||
573 | f_is_link = False |
|
571 | f_is_link = False | |
574 | if prop_list.get('svn:special'): |
|
572 | if prop_list.get('svn:special'): | |
575 | f_is_link = True |
|
573 | f_is_link = True | |
576 |
|
574 | |||
577 | data = { |
|
575 | data = { | |
578 | 'is_link': f_is_link, |
|
576 | 'is_link': f_is_link, | |
579 | 'mode': f_mode, |
|
577 | 'mode': f_mode, | |
580 | 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read |
|
578 | 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read | |
581 | } |
|
579 | } | |
582 |
|
580 | |||
583 | yield f_path, data, f_type |
|
581 | yield f_path, data, f_type | |
584 |
|
582 | |||
585 | def file_walker(_commit_id, path): |
|
583 | def file_walker(_commit_id, path): | |
586 | repo = self._factory.repo(wire) |
|
584 | repo = self._factory.repo(wire) | |
587 | root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id)) |
|
585 | root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id)) | |
588 |
|
586 | |||
589 | def no_content(): |
|
587 | def no_content(): | |
590 | raise NoContentException() |
|
588 | raise NoContentException() | |
591 |
|
589 | |||
592 | for f_name, f_data, f_type in walk_tree(root, path, _commit_id): |
|
590 | for f_name, f_data, f_type in walk_tree(root, path, _commit_id): | |
593 | file_path = f_name |
|
591 | file_path = f_name | |
594 |
|
592 | |||
595 | if f_type == 'dir': |
|
593 | if f_type == 'dir': | |
596 | mode = f_data['mode'] |
|
594 | mode = f_data['mode'] | |
597 | yield ArchiveNode(file_path, mode, False, no_content) |
|
595 | yield ArchiveNode(file_path, mode, False, no_content) | |
598 | else: |
|
596 | else: | |
599 | mode = f_data['mode'] |
|
597 | mode = f_data['mode'] | |
600 | is_link = f_data['is_link'] |
|
598 | is_link = f_data['is_link'] | |
601 | data_stream = f_data['content_stream'] |
|
599 | data_stream = f_data['content_stream'] | |
602 | yield ArchiveNode(file_path, mode, is_link, data_stream) |
|
600 | yield ArchiveNode(file_path, mode, is_link, data_stream) | |
603 |
|
601 | |||
604 | return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path, |
|
602 | return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path, | |
605 | archive_dir_name, commit_id) |
|
603 | archive_dir_name, commit_id) | |
606 |
|
604 | |||
607 |
|
605 | |||
608 | class SvnDiffer(object): |
|
606 | class SvnDiffer(object): | |
609 | """ |
|
607 | """ | |
610 | Utility to create diffs based on difflib and the Subversion api |
|
608 | Utility to create diffs based on difflib and the Subversion api | |
611 | """ |
|
609 | """ | |
612 |
|
610 | |||
613 | binary_content = False |
|
611 | binary_content = False | |
614 |
|
612 | |||
615 | def __init__( |
|
613 | def __init__( | |
616 | self, repo, src_rev, src_path, tgt_rev, tgt_path, |
|
614 | self, repo, src_rev, src_path, tgt_rev, tgt_path, | |
617 | ignore_whitespace, context): |
|
615 | ignore_whitespace, context): | |
618 | self.repo = repo |
|
616 | self.repo = repo | |
619 | self.ignore_whitespace = ignore_whitespace |
|
617 | self.ignore_whitespace = ignore_whitespace | |
620 | self.context = context |
|
618 | self.context = context | |
621 |
|
619 | |||
622 | fsobj = svn.repos.fs(repo) |
|
620 | fsobj = svn.repos.fs(repo) | |
623 |
|
621 | |||
624 | self.tgt_rev = tgt_rev |
|
622 | self.tgt_rev = tgt_rev | |
625 | self.tgt_path = tgt_path or '' |
|
623 | self.tgt_path = tgt_path or '' | |
626 | self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev) |
|
624 | self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev) | |
627 | self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path) |
|
625 | self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path) | |
628 |
|
626 | |||
629 | self.src_rev = src_rev |
|
627 | self.src_rev = src_rev | |
630 | self.src_path = src_path or self.tgt_path |
|
628 | self.src_path = src_path or self.tgt_path | |
631 | self.src_root = svn.fs.revision_root(fsobj, src_rev) |
|
629 | self.src_root = svn.fs.revision_root(fsobj, src_rev) | |
632 | self.src_kind = svn.fs.check_path(self.src_root, self.src_path) |
|
630 | self.src_kind = svn.fs.check_path(self.src_root, self.src_path) | |
633 |
|
631 | |||
634 | self._validate() |
|
632 | self._validate() | |
635 |
|
633 | |||
636 | def _validate(self): |
|
634 | def _validate(self): | |
637 | if (self.tgt_kind != svn.core.svn_node_none and |
|
635 | if (self.tgt_kind != svn.core.svn_node_none and | |
638 | self.src_kind != svn.core.svn_node_none and |
|
636 | self.src_kind != svn.core.svn_node_none and | |
639 | self.src_kind != self.tgt_kind): |
|
637 | self.src_kind != self.tgt_kind): | |
640 | # TODO: johbo: proper error handling |
|
638 | # TODO: johbo: proper error handling | |
641 | raise Exception( |
|
639 | raise Exception( | |
642 | "Source and target are not compatible for diff generation. " |
|
640 | "Source and target are not compatible for diff generation. " | |
643 | "Source type: %s, target type: %s" % |
|
641 | "Source type: %s, target type: %s" % | |
644 | (self.src_kind, self.tgt_kind)) |
|
642 | (self.src_kind, self.tgt_kind)) | |
645 |
|
643 | |||
646 | def generate_diff(self): |
|
644 | def generate_diff(self): | |
647 | buf = io.StringIO() |
|
645 | buf = io.StringIO() | |
648 | if self.tgt_kind == svn.core.svn_node_dir: |
|
646 | if self.tgt_kind == svn.core.svn_node_dir: | |
649 | self._generate_dir_diff(buf) |
|
647 | self._generate_dir_diff(buf) | |
650 | else: |
|
648 | else: | |
651 | self._generate_file_diff(buf) |
|
649 | self._generate_file_diff(buf) | |
652 | return buf.getvalue() |
|
650 | return buf.getvalue() | |
653 |
|
651 | |||
654 | def _generate_dir_diff(self, buf): |
|
652 | def _generate_dir_diff(self, buf): | |
655 | editor = DiffChangeEditor() |
|
653 | editor = DiffChangeEditor() | |
656 | editor_ptr, editor_baton = svn.delta.make_editor(editor) |
|
654 | editor_ptr, editor_baton = svn.delta.make_editor(editor) | |
657 | svn.repos.dir_delta2( |
|
655 | svn.repos.dir_delta2( | |
658 | self.src_root, |
|
656 | self.src_root, | |
659 | self.src_path, |
|
657 | self.src_path, | |
660 | '', # src_entry |
|
658 | '', # src_entry | |
661 | self.tgt_root, |
|
659 | self.tgt_root, | |
662 | self.tgt_path, |
|
660 | self.tgt_path, | |
663 | editor_ptr, editor_baton, |
|
661 | editor_ptr, editor_baton, | |
664 | authorization_callback_allow_all, |
|
662 | authorization_callback_allow_all, | |
665 | False, # text_deltas |
|
663 | False, # text_deltas | |
666 | svn.core.svn_depth_infinity, # depth |
|
664 | svn.core.svn_depth_infinity, # depth | |
667 | False, # entry_props |
|
665 | False, # entry_props | |
668 | False, # ignore_ancestry |
|
666 | False, # ignore_ancestry | |
669 | ) |
|
667 | ) | |
670 |
|
668 | |||
671 | for path, __, change in sorted(editor.changes): |
|
669 | for path, __, change in sorted(editor.changes): | |
672 | self._generate_node_diff( |
|
670 | self._generate_node_diff( | |
673 | buf, change, path, self.tgt_path, path, self.src_path) |
|
671 | buf, change, path, self.tgt_path, path, self.src_path) | |
674 |
|
672 | |||
675 | def _generate_file_diff(self, buf): |
|
673 | def _generate_file_diff(self, buf): | |
676 | change = None |
|
674 | change = None | |
677 | if self.src_kind == svn.core.svn_node_none: |
|
675 | if self.src_kind == svn.core.svn_node_none: | |
678 | change = "add" |
|
676 | change = "add" | |
679 | elif self.tgt_kind == svn.core.svn_node_none: |
|
677 | elif self.tgt_kind == svn.core.svn_node_none: | |
680 | change = "delete" |
|
678 | change = "delete" | |
681 | tgt_base, tgt_path = vcspath.split(self.tgt_path) |
|
679 | tgt_base, tgt_path = vcspath.split(self.tgt_path) | |
682 | src_base, src_path = vcspath.split(self.src_path) |
|
680 | src_base, src_path = vcspath.split(self.src_path) | |
683 | self._generate_node_diff( |
|
681 | self._generate_node_diff( | |
684 | buf, change, tgt_path, tgt_base, src_path, src_base) |
|
682 | buf, change, tgt_path, tgt_base, src_path, src_base) | |
685 |
|
683 | |||
686 | def _generate_node_diff( |
|
684 | def _generate_node_diff( | |
687 | self, buf, change, tgt_path, tgt_base, src_path, src_base): |
|
685 | self, buf, change, tgt_path, tgt_base, src_path, src_base): | |
688 |
|
686 | |||
689 | if self.src_rev == self.tgt_rev and tgt_base == src_base: |
|
687 | if self.src_rev == self.tgt_rev and tgt_base == src_base: | |
690 | # makes consistent behaviour with git/hg to return empty diff if |
|
688 | # makes consistent behaviour with git/hg to return empty diff if | |
691 | # we compare same revisions |
|
689 | # we compare same revisions | |
692 | return |
|
690 | return | |
693 |
|
691 | |||
694 | tgt_full_path = vcspath.join(tgt_base, tgt_path) |
|
692 | tgt_full_path = vcspath.join(tgt_base, tgt_path) | |
695 | src_full_path = vcspath.join(src_base, src_path) |
|
693 | src_full_path = vcspath.join(src_base, src_path) | |
696 |
|
694 | |||
697 | self.binary_content = False |
|
695 | self.binary_content = False | |
698 | mime_type = self._get_mime_type(tgt_full_path) |
|
696 | mime_type = self._get_mime_type(tgt_full_path) | |
699 |
|
697 | |||
700 | if mime_type and not mime_type.startswith('text'): |
|
698 | if mime_type and not mime_type.startswith('text'): | |
701 | self.binary_content = True |
|
699 | self.binary_content = True | |
702 | buf.write("=" * 67 + '\n') |
|
700 | buf.write("=" * 67 + '\n') | |
703 | buf.write("Cannot display: file marked as a binary type.\n") |
|
701 | buf.write("Cannot display: file marked as a binary type.\n") | |
704 | buf.write("svn:mime-type = %s\n" % mime_type) |
|
702 | buf.write("svn:mime-type = %s\n" % mime_type) | |
705 | buf.write("Index: %s\n" % (tgt_path, )) |
|
703 | buf.write("Index: %s\n" % (tgt_path, )) | |
706 | buf.write("=" * 67 + '\n') |
|
704 | buf.write("=" * 67 + '\n') | |
707 | buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % { |
|
705 | buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % { | |
708 | 'tgt_path': tgt_path}) |
|
706 | 'tgt_path': tgt_path}) | |
709 |
|
707 | |||
710 | if change == 'add': |
|
708 | if change == 'add': | |
711 | # TODO: johbo: SVN is missing a zero here compared to git |
|
709 | # TODO: johbo: SVN is missing a zero here compared to git | |
712 | buf.write("new file mode 10644\n") |
|
710 | buf.write("new file mode 10644\n") | |
713 |
|
711 | |||
714 | #TODO(marcink): intro to binary detection of svn patches |
|
712 | #TODO(marcink): intro to binary detection of svn patches | |
715 | # if self.binary_content: |
|
713 | # if self.binary_content: | |
716 | # buf.write('GIT binary patch\n') |
|
714 | # buf.write('GIT binary patch\n') | |
717 |
|
715 | |||
718 | buf.write("--- /dev/null\t(revision 0)\n") |
|
716 | buf.write("--- /dev/null\t(revision 0)\n") | |
719 | src_lines = [] |
|
717 | src_lines = [] | |
720 | else: |
|
718 | else: | |
721 | if change == 'delete': |
|
719 | if change == 'delete': | |
722 | buf.write("deleted file mode 10644\n") |
|
720 | buf.write("deleted file mode 10644\n") | |
723 |
|
721 | |||
724 | #TODO(marcink): intro to binary detection of svn patches |
|
722 | #TODO(marcink): intro to binary detection of svn patches | |
725 | # if self.binary_content: |
|
723 | # if self.binary_content: | |
726 | # buf.write('GIT binary patch\n') |
|
724 | # buf.write('GIT binary patch\n') | |
727 |
|
725 | |||
728 | buf.write("--- a/%s\t(revision %s)\n" % ( |
|
726 | buf.write("--- a/%s\t(revision %s)\n" % ( | |
729 | src_path, self.src_rev)) |
|
727 | src_path, self.src_rev)) | |
730 | src_lines = self._svn_readlines(self.src_root, src_full_path) |
|
728 | src_lines = self._svn_readlines(self.src_root, src_full_path) | |
731 |
|
729 | |||
732 | if change == 'delete': |
|
730 | if change == 'delete': | |
733 | buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, )) |
|
731 | buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, )) | |
734 | tgt_lines = [] |
|
732 | tgt_lines = [] | |
735 | else: |
|
733 | else: | |
736 | buf.write("+++ b/%s\t(revision %s)\n" % ( |
|
734 | buf.write("+++ b/%s\t(revision %s)\n" % ( | |
737 | tgt_path, self.tgt_rev)) |
|
735 | tgt_path, self.tgt_rev)) | |
738 | tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path) |
|
736 | tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path) | |
739 |
|
737 | |||
740 | if not self.binary_content: |
|
738 | if not self.binary_content: | |
741 | udiff = svn_diff.unified_diff( |
|
739 | udiff = svn_diff.unified_diff( | |
742 | src_lines, tgt_lines, context=self.context, |
|
740 | src_lines, tgt_lines, context=self.context, | |
743 | ignore_blank_lines=self.ignore_whitespace, |
|
741 | ignore_blank_lines=self.ignore_whitespace, | |
744 | ignore_case=False, |
|
742 | ignore_case=False, | |
745 | ignore_space_changes=self.ignore_whitespace) |
|
743 | ignore_space_changes=self.ignore_whitespace) | |
746 | buf.writelines(udiff) |
|
744 | buf.writelines(udiff) | |
747 |
|
745 | |||
748 | def _get_mime_type(self, path): |
|
746 | def _get_mime_type(self, path): | |
749 | try: |
|
747 | try: | |
750 | mime_type = svn.fs.node_prop( |
|
748 | mime_type = svn.fs.node_prop( | |
751 | self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE) |
|
749 | self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE) | |
752 | except svn.core.SubversionException: |
|
750 | except svn.core.SubversionException: | |
753 | mime_type = svn.fs.node_prop( |
|
751 | mime_type = svn.fs.node_prop( | |
754 | self.src_root, path, svn.core.SVN_PROP_MIME_TYPE) |
|
752 | self.src_root, path, svn.core.SVN_PROP_MIME_TYPE) | |
755 | return mime_type |
|
753 | return mime_type | |
756 |
|
754 | |||
757 | def _svn_readlines(self, fs_root, node_path): |
|
755 | def _svn_readlines(self, fs_root, node_path): | |
758 | if self.binary_content: |
|
756 | if self.binary_content: | |
759 | return [] |
|
757 | return [] | |
760 | node_kind = svn.fs.check_path(fs_root, node_path) |
|
758 | node_kind = svn.fs.check_path(fs_root, node_path) | |
761 | if node_kind not in ( |
|
759 | if node_kind not in ( | |
762 | svn.core.svn_node_file, svn.core.svn_node_symlink): |
|
760 | svn.core.svn_node_file, svn.core.svn_node_symlink): | |
763 | return [] |
|
761 | return [] | |
764 | content = svn.core.Stream( |
|
762 | content = svn.core.Stream( | |
765 | svn.fs.file_contents(fs_root, node_path)).read() |
|
763 | svn.fs.file_contents(fs_root, node_path)).read() | |
766 | return content.splitlines(True) |
|
764 | return content.splitlines(True) | |
767 |
|
765 | |||
768 |
|
766 | |||
769 | class DiffChangeEditor(svn.delta.Editor): |
|
767 | class DiffChangeEditor(svn.delta.Editor): | |
770 | """ |
|
768 | """ | |
771 | Records changes between two given revisions |
|
769 | Records changes between two given revisions | |
772 | """ |
|
770 | """ | |
773 |
|
771 | |||
774 | def __init__(self): |
|
772 | def __init__(self): | |
775 | self.changes = [] |
|
773 | self.changes = [] | |
776 |
|
774 | |||
777 | def delete_entry(self, path, revision, parent_baton, pool=None): |
|
775 | def delete_entry(self, path, revision, parent_baton, pool=None): | |
778 | self.changes.append((path, None, 'delete')) |
|
776 | self.changes.append((path, None, 'delete')) | |
779 |
|
777 | |||
780 | def add_file( |
|
778 | def add_file( | |
781 | self, path, parent_baton, copyfrom_path, copyfrom_revision, |
|
779 | self, path, parent_baton, copyfrom_path, copyfrom_revision, | |
782 | file_pool=None): |
|
780 | file_pool=None): | |
783 | self.changes.append((path, 'file', 'add')) |
|
781 | self.changes.append((path, 'file', 'add')) | |
784 |
|
782 | |||
785 | def open_file(self, path, parent_baton, base_revision, file_pool=None): |
|
783 | def open_file(self, path, parent_baton, base_revision, file_pool=None): | |
786 | self.changes.append((path, 'file', 'change')) |
|
784 | self.changes.append((path, 'file', 'change')) | |
787 |
|
785 | |||
788 |
|
786 | |||
789 | def authorization_callback_allow_all(root, path, pool): |
|
787 | def authorization_callback_allow_all(root, path, pool): | |
790 | return True |
|
788 | return True | |
791 |
|
789 | |||
792 |
|
790 | |||
793 | class TxnNodeProcessor(object): |
|
791 | class TxnNodeProcessor(object): | |
794 | """ |
|
792 | """ | |
795 | Utility to process the change of one node within a transaction root. |
|
793 | Utility to process the change of one node within a transaction root. | |
796 |
|
794 | |||
797 | It encapsulates the knowledge of how to add, update or remove |
|
795 | It encapsulates the knowledge of how to add, update or remove | |
798 | a node for a given transaction root. The purpose is to support the method |
|
796 | a node for a given transaction root. The purpose is to support the method | |
799 | `SvnRemote.commit`. |
|
797 | `SvnRemote.commit`. | |
800 | """ |
|
798 | """ | |
801 |
|
799 | |||
802 | def __init__(self, node, txn_root): |
|
800 | def __init__(self, node, txn_root): | |
803 | assert isinstance(node['path'], str) |
|
801 | assert isinstance(node['path'], str) | |
804 |
|
802 | |||
805 | self.node = node |
|
803 | self.node = node | |
806 | self.txn_root = txn_root |
|
804 | self.txn_root = txn_root | |
807 |
|
805 | |||
808 | def update(self): |
|
806 | def update(self): | |
809 | self._ensure_parent_dirs() |
|
807 | self._ensure_parent_dirs() | |
810 | self._add_file_if_node_does_not_exist() |
|
808 | self._add_file_if_node_does_not_exist() | |
811 | self._update_file_content() |
|
809 | self._update_file_content() | |
812 | self._update_file_properties() |
|
810 | self._update_file_properties() | |
813 |
|
811 | |||
814 | def remove(self): |
|
812 | def remove(self): | |
815 | svn.fs.delete(self.txn_root, self.node['path']) |
|
813 | svn.fs.delete(self.txn_root, self.node['path']) | |
816 | # TODO: Clean up directory if empty |
|
814 | # TODO: Clean up directory if empty | |
817 |
|
815 | |||
818 | def _ensure_parent_dirs(self): |
|
816 | def _ensure_parent_dirs(self): | |
819 | curdir = vcspath.dirname(self.node['path']) |
|
817 | curdir = vcspath.dirname(self.node['path']) | |
820 | dirs_to_create = [] |
|
818 | dirs_to_create = [] | |
821 | while not self._svn_path_exists(curdir): |
|
819 | while not self._svn_path_exists(curdir): | |
822 | dirs_to_create.append(curdir) |
|
820 | dirs_to_create.append(curdir) | |
823 | curdir = vcspath.dirname(curdir) |
|
821 | curdir = vcspath.dirname(curdir) | |
824 |
|
822 | |||
825 | for curdir in reversed(dirs_to_create): |
|
823 | for curdir in reversed(dirs_to_create): | |
826 | log.debug('Creating missing directory "%s"', curdir) |
|
824 | log.debug('Creating missing directory "%s"', curdir) | |
827 | svn.fs.make_dir(self.txn_root, curdir) |
|
825 | svn.fs.make_dir(self.txn_root, curdir) | |
828 |
|
826 | |||
829 | def _svn_path_exists(self, path): |
|
827 | def _svn_path_exists(self, path): | |
830 | path_status = svn.fs.check_path(self.txn_root, path) |
|
828 | path_status = svn.fs.check_path(self.txn_root, path) | |
831 | return path_status != svn.core.svn_node_none |
|
829 | return path_status != svn.core.svn_node_none | |
832 |
|
830 | |||
833 | def _add_file_if_node_does_not_exist(self): |
|
831 | def _add_file_if_node_does_not_exist(self): | |
834 | kind = svn.fs.check_path(self.txn_root, self.node['path']) |
|
832 | kind = svn.fs.check_path(self.txn_root, self.node['path']) | |
835 | if kind == svn.core.svn_node_none: |
|
833 | if kind == svn.core.svn_node_none: | |
836 | svn.fs.make_file(self.txn_root, self.node['path']) |
|
834 | svn.fs.make_file(self.txn_root, self.node['path']) | |
837 |
|
835 | |||
838 | def _update_file_content(self): |
|
836 | def _update_file_content(self): | |
839 | assert isinstance(self.node['content'], str) |
|
837 | assert isinstance(self.node['content'], str) | |
840 | handler, baton = svn.fs.apply_textdelta( |
|
838 | handler, baton = svn.fs.apply_textdelta( | |
841 | self.txn_root, self.node['path'], None, None) |
|
839 | self.txn_root, self.node['path'], None, None) | |
842 | svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton) |
|
840 | svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton) | |
843 |
|
841 | |||
844 | def _update_file_properties(self): |
|
842 | def _update_file_properties(self): | |
845 | properties = self.node.get('properties', {}) |
|
843 | properties = self.node.get('properties', {}) | |
846 | for key, value in properties.items(): |
|
844 | for key, value in properties.items(): | |
847 | svn.fs.change_node_prop( |
|
845 | svn.fs.change_node_prop( | |
848 | self.txn_root, self.node['path'], key, value) |
|
846 | self.txn_root, self.node['path'], key, value) | |
849 |
|
847 | |||
850 |
|
848 | |||
851 | def apr_time_t(timestamp): |
|
849 | def apr_time_t(timestamp): | |
852 | """ |
|
850 | """ | |
853 | Convert a Python timestamp into APR timestamp type apr_time_t |
|
851 | Convert a Python timestamp into APR timestamp type apr_time_t | |
854 | """ |
|
852 | """ | |
855 | return timestamp * 1E6 |
|
853 | return timestamp * 1E6 | |
856 |
|
854 | |||
857 |
|
855 | |||
858 | def svn_opt_revision_value_t(num): |
|
856 | def svn_opt_revision_value_t(num): | |
859 | """ |
|
857 | """ | |
860 | Put `num` into a `svn_opt_revision_value_t` structure. |
|
858 | Put `num` into a `svn_opt_revision_value_t` structure. | |
861 | """ |
|
859 | """ | |
862 | value = svn.core.svn_opt_revision_value_t() |
|
860 | value = svn.core.svn_opt_revision_value_t() | |
863 | value.number = num |
|
861 | value.number = num | |
864 | revision = svn.core.svn_opt_revision_t() |
|
862 | revision = svn.core.svn_opt_revision_t() | |
865 | revision.kind = svn.core.svn_opt_revision_number |
|
863 | revision.kind = svn.core.svn_opt_revision_number | |
866 | revision.value = value |
|
864 | revision.value = value | |
867 | return revision |
|
865 | return revision |
General Comments 0
You need to be logged in to leave comments.
Login now