##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r702:778db25c merge stable
parent child Browse files
Show More
@@ -0,0 +1,12 b''
1 # contains not directly required libraries we want to pin the version.
2
3 atomicwrites==1.2.1
4 attrs==18.2.0
5 hupper==1.6.1
6 pathlib2==2.3.4
7 pygments==2.4.2
8 psutil==5.5.1
9 pluggy==0.11.0
10 scandir==1.10.0
11 setproctitle==1.1.10
12 venusian==1.2.0
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.16.2
2 current_version = 4.17.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.16.2
13 version = 4.17.0
16
14
@@ -1,152 +1,154 b''
1 """
1 """
2 gunicorn config extension and hooks. Sets additional configuration that is
2 gunicorn config extension and hooks. Sets additional configuration that is
3 available post the .ini config.
3 available post the .ini config.
4
4
5 - workers = ${cpu_number}
5 - workers = ${cpu_number}
6 - threads = 1
6 - threads = 1
7 - proc_name = ${gunicorn_proc_name}
7 - proc_name = ${gunicorn_proc_name}
8 - worker_class = sync
8 - worker_class = sync
9 - worker_connections = 10
9 - worker_connections = 10
10 - max_requests = 1000
10 - max_requests = 1000
11 - max_requests_jitter = 30
11 - max_requests_jitter = 30
12 - timeout = 21600
12 - timeout = 21600
13
13
14 """
14 """
15
15
16 import multiprocessing
16 import multiprocessing
17 import sys
17 import sys
18 import time
18 import time
19 import datetime
19 import datetime
20 import threading
20 import threading
21 import traceback
21 import traceback
22 from gunicorn.glogging import Logger
22 from gunicorn.glogging import Logger
23
23
24
24
25 # GLOBAL
25 # GLOBAL
26 errorlog = '-'
26 errorlog = '-'
27 accesslog = '-'
27 accesslog = '-'
28 loglevel = 'debug'
28 loglevel = 'debug'
29
29
30 # SECURITY
30 # SECURITY
31
31
32 # The maximum size of HTTP request line in bytes.
32 # The maximum size of HTTP request line in bytes.
33 limit_request_line = 4094
33 # 0 for unlimited
34 limit_request_line = 0
34
35
35 # Limit the number of HTTP headers fields in a request.
36 # Limit the number of HTTP headers fields in a request.
36 limit_request_fields = 1024
37 # By default this value is 100 and can’t be larger than 32768.
38 limit_request_fields = 10240
37
39
38 # Limit the allowed size of an HTTP request header field.
40 # Limit the allowed size of an HTTP request header field.
39 # Value is a positive number or 0.
41 # Value is a positive number or 0.
40 # Setting it to 0 will allow unlimited header field sizes.
42 # Setting it to 0 will allow unlimited header field sizes.
41 limit_request_field_size = 0
43 limit_request_field_size = 0
42
44
43
45
44 # Timeout for graceful workers restart.
46 # Timeout for graceful workers restart.
45 # After receiving a restart signal, workers have this much time to finish
47 # After receiving a restart signal, workers have this much time to finish
46 # serving requests. Workers still alive after the timeout (starting from the
48 # serving requests. Workers still alive after the timeout (starting from the
47 # receipt of the restart signal) are force killed.
49 # receipt of the restart signal) are force killed.
48 graceful_timeout = 30
50 graceful_timeout = 30
49
51
50
52
51 # The number of seconds to wait for requests on a Keep-Alive connection.
53 # The number of seconds to wait for requests on a Keep-Alive connection.
52 # Generally set in the 1-5 seconds range.
54 # Generally set in the 1-5 seconds range.
53 keepalive = 2
55 keepalive = 2
54
56
55
57
56 # SERVER MECHANICS
58 # SERVER MECHANICS
57 # None == system temp dir
59 # None == system temp dir
58 # worker_tmp_dir is recommended to be set to some tmpfs
60 # worker_tmp_dir is recommended to be set to some tmpfs
59 worker_tmp_dir = None
61 worker_tmp_dir = None
60 tmp_upload_dir = None
62 tmp_upload_dir = None
61
63
62 # Custom log format
64 # Custom log format
63 access_log_format = (
65 access_log_format = (
64 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
66 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
65
67
66 # self adjust workers based on CPU count
68 # self adjust workers based on CPU count
67 # workers = multiprocessing.cpu_count() * 2 + 1
69 # workers = multiprocessing.cpu_count() * 2 + 1
68
70
69
71
70 def post_fork(server, worker):
72 def post_fork(server, worker):
71 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
73 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
72
74
73
75
74 def pre_fork(server, worker):
76 def pre_fork(server, worker):
75 pass
77 pass
76
78
77
79
78 def pre_exec(server):
80 def pre_exec(server):
79 server.log.info("Forked child, re-executing.")
81 server.log.info("Forked child, re-executing.")
80
82
81
83
82 def on_starting(server):
84 def on_starting(server):
83 server.log.info("Server is starting.")
85 server.log.info("Server is starting.")
84
86
85
87
86 def when_ready(server):
88 def when_ready(server):
87 server.log.info("Server is ready. Spawning workers")
89 server.log.info("Server is ready. Spawning workers")
88
90
89
91
90 def on_reload(server):
92 def on_reload(server):
91 pass
93 pass
92
94
93
95
94 def worker_int(worker):
96 def worker_int(worker):
95 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
97 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
96
98
97 # get traceback info, on worker crash
99 # get traceback info, on worker crash
98 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
100 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
99 code = []
101 code = []
100 for thread_id, stack in sys._current_frames().items():
102 for thread_id, stack in sys._current_frames().items():
101 code.append(
103 code.append(
102 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
104 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
103 for fname, lineno, name, line in traceback.extract_stack(stack):
105 for fname, lineno, name, line in traceback.extract_stack(stack):
104 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
106 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
105 if line:
107 if line:
106 code.append(" %s" % (line.strip()))
108 code.append(" %s" % (line.strip()))
107 worker.log.debug("\n".join(code))
109 worker.log.debug("\n".join(code))
108
110
109
111
110 def worker_abort(worker):
112 def worker_abort(worker):
111 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
113 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
112
114
113
115
114 def worker_exit(server, worker):
116 def worker_exit(server, worker):
115 worker.log.info("[<%-10s>] worker exit", worker.pid)
117 worker.log.info("[<%-10s>] worker exit", worker.pid)
116
118
117
119
118 def child_exit(server, worker):
120 def child_exit(server, worker):
119 worker.log.info("[<%-10s>] worker child exit", worker.pid)
121 worker.log.info("[<%-10s>] worker child exit", worker.pid)
120
122
121
123
122 def pre_request(worker, req):
124 def pre_request(worker, req):
123 worker.start_time = time.time()
125 worker.start_time = time.time()
124 worker.log.debug(
126 worker.log.debug(
125 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
127 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
126
128
127
129
128 def post_request(worker, req, environ, resp):
130 def post_request(worker, req, environ, resp):
129 total_time = time.time() - worker.start_time
131 total_time = time.time() - worker.start_time
130 worker.log.debug(
132 worker.log.debug(
131 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
133 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
132 worker.nr, req.method, req.path, resp.status_code, total_time)
134 worker.nr, req.method, req.path, resp.status_code, total_time)
133
135
134
136
135 class RhodeCodeLogger(Logger):
137 class RhodeCodeLogger(Logger):
136 """
138 """
137 Custom Logger that allows some customization that gunicorn doesn't allow
139 Custom Logger that allows some customization that gunicorn doesn't allow
138 """
140 """
139
141
140 datefmt = r"%Y-%m-%d %H:%M:%S"
142 datefmt = r"%Y-%m-%d %H:%M:%S"
141
143
142 def __init__(self, cfg):
144 def __init__(self, cfg):
143 Logger.__init__(self, cfg)
145 Logger.__init__(self, cfg)
144
146
145 def now(self):
147 def now(self):
146 """ return date in RhodeCode Log format """
148 """ return date in RhodeCode Log format """
147 now = time.time()
149 now = time.time()
148 msecs = int((now - long(now)) * 1000)
150 msecs = int((now - long(now)) * 1000)
149 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
151 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
150
152
151
153
152 logger_class = RhodeCodeLogger
154 logger_class = RhodeCodeLogger
@@ -1,3 +1,3 b''
1 [pip2nix]
1 [pip2nix]
2 requirements = ., -r ./requirements.txt
2 requirements = ., -r ./requirements.txt, -r ./requirements_pinned.txt
3 output = ./pkgs/python-packages.nix
3 output = ./pkgs/python-packages.nix
@@ -1,955 +1,948 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.2.1";
8 name = "atomicwrites-1.2.1";
9 doCheck = false;
9 doCheck = false;
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
16 };
16 };
17 };
17 };
18 "attrs" = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.2.0";
19 name = "attrs-18.2.0";
20 doCheck = false;
20 doCheck = false;
21 src = fetchurl {
21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
24 };
24 };
25 meta = {
25 meta = {
26 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
27 };
27 };
28 };
28 };
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 name = "backports.shutil-get-terminal-size-1.0.0";
30 name = "backports.shutil-get-terminal-size-1.0.0";
31 doCheck = false;
31 doCheck = false;
32 src = fetchurl {
32 src = fetchurl {
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 };
35 };
36 meta = {
36 meta = {
37 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
38 };
38 };
39 };
39 };
40 "beautifulsoup4" = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
41 name = "beautifulsoup4-4.6.3";
41 name = "beautifulsoup4-4.6.3";
42 doCheck = false;
42 doCheck = false;
43 src = fetchurl {
43 src = fetchurl {
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 };
46 };
47 meta = {
47 meta = {
48 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
49 };
49 };
50 };
50 };
51 "configobj" = super.buildPythonPackage {
51 "configobj" = super.buildPythonPackage {
52 name = "configobj-5.0.6";
52 name = "configobj-5.0.6";
53 doCheck = false;
53 doCheck = false;
54 propagatedBuildInputs = [
54 propagatedBuildInputs = [
55 self."six"
55 self."six"
56 ];
56 ];
57 src = fetchurl {
57 src = fetchurl {
58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
58 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
59 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
60 };
60 };
61 meta = {
61 meta = {
62 license = [ pkgs.lib.licenses.bsdOriginal ];
62 license = [ pkgs.lib.licenses.bsdOriginal ];
63 };
63 };
64 };
64 };
65 "cov-core" = super.buildPythonPackage {
65 "cov-core" = super.buildPythonPackage {
66 name = "cov-core-1.15.0";
66 name = "cov-core-1.15.0";
67 doCheck = false;
67 doCheck = false;
68 propagatedBuildInputs = [
68 propagatedBuildInputs = [
69 self."coverage"
69 self."coverage"
70 ];
70 ];
71 src = fetchurl {
71 src = fetchurl {
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
74 };
74 };
75 meta = {
75 meta = {
76 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
77 };
77 };
78 };
78 };
79 "coverage" = super.buildPythonPackage {
79 "coverage" = super.buildPythonPackage {
80 name = "coverage-4.5.1";
80 name = "coverage-4.5.3";
81 doCheck = false;
81 doCheck = false;
82 src = fetchurl {
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
83 url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz";
84 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
84 sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx";
85 };
85 };
86 meta = {
86 meta = {
87 license = [ pkgs.lib.licenses.asl20 ];
87 license = [ pkgs.lib.licenses.asl20 ];
88 };
88 };
89 };
89 };
90 "decorator" = super.buildPythonPackage {
90 "decorator" = super.buildPythonPackage {
91 name = "decorator-4.1.2";
91 name = "decorator-4.1.2";
92 doCheck = false;
92 doCheck = false;
93 src = fetchurl {
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
96 };
96 };
97 meta = {
97 meta = {
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
99 };
99 };
100 };
100 };
101 "dogpile.cache" = super.buildPythonPackage {
101 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.7.1";
102 name = "dogpile.cache-0.7.1";
103 doCheck = false;
103 doCheck = false;
104 propagatedBuildInputs = [
104 propagatedBuildInputs = [
105 self."decorator"
105 self."decorator"
106 ];
106 ];
107 src = fetchurl {
107 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
108 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
109 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
109 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
110 };
110 };
111 meta = {
111 meta = {
112 license = [ pkgs.lib.licenses.bsdOriginal ];
112 license = [ pkgs.lib.licenses.bsdOriginal ];
113 };
113 };
114 };
114 };
115 "dogpile.core" = super.buildPythonPackage {
115 "dogpile.core" = super.buildPythonPackage {
116 name = "dogpile.core-0.4.1";
116 name = "dogpile.core-0.4.1";
117 doCheck = false;
117 doCheck = false;
118 src = fetchurl {
118 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
119 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
120 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
120 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
121 };
121 };
122 meta = {
122 meta = {
123 license = [ pkgs.lib.licenses.bsdOriginal ];
123 license = [ pkgs.lib.licenses.bsdOriginal ];
124 };
124 };
125 };
125 };
126 "dulwich" = super.buildPythonPackage {
126 "dulwich" = super.buildPythonPackage {
127 name = "dulwich-0.13.0";
127 name = "dulwich-0.13.0";
128 doCheck = false;
128 doCheck = false;
129 src = fetchurl {
129 src = fetchurl {
130 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
130 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
131 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
131 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
132 };
132 };
133 meta = {
133 meta = {
134 license = [ pkgs.lib.licenses.gpl2Plus ];
134 license = [ pkgs.lib.licenses.gpl2Plus ];
135 };
135 };
136 };
136 };
137 "enum34" = super.buildPythonPackage {
137 "enum34" = super.buildPythonPackage {
138 name = "enum34-1.1.6";
138 name = "enum34-1.1.6";
139 doCheck = false;
139 doCheck = false;
140 src = fetchurl {
140 src = fetchurl {
141 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
141 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
142 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
142 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
143 };
143 };
144 meta = {
144 meta = {
145 license = [ pkgs.lib.licenses.bsdOriginal ];
145 license = [ pkgs.lib.licenses.bsdOriginal ];
146 };
146 };
147 };
147 };
148 "funcsigs" = super.buildPythonPackage {
148 "funcsigs" = super.buildPythonPackage {
149 name = "funcsigs-1.0.2";
149 name = "funcsigs-1.0.2";
150 doCheck = false;
150 doCheck = false;
151 src = fetchurl {
151 src = fetchurl {
152 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
152 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
153 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
153 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
154 };
154 };
155 meta = {
155 meta = {
156 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
156 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
157 };
157 };
158 };
158 };
159 "gevent" = super.buildPythonPackage {
159 "gevent" = super.buildPythonPackage {
160 name = "gevent-1.4.0";
160 name = "gevent-1.4.0";
161 doCheck = false;
161 doCheck = false;
162 propagatedBuildInputs = [
162 propagatedBuildInputs = [
163 self."greenlet"
163 self."greenlet"
164 ];
164 ];
165 src = fetchurl {
165 src = fetchurl {
166 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
166 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
167 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
167 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
168 };
168 };
169 meta = {
169 meta = {
170 license = [ pkgs.lib.licenses.mit ];
170 license = [ pkgs.lib.licenses.mit ];
171 };
171 };
172 };
172 };
173 "gprof2dot" = super.buildPythonPackage {
173 "gprof2dot" = super.buildPythonPackage {
174 name = "gprof2dot-2017.9.19";
174 name = "gprof2dot-2017.9.19";
175 doCheck = false;
175 doCheck = false;
176 src = fetchurl {
176 src = fetchurl {
177 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
177 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
178 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
178 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
179 };
179 };
180 meta = {
180 meta = {
181 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
181 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
182 };
182 };
183 };
183 };
184 "greenlet" = super.buildPythonPackage {
184 "greenlet" = super.buildPythonPackage {
185 name = "greenlet-0.4.15";
185 name = "greenlet-0.4.15";
186 doCheck = false;
186 doCheck = false;
187 src = fetchurl {
187 src = fetchurl {
188 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
188 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
189 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
189 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
190 };
190 };
191 meta = {
191 meta = {
192 license = [ pkgs.lib.licenses.mit ];
192 license = [ pkgs.lib.licenses.mit ];
193 };
193 };
194 };
194 };
195 "gunicorn" = super.buildPythonPackage {
195 "gunicorn" = super.buildPythonPackage {
196 name = "gunicorn-19.9.0";
196 name = "gunicorn-19.9.0";
197 doCheck = false;
197 doCheck = false;
198 src = fetchurl {
198 src = fetchurl {
199 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
199 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
200 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
200 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
201 };
201 };
202 meta = {
202 meta = {
203 license = [ pkgs.lib.licenses.mit ];
203 license = [ pkgs.lib.licenses.mit ];
204 };
204 };
205 };
205 };
206 "hg-evolve" = super.buildPythonPackage {
206 "hg-evolve" = super.buildPythonPackage {
207 name = "hg-evolve-8.0.1";
207 name = "hg-evolve-8.5.1";
208 doCheck = false;
208 doCheck = false;
209 src = fetchurl {
209 src = fetchurl {
210 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
210 url = "https://files.pythonhosted.org/packages/e3/ce/6594aa403e3464831d4daf20e45fd2e3ef553d968ac13d2c7fa791d4eedd/hg-evolve-8.5.1.tar.gz";
211 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
211 sha256 = "09avqn7c1biz97vb1zw91q6nfzydpcqv43mgpfrj7ywp0fscfgf3";
212 };
212 };
213 meta = {
213 meta = {
214 license = [ { fullName = "GPLv2+"; } ];
214 license = [ { fullName = "GPLv2+"; } ];
215 };
215 };
216 };
216 };
217 "hgsubversion" = super.buildPythonPackage {
217 "hgsubversion" = super.buildPythonPackage {
218 name = "hgsubversion-1.9.3";
218 name = "hgsubversion-1.9.3";
219 doCheck = false;
219 doCheck = false;
220 propagatedBuildInputs = [
220 propagatedBuildInputs = [
221 self."mercurial"
221 self."mercurial"
222 self."subvertpy"
222 self."subvertpy"
223 ];
223 ];
224 src = fetchurl {
224 src = fetchurl {
225 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
225 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
226 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
226 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
227 };
227 };
228 meta = {
228 meta = {
229 license = [ pkgs.lib.licenses.gpl1 ];
229 license = [ pkgs.lib.licenses.gpl1 ];
230 };
230 };
231 };
231 };
232 "hupper" = super.buildPythonPackage {
232 "hupper" = super.buildPythonPackage {
233 name = "hupper-1.4.2";
233 name = "hupper-1.6.1";
234 doCheck = false;
234 doCheck = false;
235 src = fetchurl {
235 src = fetchurl {
236 url = "https://files.pythonhosted.org/packages/f1/75/1915dc7650b4867fa3049256e24ca8eddb5989998fcec788cf52b9812dfc/hupper-1.4.2.tar.gz";
236 url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz";
237 sha256 = "16vb9fkiaakdpcp6pn56h3w0dwvm67bxq2k2dv4i382qhqwphdzb";
237 sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy";
238 };
238 };
239 meta = {
239 meta = {
240 license = [ pkgs.lib.licenses.mit ];
240 license = [ pkgs.lib.licenses.mit ];
241 };
241 };
242 };
242 };
243 "ipdb" = super.buildPythonPackage {
243 "ipdb" = super.buildPythonPackage {
244 name = "ipdb-0.11";
244 name = "ipdb-0.12";
245 doCheck = false;
245 doCheck = false;
246 propagatedBuildInputs = [
246 propagatedBuildInputs = [
247 self."setuptools"
247 self."setuptools"
248 self."ipython"
248 self."ipython"
249 ];
249 ];
250 src = fetchurl {
250 src = fetchurl {
251 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
251 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
252 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
252 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
253 };
253 };
254 meta = {
254 meta = {
255 license = [ pkgs.lib.licenses.bsdOriginal ];
255 license = [ pkgs.lib.licenses.bsdOriginal ];
256 };
256 };
257 };
257 };
258 "ipython" = super.buildPythonPackage {
258 "ipython" = super.buildPythonPackage {
259 name = "ipython-5.1.0";
259 name = "ipython-5.1.0";
260 doCheck = false;
260 doCheck = false;
261 propagatedBuildInputs = [
261 propagatedBuildInputs = [
262 self."setuptools"
262 self."setuptools"
263 self."decorator"
263 self."decorator"
264 self."pickleshare"
264 self."pickleshare"
265 self."simplegeneric"
265 self."simplegeneric"
266 self."traitlets"
266 self."traitlets"
267 self."prompt-toolkit"
267 self."prompt-toolkit"
268 self."pygments"
268 self."pygments"
269 self."pexpect"
269 self."pexpect"
270 self."backports.shutil-get-terminal-size"
270 self."backports.shutil-get-terminal-size"
271 self."pathlib2"
271 self."pathlib2"
272 self."pexpect"
272 self."pexpect"
273 ];
273 ];
274 src = fetchurl {
274 src = fetchurl {
275 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
275 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
276 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
276 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
277 };
277 };
278 meta = {
278 meta = {
279 license = [ pkgs.lib.licenses.bsdOriginal ];
279 license = [ pkgs.lib.licenses.bsdOriginal ];
280 };
280 };
281 };
281 };
282 "ipython-genutils" = super.buildPythonPackage {
282 "ipython-genutils" = super.buildPythonPackage {
283 name = "ipython-genutils-0.2.0";
283 name = "ipython-genutils-0.2.0";
284 doCheck = false;
284 doCheck = false;
285 src = fetchurl {
285 src = fetchurl {
286 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
286 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
287 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
287 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
288 };
288 };
289 meta = {
289 meta = {
290 license = [ pkgs.lib.licenses.bsdOriginal ];
290 license = [ pkgs.lib.licenses.bsdOriginal ];
291 };
291 };
292 };
292 };
293 "mako" = super.buildPythonPackage {
293 "mako" = super.buildPythonPackage {
294 name = "mako-1.0.7";
294 name = "mako-1.0.7";
295 doCheck = false;
295 doCheck = false;
296 propagatedBuildInputs = [
296 propagatedBuildInputs = [
297 self."markupsafe"
297 self."markupsafe"
298 ];
298 ];
299 src = fetchurl {
299 src = fetchurl {
300 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
300 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
301 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
301 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
302 };
302 };
303 meta = {
303 meta = {
304 license = [ pkgs.lib.licenses.mit ];
304 license = [ pkgs.lib.licenses.mit ];
305 };
305 };
306 };
306 };
307 "markupsafe" = super.buildPythonPackage {
307 "markupsafe" = super.buildPythonPackage {
308 name = "markupsafe-1.1.0";
308 name = "markupsafe-1.1.0";
309 doCheck = false;
309 doCheck = false;
310 src = fetchurl {
310 src = fetchurl {
311 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
311 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
312 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
312 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
313 };
313 };
314 meta = {
314 meta = {
315 license = [ pkgs.lib.licenses.bsdOriginal ];
315 license = [ pkgs.lib.licenses.bsdOriginal ];
316 };
316 };
317 };
317 };
318 "mercurial" = super.buildPythonPackage {
318 "mercurial" = super.buildPythonPackage {
319 name = "mercurial-4.6.2";
319 name = "mercurial-4.9.1";
320 doCheck = false;
320 doCheck = false;
321 src = fetchurl {
321 src = fetchurl {
322 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
322 url = "https://files.pythonhosted.org/packages/60/58/a1c52d5f5c0b755e231faf7c4f507dc51fe26d979d36346bc9d28f4f8a75/mercurial-4.9.1.tar.gz";
323 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
323 sha256 = "0iybbkd9add066729zg01kwz5hhc1s6lhp9rrnsmzq6ihyxj3p8v";
324 };
324 };
325 meta = {
325 meta = {
326 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
326 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
327 };
327 };
328 };
328 };
329 "mock" = super.buildPythonPackage {
329 "mock" = super.buildPythonPackage {
330 name = "mock-1.0.1";
330 name = "mock-1.0.1";
331 doCheck = false;
331 doCheck = false;
332 src = fetchurl {
332 src = fetchurl {
333 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
333 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
334 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
334 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
335 };
335 };
336 meta = {
336 meta = {
337 license = [ pkgs.lib.licenses.bsdOriginal ];
337 license = [ pkgs.lib.licenses.bsdOriginal ];
338 };
338 };
339 };
339 };
340 "more-itertools" = super.buildPythonPackage {
340 "more-itertools" = super.buildPythonPackage {
341 name = "more-itertools-5.0.0";
341 name = "more-itertools-5.0.0";
342 doCheck = false;
342 doCheck = false;
343 propagatedBuildInputs = [
343 propagatedBuildInputs = [
344 self."six"
344 self."six"
345 ];
345 ];
346 src = fetchurl {
346 src = fetchurl {
347 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
347 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
348 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
348 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
349 };
349 };
350 meta = {
350 meta = {
351 license = [ pkgs.lib.licenses.mit ];
351 license = [ pkgs.lib.licenses.mit ];
352 };
352 };
353 };
353 };
354 "msgpack-python" = super.buildPythonPackage {
354 "msgpack-python" = super.buildPythonPackage {
355 name = "msgpack-python-0.5.6";
355 name = "msgpack-python-0.5.6";
356 doCheck = false;
356 doCheck = false;
357 src = fetchurl {
357 src = fetchurl {
358 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
358 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
359 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
359 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
360 };
360 };
361 meta = {
361 meta = {
362 license = [ pkgs.lib.licenses.asl20 ];
362 license = [ pkgs.lib.licenses.asl20 ];
363 };
363 };
364 };
364 };
365 "pastedeploy" = super.buildPythonPackage {
365 "pastedeploy" = super.buildPythonPackage {
366 name = "pastedeploy-2.0.1";
366 name = "pastedeploy-2.0.1";
367 doCheck = false;
367 doCheck = false;
368 src = fetchurl {
368 src = fetchurl {
369 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
369 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
370 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
370 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
371 };
371 };
372 meta = {
372 meta = {
373 license = [ pkgs.lib.licenses.mit ];
373 license = [ pkgs.lib.licenses.mit ];
374 };
374 };
375 };
375 };
376 "pathlib2" = super.buildPythonPackage {
376 "pathlib2" = super.buildPythonPackage {
377 name = "pathlib2-2.3.3";
377 name = "pathlib2-2.3.4";
378 doCheck = false;
378 doCheck = false;
379 propagatedBuildInputs = [
379 propagatedBuildInputs = [
380 self."six"
380 self."six"
381 self."scandir"
381 self."scandir"
382 ];
382 ];
383 src = fetchurl {
383 src = fetchurl {
384 url = "https://files.pythonhosted.org/packages/bf/d7/a2568f4596b75d2c6e2b4094a7e64f620decc7887f69a1f2811931ea15b9/pathlib2-2.3.3.tar.gz";
384 url = "https://files.pythonhosted.org/packages/b5/f4/9c7cc726ece2498b6c8b62d3262aa43f59039b953fe23c9964ac5e18d40b/pathlib2-2.3.4.tar.gz";
385 sha256 = "0hpp92vqqgcd8h92msm9slv161b1q160igjwnkf2ag6cx0c96695";
385 sha256 = "1y0f9rkm1924zrc5dn4bwxlhgdkbml82lkcc28l5rgmr7d918q24";
386 };
386 };
387 meta = {
387 meta = {
388 license = [ pkgs.lib.licenses.mit ];
388 license = [ pkgs.lib.licenses.mit ];
389 };
389 };
390 };
390 };
391 "pexpect" = super.buildPythonPackage {
391 "pexpect" = super.buildPythonPackage {
392 name = "pexpect-4.6.0";
392 name = "pexpect-4.7.0";
393 doCheck = false;
393 doCheck = false;
394 propagatedBuildInputs = [
394 propagatedBuildInputs = [
395 self."ptyprocess"
395 self."ptyprocess"
396 ];
396 ];
397 src = fetchurl {
397 src = fetchurl {
398 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
398 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
399 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
399 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
400 };
400 };
401 meta = {
401 meta = {
402 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
402 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
403 };
403 };
404 };
404 };
405 "pickleshare" = super.buildPythonPackage {
405 "pickleshare" = super.buildPythonPackage {
406 name = "pickleshare-0.7.5";
406 name = "pickleshare-0.7.5";
407 doCheck = false;
407 doCheck = false;
408 propagatedBuildInputs = [
408 propagatedBuildInputs = [
409 self."pathlib2"
409 self."pathlib2"
410 ];
410 ];
411 src = fetchurl {
411 src = fetchurl {
412 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
412 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
413 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
413 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
414 };
414 };
415 meta = {
415 meta = {
416 license = [ pkgs.lib.licenses.mit ];
416 license = [ pkgs.lib.licenses.mit ];
417 };
417 };
418 };
418 };
419 "plaster" = super.buildPythonPackage {
419 "plaster" = super.buildPythonPackage {
420 name = "plaster-1.0";
420 name = "plaster-1.0";
421 doCheck = false;
421 doCheck = false;
422 propagatedBuildInputs = [
422 propagatedBuildInputs = [
423 self."setuptools"
423 self."setuptools"
424 ];
424 ];
425 src = fetchurl {
425 src = fetchurl {
426 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
426 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
427 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
427 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
428 };
428 };
429 meta = {
429 meta = {
430 license = [ pkgs.lib.licenses.mit ];
430 license = [ pkgs.lib.licenses.mit ];
431 };
431 };
432 };
432 };
433 "plaster-pastedeploy" = super.buildPythonPackage {
433 "plaster-pastedeploy" = super.buildPythonPackage {
434 name = "plaster-pastedeploy-0.6";
434 name = "plaster-pastedeploy-0.7";
435 doCheck = false;
435 doCheck = false;
436 propagatedBuildInputs = [
436 propagatedBuildInputs = [
437 self."pastedeploy"
437 self."pastedeploy"
438 self."plaster"
438 self."plaster"
439 ];
439 ];
440 src = fetchurl {
440 src = fetchurl {
441 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
441 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
442 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
442 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
443 };
443 };
444 meta = {
444 meta = {
445 license = [ pkgs.lib.licenses.mit ];
445 license = [ pkgs.lib.licenses.mit ];
446 };
446 };
447 };
447 };
448 "pluggy" = super.buildPythonPackage {
448 "pluggy" = super.buildPythonPackage {
449 name = "pluggy-0.8.1";
449 name = "pluggy-0.11.0";
450 doCheck = false;
450 doCheck = false;
451 src = fetchurl {
451 src = fetchurl {
452 url = "https://files.pythonhosted.org/packages/38/e1/83b10c17688af7b2998fa5342fec58ecbd2a5a7499f31e606ae6640b71ac/pluggy-0.8.1.tar.gz";
452 url = "https://files.pythonhosted.org/packages/0d/a1/862ab336e8128fde20981d2c1aa8506693412daf5083b1911d539412676b/pluggy-0.11.0.tar.gz";
453 sha256 = "05l6g42p9ilmabw0hlbiyxy6gyzjri41m5l11a8dzgvi77q35p4d";
453 sha256 = "10511a54dvafw1jrk75mrhml53c7b7w4yaw7241696lc2hfvr895";
454 };
454 };
455 meta = {
455 meta = {
456 license = [ pkgs.lib.licenses.mit ];
456 license = [ pkgs.lib.licenses.mit ];
457 };
457 };
458 };
458 };
459 "prompt-toolkit" = super.buildPythonPackage {
459 "prompt-toolkit" = super.buildPythonPackage {
460 name = "prompt-toolkit-1.0.15";
460 name = "prompt-toolkit-1.0.16";
461 doCheck = false;
461 doCheck = false;
462 propagatedBuildInputs = [
462 propagatedBuildInputs = [
463 self."six"
463 self."six"
464 self."wcwidth"
464 self."wcwidth"
465 ];
465 ];
466 src = fetchurl {
466 src = fetchurl {
467 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz";
468 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
468 sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1";
469 };
469 };
470 meta = {
470 meta = {
471 license = [ pkgs.lib.licenses.bsdOriginal ];
471 license = [ pkgs.lib.licenses.bsdOriginal ];
472 };
472 };
473 };
473 };
474 "psutil" = super.buildPythonPackage {
474 "psutil" = super.buildPythonPackage {
475 name = "psutil-5.4.8";
475 name = "psutil-5.5.1";
476 doCheck = false;
476 doCheck = false;
477 src = fetchurl {
477 src = fetchurl {
478 url = "https://files.pythonhosted.org/packages/e3/58/0eae6e4466e5abf779d7e2b71fac7fba5f59e00ea36ddb3ed690419ccb0f/psutil-5.4.8.tar.gz";
478 url = "https://files.pythonhosted.org/packages/c7/01/7c30b247cdc5ba29623faa5c8cf1f1bbf7e041783c340414b0ed7e067c64/psutil-5.5.1.tar.gz";
479 sha256 = "1hyna338sml2cl1mfb2gs89np18z27mvyhmq4ifh22x07n7mq9kf";
479 sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj";
480 };
480 };
481 meta = {
481 meta = {
482 license = [ pkgs.lib.licenses.bsdOriginal ];
482 license = [ pkgs.lib.licenses.bsdOriginal ];
483 };
483 };
484 };
484 };
485 "ptyprocess" = super.buildPythonPackage {
485 "ptyprocess" = super.buildPythonPackage {
486 name = "ptyprocess-0.6.0";
486 name = "ptyprocess-0.6.0";
487 doCheck = false;
487 doCheck = false;
488 src = fetchurl {
488 src = fetchurl {
489 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
489 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
490 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
490 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
491 };
491 };
492 meta = {
492 meta = {
493 license = [ ];
493 license = [ ];
494 };
494 };
495 };
495 };
496 "py" = super.buildPythonPackage {
496 "py" = super.buildPythonPackage {
497 name = "py-1.6.0";
497 name = "py-1.6.0";
498 doCheck = false;
498 doCheck = false;
499 src = fetchurl {
499 src = fetchurl {
500 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
500 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
501 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
501 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
502 };
502 };
503 meta = {
503 meta = {
504 license = [ pkgs.lib.licenses.mit ];
504 license = [ pkgs.lib.licenses.mit ];
505 };
505 };
506 };
506 };
507 "pygments" = super.buildPythonPackage {
507 "pygments" = super.buildPythonPackage {
508 name = "pygments-2.3.1";
508 name = "pygments-2.4.2";
509 doCheck = false;
509 doCheck = false;
510 src = fetchurl {
510 src = fetchurl {
511 url = "https://files.pythonhosted.org/packages/64/69/413708eaf3a64a6abb8972644e0f20891a55e621c6759e2c3f3891e05d63/Pygments-2.3.1.tar.gz";
511 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
512 sha256 = "0ji87g09jph8jqcvclgb02qvxasdnr9pzvk90rl66d90yqcxmyjz";
512 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
513 };
513 };
514 meta = {
514 meta = {
515 license = [ pkgs.lib.licenses.bsdOriginal ];
515 license = [ pkgs.lib.licenses.bsdOriginal ];
516 };
516 };
517 };
517 };
518 "pyramid" = super.buildPythonPackage {
518 "pyramid" = super.buildPythonPackage {
519 name = "pyramid-1.10.1";
519 name = "pyramid-1.10.4";
520 doCheck = false;
520 doCheck = false;
521 propagatedBuildInputs = [
521 propagatedBuildInputs = [
522 self."hupper"
522 self."hupper"
523 self."plaster"
523 self."plaster"
524 self."plaster-pastedeploy"
524 self."plaster-pastedeploy"
525 self."setuptools"
525 self."setuptools"
526 self."translationstring"
526 self."translationstring"
527 self."venusian"
527 self."venusian"
528 self."webob"
528 self."webob"
529 self."zope.deprecation"
529 self."zope.deprecation"
530 self."zope.interface"
530 self."zope.interface"
531 self."repoze.lru"
531 self."repoze.lru"
532 ];
532 ];
533 src = fetchurl {
533 src = fetchurl {
534 url = "https://files.pythonhosted.org/packages/0a/3e/22e3ac9be1b70a01139adba8906ee4b8f628bb469fea3c52f6c97b73063c/pyramid-1.10.1.tar.gz";
534 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
535 sha256 = "1h5105nfh6rsrfjiyw20aavyibj36la3hajy6vh1fa77xb4y3hrp";
535 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
536 };
536 };
537 meta = {
537 meta = {
538 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
538 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
539 };
539 };
540 };
540 };
541 "pyramid-mako" = super.buildPythonPackage {
541 "pyramid-mako" = super.buildPythonPackage {
542 name = "pyramid-mako-1.0.2";
542 name = "pyramid-mako-1.0.2";
543 doCheck = false;
543 doCheck = false;
544 propagatedBuildInputs = [
544 propagatedBuildInputs = [
545 self."pyramid"
545 self."pyramid"
546 self."mako"
546 self."mako"
547 ];
547 ];
548 src = fetchurl {
548 src = fetchurl {
549 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
549 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
550 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
550 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
551 };
551 };
552 meta = {
552 meta = {
553 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
553 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
554 };
554 };
555 };
555 };
556 "pytest" = super.buildPythonPackage {
556 "pytest" = super.buildPythonPackage {
557 name = "pytest-3.8.2";
557 name = "pytest-3.8.2";
558 doCheck = false;
558 doCheck = false;
559 propagatedBuildInputs = [
559 propagatedBuildInputs = [
560 self."py"
560 self."py"
561 self."six"
561 self."six"
562 self."setuptools"
562 self."setuptools"
563 self."attrs"
563 self."attrs"
564 self."more-itertools"
564 self."more-itertools"
565 self."atomicwrites"
565 self."atomicwrites"
566 self."pluggy"
566 self."pluggy"
567 self."funcsigs"
567 self."funcsigs"
568 self."pathlib2"
568 self."pathlib2"
569 ];
569 ];
570 src = fetchurl {
570 src = fetchurl {
571 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
571 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
572 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
572 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
573 };
573 };
574 meta = {
574 meta = {
575 license = [ pkgs.lib.licenses.mit ];
575 license = [ pkgs.lib.licenses.mit ];
576 };
576 };
577 };
577 };
578 "pytest-cov" = super.buildPythonPackage {
578 "pytest-cov" = super.buildPythonPackage {
579 name = "pytest-cov-2.6.0";
579 name = "pytest-cov-2.6.0";
580 doCheck = false;
580 doCheck = false;
581 propagatedBuildInputs = [
581 propagatedBuildInputs = [
582 self."pytest"
582 self."pytest"
583 self."coverage"
583 self."coverage"
584 ];
584 ];
585 src = fetchurl {
585 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
586 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
587 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
587 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
588 };
588 };
589 meta = {
589 meta = {
590 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
590 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
591 };
591 };
592 };
592 };
593 "pytest-profiling" = super.buildPythonPackage {
593 "pytest-profiling" = super.buildPythonPackage {
594 name = "pytest-profiling-1.3.0";
594 name = "pytest-profiling-1.3.0";
595 doCheck = false;
595 doCheck = false;
596 propagatedBuildInputs = [
596 propagatedBuildInputs = [
597 self."six"
597 self."six"
598 self."pytest"
598 self."pytest"
599 self."gprof2dot"
599 self."gprof2dot"
600 ];
600 ];
601 src = fetchurl {
601 src = fetchurl {
602 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
602 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
603 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
603 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
604 };
604 };
605 meta = {
605 meta = {
606 license = [ pkgs.lib.licenses.mit ];
606 license = [ pkgs.lib.licenses.mit ];
607 };
607 };
608 };
608 };
609 "pytest-runner" = super.buildPythonPackage {
609 "pytest-runner" = super.buildPythonPackage {
610 name = "pytest-runner-4.2";
610 name = "pytest-runner-4.2";
611 doCheck = false;
611 doCheck = false;
612 src = fetchurl {
612 src = fetchurl {
613 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
613 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
614 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
614 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
615 };
615 };
616 meta = {
616 meta = {
617 license = [ pkgs.lib.licenses.mit ];
617 license = [ pkgs.lib.licenses.mit ];
618 };
618 };
619 };
619 };
620 "pytest-sugar" = super.buildPythonPackage {
620 "pytest-sugar" = super.buildPythonPackage {
621 name = "pytest-sugar-0.9.1";
621 name = "pytest-sugar-0.9.1";
622 doCheck = false;
622 doCheck = false;
623 propagatedBuildInputs = [
623 propagatedBuildInputs = [
624 self."pytest"
624 self."pytest"
625 self."termcolor"
625 self."termcolor"
626 ];
626 ];
627 src = fetchurl {
627 src = fetchurl {
628 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
628 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
629 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
629 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
630 };
630 };
631 meta = {
631 meta = {
632 license = [ pkgs.lib.licenses.bsdOriginal ];
632 license = [ pkgs.lib.licenses.bsdOriginal ];
633 };
633 };
634 };
634 };
635 "pytest-timeout" = super.buildPythonPackage {
635 "pytest-timeout" = super.buildPythonPackage {
636 name = "pytest-timeout-1.3.2";
636 name = "pytest-timeout-1.3.2";
637 doCheck = false;
637 doCheck = false;
638 propagatedBuildInputs = [
638 propagatedBuildInputs = [
639 self."pytest"
639 self."pytest"
640 ];
640 ];
641 src = fetchurl {
641 src = fetchurl {
642 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
642 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
643 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
643 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
644 };
644 };
645 meta = {
645 meta = {
646 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
646 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
647 };
647 };
648 };
648 };
649 "repoze.lru" = super.buildPythonPackage {
649 "repoze.lru" = super.buildPythonPackage {
650 name = "repoze.lru-0.7";
650 name = "repoze.lru-0.7";
651 doCheck = false;
651 doCheck = false;
652 src = fetchurl {
652 src = fetchurl {
653 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
653 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
654 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
654 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
655 };
655 };
656 meta = {
656 meta = {
657 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
657 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
658 };
658 };
659 };
659 };
660 "rhodecode-vcsserver" = super.buildPythonPackage {
660 "rhodecode-vcsserver" = super.buildPythonPackage {
661 name = "rhodecode-vcsserver-4.16.2";
661 name = "rhodecode-vcsserver-4.17.0";
662 buildInputs = [
662 buildInputs = [
663 self."pytest"
663 self."pytest"
664 self."py"
664 self."py"
665 self."pytest-cov"
665 self."pytest-cov"
666 self."pytest-sugar"
666 self."pytest-sugar"
667 self."pytest-runner"
667 self."pytest-runner"
668 self."pytest-profiling"
668 self."pytest-profiling"
669 self."pytest-timeout"
669 self."pytest-timeout"
670 self."gprof2dot"
670 self."gprof2dot"
671 self."mock"
671 self."mock"
672 self."cov-core"
672 self."cov-core"
673 self."coverage"
673 self."coverage"
674 self."webtest"
674 self."webtest"
675 self."beautifulsoup4"
675 self."beautifulsoup4"
676 self."configobj"
676 self."configobj"
677 ];
677 ];
678 doCheck = true;
678 doCheck = true;
679 propagatedBuildInputs = [
679 propagatedBuildInputs = [
680 self."configobj"
680 self."configobj"
681 self."atomicwrites"
682 self."attrs"
683 self."dogpile.cache"
681 self."dogpile.cache"
684 self."dogpile.core"
682 self."dogpile.core"
685 self."decorator"
683 self."decorator"
686 self."dulwich"
684 self."dulwich"
687 self."hgsubversion"
685 self."hgsubversion"
688 self."hg-evolve"
686 self."hg-evolve"
689 self."mako"
687 self."mako"
690 self."markupsafe"
688 self."markupsafe"
691 self."mercurial"
689 self."mercurial"
692 self."msgpack-python"
690 self."msgpack-python"
693 self."pastedeploy"
691 self."pastedeploy"
694 self."psutil"
695 self."pyramid"
692 self."pyramid"
696 self."pyramid-mako"
693 self."pyramid-mako"
697 self."pygments"
698 self."pathlib2"
699 self."repoze.lru"
694 self."repoze.lru"
700 self."simplejson"
695 self."simplejson"
701 self."subprocess32"
696 self."subprocess32"
702 self."subvertpy"
697 self."subvertpy"
703 self."six"
698 self."six"
704 self."translationstring"
699 self."translationstring"
705 self."webob"
700 self."webob"
706 self."zope.deprecation"
701 self."zope.deprecation"
707 self."zope.interface"
702 self."zope.interface"
708 self."venusian"
709 self."gevent"
703 self."gevent"
710 self."greenlet"
704 self."greenlet"
711 self."gunicorn"
705 self."gunicorn"
712 self."waitress"
706 self."waitress"
713 self."setproctitle"
714 self."ipdb"
707 self."ipdb"
715 self."ipython"
708 self."ipython"
716 self."pytest"
709 self."pytest"
717 self."py"
710 self."py"
718 self."pytest-cov"
711 self."pytest-cov"
719 self."pytest-sugar"
712 self."pytest-sugar"
720 self."pytest-runner"
713 self."pytest-runner"
721 self."pytest-profiling"
714 self."pytest-profiling"
722 self."pytest-timeout"
715 self."pytest-timeout"
723 self."gprof2dot"
716 self."gprof2dot"
724 self."mock"
717 self."mock"
725 self."cov-core"
718 self."cov-core"
726 self."coverage"
719 self."coverage"
727 self."webtest"
720 self."webtest"
728 self."beautifulsoup4"
721 self."beautifulsoup4"
729 ];
722 ];
730 src = ./.;
723 src = ./.;
731 meta = {
724 meta = {
732 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
725 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
733 };
726 };
734 };
727 };
735 "scandir" = super.buildPythonPackage {
728 "scandir" = super.buildPythonPackage {
736 name = "scandir-1.9.0";
729 name = "scandir-1.10.0";
737 doCheck = false;
730 doCheck = false;
738 src = fetchurl {
731 src = fetchurl {
739 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
732 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
740 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
733 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
741 };
734 };
742 meta = {
735 meta = {
743 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
736 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
744 };
737 };
745 };
738 };
746 "setproctitle" = super.buildPythonPackage {
739 "setproctitle" = super.buildPythonPackage {
747 name = "setproctitle-1.1.10";
740 name = "setproctitle-1.1.10";
748 doCheck = false;
741 doCheck = false;
749 src = fetchurl {
742 src = fetchurl {
750 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
743 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
751 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
744 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
752 };
745 };
753 meta = {
746 meta = {
754 license = [ pkgs.lib.licenses.bsdOriginal ];
747 license = [ pkgs.lib.licenses.bsdOriginal ];
755 };
748 };
756 };
749 };
757 "setuptools" = super.buildPythonPackage {
750 "setuptools" = super.buildPythonPackage {
758 name = "setuptools-40.8.0";
751 name = "setuptools-41.0.1";
759 doCheck = false;
752 doCheck = false;
760 src = fetchurl {
753 src = fetchurl {
761 url = "https://files.pythonhosted.org/packages/c2/f7/c7b501b783e5a74cf1768bc174ee4fb0a8a6ee5af6afa92274ff964703e0/setuptools-40.8.0.zip";
754 url = "https://files.pythonhosted.org/packages/1d/64/a18a487b4391a05b9c7f938b94a16d80305bf0369c6b0b9509e86165e1d3/setuptools-41.0.1.zip";
762 sha256 = "0k9hifpgahnw2a26w3cr346iy733k6d3nwh3f7g9m13y6f8fqkkf";
755 sha256 = "04sns22y2hhsrwfy1mha2lgslvpjsjsz8xws7h2rh5a7ylkd28m2";
763 };
756 };
764 meta = {
757 meta = {
765 license = [ pkgs.lib.licenses.mit ];
758 license = [ pkgs.lib.licenses.mit ];
766 };
759 };
767 };
760 };
768 "simplegeneric" = super.buildPythonPackage {
761 "simplegeneric" = super.buildPythonPackage {
769 name = "simplegeneric-0.8.1";
762 name = "simplegeneric-0.8.1";
770 doCheck = false;
763 doCheck = false;
771 src = fetchurl {
764 src = fetchurl {
772 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
765 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
773 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
766 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
774 };
767 };
775 meta = {
768 meta = {
776 license = [ pkgs.lib.licenses.zpl21 ];
769 license = [ pkgs.lib.licenses.zpl21 ];
777 };
770 };
778 };
771 };
779 "simplejson" = super.buildPythonPackage {
772 "simplejson" = super.buildPythonPackage {
780 name = "simplejson-3.16.0";
773 name = "simplejson-3.16.0";
781 doCheck = false;
774 doCheck = false;
782 src = fetchurl {
775 src = fetchurl {
783 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
776 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
784 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
777 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
785 };
778 };
786 meta = {
779 meta = {
787 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
780 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
788 };
781 };
789 };
782 };
790 "six" = super.buildPythonPackage {
783 "six" = super.buildPythonPackage {
791 name = "six-1.11.0";
784 name = "six-1.11.0";
792 doCheck = false;
785 doCheck = false;
793 src = fetchurl {
786 src = fetchurl {
794 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
787 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
795 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
788 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
796 };
789 };
797 meta = {
790 meta = {
798 license = [ pkgs.lib.licenses.mit ];
791 license = [ pkgs.lib.licenses.mit ];
799 };
792 };
800 };
793 };
801 "subprocess32" = super.buildPythonPackage {
794 "subprocess32" = super.buildPythonPackage {
802 name = "subprocess32-3.5.3";
795 name = "subprocess32-3.5.4";
803 doCheck = false;
796 doCheck = false;
804 src = fetchurl {
797 src = fetchurl {
805 url = "https://files.pythonhosted.org/packages/be/2b/beeba583e9877e64db10b52a96915afc0feabf7144dcbf2a0d0ea68bf73d/subprocess32-3.5.3.tar.gz";
798 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
806 sha256 = "1hr5fan8i719hmlmz73hf8rhq74014w07d8ryg7krvvf6692kj3b";
799 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
807 };
800 };
808 meta = {
801 meta = {
809 license = [ pkgs.lib.licenses.psfl ];
802 license = [ pkgs.lib.licenses.psfl ];
810 };
803 };
811 };
804 };
812 "subvertpy" = super.buildPythonPackage {
805 "subvertpy" = super.buildPythonPackage {
813 name = "subvertpy-0.10.1";
806 name = "subvertpy-0.10.1";
814 doCheck = false;
807 doCheck = false;
815 src = fetchurl {
808 src = fetchurl {
816 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
809 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
817 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
810 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
818 };
811 };
819 meta = {
812 meta = {
820 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
813 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
821 };
814 };
822 };
815 };
823 "termcolor" = super.buildPythonPackage {
816 "termcolor" = super.buildPythonPackage {
824 name = "termcolor-1.1.0";
817 name = "termcolor-1.1.0";
825 doCheck = false;
818 doCheck = false;
826 src = fetchurl {
819 src = fetchurl {
827 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
820 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
828 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
821 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
829 };
822 };
830 meta = {
823 meta = {
831 license = [ pkgs.lib.licenses.mit ];
824 license = [ pkgs.lib.licenses.mit ];
832 };
825 };
833 };
826 };
834 "traitlets" = super.buildPythonPackage {
827 "traitlets" = super.buildPythonPackage {
835 name = "traitlets-4.3.2";
828 name = "traitlets-4.3.2";
836 doCheck = false;
829 doCheck = false;
837 propagatedBuildInputs = [
830 propagatedBuildInputs = [
838 self."ipython-genutils"
831 self."ipython-genutils"
839 self."six"
832 self."six"
840 self."decorator"
833 self."decorator"
841 self."enum34"
834 self."enum34"
842 ];
835 ];
843 src = fetchurl {
836 src = fetchurl {
844 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
837 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
845 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
838 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
846 };
839 };
847 meta = {
840 meta = {
848 license = [ pkgs.lib.licenses.bsdOriginal ];
841 license = [ pkgs.lib.licenses.bsdOriginal ];
849 };
842 };
850 };
843 };
851 "translationstring" = super.buildPythonPackage {
844 "translationstring" = super.buildPythonPackage {
852 name = "translationstring-1.3";
845 name = "translationstring-1.3";
853 doCheck = false;
846 doCheck = false;
854 src = fetchurl {
847 src = fetchurl {
855 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
848 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
856 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
849 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
857 };
850 };
858 meta = {
851 meta = {
859 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
852 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
860 };
853 };
861 };
854 };
862 "venusian" = super.buildPythonPackage {
855 "venusian" = super.buildPythonPackage {
863 name = "venusian-1.2.0";
856 name = "venusian-1.2.0";
864 doCheck = false;
857 doCheck = false;
865 src = fetchurl {
858 src = fetchurl {
866 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
859 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
867 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
860 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
868 };
861 };
869 meta = {
862 meta = {
870 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
863 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
871 };
864 };
872 };
865 };
873 "waitress" = super.buildPythonPackage {
866 "waitress" = super.buildPythonPackage {
874 name = "waitress-1.1.0";
867 name = "waitress-1.3.0";
875 doCheck = false;
868 doCheck = false;
876 src = fetchurl {
869 src = fetchurl {
877 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
870 url = "https://files.pythonhosted.org/packages/43/50/9890471320d5ad22761ae46661cf745f487b1c8c4ec49352b99e1078b970/waitress-1.3.0.tar.gz";
878 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
871 sha256 = "09j5dzbbcxib7vdskhx39s1qsydlr4n2p2png71d7mjnr9pnwajf";
879 };
872 };
880 meta = {
873 meta = {
881 license = [ pkgs.lib.licenses.zpl21 ];
874 license = [ pkgs.lib.licenses.zpl21 ];
882 };
875 };
883 };
876 };
884 "wcwidth" = super.buildPythonPackage {
877 "wcwidth" = super.buildPythonPackage {
885 name = "wcwidth-0.1.7";
878 name = "wcwidth-0.1.7";
886 doCheck = false;
879 doCheck = false;
887 src = fetchurl {
880 src = fetchurl {
888 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
881 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
889 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
882 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
890 };
883 };
891 meta = {
884 meta = {
892 license = [ pkgs.lib.licenses.mit ];
885 license = [ pkgs.lib.licenses.mit ];
893 };
886 };
894 };
887 };
895 "webob" = super.buildPythonPackage {
888 "webob" = super.buildPythonPackage {
896 name = "webob-1.8.4";
889 name = "webob-1.8.5";
897 doCheck = false;
890 doCheck = false;
898 src = fetchurl {
891 src = fetchurl {
899 url = "https://files.pythonhosted.org/packages/e4/6c/99e322c3d4cc11d9060a67a9bf2f7c9c581f40988c11fffe89bb8c36bc5e/WebOb-1.8.4.tar.gz";
892 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
900 sha256 = "16cfg5y4n6sihz59vsmns2yqbfm0gfsn3l5xgz2g0pdhilaib0x4";
893 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
901 };
894 };
902 meta = {
895 meta = {
903 license = [ pkgs.lib.licenses.mit ];
896 license = [ pkgs.lib.licenses.mit ];
904 };
897 };
905 };
898 };
906 "webtest" = super.buildPythonPackage {
899 "webtest" = super.buildPythonPackage {
907 name = "webtest-2.0.32";
900 name = "webtest-2.0.33";
908 doCheck = false;
901 doCheck = false;
909 propagatedBuildInputs = [
902 propagatedBuildInputs = [
910 self."six"
903 self."six"
911 self."webob"
904 self."webob"
912 self."waitress"
905 self."waitress"
913 self."beautifulsoup4"
906 self."beautifulsoup4"
914 ];
907 ];
915 src = fetchurl {
908 src = fetchurl {
916 url = "https://files.pythonhosted.org/packages/27/9f/9e74449d272ffbef4fb3012e6dbc53c0b24822d545e7a33a342f80131e59/WebTest-2.0.32.tar.gz";
909 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
917 sha256 = "0qp0nnbazzm4ibjiyqfcn6f230svk09i4g58zg2i9x1ga06h48a2";
910 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
918 };
911 };
919 meta = {
912 meta = {
920 license = [ pkgs.lib.licenses.mit ];
913 license = [ pkgs.lib.licenses.mit ];
921 };
914 };
922 };
915 };
923 "zope.deprecation" = super.buildPythonPackage {
916 "zope.deprecation" = super.buildPythonPackage {
924 name = "zope.deprecation-4.3.0";
917 name = "zope.deprecation-4.4.0";
925 doCheck = false;
918 doCheck = false;
926 propagatedBuildInputs = [
919 propagatedBuildInputs = [
927 self."setuptools"
920 self."setuptools"
928 ];
921 ];
929 src = fetchurl {
922 src = fetchurl {
930 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
923 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
931 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
924 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
932 };
925 };
933 meta = {
926 meta = {
934 license = [ pkgs.lib.licenses.zpl21 ];
927 license = [ pkgs.lib.licenses.zpl21 ];
935 };
928 };
936 };
929 };
937 "zope.interface" = super.buildPythonPackage {
930 "zope.interface" = super.buildPythonPackage {
938 name = "zope.interface-4.5.0";
931 name = "zope.interface-4.6.0";
939 doCheck = false;
932 doCheck = false;
940 propagatedBuildInputs = [
933 propagatedBuildInputs = [
941 self."setuptools"
934 self."setuptools"
942 ];
935 ];
943 src = fetchurl {
936 src = fetchurl {
944 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
937 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
945 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
938 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
946 };
939 };
947 meta = {
940 meta = {
948 license = [ pkgs.lib.licenses.zpl21 ];
941 license = [ pkgs.lib.licenses.zpl21 ];
949 };
942 };
950 };
943 };
951
944
952 ### Test requirements
945 ### Test requirements
953
946
954
947
955 }
948 }
@@ -1,49 +1,43 b''
1 ## dependencies
1 ## dependencies
2
2
3 # our custom configobj
3 # our custom configobj
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
5 atomicwrites==1.2.1
5
6 attrs==18.2.0
7 dogpile.cache==0.7.1
6 dogpile.cache==0.7.1
8 dogpile.core==0.4.1
7 dogpile.core==0.4.1
9 decorator==4.1.2
8 decorator==4.1.2
10 dulwich==0.13.0
9 dulwich==0.13.0
11 hgsubversion==1.9.3
10 hgsubversion==1.9.3
12 hg-evolve==8.0.1
11 hg-evolve==8.5.1
13 mako==1.0.7
12 mako==1.0.7
14 markupsafe==1.1.0
13 markupsafe==1.1.0
15 mercurial==4.6.2
14 mercurial==4.9.1
16 msgpack-python==0.5.6
15 msgpack-python==0.5.6
17
16
18 pastedeploy==2.0.1
17 pastedeploy==2.0.1
19 psutil==5.4.8
18 pyramid==1.10.4
20 pyramid==1.10.1
21 pyramid-mako==1.0.2
19 pyramid-mako==1.0.2
22
20
23 pygments==2.3.1
24 pathlib2==2.3.3
25 repoze.lru==0.7
21 repoze.lru==0.7
26 simplejson==3.16.0
22 simplejson==3.16.0
27 subprocess32==3.5.3
23 subprocess32==3.5.4
28 subvertpy==0.10.1
24 subvertpy==0.10.1
29
25
30 six==1.11.0
26 six==1.11.0
31 translationstring==1.3
27 translationstring==1.3
32 webob==1.8.4
28 webob==1.8.5
33 zope.deprecation==4.3.0
29 zope.deprecation==4.4.0
34 zope.interface==4.5.0
30 zope.interface==4.6.0
35 venusian==1.2.0
36
31
37 ## http servers
32 ## http servers
38 gevent==1.4.0
33 gevent==1.4.0
39 greenlet==0.4.15
34 greenlet==0.4.15
40 gunicorn==19.9.0
35 gunicorn==19.9.0
41 waitress==1.1.0
36 waitress==1.3.0
42 setproctitle==1.1.10
43
37
44 ## debug
38 ## debug
45 ipdb==0.11.0
39 ipdb==0.12.0
46 ipython==5.1.0
40 ipython==5.1.0
47
41
48 ## test related requirements
42 ## test related requirements
49 -r requirements_test.txt
43 -r requirements_test.txt
@@ -1,16 +1,16 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.8.2
2 pytest==3.8.2
3 py==1.6.0
3 py==1.6.0
4 pytest-cov==2.6.0
4 pytest-cov==2.6.0
5 pytest-sugar==0.9.1
5 pytest-sugar==0.9.1
6 pytest-runner==4.2.0
6 pytest-runner==4.2.0
7 pytest-profiling==1.3.0
7 pytest-profiling==1.3.0
8 pytest-timeout==1.3.2
8 pytest-timeout==1.3.2
9 gprof2dot==2017.9.19
9 gprof2dot==2017.9.19
10
10
11 mock==1.0.1
11 mock==1.0.1
12 cov-core==1.15.0
12 cov-core==1.15.0
13 coverage==4.5.1
13 coverage==4.5.3
14
14
15 webtest==2.0.32
15 webtest==2.0.33
16 beautifulsoup4==4.6.3
16 beautifulsoup4==4.6.3
@@ -1,1 +1,1 b''
1 4.16.2 No newline at end of file
1 4.17.0 No newline at end of file
@@ -1,117 +1,117 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28
28
29
29
30 def _make_exception(kind, org_exc, *args):
30 def _make_exception(kind, org_exc, *args):
31 """
31 """
32 Prepares a base `Exception` instance to be sent over the wire.
32 Prepares a base `Exception` instance to be sent over the wire.
33
33
34 To give our caller a hint what this is about, it will attach an attribute
34 To give our caller a hint what this is about, it will attach an attribute
35 `_vcs_kind` to the exception.
35 `_vcs_kind` to the exception.
36 """
36 """
37 exc = Exception(*args)
37 exc = Exception(*args)
38 exc._vcs_kind = kind
38 exc._vcs_kind = kind
39 exc._org_exc = org_exc
39 exc._org_exc = org_exc
40 exc._org_exc_tb = ''
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 return exc
41 return exc
42
42
43
43
44 def AbortException(org_exc=None):
44 def AbortException(org_exc=None):
45 def _make_exception_wrapper(*args):
45 def _make_exception_wrapper(*args):
46 return _make_exception('abort', org_exc, *args)
46 return _make_exception('abort', org_exc, *args)
47 return _make_exception_wrapper
47 return _make_exception_wrapper
48
48
49
49
50 def ArchiveException(org_exc=None):
50 def ArchiveException(org_exc=None):
51 def _make_exception_wrapper(*args):
51 def _make_exception_wrapper(*args):
52 return _make_exception('archive', org_exc, *args)
52 return _make_exception('archive', org_exc, *args)
53 return _make_exception_wrapper
53 return _make_exception_wrapper
54
54
55
55
56 def LookupException(org_exc=None):
56 def LookupException(org_exc=None):
57 def _make_exception_wrapper(*args):
57 def _make_exception_wrapper(*args):
58 return _make_exception('lookup', org_exc, *args)
58 return _make_exception('lookup', org_exc, *args)
59 return _make_exception_wrapper
59 return _make_exception_wrapper
60
60
61
61
62 def VcsException(org_exc=None):
62 def VcsException(org_exc=None):
63 def _make_exception_wrapper(*args):
63 def _make_exception_wrapper(*args):
64 return _make_exception('error', org_exc, *args)
64 return _make_exception('error', org_exc, *args)
65 return _make_exception_wrapper
65 return _make_exception_wrapper
66
66
67
67
68 def RepositoryLockedException(org_exc=None):
68 def RepositoryLockedException(org_exc=None):
69 def _make_exception_wrapper(*args):
69 def _make_exception_wrapper(*args):
70 return _make_exception('repo_locked', org_exc, *args)
70 return _make_exception('repo_locked', org_exc, *args)
71 return _make_exception_wrapper
71 return _make_exception_wrapper
72
72
73
73
74 def RepositoryBranchProtectedException(org_exc=None):
74 def RepositoryBranchProtectedException(org_exc=None):
75 def _make_exception_wrapper(*args):
75 def _make_exception_wrapper(*args):
76 return _make_exception('repo_branch_protected', org_exc, *args)
76 return _make_exception('repo_branch_protected', org_exc, *args)
77 return _make_exception_wrapper
77 return _make_exception_wrapper
78
78
79
79
80 def RequirementException(org_exc=None):
80 def RequirementException(org_exc=None):
81 def _make_exception_wrapper(*args):
81 def _make_exception_wrapper(*args):
82 return _make_exception('requirement', org_exc, *args)
82 return _make_exception('requirement', org_exc, *args)
83 return _make_exception_wrapper
83 return _make_exception_wrapper
84
84
85
85
86 def UnhandledException(org_exc=None):
86 def UnhandledException(org_exc=None):
87 def _make_exception_wrapper(*args):
87 def _make_exception_wrapper(*args):
88 return _make_exception('unhandled', org_exc, *args)
88 return _make_exception('unhandled', org_exc, *args)
89 return _make_exception_wrapper
89 return _make_exception_wrapper
90
90
91
91
92 def URLError(org_exc=None):
92 def URLError(org_exc=None):
93 def _make_exception_wrapper(*args):
93 def _make_exception_wrapper(*args):
94 return _make_exception('url_error', org_exc, *args)
94 return _make_exception('url_error', org_exc, *args)
95 return _make_exception_wrapper
95 return _make_exception_wrapper
96
96
97
97
98 def SubrepoMergeException(org_exc=None):
98 def SubrepoMergeException(org_exc=None):
99 def _make_exception_wrapper(*args):
99 def _make_exception_wrapper(*args):
100 return _make_exception('subrepo_merge_error', org_exc, *args)
100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 return _make_exception_wrapper
101 return _make_exception_wrapper
102
102
103
103
104 class HTTPRepoLocked(HTTPLocked):
104 class HTTPRepoLocked(HTTPLocked):
105 """
105 """
106 Subclass of HTTPLocked response that allows to set the title and status
106 Subclass of HTTPLocked response that allows to set the title and status
107 code via constructor arguments.
107 code via constructor arguments.
108 """
108 """
109 def __init__(self, title, status_code=None, **kwargs):
109 def __init__(self, title, status_code=None, **kwargs):
110 self.code = status_code or HTTPLocked.code
110 self.code = status_code or HTTPLocked.code
111 self.title = title
111 self.title = title
112 super(HTTPRepoLocked, self).__init__(**kwargs)
112 super(HTTPRepoLocked, self).__init__(**kwargs)
113
113
114
114
115 class HTTPRepoBranchProtected(HTTPForbidden):
115 class HTTPRepoBranchProtected(HTTPForbidden):
116 def __init__(self, *args, **kwargs):
116 def __init__(self, *args, **kwargs):
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
117 super(HTTPForbidden, self).__init__(*args, **kwargs)
@@ -1,742 +1,752 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import collections
17 import collections
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 import more_itertools
28 import more_itertools
29 from dulwich import index, objects
29 from dulwich import index, objects
30 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.client import HttpGitClient, LocalGitClient
31 from dulwich.errors import (
31 from dulwich.errors import (
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 MissingCommitError, ObjectMissing, HangupException,
33 MissingCommitError, ObjectMissing, HangupException,
34 UnexpectedCommandError)
34 UnexpectedCommandError)
35 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.repo import Repo as DulwichRepo, Tag
36 from dulwich.server import update_server_info
36 from dulwich.server import update_server_info
37
37
38 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver import exceptions, settings, subprocessio
39 from vcsserver.utils import safe_str
39 from vcsserver.utils import safe_str
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 from vcsserver.hgcompat import (
41 from vcsserver.hgcompat import (
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 from vcsserver.git_lfs.lib import LFSOidStore
43 from vcsserver.git_lfs.lib import LFSOidStore
44
44
45 DIR_STAT = stat.S_IFDIR
45 DIR_STAT = stat.S_IFDIR
46 FILE_MODE = stat.S_IFMT
46 FILE_MODE = stat.S_IFMT
47 GIT_LINK = objects.S_IFGITLINK
47 GIT_LINK = objects.S_IFGITLINK
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Converts Dulwich exceptions to something neutral."""
53 """Converts Dulwich exceptions to something neutral."""
54 @wraps(func)
54 @wraps(func)
55 def wrapper(*args, **kwargs):
55 def wrapper(*args, **kwargs):
56 try:
56 try:
57 return func(*args, **kwargs)
57 return func(*args, **kwargs)
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 ObjectMissing) as e:
59 ObjectMissing) as e:
60 exc = exceptions.LookupException(e)
60 exc = exceptions.LookupException(e)
61 raise exc(e)
61 raise exc(e)
62 except (HangupException, UnexpectedCommandError) as e:
62 except (HangupException, UnexpectedCommandError) as e:
63 exc = exceptions.VcsException(e)
63 exc = exceptions.VcsException(e)
64 raise exc(e)
64 raise exc(e)
65 except Exception as e:
65 except Exception as e:
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
66 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 # (KeyError on empty repos), we cannot track this and catch all
67 # (KeyError on empty repos), we cannot track this and catch all
68 # exceptions, it's an exceptions from other handlers
68 # exceptions, it's an exceptions from other handlers
69 #if not hasattr(e, '_vcs_kind'):
69 #if not hasattr(e, '_vcs_kind'):
70 #log.exception("Unhandled exception in git remote call")
70 #log.exception("Unhandled exception in git remote call")
71 #raise_from_original(exceptions.UnhandledException)
71 #raise_from_original(exceptions.UnhandledException)
72 raise
72 raise
73 return wrapper
73 return wrapper
74
74
75
75
76 class Repo(DulwichRepo):
76 class Repo(DulwichRepo):
77 """
77 """
78 A wrapper for dulwich Repo class.
78 A wrapper for dulwich Repo class.
79
79
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 "Too many open files" error. We need to close all opened file descriptors
81 "Too many open files" error. We need to close all opened file descriptors
82 once the repo object is destroyed.
82 once the repo object is destroyed.
83
83
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 to 0.12.0 +
85 to 0.12.0 +
86 """
86 """
87 def __del__(self):
87 def __del__(self):
88 if hasattr(self, 'object_store'):
88 if hasattr(self, 'object_store'):
89 self.close()
89 self.close()
90
90
91
91
92 class GitFactory(RepoFactory):
92 class GitFactory(RepoFactory):
93 repo_type = 'git'
93 repo_type = 'git'
94
94
95 def _create_repo(self, wire, create):
95 def _create_repo(self, wire, create):
96 repo_path = str_to_dulwich(wire['path'])
96 repo_path = str_to_dulwich(wire['path'])
97 return Repo(repo_path)
97 return Repo(repo_path)
98
98
99
99
100 class GitRemote(object):
100 class GitRemote(object):
101
101
102 def __init__(self, factory):
102 def __init__(self, factory):
103 self._factory = factory
103 self._factory = factory
104 self.peeled_ref_marker = '^{}'
104 self.peeled_ref_marker = '^{}'
105 self._bulk_methods = {
105 self._bulk_methods = {
106 "author": self.commit_attribute,
106 "author": self.commit_attribute,
107 "date": self.get_object_attrs,
107 "date": self.get_object_attrs,
108 "message": self.commit_attribute,
108 "message": self.commit_attribute,
109 "parents": self.commit_attribute,
109 "parents": self.commit_attribute,
110 "_commit": self.revision,
110 "_commit": self.revision,
111 }
111 }
112
112
113 def _wire_to_config(self, wire):
113 def _wire_to_config(self, wire):
114 if 'config' in wire:
114 if 'config' in wire:
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 return {}
116 return {}
117
117
118 def _assign_ref(self, wire, ref, commit_id):
118 def _assign_ref(self, wire, ref, commit_id):
119 repo = self._factory.repo(wire)
119 repo = self._factory.repo(wire)
120 repo[ref] = commit_id
120 repo[ref] = commit_id
121
121
122 def _remote_conf(self, config):
122 def _remote_conf(self, config):
123 params = [
123 params = [
124 '-c', 'core.askpass=""',
124 '-c', 'core.askpass=""',
125 ]
125 ]
126 ssl_cert_dir = config.get('vcs_ssl_dir')
126 ssl_cert_dir = config.get('vcs_ssl_dir')
127 if ssl_cert_dir:
127 if ssl_cert_dir:
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 return params
129 return params
130
130
131 @reraise_safe_exceptions
131 @reraise_safe_exceptions
132 def is_empty(self, wire):
133 repo = self._factory.repo(wire)
134 try:
135 return not repo.head()
136 except Exception:
137 log.exception("failed to read object_store")
138 return True
139
140 @reraise_safe_exceptions
132 def add_object(self, wire, content):
141 def add_object(self, wire, content):
133 repo = self._factory.repo(wire)
142 repo = self._factory.repo(wire)
134 blob = objects.Blob()
143 blob = objects.Blob()
135 blob.set_raw_string(content)
144 blob.set_raw_string(content)
136 repo.object_store.add_object(blob)
145 repo.object_store.add_object(blob)
137 return blob.id
146 return blob.id
138
147
139 @reraise_safe_exceptions
148 @reraise_safe_exceptions
140 def assert_correct_path(self, wire):
149 def assert_correct_path(self, wire):
141 path = wire.get('path')
150 path = wire.get('path')
142 try:
151 try:
143 self._factory.repo(wire)
152 self._factory.repo(wire)
144 except NotGitRepository as e:
153 except NotGitRepository as e:
145 tb = traceback.format_exc()
154 tb = traceback.format_exc()
146 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
147 return False
156 return False
148
157
149 return True
158 return True
150
159
151 @reraise_safe_exceptions
160 @reraise_safe_exceptions
152 def bare(self, wire):
161 def bare(self, wire):
153 repo = self._factory.repo(wire)
162 repo = self._factory.repo(wire)
154 return repo.bare
163 return repo.bare
155
164
156 @reraise_safe_exceptions
165 @reraise_safe_exceptions
157 def blob_as_pretty_string(self, wire, sha):
166 def blob_as_pretty_string(self, wire, sha):
158 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
159 return repo[sha].as_pretty_string()
168 return repo[sha].as_pretty_string()
160
169
161 @reraise_safe_exceptions
170 @reraise_safe_exceptions
162 def blob_raw_length(self, wire, sha):
171 def blob_raw_length(self, wire, sha):
163 repo = self._factory.repo(wire)
172 repo = self._factory.repo(wire)
164 blob = repo[sha]
173 blob = repo[sha]
165 return blob.raw_length()
174 return blob.raw_length()
166
175
167 def _parse_lfs_pointer(self, raw_content):
176 def _parse_lfs_pointer(self, raw_content):
168
177
169 spec_string = 'version https://git-lfs.github.com/spec'
178 spec_string = 'version https://git-lfs.github.com/spec'
170 if raw_content and raw_content.startswith(spec_string):
179 if raw_content and raw_content.startswith(spec_string):
171 pattern = re.compile(r"""
180 pattern = re.compile(r"""
172 (?:\n)?
181 (?:\n)?
173 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
174 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
175 ^size[ ](?P<oid_size>[0-9]+)\n
184 ^size[ ](?P<oid_size>[0-9]+)\n
176 (?:\n)?
185 (?:\n)?
177 """, re.VERBOSE | re.MULTILINE)
186 """, re.VERBOSE | re.MULTILINE)
178 match = pattern.match(raw_content)
187 match = pattern.match(raw_content)
179 if match:
188 if match:
180 return match.groupdict()
189 return match.groupdict()
181
190
182 return {}
191 return {}
183
192
184 @reraise_safe_exceptions
193 @reraise_safe_exceptions
185 def is_large_file(self, wire, sha):
194 def is_large_file(self, wire, sha):
186 repo = self._factory.repo(wire)
195 repo = self._factory.repo(wire)
187 blob = repo[sha]
196 blob = repo[sha]
188 return self._parse_lfs_pointer(blob.as_raw_string())
197 return self._parse_lfs_pointer(blob.as_raw_string())
189
198
190 @reraise_safe_exceptions
199 @reraise_safe_exceptions
191 def in_largefiles_store(self, wire, oid):
200 def in_largefiles_store(self, wire, oid):
192 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
193 conf = self._wire_to_config(wire)
202 conf = self._wire_to_config(wire)
194
203
195 store_location = conf.get('vcs_git_lfs_store_location')
204 store_location = conf.get('vcs_git_lfs_store_location')
196 if store_location:
205 if store_location:
197 repo_name = repo.path
206 repo_name = repo.path
198 store = LFSOidStore(
207 store = LFSOidStore(
199 oid=oid, repo=repo_name, store_location=store_location)
208 oid=oid, repo=repo_name, store_location=store_location)
200 return store.has_oid()
209 return store.has_oid()
201
210
202 return False
211 return False
203
212
204 @reraise_safe_exceptions
213 @reraise_safe_exceptions
205 def store_path(self, wire, oid):
214 def store_path(self, wire, oid):
206 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
207 conf = self._wire_to_config(wire)
216 conf = self._wire_to_config(wire)
208
217
209 store_location = conf.get('vcs_git_lfs_store_location')
218 store_location = conf.get('vcs_git_lfs_store_location')
210 if store_location:
219 if store_location:
211 repo_name = repo.path
220 repo_name = repo.path
212 store = LFSOidStore(
221 store = LFSOidStore(
213 oid=oid, repo=repo_name, store_location=store_location)
222 oid=oid, repo=repo_name, store_location=store_location)
214 return store.oid_path
223 return store.oid_path
215 raise ValueError('Unable to fetch oid with path {}'.format(oid))
224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
216
225
217 @reraise_safe_exceptions
226 @reraise_safe_exceptions
218 def bulk_request(self, wire, rev, pre_load):
227 def bulk_request(self, wire, rev, pre_load):
219 result = {}
228 result = {}
220 for attr in pre_load:
229 for attr in pre_load:
221 try:
230 try:
222 method = self._bulk_methods[attr]
231 method = self._bulk_methods[attr]
223 args = [wire, rev]
232 args = [wire, rev]
224 if attr == "date":
233 if attr == "date":
225 args.extend(["commit_time", "commit_timezone"])
234 args.extend(["commit_time", "commit_timezone"])
226 elif attr in ["author", "message", "parents"]:
235 elif attr in ["author", "message", "parents"]:
227 args.append(attr)
236 args.append(attr)
228 result[attr] = method(*args)
237 result[attr] = method(*args)
229 except KeyError as e:
238 except KeyError as e:
230 raise exceptions.VcsException(e)(
239 raise exceptions.VcsException(e)(
231 "Unknown bulk attribute: %s" % attr)
240 "Unknown bulk attribute: %s" % attr)
232 return result
241 return result
233
242
234 def _build_opener(self, url):
243 def _build_opener(self, url):
235 handlers = []
244 handlers = []
236 url_obj = url_parser(url)
245 url_obj = url_parser(url)
237 _, authinfo = url_obj.authinfo()
246 _, authinfo = url_obj.authinfo()
238
247
239 if authinfo:
248 if authinfo:
240 # create a password manager
249 # create a password manager
241 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
242 passmgr.add_password(*authinfo)
251 passmgr.add_password(*authinfo)
243
252
244 handlers.extend((httpbasicauthhandler(passmgr),
253 handlers.extend((httpbasicauthhandler(passmgr),
245 httpdigestauthhandler(passmgr)))
254 httpdigestauthhandler(passmgr)))
246
255
247 return urllib2.build_opener(*handlers)
256 return urllib2.build_opener(*handlers)
248
257
249 @reraise_safe_exceptions
258 @reraise_safe_exceptions
250 def check_url(self, url, config):
259 def check_url(self, url, config):
251 url_obj = url_parser(url)
260 url_obj = url_parser(url)
252 test_uri, _ = url_obj.authinfo()
261 test_uri, _ = url_obj.authinfo()
253 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
254 url_obj.query = obfuscate_qs(url_obj.query)
263 url_obj.query = obfuscate_qs(url_obj.query)
255 cleaned_uri = str(url_obj)
264 cleaned_uri = str(url_obj)
256 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
257
266
258 if not test_uri.endswith('info/refs'):
267 if not test_uri.endswith('info/refs'):
259 test_uri = test_uri.rstrip('/') + '/info/refs'
268 test_uri = test_uri.rstrip('/') + '/info/refs'
260
269
261 o = self._build_opener(url)
270 o = self._build_opener(url)
262 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
263
272
264 q = {"service": 'git-upload-pack'}
273 q = {"service": 'git-upload-pack'}
265 qs = '?%s' % urllib.urlencode(q)
274 qs = '?%s' % urllib.urlencode(q)
266 cu = "%s%s" % (test_uri, qs)
275 cu = "%s%s" % (test_uri, qs)
267 req = urllib2.Request(cu, None, {})
276 req = urllib2.Request(cu, None, {})
268
277
269 try:
278 try:
270 log.debug("Trying to open URL %s", cleaned_uri)
279 log.debug("Trying to open URL %s", cleaned_uri)
271 resp = o.open(req)
280 resp = o.open(req)
272 if resp.code != 200:
281 if resp.code != 200:
273 raise exceptions.URLError()('Return Code is not 200')
282 raise exceptions.URLError()('Return Code is not 200')
274 except Exception as e:
283 except Exception as e:
275 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
276 # means it cannot be cloned
285 # means it cannot be cloned
277 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
278
287
279 # now detect if it's proper git repo
288 # now detect if it's proper git repo
280 gitdata = resp.read()
289 gitdata = resp.read()
281 if 'service=git-upload-pack' in gitdata:
290 if 'service=git-upload-pack' in gitdata:
282 pass
291 pass
283 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
284 # old style git can return some other format !
293 # old style git can return some other format !
285 pass
294 pass
286 else:
295 else:
287 raise exceptions.URLError()(
296 raise exceptions.URLError()(
288 "url [%s] does not look like an git" % (cleaned_uri,))
297 "url [%s] does not look like an git" % (cleaned_uri,))
289
298
290 return True
299 return True
291
300
292 @reraise_safe_exceptions
301 @reraise_safe_exceptions
293 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
294 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
295 remote_refs = self.pull(wire, url, apply_refs=False)
304 remote_refs = self.pull(wire, url, apply_refs=False)
296 repo = self._factory.repo(wire)
305 repo = self._factory.repo(wire)
297 if isinstance(valid_refs, list):
306 if isinstance(valid_refs, list):
298 valid_refs = tuple(valid_refs)
307 valid_refs = tuple(valid_refs)
299
308
300 for k in remote_refs:
309 for k in remote_refs:
301 # only parse heads/tags and skip so called deferred tags
310 # only parse heads/tags and skip so called deferred tags
302 if k.startswith(valid_refs) and not k.endswith(deferred):
311 if k.startswith(valid_refs) and not k.endswith(deferred):
303 repo[k] = remote_refs[k]
312 repo[k] = remote_refs[k]
304
313
305 if update_after_clone:
314 if update_after_clone:
306 # we want to checkout HEAD
315 # we want to checkout HEAD
307 repo["HEAD"] = remote_refs["HEAD"]
316 repo["HEAD"] = remote_refs["HEAD"]
308 index.build_index_from_tree(repo.path, repo.index_path(),
317 index.build_index_from_tree(repo.path, repo.index_path(),
309 repo.object_store, repo["HEAD"].tree)
318 repo.object_store, repo["HEAD"].tree)
310
319
311 # TODO: this is quite complex, check if that can be simplified
320 # TODO: this is quite complex, check if that can be simplified
312 @reraise_safe_exceptions
321 @reraise_safe_exceptions
313 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
314 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
315 object_store = repo.object_store
324 object_store = repo.object_store
316
325
317 # Create tree and populates it with blobs
326 # Create tree and populates it with blobs
318 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
319
328
320 for node in updated:
329 for node in updated:
321 # Compute subdirs if needed
330 # Compute subdirs if needed
322 dirpath, nodename = vcspath.split(node['path'])
331 dirpath, nodename = vcspath.split(node['path'])
323 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
324 parent = commit_tree
333 parent = commit_tree
325 ancestors = [('', parent)]
334 ancestors = [('', parent)]
326
335
327 # Tries to dig for the deepest existing tree
336 # Tries to dig for the deepest existing tree
328 while dirnames:
337 while dirnames:
329 curdir = dirnames.pop(0)
338 curdir = dirnames.pop(0)
330 try:
339 try:
331 dir_id = parent[curdir][1]
340 dir_id = parent[curdir][1]
332 except KeyError:
341 except KeyError:
333 # put curdir back into dirnames and stops
342 # put curdir back into dirnames and stops
334 dirnames.insert(0, curdir)
343 dirnames.insert(0, curdir)
335 break
344 break
336 else:
345 else:
337 # If found, updates parent
346 # If found, updates parent
338 parent = repo[dir_id]
347 parent = repo[dir_id]
339 ancestors.append((curdir, parent))
348 ancestors.append((curdir, parent))
340 # Now parent is deepest existing tree and we need to create
349 # Now parent is deepest existing tree and we need to create
341 # subtrees for dirnames (in reverse order)
350 # subtrees for dirnames (in reverse order)
342 # [this only applies for nodes from added]
351 # [this only applies for nodes from added]
343 new_trees = []
352 new_trees = []
344
353
345 blob = objects.Blob.from_string(node['content'])
354 blob = objects.Blob.from_string(node['content'])
346
355
347 if dirnames:
356 if dirnames:
348 # If there are trees which should be created we need to build
357 # If there are trees which should be created we need to build
349 # them now (in reverse order)
358 # them now (in reverse order)
350 reversed_dirnames = list(reversed(dirnames))
359 reversed_dirnames = list(reversed(dirnames))
351 curtree = objects.Tree()
360 curtree = objects.Tree()
352 curtree[node['node_path']] = node['mode'], blob.id
361 curtree[node['node_path']] = node['mode'], blob.id
353 new_trees.append(curtree)
362 new_trees.append(curtree)
354 for dirname in reversed_dirnames[:-1]:
363 for dirname in reversed_dirnames[:-1]:
355 newtree = objects.Tree()
364 newtree = objects.Tree()
356 newtree[dirname] = (DIR_STAT, curtree.id)
365 newtree[dirname] = (DIR_STAT, curtree.id)
357 new_trees.append(newtree)
366 new_trees.append(newtree)
358 curtree = newtree
367 curtree = newtree
359 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
360 else:
369 else:
361 parent.add(
370 parent.add(
362 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
363
372
364 new_trees.append(parent)
373 new_trees.append(parent)
365 # Update ancestors
374 # Update ancestors
366 reversed_ancestors = reversed(
375 reversed_ancestors = reversed(
367 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
368 for parent, tree, path in reversed_ancestors:
377 for parent, tree, path in reversed_ancestors:
369 parent[path] = (DIR_STAT, tree.id)
378 parent[path] = (DIR_STAT, tree.id)
370 object_store.add_object(tree)
379 object_store.add_object(tree)
371
380
372 object_store.add_object(blob)
381 object_store.add_object(blob)
373 for tree in new_trees:
382 for tree in new_trees:
374 object_store.add_object(tree)
383 object_store.add_object(tree)
375
384
376 for node_path in removed:
385 for node_path in removed:
377 paths = node_path.split('/')
386 paths = node_path.split('/')
378 tree = commit_tree
387 tree = commit_tree
379 trees = [tree]
388 trees = [tree]
380 # Traverse deep into the forest...
389 # Traverse deep into the forest...
381 for path in paths:
390 for path in paths:
382 try:
391 try:
383 obj = repo[tree[path][1]]
392 obj = repo[tree[path][1]]
384 if isinstance(obj, objects.Tree):
393 if isinstance(obj, objects.Tree):
385 trees.append(obj)
394 trees.append(obj)
386 tree = obj
395 tree = obj
387 except KeyError:
396 except KeyError:
388 break
397 break
389 # Cut down the blob and all rotten trees on the way back...
398 # Cut down the blob and all rotten trees on the way back...
390 for path, tree in reversed(zip(paths, trees)):
399 for path, tree in reversed(zip(paths, trees)):
391 del tree[path]
400 del tree[path]
392 if tree:
401 if tree:
393 # This tree still has elements - don't remove it or any
402 # This tree still has elements - don't remove it or any
394 # of it's parents
403 # of it's parents
395 break
404 break
396
405
397 object_store.add_object(commit_tree)
406 object_store.add_object(commit_tree)
398
407
399 # Create commit
408 # Create commit
400 commit = objects.Commit()
409 commit = objects.Commit()
401 commit.tree = commit_tree.id
410 commit.tree = commit_tree.id
402 for k, v in commit_data.iteritems():
411 for k, v in commit_data.iteritems():
403 setattr(commit, k, v)
412 setattr(commit, k, v)
404 object_store.add_object(commit)
413 object_store.add_object(commit)
405
414
406 ref = 'refs/heads/%s' % branch
415 ref = 'refs/heads/%s' % branch
407 repo.refs[ref] = commit.id
416 repo.refs[ref] = commit.id
408
417
409 return commit.id
418 return commit.id
410
419
411 @reraise_safe_exceptions
420 @reraise_safe_exceptions
412 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
413 if url != 'default' and '://' not in url:
422 if url != 'default' and '://' not in url:
414 client = LocalGitClient(url)
423 client = LocalGitClient(url)
415 else:
424 else:
416 url_obj = url_parser(url)
425 url_obj = url_parser(url)
417 o = self._build_opener(url)
426 o = self._build_opener(url)
418 url, _ = url_obj.authinfo()
427 url, _ = url_obj.authinfo()
419 client = HttpGitClient(base_url=url, opener=o)
428 client = HttpGitClient(base_url=url, opener=o)
420 repo = self._factory.repo(wire)
429 repo = self._factory.repo(wire)
421
430
422 determine_wants = repo.object_store.determine_wants_all
431 determine_wants = repo.object_store.determine_wants_all
423 if refs:
432 if refs:
424 def determine_wants_requested(references):
433 def determine_wants_requested(references):
425 return [references[r] for r in references if r in refs]
434 return [references[r] for r in references if r in refs]
426 determine_wants = determine_wants_requested
435 determine_wants = determine_wants_requested
427
436
428 try:
437 try:
429 remote_refs = client.fetch(
438 remote_refs = client.fetch(
430 path=url, target=repo, determine_wants=determine_wants)
439 path=url, target=repo, determine_wants=determine_wants)
431 except NotGitRepository as e:
440 except NotGitRepository as e:
432 log.warning(
441 log.warning(
433 'Trying to fetch from "%s" failed, not a Git repository.', url)
442 'Trying to fetch from "%s" failed, not a Git repository.', url)
434 # Exception can contain unicode which we convert
443 # Exception can contain unicode which we convert
435 raise exceptions.AbortException(e)(repr(e))
444 raise exceptions.AbortException(e)(repr(e))
436
445
437 # mikhail: client.fetch() returns all the remote refs, but fetches only
446 # mikhail: client.fetch() returns all the remote refs, but fetches only
438 # refs filtered by `determine_wants` function. We need to filter result
447 # refs filtered by `determine_wants` function. We need to filter result
439 # as well
448 # as well
440 if refs:
449 if refs:
441 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
442
451
443 if apply_refs:
452 if apply_refs:
444 # TODO: johbo: Needs proper test coverage with a git repository
453 # TODO: johbo: Needs proper test coverage with a git repository
445 # that contains a tag object, so that we would end up with
454 # that contains a tag object, so that we would end up with
446 # a peeled ref at this point.
455 # a peeled ref at this point.
447 for k in remote_refs:
456 for k in remote_refs:
448 if k.endswith(self.peeled_ref_marker):
457 if k.endswith(self.peeled_ref_marker):
449 log.debug("Skipping peeled reference %s", k)
458 log.debug("Skipping peeled reference %s", k)
450 continue
459 continue
451 repo[k] = remote_refs[k]
460 repo[k] = remote_refs[k]
452
461
453 if refs and not update_after:
462 if refs and not update_after:
454 # mikhail: explicitly set the head to the last ref.
463 # mikhail: explicitly set the head to the last ref.
455 repo['HEAD'] = remote_refs[refs[-1]]
464 repo['HEAD'] = remote_refs[refs[-1]]
456
465
457 if update_after:
466 if update_after:
458 # we want to checkout HEAD
467 # we want to checkout HEAD
459 repo["HEAD"] = remote_refs["HEAD"]
468 repo["HEAD"] = remote_refs["HEAD"]
460 index.build_index_from_tree(repo.path, repo.index_path(),
469 index.build_index_from_tree(repo.path, repo.index_path(),
461 repo.object_store, repo["HEAD"].tree)
470 repo.object_store, repo["HEAD"].tree)
462 return remote_refs
471 return remote_refs
463
472
464 @reraise_safe_exceptions
473 @reraise_safe_exceptions
465 def sync_fetch(self, wire, url, refs=None):
474 def sync_fetch(self, wire, url, refs=None):
466 repo = self._factory.repo(wire)
475 repo = self._factory.repo(wire)
467 if refs and not isinstance(refs, (list, tuple)):
476 if refs and not isinstance(refs, (list, tuple)):
468 refs = [refs]
477 refs = [refs]
469 config = self._wire_to_config(wire)
478 config = self._wire_to_config(wire)
470 # get all remote refs we'll use to fetch later
479 # get all remote refs we'll use to fetch later
471 output, __ = self.run_git_command(
480 output, __ = self.run_git_command(
472 wire, ['ls-remote', url], fail_on_stderr=False,
481 wire, ['ls-remote', url], fail_on_stderr=False,
473 _copts=self._remote_conf(config),
482 _copts=self._remote_conf(config),
474 extra_env={'GIT_TERMINAL_PROMPT': '0'})
483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
475
484
476 remote_refs = collections.OrderedDict()
485 remote_refs = collections.OrderedDict()
477 fetch_refs = []
486 fetch_refs = []
478
487
479 for ref_line in output.splitlines():
488 for ref_line in output.splitlines():
480 sha, ref = ref_line.split('\t')
489 sha, ref = ref_line.split('\t')
481 sha = sha.strip()
490 sha = sha.strip()
482 if ref in remote_refs:
491 if ref in remote_refs:
483 # duplicate, skip
492 # duplicate, skip
484 continue
493 continue
485 if ref.endswith(self.peeled_ref_marker):
494 if ref.endswith(self.peeled_ref_marker):
486 log.debug("Skipping peeled reference %s", ref)
495 log.debug("Skipping peeled reference %s", ref)
487 continue
496 continue
488 # don't sync HEAD
497 # don't sync HEAD
489 if ref in ['HEAD']:
498 if ref in ['HEAD']:
490 continue
499 continue
491
500
492 remote_refs[ref] = sha
501 remote_refs[ref] = sha
493
502
494 if refs and sha in refs:
503 if refs and sha in refs:
495 # we filter fetch using our specified refs
504 # we filter fetch using our specified refs
496 fetch_refs.append('{}:{}'.format(ref, ref))
505 fetch_refs.append('{}:{}'.format(ref, ref))
497 elif not refs:
506 elif not refs:
498 fetch_refs.append('{}:{}'.format(ref, ref))
507 fetch_refs.append('{}:{}'.format(ref, ref))
499 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
500 if fetch_refs:
509 if fetch_refs:
501 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
502 fetch_refs_chunks = list(chunk)
511 fetch_refs_chunks = list(chunk)
503 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
504 _out, _err = self.run_git_command(
513 _out, _err = self.run_git_command(
505 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
506 fail_on_stderr=False,
515 fail_on_stderr=False,
507 _copts=self._remote_conf(config),
516 _copts=self._remote_conf(config),
508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
509
518
510 return remote_refs
519 return remote_refs
511
520
512 @reraise_safe_exceptions
521 @reraise_safe_exceptions
513 def sync_push(self, wire, url, refs=None):
522 def sync_push(self, wire, url, refs=None):
514 if not self.check_url(url, wire):
523 if not self.check_url(url, wire):
515 return
524 return
516 config = self._wire_to_config(wire)
525 config = self._wire_to_config(wire)
517 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
518 self.run_git_command(
527 self.run_git_command(
519 wire, ['push', url, '--mirror'], fail_on_stderr=False,
528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
520 _copts=self._remote_conf(config),
529 _copts=self._remote_conf(config),
521 extra_env={'GIT_TERMINAL_PROMPT': '0'})
530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
522
531
523 @reraise_safe_exceptions
532 @reraise_safe_exceptions
524 def get_remote_refs(self, wire, url):
533 def get_remote_refs(self, wire, url):
525 repo = Repo(url)
534 repo = Repo(url)
526 return repo.get_refs()
535 return repo.get_refs()
527
536
528 @reraise_safe_exceptions
537 @reraise_safe_exceptions
529 def get_description(self, wire):
538 def get_description(self, wire):
530 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
531 return repo.get_description()
540 return repo.get_description()
532
541
533 @reraise_safe_exceptions
542 @reraise_safe_exceptions
534 def get_missing_revs(self, wire, rev1, rev2, path2):
543 def get_missing_revs(self, wire, rev1, rev2, path2):
535 repo = self._factory.repo(wire)
544 repo = self._factory.repo(wire)
536 LocalGitClient(thin_packs=False).fetch(path2, repo)
545 LocalGitClient(thin_packs=False).fetch(path2, repo)
537
546
538 wire_remote = wire.copy()
547 wire_remote = wire.copy()
539 wire_remote['path'] = path2
548 wire_remote['path'] = path2
540 repo_remote = self._factory.repo(wire_remote)
549 repo_remote = self._factory.repo(wire_remote)
541 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
542
551
543 revs = [
552 revs = [
544 x.commit.id
553 x.commit.id
545 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
546 return revs
555 return revs
547
556
548 @reraise_safe_exceptions
557 @reraise_safe_exceptions
549 def get_object(self, wire, sha):
558 def get_object(self, wire, sha):
550 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
551 obj = repo.get_object(sha)
560 obj = repo.get_object(sha)
552 commit_id = obj.id
561 commit_id = obj.id
553
562
554 if isinstance(obj, Tag):
563 if isinstance(obj, Tag):
555 commit_id = obj.object[1]
564 commit_id = obj.object[1]
556
565
557 return {
566 return {
558 'id': obj.id,
567 'id': obj.id,
559 'type': obj.type_name,
568 'type': obj.type_name,
560 'commit_id': commit_id
569 'commit_id': commit_id,
570 'idx': 0
561 }
571 }
562
572
563 @reraise_safe_exceptions
573 @reraise_safe_exceptions
564 def get_object_attrs(self, wire, sha, *attrs):
574 def get_object_attrs(self, wire, sha, *attrs):
565 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
566 obj = repo.get_object(sha)
576 obj = repo.get_object(sha)
567 return list(getattr(obj, a) for a in attrs)
577 return list(getattr(obj, a) for a in attrs)
568
578
569 @reraise_safe_exceptions
579 @reraise_safe_exceptions
570 def get_refs(self, wire):
580 def get_refs(self, wire):
571 repo = self._factory.repo(wire)
581 repo = self._factory.repo(wire)
572 result = {}
582 result = {}
573 for ref, sha in repo.refs.as_dict().items():
583 for ref, sha in repo.refs.as_dict().items():
574 peeled_sha = repo.get_peeled(ref)
584 peeled_sha = repo.get_peeled(ref)
575 result[ref] = peeled_sha
585 result[ref] = peeled_sha
576 return result
586 return result
577
587
578 @reraise_safe_exceptions
588 @reraise_safe_exceptions
579 def get_refs_path(self, wire):
589 def get_refs_path(self, wire):
580 repo = self._factory.repo(wire)
590 repo = self._factory.repo(wire)
581 return repo.refs.path
591 return repo.refs.path
582
592
583 @reraise_safe_exceptions
593 @reraise_safe_exceptions
584 def head(self, wire, show_exc=True):
594 def head(self, wire, show_exc=True):
585 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
586 try:
596 try:
587 return repo.head()
597 return repo.head()
588 except Exception:
598 except Exception:
589 if show_exc:
599 if show_exc:
590 raise
600 raise
591
601
592 @reraise_safe_exceptions
602 @reraise_safe_exceptions
593 def init(self, wire):
603 def init(self, wire):
594 repo_path = str_to_dulwich(wire['path'])
604 repo_path = str_to_dulwich(wire['path'])
595 self.repo = Repo.init(repo_path)
605 self.repo = Repo.init(repo_path)
596
606
597 @reraise_safe_exceptions
607 @reraise_safe_exceptions
598 def init_bare(self, wire):
608 def init_bare(self, wire):
599 repo_path = str_to_dulwich(wire['path'])
609 repo_path = str_to_dulwich(wire['path'])
600 self.repo = Repo.init_bare(repo_path)
610 self.repo = Repo.init_bare(repo_path)
601
611
602 @reraise_safe_exceptions
612 @reraise_safe_exceptions
603 def revision(self, wire, rev):
613 def revision(self, wire, rev):
604 repo = self._factory.repo(wire)
614 repo = self._factory.repo(wire)
605 obj = repo[rev]
615 obj = repo[rev]
606 obj_data = {
616 obj_data = {
607 'id': obj.id,
617 'id': obj.id,
608 }
618 }
609 try:
619 try:
610 obj_data['tree'] = obj.tree
620 obj_data['tree'] = obj.tree
611 except AttributeError:
621 except AttributeError:
612 pass
622 pass
613 return obj_data
623 return obj_data
614
624
615 @reraise_safe_exceptions
625 @reraise_safe_exceptions
616 def commit_attribute(self, wire, rev, attr):
626 def commit_attribute(self, wire, rev, attr):
617 repo = self._factory.repo(wire)
627 repo = self._factory.repo(wire)
618 obj = repo[rev]
628 obj = repo[rev]
619 return getattr(obj, attr)
629 return getattr(obj, attr)
620
630
621 @reraise_safe_exceptions
631 @reraise_safe_exceptions
622 def set_refs(self, wire, key, value):
632 def set_refs(self, wire, key, value):
623 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
624 repo.refs[key] = value
634 repo.refs[key] = value
625
635
626 @reraise_safe_exceptions
636 @reraise_safe_exceptions
627 def remove_ref(self, wire, key):
637 def remove_ref(self, wire, key):
628 repo = self._factory.repo(wire)
638 repo = self._factory.repo(wire)
629 del repo.refs[key]
639 del repo.refs[key]
630
640
631 @reraise_safe_exceptions
641 @reraise_safe_exceptions
632 def tree_changes(self, wire, source_id, target_id):
642 def tree_changes(self, wire, source_id, target_id):
633 repo = self._factory.repo(wire)
643 repo = self._factory.repo(wire)
634 source = repo[source_id].tree if source_id else None
644 source = repo[source_id].tree if source_id else None
635 target = repo[target_id].tree
645 target = repo[target_id].tree
636 result = repo.object_store.tree_changes(source, target)
646 result = repo.object_store.tree_changes(source, target)
637 return list(result)
647 return list(result)
638
648
639 @reraise_safe_exceptions
649 @reraise_safe_exceptions
640 def tree_items(self, wire, tree_id):
650 def tree_items(self, wire, tree_id):
641 repo = self._factory.repo(wire)
651 repo = self._factory.repo(wire)
642 tree = repo[tree_id]
652 tree = repo[tree_id]
643
653
644 result = []
654 result = []
645 for item in tree.iteritems():
655 for item in tree.iteritems():
646 item_sha = item.sha
656 item_sha = item.sha
647 item_mode = item.mode
657 item_mode = item.mode
648
658
649 if FILE_MODE(item_mode) == GIT_LINK:
659 if FILE_MODE(item_mode) == GIT_LINK:
650 item_type = "link"
660 item_type = "link"
651 else:
661 else:
652 item_type = repo[item_sha].type_name
662 item_type = repo[item_sha].type_name
653
663
654 result.append((item.path, item_mode, item_sha, item_type))
664 result.append((item.path, item_mode, item_sha, item_type))
655 return result
665 return result
656
666
657 @reraise_safe_exceptions
667 @reraise_safe_exceptions
658 def update_server_info(self, wire):
668 def update_server_info(self, wire):
659 repo = self._factory.repo(wire)
669 repo = self._factory.repo(wire)
660 update_server_info(repo)
670 update_server_info(repo)
661
671
662 @reraise_safe_exceptions
672 @reraise_safe_exceptions
663 def discover_git_version(self):
673 def discover_git_version(self):
664 stdout, _ = self.run_git_command(
674 stdout, _ = self.run_git_command(
665 {}, ['--version'], _bare=True, _safe=True)
675 {}, ['--version'], _bare=True, _safe=True)
666 prefix = 'git version'
676 prefix = 'git version'
667 if stdout.startswith(prefix):
677 if stdout.startswith(prefix):
668 stdout = stdout[len(prefix):]
678 stdout = stdout[len(prefix):]
669 return stdout.strip()
679 return stdout.strip()
670
680
671 @reraise_safe_exceptions
681 @reraise_safe_exceptions
672 def run_git_command(self, wire, cmd, **opts):
682 def run_git_command(self, wire, cmd, **opts):
673 path = wire.get('path', None)
683 path = wire.get('path', None)
674
684
675 if path and os.path.isdir(path):
685 if path and os.path.isdir(path):
676 opts['cwd'] = path
686 opts['cwd'] = path
677
687
678 if '_bare' in opts:
688 if '_bare' in opts:
679 _copts = []
689 _copts = []
680 del opts['_bare']
690 del opts['_bare']
681 else:
691 else:
682 _copts = ['-c', 'core.quotepath=false', ]
692 _copts = ['-c', 'core.quotepath=false', ]
683 safe_call = False
693 safe_call = False
684 if '_safe' in opts:
694 if '_safe' in opts:
685 # no exc on failure
695 # no exc on failure
686 del opts['_safe']
696 del opts['_safe']
687 safe_call = True
697 safe_call = True
688
698
689 if '_copts' in opts:
699 if '_copts' in opts:
690 _copts.extend(opts['_copts'] or [])
700 _copts.extend(opts['_copts'] or [])
691 del opts['_copts']
701 del opts['_copts']
692
702
693 gitenv = os.environ.copy()
703 gitenv = os.environ.copy()
694 gitenv.update(opts.pop('extra_env', {}))
704 gitenv.update(opts.pop('extra_env', {}))
695 # need to clean fix GIT_DIR !
705 # need to clean fix GIT_DIR !
696 if 'GIT_DIR' in gitenv:
706 if 'GIT_DIR' in gitenv:
697 del gitenv['GIT_DIR']
707 del gitenv['GIT_DIR']
698 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
708 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
699 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
709 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
700
710
701 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
711 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
702 _opts = {'env': gitenv, 'shell': False}
712 _opts = {'env': gitenv, 'shell': False}
703
713
704 try:
714 try:
705 _opts.update(opts)
715 _opts.update(opts)
706 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
716 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
707
717
708 return ''.join(p), ''.join(p.error)
718 return ''.join(p), ''.join(p.error)
709 except (EnvironmentError, OSError) as err:
719 except (EnvironmentError, OSError) as err:
710 cmd = ' '.join(cmd) # human friendly CMD
720 cmd = ' '.join(cmd) # human friendly CMD
711 tb_err = ("Couldn't run git command (%s).\n"
721 tb_err = ("Couldn't run git command (%s).\n"
712 "Original error was:%s\n"
722 "Original error was:%s\n"
713 "Call options:%s\n"
723 "Call options:%s\n"
714 % (cmd, err, _opts))
724 % (cmd, err, _opts))
715 log.exception(tb_err)
725 log.exception(tb_err)
716 if safe_call:
726 if safe_call:
717 return '', err
727 return '', err
718 else:
728 else:
719 raise exceptions.VcsException()(tb_err)
729 raise exceptions.VcsException()(tb_err)
720
730
721 @reraise_safe_exceptions
731 @reraise_safe_exceptions
722 def install_hooks(self, wire, force=False):
732 def install_hooks(self, wire, force=False):
723 from vcsserver.hook_utils import install_git_hooks
733 from vcsserver.hook_utils import install_git_hooks
724 repo = self._factory.repo(wire)
734 repo = self._factory.repo(wire)
725 return install_git_hooks(repo.path, repo.bare, force_create=force)
735 return install_git_hooks(repo.path, repo.bare, force_create=force)
726
736
727 @reraise_safe_exceptions
737 @reraise_safe_exceptions
728 def get_hooks_info(self, wire):
738 def get_hooks_info(self, wire):
729 from vcsserver.hook_utils import (
739 from vcsserver.hook_utils import (
730 get_git_pre_hook_version, get_git_post_hook_version)
740 get_git_pre_hook_version, get_git_post_hook_version)
731 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
732 return {
742 return {
733 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
743 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
734 'post_version': get_git_post_hook_version(repo.path, repo.bare),
744 'post_version': get_git_post_hook_version(repo.path, repo.bare),
735 }
745 }
736
746
737
747
738 def str_to_dulwich(value):
748 def str_to_dulwich(value):
739 """
749 """
740 Dulwich 0.10.1a requires `unicode` objects to be passed in.
750 Dulwich 0.10.1a requires `unicode` objects to be passed in.
741 """
751 """
742 return value.decode(settings.WIRE_ENCODING)
752 return value.decode(settings.WIRE_ENCODING)
@@ -1,287 +1,292 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import re
18 import re
19 import logging
19 import logging
20 from wsgiref.util import FileWrapper
20 from wsgiref.util import FileWrapper
21
21
22 import simplejson as json
22 import simplejson as json
23 from pyramid.config import Configurator
23 from pyramid.config import Configurator
24 from pyramid.response import Response, FileIter
24 from pyramid.response import Response, FileIter
25 from pyramid.httpexceptions import (
25 from pyramid.httpexceptions import (
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 HTTPUnprocessableEntity)
27 HTTPUnprocessableEntity)
28
28
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.utils import safe_int
31 from vcsserver.utils import safe_int
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
38
39
39
40 def write_response_error(http_exception, text=None):
40 def write_response_error(http_exception, text=None):
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 _exception = http_exception(content_type=content_type)
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
43 _exception.content_type = content_type
44 if text:
44 if text:
45 _exception.body = json.dumps({'message': text})
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
47 http_exception, text)
48 return _exception
48 return _exception
49
49
50
50
51 class AuthHeaderRequired(object):
51 class AuthHeaderRequired(object):
52 """
52 """
53 Decorator to check if request has proper auth-header
53 Decorator to check if request has proper auth-header
54 """
54 """
55
55
56 def __call__(self, func):
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
57 return get_cython_compat_decorator(self.__wrapper, func)
58
58
59 def __wrapper(self, func, *fargs, **fkwargs):
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
60 request = fargs[1]
61 auth = request.authorization
61 auth = request.authorization
62 if not auth:
62 if not auth:
63 return write_response_error(HTTPForbidden)
63 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
64 return func(*fargs[1:], **fkwargs)
65
65
66
66
67 # views
67 # views
68
68
69 def lfs_objects(request):
69 def lfs_objects(request):
70 # indicate not supported, V1 API
70 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
73
74
74
75 @AuthHeaderRequired()
75 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
76 def lfs_objects_batch(request):
77 """
77 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
78 The client sends the following information to the Batch endpoint to transfer some objects:
79
79
80 operation - Should be download or upload.
80 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
81 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
82 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
83 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
84 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
85 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
86 size - Integer byte size of the LFS object. Must be at least zero.
87 """
87 """
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 auth = request.authorization
89 auth = request.authorization
90 repo = request.matchdict.get('repo')
90 repo = request.matchdict.get('repo')
91 data = request.json
91 data = request.json
92 operation = data.get('operation')
92 operation = data.get('operation')
93 http_scheme = request.registry.git_lfs_http_scheme
94
93 if operation not in ('download', 'upload'):
95 if operation not in ('download', 'upload'):
94 log.debug('LFS: unsupported operation:%s', operation)
96 log.debug('LFS: unsupported operation:%s', operation)
95 return write_response_error(
97 return write_response_error(
96 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
97
99
98 if 'objects' not in data:
100 if 'objects' not in data:
99 log.debug('LFS: missing objects data')
101 log.debug('LFS: missing objects data')
100 return write_response_error(
102 return write_response_error(
101 HTTPBadRequest, 'missing objects data')
103 HTTPBadRequest, 'missing objects data')
102
104
103 log.debug('LFS: handling operation of type: %s', operation)
105 log.debug('LFS: handling operation of type: %s', operation)
104
106
105 objects = []
107 objects = []
106 for o in data['objects']:
108 for o in data['objects']:
107 try:
109 try:
108 oid = o['oid']
110 oid = o['oid']
109 obj_size = o['size']
111 obj_size = o['size']
110 except KeyError:
112 except KeyError:
111 log.exception('LFS, failed to extract data')
113 log.exception('LFS, failed to extract data')
112 return write_response_error(
114 return write_response_error(
113 HTTPBadRequest, 'unsupported data in objects')
115 HTTPBadRequest, 'unsupported data in objects')
114
116
115 obj_data = {'oid': oid}
117 obj_data = {'oid': oid}
116
118
117 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid)
119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
118 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo)
120 _scheme=http_scheme)
121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 _scheme=http_scheme)
119 store = LFSOidStore(
123 store = LFSOidStore(
120 oid, repo, store_location=request.registry.git_lfs_store_path)
124 oid, repo, store_location=request.registry.git_lfs_store_path)
121 handler = OidHandler(
125 handler = OidHandler(
122 store, repo, auth, oid, obj_size, obj_data,
126 store, repo, auth, oid, obj_size, obj_data,
123 obj_href, obj_verify_href)
127 obj_href, obj_verify_href)
124
128
125 # this verifies also OIDs
129 # this verifies also OIDs
126 actions, errors = handler.exec_operation(operation)
130 actions, errors = handler.exec_operation(operation)
127 if errors:
131 if errors:
128 log.warning('LFS: got following errors: %s', errors)
132 log.warning('LFS: got following errors: %s', errors)
129 obj_data['errors'] = errors
133 obj_data['errors'] = errors
130
134
131 if actions:
135 if actions:
132 obj_data['actions'] = actions
136 obj_data['actions'] = actions
133
137
134 obj_data['size'] = obj_size
138 obj_data['size'] = obj_size
135 obj_data['authenticated'] = True
139 obj_data['authenticated'] = True
136 objects.append(obj_data)
140 objects.append(obj_data)
137
141
138 result = {'objects': objects, 'transfer': 'basic'}
142 result = {'objects': objects, 'transfer': 'basic'}
139 log.debug('LFS Response %s', safe_result(result))
143 log.debug('LFS Response %s', safe_result(result))
140
144
141 return result
145 return result
142
146
143
147
144 def lfs_objects_oid_upload(request):
148 def lfs_objects_oid_upload(request):
145 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
146 repo = request.matchdict.get('repo')
150 repo = request.matchdict.get('repo')
147 oid = request.matchdict.get('oid')
151 oid = request.matchdict.get('oid')
148 store = LFSOidStore(
152 store = LFSOidStore(
149 oid, repo, store_location=request.registry.git_lfs_store_path)
153 oid, repo, store_location=request.registry.git_lfs_store_path)
150 engine = store.get_engine(mode='wb')
154 engine = store.get_engine(mode='wb')
151 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
152
156
153 body = request.environ['wsgi.input']
157 body = request.environ['wsgi.input']
154
158
155 with engine as f:
159 with engine as f:
156 blksize = 64 * 1024 # 64kb
160 blksize = 64 * 1024 # 64kb
157 while True:
161 while True:
158 # read in chunks as stream comes in from Gunicorn
162 # read in chunks as stream comes in from Gunicorn
159 # this is a specific Gunicorn support function.
163 # this is a specific Gunicorn support function.
160 # might work differently on waitress
164 # might work differently on waitress
161 chunk = body.read(blksize)
165 chunk = body.read(blksize)
162 if not chunk:
166 if not chunk:
163 break
167 break
164 f.write(chunk)
168 f.write(chunk)
165
169
166 return {'upload': 'ok'}
170 return {'upload': 'ok'}
167
171
168
172
169 def lfs_objects_oid_download(request):
173 def lfs_objects_oid_download(request):
170 repo = request.matchdict.get('repo')
174 repo = request.matchdict.get('repo')
171 oid = request.matchdict.get('oid')
175 oid = request.matchdict.get('oid')
172
176
173 store = LFSOidStore(
177 store = LFSOidStore(
174 oid, repo, store_location=request.registry.git_lfs_store_path)
178 oid, repo, store_location=request.registry.git_lfs_store_path)
175 if not store.has_oid():
179 if not store.has_oid():
176 log.debug('LFS: oid %s does not exists in store', oid)
180 log.debug('LFS: oid %s does not exists in store', oid)
177 return write_response_error(
181 return write_response_error(
178 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
179
183
180 # TODO(marcink): support range header ?
184 # TODO(marcink): support range header ?
181 # Range: bytes=0-, `bytes=(\d+)\-.*`
185 # Range: bytes=0-, `bytes=(\d+)\-.*`
182
186
183 f = open(store.oid_path, 'rb')
187 f = open(store.oid_path, 'rb')
184 response = Response(
188 response = Response(
185 content_type='application/octet-stream', app_iter=FileIter(f))
189 content_type='application/octet-stream', app_iter=FileIter(f))
186 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
187 return response
191 return response
188
192
189
193
190 def lfs_objects_verify(request):
194 def lfs_objects_verify(request):
191 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
192 repo = request.matchdict.get('repo')
196 repo = request.matchdict.get('repo')
193
197
194 data = request.json
198 data = request.json
195 oid = data.get('oid')
199 oid = data.get('oid')
196 size = safe_int(data.get('size'))
200 size = safe_int(data.get('size'))
197
201
198 if not (oid and size):
202 if not (oid and size):
199 return write_response_error(
203 return write_response_error(
200 HTTPBadRequest, 'missing oid and size in request data')
204 HTTPBadRequest, 'missing oid and size in request data')
201
205
202 store = LFSOidStore(
206 store = LFSOidStore(
203 oid, repo, store_location=request.registry.git_lfs_store_path)
207 oid, repo, store_location=request.registry.git_lfs_store_path)
204 if not store.has_oid():
208 if not store.has_oid():
205 log.debug('LFS: oid %s does not exists in store', oid)
209 log.debug('LFS: oid %s does not exists in store', oid)
206 return write_response_error(
210 return write_response_error(
207 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
208
212
209 store_size = store.size_oid()
213 store_size = store.size_oid()
210 if store_size != size:
214 if store_size != size:
211 msg = 'requested file size mismatch store size:%s requested:%s' % (
215 msg = 'requested file size mismatch store size:%s requested:%s' % (
212 store_size, size)
216 store_size, size)
213 return write_response_error(
217 return write_response_error(
214 HTTPUnprocessableEntity, msg)
218 HTTPUnprocessableEntity, msg)
215
219
216 return {'message': {'size': 'ok', 'in_store': 'ok'}}
220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
217
221
218
222
219 def lfs_objects_lock(request):
223 def lfs_objects_lock(request):
220 return write_response_error(
224 return write_response_error(
221 HTTPNotImplemented, 'GIT LFS locking api not supported')
225 HTTPNotImplemented, 'GIT LFS locking api not supported')
222
226
223
227
224 def not_found(request):
228 def not_found(request):
225 return write_response_error(
229 return write_response_error(
226 HTTPNotFound, 'request path not found')
230 HTTPNotFound, 'request path not found')
227
231
228
232
229 def lfs_disabled(request):
233 def lfs_disabled(request):
230 return write_response_error(
234 return write_response_error(
231 HTTPNotImplemented, 'GIT LFS disabled for this repo')
235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
232
236
233
237
234 def git_lfs_app(config):
238 def git_lfs_app(config):
235
239
236 # v1 API deprecation endpoint
240 # v1 API deprecation endpoint
237 config.add_route('lfs_objects',
241 config.add_route('lfs_objects',
238 '/{repo:.*?[^/]}/info/lfs/objects')
242 '/{repo:.*?[^/]}/info/lfs/objects')
239 config.add_view(lfs_objects, route_name='lfs_objects',
243 config.add_view(lfs_objects, route_name='lfs_objects',
240 request_method='POST', renderer='json')
244 request_method='POST', renderer='json')
241
245
242 # locking API
246 # locking API
243 config.add_route('lfs_objects_lock',
247 config.add_route('lfs_objects_lock',
244 '/{repo:.*?[^/]}/info/lfs/locks')
248 '/{repo:.*?[^/]}/info/lfs/locks')
245 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
246 request_method=('POST', 'GET'), renderer='json')
250 request_method=('POST', 'GET'), renderer='json')
247
251
248 config.add_route('lfs_objects_lock_verify',
252 config.add_route('lfs_objects_lock_verify',
249 '/{repo:.*?[^/]}/info/lfs/locks/verify')
253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
250 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
251 request_method=('POST', 'GET'), renderer='json')
255 request_method=('POST', 'GET'), renderer='json')
252
256
253 # batch API
257 # batch API
254 config.add_route('lfs_objects_batch',
258 config.add_route('lfs_objects_batch',
255 '/{repo:.*?[^/]}/info/lfs/objects/batch')
259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
256 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
257 request_method='POST', renderer='json')
261 request_method='POST', renderer='json')
258
262
259 # oid upload/download API
263 # oid upload/download API
260 config.add_route('lfs_objects_oid',
264 config.add_route('lfs_objects_oid',
261 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
262 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
263 request_method='PUT', renderer='json')
267 request_method='PUT', renderer='json')
264 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
265 request_method='GET', renderer='json')
269 request_method='GET', renderer='json')
266
270
267 # verification API
271 # verification API
268 config.add_route('lfs_objects_verify',
272 config.add_route('lfs_objects_verify',
269 '/{repo:.*?[^/]}/info/lfs/verify')
273 '/{repo:.*?[^/]}/info/lfs/verify')
270 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
271 request_method='POST', renderer='json')
275 request_method='POST', renderer='json')
272
276
273 # not found handler for API
277 # not found handler for API
274 config.add_notfound_view(not_found, renderer='json')
278 config.add_notfound_view(not_found, renderer='json')
275
279
276
280
277 def create_app(git_lfs_enabled, git_lfs_store_path):
281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
278 config = Configurator()
282 config = Configurator()
279 if git_lfs_enabled:
283 if git_lfs_enabled:
280 config.include(git_lfs_app)
284 config.include(git_lfs_app)
281 config.registry.git_lfs_store_path = git_lfs_store_path
285 config.registry.git_lfs_store_path = git_lfs_store_path
286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
282 else:
287 else:
283 # not found handler for API, reporting disabled LFS support
288 # not found handler for API, reporting disabled LFS support
284 config.add_notfound_view(lfs_disabled, renderer='json')
289 config.add_notfound_view(lfs_disabled, renderer='json')
285
290
286 app = config.make_wsgi_app()
291 app = config.make_wsgi_app()
287 return app
292 return app
@@ -1,239 +1,272 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21 import simplejson as json
21 import simplejson as json
22
22
23 from vcsserver.git_lfs.app import create_app
23 from vcsserver.git_lfs.app import create_app
24
24
25
25
26 @pytest.fixture(scope='function')
26 @pytest.fixture(scope='function')
27 def git_lfs_app(tmpdir):
27 def git_lfs_app(tmpdir):
28 custom_app = WebObTestApp(create_app(
28 custom_app = WebObTestApp(create_app(
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir)))
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
30 git_lfs_http_scheme='http'))
31 custom_app._store = str(tmpdir)
32 return custom_app
33
34
35 @pytest.fixture(scope='function')
36 def git_lfs_https_app(tmpdir):
37 custom_app = WebObTestApp(create_app(
38 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
39 git_lfs_http_scheme='https'))
30 custom_app._store = str(tmpdir)
40 custom_app._store = str(tmpdir)
31 return custom_app
41 return custom_app
32
42
33
43
34 @pytest.fixture()
44 @pytest.fixture()
35 def http_auth():
45 def http_auth():
36 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
46 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
37
47
38
48
39 class TestLFSApplication(object):
49 class TestLFSApplication(object):
40
50
41 def test_app_wrong_path(self, git_lfs_app):
51 def test_app_wrong_path(self, git_lfs_app):
42 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
52 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
43
53
44 def test_app_deprecated_endpoint(self, git_lfs_app):
54 def test_app_deprecated_endpoint(self, git_lfs_app):
45 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
55 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
46 assert response.status_code == 501
56 assert response.status_code == 501
47 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
57 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
48
58
49 def test_app_lock_verify_api_not_available(self, git_lfs_app):
59 def test_app_lock_verify_api_not_available(self, git_lfs_app):
50 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
60 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
51 assert response.status_code == 501
61 assert response.status_code == 501
52 assert json.loads(response.text) == {
62 assert json.loads(response.text) == {
53 u'message': u'GIT LFS locking api not supported'}
63 u'message': u'GIT LFS locking api not supported'}
54
64
55 def test_app_lock_api_not_available(self, git_lfs_app):
65 def test_app_lock_api_not_available(self, git_lfs_app):
56 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
66 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
57 assert response.status_code == 501
67 assert response.status_code == 501
58 assert json.loads(response.text) == {
68 assert json.loads(response.text) == {
59 u'message': u'GIT LFS locking api not supported'}
69 u'message': u'GIT LFS locking api not supported'}
60
70
61 def test_app_batch_api_missing_auth(self, git_lfs_app,):
71 def test_app_batch_api_missing_auth(self, git_lfs_app):
62 git_lfs_app.post_json(
72 git_lfs_app.post_json(
63 '/repo/info/lfs/objects/batch', params={}, status=403)
73 '/repo/info/lfs/objects/batch', params={}, status=403)
64
74
65 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
75 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
66 response = git_lfs_app.post_json(
76 response = git_lfs_app.post_json(
67 '/repo/info/lfs/objects/batch', params={}, status=400,
77 '/repo/info/lfs/objects/batch', params={}, status=400,
68 extra_environ=http_auth)
78 extra_environ=http_auth)
69 assert json.loads(response.text) == {
79 assert json.loads(response.text) == {
70 u'message': u'unsupported operation mode: `None`'}
80 u'message': u'unsupported operation mode: `None`'}
71
81
72 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
82 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
73 response = git_lfs_app.post_json(
83 response = git_lfs_app.post_json(
74 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
84 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
75 status=400, extra_environ=http_auth)
85 status=400, extra_environ=http_auth)
76 assert json.loads(response.text) == {
86 assert json.loads(response.text) == {
77 u'message': u'missing objects data'}
87 u'message': u'missing objects data'}
78
88
79 def test_app_batch_api_unsupported_data_in_objects(
89 def test_app_batch_api_unsupported_data_in_objects(
80 self, git_lfs_app, http_auth):
90 self, git_lfs_app, http_auth):
81 params = {'operation': 'download',
91 params = {'operation': 'download',
82 'objects': [{}]}
92 'objects': [{}]}
83 response = git_lfs_app.post_json(
93 response = git_lfs_app.post_json(
84 '/repo/info/lfs/objects/batch', params=params, status=400,
94 '/repo/info/lfs/objects/batch', params=params, status=400,
85 extra_environ=http_auth)
95 extra_environ=http_auth)
86 assert json.loads(response.text) == {
96 assert json.loads(response.text) == {
87 u'message': u'unsupported data in objects'}
97 u'message': u'unsupported data in objects'}
88
98
89 def test_app_batch_api_download_missing_object(
99 def test_app_batch_api_download_missing_object(
90 self, git_lfs_app, http_auth):
100 self, git_lfs_app, http_auth):
91 params = {'operation': 'download',
101 params = {'operation': 'download',
92 'objects': [{'oid': '123', 'size': '1024'}]}
102 'objects': [{'oid': '123', 'size': '1024'}]}
93 response = git_lfs_app.post_json(
103 response = git_lfs_app.post_json(
94 '/repo/info/lfs/objects/batch', params=params,
104 '/repo/info/lfs/objects/batch', params=params,
95 extra_environ=http_auth)
105 extra_environ=http_auth)
96
106
97 expected_objects = [
107 expected_objects = [
98 {u'authenticated': True,
108 {u'authenticated': True,
99 u'errors': {u'error': {
109 u'errors': {u'error': {
100 u'code': 404,
110 u'code': 404,
101 u'message': u'object: 123 does not exist in store'}},
111 u'message': u'object: 123 does not exist in store'}},
102 u'oid': u'123',
112 u'oid': u'123',
103 u'size': u'1024'}
113 u'size': u'1024'}
104 ]
114 ]
105 assert json.loads(response.text) == {
115 assert json.loads(response.text) == {
106 'objects': expected_objects, 'transfer': 'basic'}
116 'objects': expected_objects, 'transfer': 'basic'}
107
117
108 def test_app_batch_api_download(self, git_lfs_app, http_auth):
118 def test_app_batch_api_download(self, git_lfs_app, http_auth):
109 oid = '456'
119 oid = '456'
110 oid_path = os.path.join(git_lfs_app._store, oid)
120 oid_path = os.path.join(git_lfs_app._store, oid)
111 if not os.path.isdir(os.path.dirname(oid_path)):
121 if not os.path.isdir(os.path.dirname(oid_path)):
112 os.makedirs(os.path.dirname(oid_path))
122 os.makedirs(os.path.dirname(oid_path))
113 with open(oid_path, 'wb') as f:
123 with open(oid_path, 'wb') as f:
114 f.write('OID_CONTENT')
124 f.write('OID_CONTENT')
115
125
116 params = {'operation': 'download',
126 params = {'operation': 'download',
117 'objects': [{'oid': oid, 'size': '1024'}]}
127 'objects': [{'oid': oid, 'size': '1024'}]}
118 response = git_lfs_app.post_json(
128 response = git_lfs_app.post_json(
119 '/repo/info/lfs/objects/batch', params=params,
129 '/repo/info/lfs/objects/batch', params=params,
120 extra_environ=http_auth)
130 extra_environ=http_auth)
121
131
122 expected_objects = [
132 expected_objects = [
123 {u'authenticated': True,
133 {u'authenticated': True,
124 u'actions': {
134 u'actions': {
125 u'download': {
135 u'download': {
126 u'header': {u'Authorization': u'Basic XXXXX'},
136 u'header': {u'Authorization': u'Basic XXXXX'},
127 u'href': u'http://localhost/repo/info/lfs/objects/456'},
137 u'href': u'http://localhost/repo/info/lfs/objects/456'},
128 },
138 },
129 u'oid': u'456',
139 u'oid': u'456',
130 u'size': u'1024'}
140 u'size': u'1024'}
131 ]
141 ]
132 assert json.loads(response.text) == {
142 assert json.loads(response.text) == {
133 'objects': expected_objects, 'transfer': 'basic'}
143 'objects': expected_objects, 'transfer': 'basic'}
134
144
135 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
145 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
136 params = {'operation': 'upload',
146 params = {'operation': 'upload',
137 'objects': [{'oid': '123', 'size': '1024'}]}
147 'objects': [{'oid': '123', 'size': '1024'}]}
138 response = git_lfs_app.post_json(
148 response = git_lfs_app.post_json(
139 '/repo/info/lfs/objects/batch', params=params,
149 '/repo/info/lfs/objects/batch', params=params,
140 extra_environ=http_auth)
150 extra_environ=http_auth)
141 expected_objects = [
151 expected_objects = [
142 {u'authenticated': True,
152 {u'authenticated': True,
143 u'actions': {
153 u'actions': {
144 u'upload': {
154 u'upload': {
145 u'header': {u'Authorization': u'Basic XXXXX',
155 u'header': {u'Authorization': u'Basic XXXXX',
146 u'Transfer-Encoding': u'chunked'},
156 u'Transfer-Encoding': u'chunked'},
147 u'href': u'http://localhost/repo/info/lfs/objects/123'},
157 u'href': u'http://localhost/repo/info/lfs/objects/123'},
148 u'verify': {
158 u'verify': {
149 u'header': {u'Authorization': u'Basic XXXXX'},
159 u'header': {u'Authorization': u'Basic XXXXX'},
150 u'href': u'http://localhost/repo/info/lfs/verify'}
160 u'href': u'http://localhost/repo/info/lfs/verify'}
151 },
161 },
152 u'oid': u'123',
162 u'oid': u'123',
153 u'size': u'1024'}
163 u'size': u'1024'}
154 ]
164 ]
155 assert json.loads(response.text) == {
165 assert json.loads(response.text) == {
156 'objects': expected_objects, 'transfer': 'basic'}
166 'objects': expected_objects, 'transfer': 'basic'}
157
167
168 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
169 params = {'operation': 'upload',
170 'objects': [{'oid': '123', 'size': '1024'}]}
171 response = git_lfs_https_app.post_json(
172 '/repo/info/lfs/objects/batch', params=params,
173 extra_environ=http_auth)
174 expected_objects = [
175 {u'authenticated': True,
176 u'actions': {
177 u'upload': {
178 u'header': {u'Authorization': u'Basic XXXXX',
179 u'Transfer-Encoding': u'chunked'},
180 u'href': u'https://localhost/repo/info/lfs/objects/123'},
181 u'verify': {
182 u'header': {u'Authorization': u'Basic XXXXX'},
183 u'href': u'https://localhost/repo/info/lfs/verify'}
184 },
185 u'oid': u'123',
186 u'size': u'1024'}
187 ]
188 assert json.loads(response.text) == {
189 'objects': expected_objects, 'transfer': 'basic'}
190
158 def test_app_verify_api_missing_data(self, git_lfs_app):
191 def test_app_verify_api_missing_data(self, git_lfs_app):
159 params = {'oid': 'missing',}
192 params = {'oid': 'missing'}
160 response = git_lfs_app.post_json(
193 response = git_lfs_app.post_json(
161 '/repo/info/lfs/verify', params=params,
194 '/repo/info/lfs/verify', params=params,
162 status=400)
195 status=400)
163
196
164 assert json.loads(response.text) == {
197 assert json.loads(response.text) == {
165 u'message': u'missing oid and size in request data'}
198 u'message': u'missing oid and size in request data'}
166
199
167 def test_app_verify_api_missing_obj(self, git_lfs_app):
200 def test_app_verify_api_missing_obj(self, git_lfs_app):
168 params = {'oid': 'missing', 'size': '1024'}
201 params = {'oid': 'missing', 'size': '1024'}
169 response = git_lfs_app.post_json(
202 response = git_lfs_app.post_json(
170 '/repo/info/lfs/verify', params=params,
203 '/repo/info/lfs/verify', params=params,
171 status=404)
204 status=404)
172
205
173 assert json.loads(response.text) == {
206 assert json.loads(response.text) == {
174 u'message': u'oid `missing` does not exists in store'}
207 u'message': u'oid `missing` does not exists in store'}
175
208
176 def test_app_verify_api_size_mismatch(self, git_lfs_app):
209 def test_app_verify_api_size_mismatch(self, git_lfs_app):
177 oid = 'existing'
210 oid = 'existing'
178 oid_path = os.path.join(git_lfs_app._store, oid)
211 oid_path = os.path.join(git_lfs_app._store, oid)
179 if not os.path.isdir(os.path.dirname(oid_path)):
212 if not os.path.isdir(os.path.dirname(oid_path)):
180 os.makedirs(os.path.dirname(oid_path))
213 os.makedirs(os.path.dirname(oid_path))
181 with open(oid_path, 'wb') as f:
214 with open(oid_path, 'wb') as f:
182 f.write('OID_CONTENT')
215 f.write('OID_CONTENT')
183
216
184 params = {'oid': oid, 'size': '1024'}
217 params = {'oid': oid, 'size': '1024'}
185 response = git_lfs_app.post_json(
218 response = git_lfs_app.post_json(
186 '/repo/info/lfs/verify', params=params, status=422)
219 '/repo/info/lfs/verify', params=params, status=422)
187
220
188 assert json.loads(response.text) == {
221 assert json.loads(response.text) == {
189 u'message': u'requested file size mismatch '
222 u'message': u'requested file size mismatch '
190 u'store size:11 requested:1024'}
223 u'store size:11 requested:1024'}
191
224
192 def test_app_verify_api(self, git_lfs_app):
225 def test_app_verify_api(self, git_lfs_app):
193 oid = 'existing'
226 oid = 'existing'
194 oid_path = os.path.join(git_lfs_app._store, oid)
227 oid_path = os.path.join(git_lfs_app._store, oid)
195 if not os.path.isdir(os.path.dirname(oid_path)):
228 if not os.path.isdir(os.path.dirname(oid_path)):
196 os.makedirs(os.path.dirname(oid_path))
229 os.makedirs(os.path.dirname(oid_path))
197 with open(oid_path, 'wb') as f:
230 with open(oid_path, 'wb') as f:
198 f.write('OID_CONTENT')
231 f.write('OID_CONTENT')
199
232
200 params = {'oid': oid, 'size': 11}
233 params = {'oid': oid, 'size': 11}
201 response = git_lfs_app.post_json(
234 response = git_lfs_app.post_json(
202 '/repo/info/lfs/verify', params=params)
235 '/repo/info/lfs/verify', params=params)
203
236
204 assert json.loads(response.text) == {
237 assert json.loads(response.text) == {
205 u'message': {u'size': u'ok', u'in_store': u'ok'}}
238 u'message': {u'size': u'ok', u'in_store': u'ok'}}
206
239
207 def test_app_download_api_oid_not_existing(self, git_lfs_app):
240 def test_app_download_api_oid_not_existing(self, git_lfs_app):
208 oid = 'missing'
241 oid = 'missing'
209
242
210 response = git_lfs_app.get(
243 response = git_lfs_app.get(
211 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
244 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
212
245
213 assert json.loads(response.text) == {
246 assert json.loads(response.text) == {
214 u'message': u'requested file with oid `missing` not found in store'}
247 u'message': u'requested file with oid `missing` not found in store'}
215
248
216 def test_app_download_api(self, git_lfs_app):
249 def test_app_download_api(self, git_lfs_app):
217 oid = 'existing'
250 oid = 'existing'
218 oid_path = os.path.join(git_lfs_app._store, oid)
251 oid_path = os.path.join(git_lfs_app._store, oid)
219 if not os.path.isdir(os.path.dirname(oid_path)):
252 if not os.path.isdir(os.path.dirname(oid_path)):
220 os.makedirs(os.path.dirname(oid_path))
253 os.makedirs(os.path.dirname(oid_path))
221 with open(oid_path, 'wb') as f:
254 with open(oid_path, 'wb') as f:
222 f.write('OID_CONTENT')
255 f.write('OID_CONTENT')
223
256
224 response = git_lfs_app.get(
257 response = git_lfs_app.get(
225 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
258 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
226 assert response
259 assert response
227
260
228 def test_app_upload(self, git_lfs_app):
261 def test_app_upload(self, git_lfs_app):
229 oid = 'uploaded'
262 oid = 'uploaded'
230
263
231 response = git_lfs_app.put(
264 response = git_lfs_app.put(
232 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
265 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
233
266
234 assert json.loads(response.text) == {u'upload': u'ok'}
267 assert json.loads(response.text) == {u'upload': u'ok'}
235
268
236 # verify that we actually wrote that OID
269 # verify that we actually wrote that OID
237 oid_path = os.path.join(git_lfs_app._store, oid)
270 oid_path = os.path.join(git_lfs_app._store, oid)
238 assert os.path.isfile(oid_path)
271 assert os.path.isfile(oid_path)
239 assert 'CONTENT' == open(oid_path).read()
272 assert 'CONTENT' == open(oid_path).read()
@@ -1,803 +1,856 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23
24
24 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
27 from mercurial import commands
27 from mercurial import unionrepo
28 from mercurial import unionrepo
28 from mercurial import verify
29 from mercurial import verify
29
30
30 import vcsserver
31 import vcsserver
31 from vcsserver import exceptions
32 from vcsserver import exceptions
32 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.hgcompat import (
34 from vcsserver.hgcompat import (
34 archival, bin, clone, config as hgconfig, diffopts, hex,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 RepoLookupError, InterventionRequired, RequirementError)
39 RepoLookupError, InterventionRequired, RequirementError)
39
40
40 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
41
42
42
43
43 def make_ui_from_config(repo_config):
44 def make_ui_from_config(repo_config):
44 baseui = ui.ui()
45
46 class LoggingUI(ui.ui):
47 def status(self, *msg, **opts):
48 log.info(' '.join(msg).rstrip('\n'))
49 super(LoggingUI, self).status(*msg, **opts)
50
51 def warn(self, *msg, **opts):
52 log.warn(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).warn(*msg, **opts)
54
55 def error(self, *msg, **opts):
56 log.error(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).error(*msg, **opts)
58
59 def note(self, *msg, **opts):
60 log.info(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).note(*msg, **opts)
62
63 def debug(self, *msg, **opts):
64 log.debug(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).debug(*msg, **opts)
66
67 baseui = LoggingUI()
45
68
46 # clean the baseui object
69 # clean the baseui object
47 baseui._ocfg = hgconfig.config()
70 baseui._ocfg = hgconfig.config()
48 baseui._ucfg = hgconfig.config()
71 baseui._ucfg = hgconfig.config()
49 baseui._tcfg = hgconfig.config()
72 baseui._tcfg = hgconfig.config()
50
73
51 for section, option, value in repo_config:
74 for section, option, value in repo_config:
52 baseui.setconfig(section, option, value)
75 baseui.setconfig(section, option, value)
53
76
54 # make our hgweb quiet so it doesn't print output
77 # make our hgweb quiet so it doesn't print output
55 baseui.setconfig('ui', 'quiet', 'true')
78 baseui.setconfig('ui', 'quiet', 'true')
56
79
57 baseui.setconfig('ui', 'paginate', 'never')
80 baseui.setconfig('ui', 'paginate', 'never')
81 # for better Error reporting of Mercurial
82 baseui.setconfig('ui', 'message-output', 'stderr')
83
58 # force mercurial to only use 1 thread, otherwise it may try to set a
84 # force mercurial to only use 1 thread, otherwise it may try to set a
59 # signal in a non-main thread, thus generating a ValueError.
85 # signal in a non-main thread, thus generating a ValueError.
60 baseui.setconfig('worker', 'numcpus', 1)
86 baseui.setconfig('worker', 'numcpus', 1)
61
87
62 # If there is no config for the largefiles extension, we explicitly disable
88 # If there is no config for the largefiles extension, we explicitly disable
63 # it here. This overrides settings from repositories hgrc file. Recent
89 # it here. This overrides settings from repositories hgrc file. Recent
64 # mercurial versions enable largefiles in hgrc on clone from largefile
90 # mercurial versions enable largefiles in hgrc on clone from largefile
65 # repo.
91 # repo.
66 if not baseui.hasconfig('extensions', 'largefiles'):
92 if not baseui.hasconfig('extensions', 'largefiles'):
67 log.debug('Explicitly disable largefiles extension for repo.')
93 log.debug('Explicitly disable largefiles extension for repo.')
68 baseui.setconfig('extensions', 'largefiles', '!')
94 baseui.setconfig('extensions', 'largefiles', '!')
69
95
70 return baseui
96 return baseui
71
97
72
98
73 def reraise_safe_exceptions(func):
99 def reraise_safe_exceptions(func):
74 """Decorator for converting mercurial exceptions to something neutral."""
100 """Decorator for converting mercurial exceptions to something neutral."""
75 def wrapper(*args, **kwargs):
101 def wrapper(*args, **kwargs):
76 try:
102 try:
77 return func(*args, **kwargs)
103 return func(*args, **kwargs)
78 except (Abort, InterventionRequired) as e:
104 except (Abort, InterventionRequired) as e:
79 raise_from_original(exceptions.AbortException(e))
105 raise_from_original(exceptions.AbortException(e))
80 except RepoLookupError as e:
106 except RepoLookupError as e:
81 raise_from_original(exceptions.LookupException(e))
107 raise_from_original(exceptions.LookupException(e))
82 except RequirementError as e:
108 except RequirementError as e:
83 raise_from_original(exceptions.RequirementException(e))
109 raise_from_original(exceptions.RequirementException(e))
84 except RepoError as e:
110 except RepoError as e:
85 raise_from_original(exceptions.VcsException(e))
111 raise_from_original(exceptions.VcsException(e))
86 except LookupError as e:
112 except LookupError as e:
87 raise_from_original(exceptions.LookupException(e))
113 raise_from_original(exceptions.LookupException(e))
88 except Exception as e:
114 except Exception as e:
89 if not hasattr(e, '_vcs_kind'):
115 if not hasattr(e, '_vcs_kind'):
90 log.exception("Unhandled exception in hg remote call")
116 log.exception("Unhandled exception in hg remote call")
91 raise_from_original(exceptions.UnhandledException(e))
117 raise_from_original(exceptions.UnhandledException(e))
92
118
93 raise
119 raise
94 return wrapper
120 return wrapper
95
121
96
122
97 class MercurialFactory(RepoFactory):
123 class MercurialFactory(RepoFactory):
98 repo_type = 'hg'
124 repo_type = 'hg'
99
125
100 def _create_config(self, config, hooks=True):
126 def _create_config(self, config, hooks=True):
101 if not hooks:
127 if not hooks:
102 hooks_to_clean = frozenset((
128 hooks_to_clean = frozenset((
103 'changegroup.repo_size', 'preoutgoing.pre_pull',
129 'changegroup.repo_size', 'preoutgoing.pre_pull',
104 'outgoing.pull_logger', 'prechangegroup.pre_push'))
130 'outgoing.pull_logger', 'prechangegroup.pre_push'))
105 new_config = []
131 new_config = []
106 for section, option, value in config:
132 for section, option, value in config:
107 if section == 'hooks' and option in hooks_to_clean:
133 if section == 'hooks' and option in hooks_to_clean:
108 continue
134 continue
109 new_config.append((section, option, value))
135 new_config.append((section, option, value))
110 config = new_config
136 config = new_config
111
137
112 baseui = make_ui_from_config(config)
138 baseui = make_ui_from_config(config)
113 return baseui
139 return baseui
114
140
115 def _create_repo(self, wire, create):
141 def _create_repo(self, wire, create):
116 baseui = self._create_config(wire["config"])
142 baseui = self._create_config(wire["config"])
117 return localrepository(baseui, wire["path"], create)
143 return instance(baseui, wire["path"], create)
118
144
119
145
120 class HgRemote(object):
146 class HgRemote(object):
121
147
122 def __init__(self, factory):
148 def __init__(self, factory):
123 self._factory = factory
149 self._factory = factory
124
150
125 self._bulk_methods = {
151 self._bulk_methods = {
126 "affected_files": self.ctx_files,
152 "affected_files": self.ctx_files,
127 "author": self.ctx_user,
153 "author": self.ctx_user,
128 "branch": self.ctx_branch,
154 "branch": self.ctx_branch,
129 "children": self.ctx_children,
155 "children": self.ctx_children,
130 "date": self.ctx_date,
156 "date": self.ctx_date,
131 "message": self.ctx_description,
157 "message": self.ctx_description,
132 "parents": self.ctx_parents,
158 "parents": self.ctx_parents,
133 "status": self.ctx_status,
159 "status": self.ctx_status,
134 "obsolete": self.ctx_obsolete,
160 "obsolete": self.ctx_obsolete,
135 "phase": self.ctx_phase,
161 "phase": self.ctx_phase,
136 "hidden": self.ctx_hidden,
162 "hidden": self.ctx_hidden,
137 "_file_paths": self.ctx_list,
163 "_file_paths": self.ctx_list,
138 }
164 }
139
165
166 def _get_ctx(self, repo, ref):
167 return get_ctx(repo, ref)
168
140 @reraise_safe_exceptions
169 @reraise_safe_exceptions
141 def discover_hg_version(self):
170 def discover_hg_version(self):
142 from mercurial import util
171 from mercurial import util
143 return util.version()
172 return util.version()
144
173
145 @reraise_safe_exceptions
174 @reraise_safe_exceptions
175 def is_empty(self, wire):
176 repo = self._factory.repo(wire)
177
178 try:
179 return len(repo) == 0
180 except Exception:
181 log.exception("failed to read object_store")
182 return False
183
184 @reraise_safe_exceptions
146 def archive_repo(self, archive_path, mtime, file_info, kind):
185 def archive_repo(self, archive_path, mtime, file_info, kind):
147 if kind == "tgz":
186 if kind == "tgz":
148 archiver = archival.tarit(archive_path, mtime, "gz")
187 archiver = archival.tarit(archive_path, mtime, "gz")
149 elif kind == "tbz2":
188 elif kind == "tbz2":
150 archiver = archival.tarit(archive_path, mtime, "bz2")
189 archiver = archival.tarit(archive_path, mtime, "bz2")
151 elif kind == 'zip':
190 elif kind == 'zip':
152 archiver = archival.zipit(archive_path, mtime)
191 archiver = archival.zipit(archive_path, mtime)
153 else:
192 else:
154 raise exceptions.ArchiveException()(
193 raise exceptions.ArchiveException()(
155 'Remote does not support: "%s".' % kind)
194 'Remote does not support: "%s".' % kind)
156
195
157 for f_path, f_mode, f_is_link, f_content in file_info:
196 for f_path, f_mode, f_is_link, f_content in file_info:
158 archiver.addfile(f_path, f_mode, f_is_link, f_content)
197 archiver.addfile(f_path, f_mode, f_is_link, f_content)
159 archiver.done()
198 archiver.done()
160
199
161 @reraise_safe_exceptions
200 @reraise_safe_exceptions
162 def bookmarks(self, wire):
201 def bookmarks(self, wire):
163 repo = self._factory.repo(wire)
202 repo = self._factory.repo(wire)
164 return dict(repo._bookmarks)
203 return dict(repo._bookmarks)
165
204
166 @reraise_safe_exceptions
205 @reraise_safe_exceptions
167 def branches(self, wire, normal, closed):
206 def branches(self, wire, normal, closed):
168 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
169 iter_branches = repo.branchmap().iterbranches()
208 iter_branches = repo.branchmap().iterbranches()
170 bt = {}
209 bt = {}
171 for branch_name, _heads, tip, is_closed in iter_branches:
210 for branch_name, _heads, tip, is_closed in iter_branches:
172 if normal and not is_closed:
211 if normal and not is_closed:
173 bt[branch_name] = tip
212 bt[branch_name] = tip
174 if closed and is_closed:
213 if closed and is_closed:
175 bt[branch_name] = tip
214 bt[branch_name] = tip
176
215
177 return bt
216 return bt
178
217
179 @reraise_safe_exceptions
218 @reraise_safe_exceptions
180 def bulk_request(self, wire, rev, pre_load):
219 def bulk_request(self, wire, rev, pre_load):
181 result = {}
220 result = {}
182 for attr in pre_load:
221 for attr in pre_load:
183 try:
222 try:
184 method = self._bulk_methods[attr]
223 method = self._bulk_methods[attr]
185 result[attr] = method(wire, rev)
224 result[attr] = method(wire, rev)
186 except KeyError as e:
225 except KeyError as e:
187 raise exceptions.VcsException(e)(
226 raise exceptions.VcsException(e)(
188 'Unknown bulk attribute: "%s"' % attr)
227 'Unknown bulk attribute: "%s"' % attr)
189 return result
228 return result
190
229
191 @reraise_safe_exceptions
230 @reraise_safe_exceptions
192 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
231 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
193 baseui = self._factory._create_config(wire["config"], hooks=hooks)
232 baseui = self._factory._create_config(wire["config"], hooks=hooks)
194 clone(baseui, source, dest, noupdate=not update_after_clone)
233 clone(baseui, source, dest, noupdate=not update_after_clone)
195
234
196 @reraise_safe_exceptions
235 @reraise_safe_exceptions
197 def commitctx(
236 def commitctx(
198 self, wire, message, parents, commit_time, commit_timezone,
237 self, wire, message, parents, commit_time, commit_timezone,
199 user, files, extra, removed, updated):
238 user, files, extra, removed, updated):
200
239
201 def _filectxfn(_repo, memctx, path):
240 repo = self._factory.repo(wire)
241 baseui = self._factory._create_config(wire['config'])
242 publishing = baseui.configbool('phases', 'publish')
243 if publishing:
244 new_commit = 'public'
245 else:
246 new_commit = 'draft'
247
248 def _filectxfn(_repo, ctx, path):
202 """
249 """
203 Marks given path as added/changed/removed in a given _repo. This is
250 Marks given path as added/changed/removed in a given _repo. This is
204 for internal mercurial commit function.
251 for internal mercurial commit function.
205 """
252 """
206
253
207 # check if this path is removed
254 # check if this path is removed
208 if path in removed:
255 if path in removed:
209 # returning None is a way to mark node for removal
256 # returning None is a way to mark node for removal
210 return None
257 return None
211
258
212 # check if this path is added
259 # check if this path is added
213 for node in updated:
260 for node in updated:
214 if node['path'] == path:
261 if node['path'] == path:
215 return memfilectx(
262 return memfilectx(
216 _repo,
263 _repo,
217 changectx=memctx,
264 changectx=ctx,
218 path=node['path'],
265 path=node['path'],
219 data=node['content'],
266 data=node['content'],
220 islink=False,
267 islink=False,
221 isexec=bool(node['mode'] & stat.S_IXUSR),
268 isexec=bool(node['mode'] & stat.S_IXUSR),
222 copied=False)
269 copied=False)
223
270
224 raise exceptions.AbortException()(
271 raise exceptions.AbortException()(
225 "Given path haven't been marked as added, "
272 "Given path haven't been marked as added, "
226 "changed or removed (%s)" % path)
273 "changed or removed (%s)" % path)
227
274
228 repo = self._factory.repo(wire)
275 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
229
276
230 commit_ctx = memctx(
277 commit_ctx = memctx(
231 repo=repo,
278 repo=repo,
232 parents=parents,
279 parents=parents,
233 text=message,
280 text=message,
234 files=files,
281 files=files,
235 filectxfn=_filectxfn,
282 filectxfn=_filectxfn,
236 user=user,
283 user=user,
237 date=(commit_time, commit_timezone),
284 date=(commit_time, commit_timezone),
238 extra=extra)
285 extra=extra)
239
286
240 n = repo.commitctx(commit_ctx)
287 n = repo.commitctx(commit_ctx)
241 new_id = hex(n)
288 new_id = hex(n)
242
289
243 return new_id
290 return new_id
244
291
245 @reraise_safe_exceptions
292 @reraise_safe_exceptions
246 def ctx_branch(self, wire, revision):
293 def ctx_branch(self, wire, revision):
247 repo = self._factory.repo(wire)
294 repo = self._factory.repo(wire)
248 ctx = repo[revision]
295 ctx = self._get_ctx(repo, revision)
249 return ctx.branch()
296 return ctx.branch()
250
297
251 @reraise_safe_exceptions
298 @reraise_safe_exceptions
252 def ctx_children(self, wire, revision):
299 def ctx_children(self, wire, revision):
253 repo = self._factory.repo(wire)
300 repo = self._factory.repo(wire)
254 ctx = repo[revision]
301 ctx = self._get_ctx(repo, revision)
255 return [child.rev() for child in ctx.children()]
302 return [child.rev() for child in ctx.children()]
256
303
257 @reraise_safe_exceptions
304 @reraise_safe_exceptions
258 def ctx_date(self, wire, revision):
305 def ctx_date(self, wire, revision):
259 repo = self._factory.repo(wire)
306 repo = self._factory.repo(wire)
260 ctx = repo[revision]
307 ctx = self._get_ctx(repo, revision)
261 return ctx.date()
308 return ctx.date()
262
309
263 @reraise_safe_exceptions
310 @reraise_safe_exceptions
264 def ctx_description(self, wire, revision):
311 def ctx_description(self, wire, revision):
265 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
266 ctx = repo[revision]
313 ctx = self._get_ctx(repo, revision)
267 return ctx.description()
314 return ctx.description()
268
315
269 @reraise_safe_exceptions
316 @reraise_safe_exceptions
270 def ctx_diff(
271 self, wire, revision, git=True, ignore_whitespace=True, context=3):
272 repo = self._factory.repo(wire)
273 ctx = repo[revision]
274 result = ctx.diff(
275 git=git, ignore_whitespace=ignore_whitespace, context=context)
276 return list(result)
277
278 @reraise_safe_exceptions
279 def ctx_files(self, wire, revision):
317 def ctx_files(self, wire, revision):
280 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
281 ctx = repo[revision]
319 ctx = self._get_ctx(repo, revision)
282 return ctx.files()
320 return ctx.files()
283
321
284 @reraise_safe_exceptions
322 @reraise_safe_exceptions
285 def ctx_list(self, path, revision):
323 def ctx_list(self, path, revision):
286 repo = self._factory.repo(path)
324 repo = self._factory.repo(path)
287 ctx = repo[revision]
325 ctx = self._get_ctx(repo, revision)
288 return list(ctx)
326 return list(ctx)
289
327
290 @reraise_safe_exceptions
328 @reraise_safe_exceptions
291 def ctx_parents(self, wire, revision):
329 def ctx_parents(self, wire, revision):
292 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
293 ctx = repo[revision]
331 ctx = self._get_ctx(repo, revision)
294 return [parent.rev() for parent in ctx.parents()]
332 return [parent.rev() for parent in ctx.parents()]
295
333
296 @reraise_safe_exceptions
334 @reraise_safe_exceptions
297 def ctx_phase(self, wire, revision):
335 def ctx_phase(self, wire, revision):
298 repo = self._factory.repo(wire)
336 repo = self._factory.repo(wire)
299 ctx = repo[revision]
337 ctx = self._get_ctx(repo, revision)
300 # public=0, draft=1, secret=3
338 # public=0, draft=1, secret=3
301 return ctx.phase()
339 return ctx.phase()
302
340
303 @reraise_safe_exceptions
341 @reraise_safe_exceptions
304 def ctx_obsolete(self, wire, revision):
342 def ctx_obsolete(self, wire, revision):
305 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
306 ctx = repo[revision]
344 ctx = self._get_ctx(repo, revision)
307 return ctx.obsolete()
345 return ctx.obsolete()
308
346
309 @reraise_safe_exceptions
347 @reraise_safe_exceptions
310 def ctx_hidden(self, wire, revision):
348 def ctx_hidden(self, wire, revision):
311 repo = self._factory.repo(wire)
349 repo = self._factory.repo(wire)
312 ctx = repo[revision]
350 ctx = self._get_ctx(repo, revision)
313 return ctx.hidden()
351 return ctx.hidden()
314
352
315 @reraise_safe_exceptions
353 @reraise_safe_exceptions
316 def ctx_substate(self, wire, revision):
354 def ctx_substate(self, wire, revision):
317 repo = self._factory.repo(wire)
355 repo = self._factory.repo(wire)
318 ctx = repo[revision]
356 ctx = self._get_ctx(repo, revision)
319 return ctx.substate
357 return ctx.substate
320
358
321 @reraise_safe_exceptions
359 @reraise_safe_exceptions
322 def ctx_status(self, wire, revision):
360 def ctx_status(self, wire, revision):
323 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
324 ctx = repo[revision]
362 ctx = self._get_ctx(repo, revision)
325 status = repo[ctx.p1().node()].status(other=ctx.node())
363 status = repo[ctx.p1().node()].status(other=ctx.node())
326 # object of status (odd, custom named tuple in mercurial) is not
364 # object of status (odd, custom named tuple in mercurial) is not
327 # correctly serializable, we make it a list, as the underling
365 # correctly serializable, we make it a list, as the underling
328 # API expects this to be a list
366 # API expects this to be a list
329 return list(status)
367 return list(status)
330
368
331 @reraise_safe_exceptions
369 @reraise_safe_exceptions
332 def ctx_user(self, wire, revision):
370 def ctx_user(self, wire, revision):
333 repo = self._factory.repo(wire)
371 repo = self._factory.repo(wire)
334 ctx = repo[revision]
372 ctx = self._get_ctx(repo, revision)
335 return ctx.user()
373 return ctx.user()
336
374
337 @reraise_safe_exceptions
375 @reraise_safe_exceptions
338 def check_url(self, url, config):
376 def check_url(self, url, config):
339 _proto = None
377 _proto = None
340 if '+' in url[:url.find('://')]:
378 if '+' in url[:url.find('://')]:
341 _proto = url[0:url.find('+')]
379 _proto = url[0:url.find('+')]
342 url = url[url.find('+') + 1:]
380 url = url[url.find('+') + 1:]
343 handlers = []
381 handlers = []
344 url_obj = url_parser(url)
382 url_obj = url_parser(url)
345 test_uri, authinfo = url_obj.authinfo()
383 test_uri, authinfo = url_obj.authinfo()
346 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
384 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
347 url_obj.query = obfuscate_qs(url_obj.query)
385 url_obj.query = obfuscate_qs(url_obj.query)
348
386
349 cleaned_uri = str(url_obj)
387 cleaned_uri = str(url_obj)
350 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
388 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
351
389
352 if authinfo:
390 if authinfo:
353 # create a password manager
391 # create a password manager
354 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
392 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
355 passmgr.add_password(*authinfo)
393 passmgr.add_password(*authinfo)
356
394
357 handlers.extend((httpbasicauthhandler(passmgr),
395 handlers.extend((httpbasicauthhandler(passmgr),
358 httpdigestauthhandler(passmgr)))
396 httpdigestauthhandler(passmgr)))
359
397
360 o = urllib2.build_opener(*handlers)
398 o = urllib2.build_opener(*handlers)
361 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
399 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
362 ('Accept', 'application/mercurial-0.1')]
400 ('Accept', 'application/mercurial-0.1')]
363
401
364 q = {"cmd": 'between'}
402 q = {"cmd": 'between'}
365 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
403 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
366 qs = '?%s' % urllib.urlencode(q)
404 qs = '?%s' % urllib.urlencode(q)
367 cu = "%s%s" % (test_uri, qs)
405 cu = "%s%s" % (test_uri, qs)
368 req = urllib2.Request(cu, None, {})
406 req = urllib2.Request(cu, None, {})
369
407
370 try:
408 try:
371 log.debug("Trying to open URL %s", cleaned_uri)
409 log.debug("Trying to open URL %s", cleaned_uri)
372 resp = o.open(req)
410 resp = o.open(req)
373 if resp.code != 200:
411 if resp.code != 200:
374 raise exceptions.URLError()('Return Code is not 200')
412 raise exceptions.URLError()('Return Code is not 200')
375 except Exception as e:
413 except Exception as e:
376 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
414 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
377 # means it cannot be cloned
415 # means it cannot be cloned
378 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
416 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
379
417
380 # now check if it's a proper hg repo, but don't do it for svn
418 # now check if it's a proper hg repo, but don't do it for svn
381 try:
419 try:
382 if _proto == 'svn':
420 if _proto == 'svn':
383 pass
421 pass
384 else:
422 else:
385 # check for pure hg repos
423 # check for pure hg repos
386 log.debug(
424 log.debug(
387 "Verifying if URL is a Mercurial repository: %s",
425 "Verifying if URL is a Mercurial repository: %s",
388 cleaned_uri)
426 cleaned_uri)
389 ui = make_ui_from_config(config)
427 ui = make_ui_from_config(config)
390 peer_checker = makepeer(ui, url)
428 peer_checker = makepeer(ui, url)
391 peer_checker.lookup('tip')
429 peer_checker.lookup('tip')
392 except Exception as e:
430 except Exception as e:
393 log.warning("URL is not a valid Mercurial repository: %s",
431 log.warning("URL is not a valid Mercurial repository: %s",
394 cleaned_uri)
432 cleaned_uri)
395 raise exceptions.URLError(e)(
433 raise exceptions.URLError(e)(
396 "url [%s] does not look like an hg repo org_exc: %s"
434 "url [%s] does not look like an hg repo org_exc: %s"
397 % (cleaned_uri, e))
435 % (cleaned_uri, e))
398
436
399 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
437 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
400 return True
438 return True
401
439
402 @reraise_safe_exceptions
440 @reraise_safe_exceptions
403 def diff(
441 def diff(
404 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
442 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
405 context):
443 context):
406 repo = self._factory.repo(wire)
444 repo = self._factory.repo(wire)
407
445
408 if file_filter:
446 if file_filter:
409 match_filter = match(file_filter[0], '', [file_filter[1]])
447 match_filter = match(file_filter[0], '', [file_filter[1]])
410 else:
448 else:
411 match_filter = file_filter
449 match_filter = file_filter
412 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
450 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
413
451
414 try:
452 try:
415 return "".join(patch.diff(
453 return "".join(patch.diff(
416 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
454 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
417 except RepoLookupError as e:
455 except RepoLookupError as e:
418 raise exceptions.LookupException(e)()
456 raise exceptions.LookupException(e)()
419
457
420 @reraise_safe_exceptions
458 @reraise_safe_exceptions
421 def node_history(self, wire, revision, path, limit):
459 def node_history(self, wire, revision, path, limit):
422 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
423
461
424 ctx = repo[revision]
462 ctx = self._get_ctx(repo, revision)
425 fctx = ctx.filectx(path)
463 fctx = ctx.filectx(path)
426
464
427 def history_iter():
465 def history_iter():
428 limit_rev = fctx.rev()
466 limit_rev = fctx.rev()
429 for obj in reversed(list(fctx.filelog())):
467 for obj in reversed(list(fctx.filelog())):
430 obj = fctx.filectx(obj)
468 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
471 continue
472
431 if limit_rev >= obj.rev():
473 if limit_rev >= obj.rev():
432 yield obj
474 yield obj
433
475
434 history = []
476 history = []
435 for cnt, obj in enumerate(history_iter()):
477 for cnt, obj in enumerate(history_iter()):
436 if limit and cnt >= limit:
478 if limit and cnt >= limit:
437 break
479 break
438 history.append(hex(obj.node()))
480 history.append(hex(obj.node()))
439
481
440 return [x for x in history]
482 return [x for x in history]
441
483
442 @reraise_safe_exceptions
484 @reraise_safe_exceptions
443 def node_history_untill(self, wire, revision, path, limit):
485 def node_history_untill(self, wire, revision, path, limit):
444 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
445 ctx = repo[revision]
487 ctx = self._get_ctx(repo, revision)
446 fctx = ctx.filectx(path)
488 fctx = ctx.filectx(path)
447
489
448 file_log = list(fctx.filelog())
490 file_log = list(fctx.filelog())
449 if limit:
491 if limit:
450 # Limit to the last n items
492 # Limit to the last n items
451 file_log = file_log[-limit:]
493 file_log = file_log[-limit:]
452
494
453 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
495 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
454
496
455 @reraise_safe_exceptions
497 @reraise_safe_exceptions
456 def fctx_annotate(self, wire, revision, path):
498 def fctx_annotate(self, wire, revision, path):
457 repo = self._factory.repo(wire)
499 repo = self._factory.repo(wire)
458 ctx = repo[revision]
500 ctx = self._get_ctx(repo, revision)
459 fctx = ctx.filectx(path)
501 fctx = ctx.filectx(path)
460
502
461 result = []
503 result = []
462 for i, annotate_obj in enumerate(fctx.annotate(), 1):
504 for i, annotate_obj in enumerate(fctx.annotate(), 1):
463 ln_no = i
505 ln_no = i
464 sha = hex(annotate_obj.fctx.node())
506 sha = hex(annotate_obj.fctx.node())
465 content = annotate_obj.text
507 content = annotate_obj.text
466 result.append((ln_no, sha, content))
508 result.append((ln_no, sha, content))
467 return result
509 return result
468
510
469 @reraise_safe_exceptions
511 @reraise_safe_exceptions
470 def fctx_data(self, wire, revision, path):
512 def fctx_data(self, wire, revision, path):
471 repo = self._factory.repo(wire)
513 repo = self._factory.repo(wire)
472 ctx = repo[revision]
514 ctx = self._get_ctx(repo, revision)
473 fctx = ctx.filectx(path)
515 fctx = ctx.filectx(path)
474 return fctx.data()
516 return fctx.data()
475
517
476 @reraise_safe_exceptions
518 @reraise_safe_exceptions
477 def fctx_flags(self, wire, revision, path):
519 def fctx_flags(self, wire, revision, path):
478 repo = self._factory.repo(wire)
520 repo = self._factory.repo(wire)
479 ctx = repo[revision]
521 ctx = self._get_ctx(repo, revision)
480 fctx = ctx.filectx(path)
522 fctx = ctx.filectx(path)
481 return fctx.flags()
523 return fctx.flags()
482
524
483 @reraise_safe_exceptions
525 @reraise_safe_exceptions
484 def fctx_size(self, wire, revision, path):
526 def fctx_size(self, wire, revision, path):
485 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
486 ctx = repo[revision]
528 ctx = self._get_ctx(repo, revision)
487 fctx = ctx.filectx(path)
529 fctx = ctx.filectx(path)
488 return fctx.size()
530 return fctx.size()
489
531
490 @reraise_safe_exceptions
532 @reraise_safe_exceptions
491 def get_all_commit_ids(self, wire, name):
533 def get_all_commit_ids(self, wire, name):
492 repo = self._factory.repo(wire)
534 repo = self._factory.repo(wire)
493 revs = repo.filtered(name).changelog.index
535 repo = repo.filtered(name)
494 return map(lambda x: hex(x[7]), revs)[:-1]
536 revs = map(lambda x: hex(x[7]), repo.changelog.index)
537 return revs
495
538
496 @reraise_safe_exceptions
539 @reraise_safe_exceptions
497 def get_config_value(self, wire, section, name, untrusted=False):
540 def get_config_value(self, wire, section, name, untrusted=False):
498 repo = self._factory.repo(wire)
541 repo = self._factory.repo(wire)
499 return repo.ui.config(section, name, untrusted=untrusted)
542 return repo.ui.config(section, name, untrusted=untrusted)
500
543
501 @reraise_safe_exceptions
544 @reraise_safe_exceptions
502 def get_config_bool(self, wire, section, name, untrusted=False):
545 def get_config_bool(self, wire, section, name, untrusted=False):
503 repo = self._factory.repo(wire)
546 repo = self._factory.repo(wire)
504 return repo.ui.configbool(section, name, untrusted=untrusted)
547 return repo.ui.configbool(section, name, untrusted=untrusted)
505
548
506 @reraise_safe_exceptions
549 @reraise_safe_exceptions
507 def get_config_list(self, wire, section, name, untrusted=False):
550 def get_config_list(self, wire, section, name, untrusted=False):
508 repo = self._factory.repo(wire)
551 repo = self._factory.repo(wire)
509 return repo.ui.configlist(section, name, untrusted=untrusted)
552 return repo.ui.configlist(section, name, untrusted=untrusted)
510
553
511 @reraise_safe_exceptions
554 @reraise_safe_exceptions
512 def is_large_file(self, wire, path):
555 def is_large_file(self, wire, path):
513 return largefiles.lfutil.isstandin(path)
556 return largefiles.lfutil.isstandin(path)
514
557
515 @reraise_safe_exceptions
558 @reraise_safe_exceptions
516 def in_largefiles_store(self, wire, sha):
559 def in_largefiles_store(self, wire, sha):
517 repo = self._factory.repo(wire)
560 repo = self._factory.repo(wire)
518 return largefiles.lfutil.instore(repo, sha)
561 return largefiles.lfutil.instore(repo, sha)
519
562
520 @reraise_safe_exceptions
563 @reraise_safe_exceptions
521 def in_user_cache(self, wire, sha):
564 def in_user_cache(self, wire, sha):
522 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
523 return largefiles.lfutil.inusercache(repo.ui, sha)
566 return largefiles.lfutil.inusercache(repo.ui, sha)
524
567
525 @reraise_safe_exceptions
568 @reraise_safe_exceptions
526 def store_path(self, wire, sha):
569 def store_path(self, wire, sha):
527 repo = self._factory.repo(wire)
570 repo = self._factory.repo(wire)
528 return largefiles.lfutil.storepath(repo, sha)
571 return largefiles.lfutil.storepath(repo, sha)
529
572
530 @reraise_safe_exceptions
573 @reraise_safe_exceptions
531 def link(self, wire, sha, path):
574 def link(self, wire, sha, path):
532 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
533 largefiles.lfutil.link(
576 largefiles.lfutil.link(
534 largefiles.lfutil.usercachepath(repo.ui, sha), path)
577 largefiles.lfutil.usercachepath(repo.ui, sha), path)
535
578
536 @reraise_safe_exceptions
579 @reraise_safe_exceptions
537 def localrepository(self, wire, create=False):
580 def localrepository(self, wire, create=False):
538 self._factory.repo(wire, create=create)
581 self._factory.repo(wire, create=create)
539
582
540 @reraise_safe_exceptions
583 @reraise_safe_exceptions
541 def lookup(self, wire, revision, both):
584 def lookup(self, wire, revision, both):
542
585
543 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
544
587
545 if isinstance(revision, int):
588 if isinstance(revision, int):
546 # NOTE(marcink):
589 # NOTE(marcink):
547 # since Mercurial doesn't support indexes properly
590 # since Mercurial doesn't support negative indexes properly
548 # we need to shift accordingly by one to get proper index, e.g
591 # we need to shift accordingly by one to get proper index, e.g
549 # repo[-1] => repo[-2]
592 # repo[-1] => repo[-2]
550 # repo[0] => repo[-1]
593 # repo[0] => repo[-1]
551 # repo[1] => repo[2] we also never call repo[0] because
552 # it's actually second commit
553 if revision <= 0:
594 if revision <= 0:
554 revision = revision + -1
595 revision = revision + -1
555 else:
556 revision = revision + 1
557
558 try:
596 try:
559 ctx = repo[revision]
597 ctx = self._get_ctx(repo, revision)
560 except RepoLookupError as e:
598 except (TypeError, RepoLookupError) as e:
599 e._org_exc_tb = traceback.format_exc()
561 raise exceptions.LookupException(e)(revision)
600 raise exceptions.LookupException(e)(revision)
562 except LookupError as e:
601 except LookupError as e:
602 e._org_exc_tb = traceback.format_exc()
563 raise exceptions.LookupException(e)(e.name)
603 raise exceptions.LookupException(e)(e.name)
564
604
565 if not both:
605 if not both:
566 return ctx.hex()
606 return ctx.hex()
567
607
568 ctx = repo[ctx.hex()]
608 ctx = repo[ctx.hex()]
569 return ctx.hex(), ctx.rev()
609 return ctx.hex(), ctx.rev()
570
610
571 @reraise_safe_exceptions
611 @reraise_safe_exceptions
572 def pull(self, wire, url, commit_ids=None):
612 def pull(self, wire, url, commit_ids=None):
573 repo = self._factory.repo(wire)
613 repo = self._factory.repo(wire)
574 # Disable any prompts for this repo
614 # Disable any prompts for this repo
575 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
615 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576
616
577 remote = peer(repo, {}, url)
617 remote = peer(repo, {}, url)
578 # Disable any prompts for this remote
618 # Disable any prompts for this remote
579 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
619 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
580
620
581 if commit_ids:
621 if commit_ids:
582 commit_ids = [bin(commit_id) for commit_id in commit_ids]
622 commit_ids = [bin(commit_id) for commit_id in commit_ids]
583
623
584 return exchange.pull(
624 return exchange.pull(
585 repo, remote, heads=commit_ids, force=None).cgresult
625 repo, remote, heads=commit_ids, force=None).cgresult
586
626
587 @reraise_safe_exceptions
627 @reraise_safe_exceptions
588 def sync_push(self, wire, url):
628 def sync_push(self, wire, url):
589 if not self.check_url(url, wire['config']):
629 if not self.check_url(url, wire['config']):
590 return
630 return
591
631
592 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
593
633
594 # Disable any prompts for this repo
634 # Disable any prompts for this repo
595 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
635 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
596
636
597 bookmarks = dict(repo._bookmarks).keys()
637 bookmarks = dict(repo._bookmarks).keys()
598 remote = peer(repo, {}, url)
638 remote = peer(repo, {}, url)
599 # Disable any prompts for this remote
639 # Disable any prompts for this remote
600 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
640 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
601
641
602 return exchange.push(
642 return exchange.push(
603 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
643 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
604
644
605 @reraise_safe_exceptions
645 @reraise_safe_exceptions
606 def revision(self, wire, rev):
646 def revision(self, wire, rev):
607 repo = self._factory.repo(wire)
647 repo = self._factory.repo(wire)
608 ctx = repo[rev]
648 ctx = self._get_ctx(repo, rev)
609 return ctx.rev()
649 return ctx.rev()
610
650
611 @reraise_safe_exceptions
651 @reraise_safe_exceptions
612 def rev_range(self, wire, filter):
652 def rev_range(self, wire, filter):
613 repo = self._factory.repo(wire)
653 repo = self._factory.repo(wire)
614 revisions = [rev for rev in revrange(repo, filter)]
654 revisions = [rev for rev in revrange(repo, filter)]
615 return revisions
655 return revisions
616
656
617 @reraise_safe_exceptions
657 @reraise_safe_exceptions
618 def rev_range_hash(self, wire, node):
658 def rev_range_hash(self, wire, node):
619 repo = self._factory.repo(wire)
659 repo = self._factory.repo(wire)
620
660
621 def get_revs(repo, rev_opt):
661 def get_revs(repo, rev_opt):
622 if rev_opt:
662 if rev_opt:
623 revs = revrange(repo, rev_opt)
663 revs = revrange(repo, rev_opt)
624 if len(revs) == 0:
664 if len(revs) == 0:
625 return (nullrev, nullrev)
665 return (nullrev, nullrev)
626 return max(revs), min(revs)
666 return max(revs), min(revs)
627 else:
667 else:
628 return len(repo) - 1, 0
668 return len(repo) - 1, 0
629
669
630 stop, start = get_revs(repo, [node + ':'])
670 stop, start = get_revs(repo, [node + ':'])
631 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
671 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
632 return revs
672 return revs
633
673
634 @reraise_safe_exceptions
674 @reraise_safe_exceptions
635 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
675 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
636 other_path = kwargs.pop('other_path', None)
676 other_path = kwargs.pop('other_path', None)
637
677
638 # case when we want to compare two independent repositories
678 # case when we want to compare two independent repositories
639 if other_path and other_path != wire["path"]:
679 if other_path and other_path != wire["path"]:
640 baseui = self._factory._create_config(wire["config"])
680 baseui = self._factory._create_config(wire["config"])
641 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
681 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
642 else:
682 else:
643 repo = self._factory.repo(wire)
683 repo = self._factory.repo(wire)
644 return list(repo.revs(rev_spec, *args))
684 return list(repo.revs(rev_spec, *args))
645
685
646 @reraise_safe_exceptions
686 @reraise_safe_exceptions
647 def strip(self, wire, revision, update, backup):
687 def strip(self, wire, revision, update, backup):
648 repo = self._factory.repo(wire)
688 repo = self._factory.repo(wire)
649 ctx = repo[revision]
689 ctx = self._get_ctx(repo, revision)
650 hgext_strip(
690 hgext_strip(
651 repo.baseui, repo, ctx.node(), update=update, backup=backup)
691 repo.baseui, repo, ctx.node(), update=update, backup=backup)
652
692
653 @reraise_safe_exceptions
693 @reraise_safe_exceptions
654 def verify(self, wire,):
694 def verify(self, wire,):
655 repo = self._factory.repo(wire)
695 repo = self._factory.repo(wire)
656 baseui = self._factory._create_config(wire['config'])
696 baseui = self._factory._create_config(wire['config'])
657 baseui.setconfig('ui', 'quiet', 'false')
697 baseui.setconfig('ui', 'quiet', 'false')
658 output = io.BytesIO()
698 output = io.BytesIO()
659
699
660 def write(data, **unused_kwargs):
700 def write(data, **unused_kwargs):
661 output.write(data)
701 output.write(data)
662 baseui.write = write
702 baseui.write = write
663
703
664 repo.ui = baseui
704 repo.ui = baseui
665 verify.verify(repo)
705 verify.verify(repo)
666 return output.getvalue()
706 return output.getvalue()
667
707
668 @reraise_safe_exceptions
708 @reraise_safe_exceptions
669 def tag(self, wire, name, revision, message, local, user,
709 def tag(self, wire, name, revision, message, local, user,
670 tag_time, tag_timezone):
710 tag_time, tag_timezone):
671 repo = self._factory.repo(wire)
711 repo = self._factory.repo(wire)
672 ctx = repo[revision]
712 ctx = self._get_ctx(repo, revision)
673 node = ctx.node()
713 node = ctx.node()
674
714
675 date = (tag_time, tag_timezone)
715 date = (tag_time, tag_timezone)
676 try:
716 try:
677 hg_tag.tag(repo, name, node, message, local, user, date)
717 hg_tag.tag(repo, name, node, message, local, user, date)
678 except Abort as e:
718 except Abort as e:
679 log.exception("Tag operation aborted")
719 log.exception("Tag operation aborted")
680 # Exception can contain unicode which we convert
720 # Exception can contain unicode which we convert
681 raise exceptions.AbortException(e)(repr(e))
721 raise exceptions.AbortException(e)(repr(e))
682
722
683 @reraise_safe_exceptions
723 @reraise_safe_exceptions
684 def tags(self, wire):
724 def tags(self, wire):
685 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
686 return repo.tags()
726 return repo.tags()
687
727
688 @reraise_safe_exceptions
728 @reraise_safe_exceptions
689 def update(self, wire, node=None, clean=False):
729 def update(self, wire, node=None, clean=False):
690 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
691 baseui = self._factory._create_config(wire['config'])
731 baseui = self._factory._create_config(wire['config'])
692 commands.update(baseui, repo, node=node, clean=clean)
732 commands.update(baseui, repo, node=node, clean=clean)
693
733
694 @reraise_safe_exceptions
734 @reraise_safe_exceptions
695 def identify(self, wire):
735 def identify(self, wire):
696 repo = self._factory.repo(wire)
736 repo = self._factory.repo(wire)
697 baseui = self._factory._create_config(wire['config'])
737 baseui = self._factory._create_config(wire['config'])
698 output = io.BytesIO()
738 output = io.BytesIO()
699 baseui.write = output.write
739 baseui.write = output.write
700 # This is required to get a full node id
740 # This is required to get a full node id
701 baseui.debugflag = True
741 baseui.debugflag = True
702 commands.identify(baseui, repo, id=True)
742 commands.identify(baseui, repo, id=True)
703
743
704 return output.getvalue()
744 return output.getvalue()
705
745
706 @reraise_safe_exceptions
746 @reraise_safe_exceptions
707 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
747 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
708 hooks=True):
748 hooks=True):
709 repo = self._factory.repo(wire)
749 repo = self._factory.repo(wire)
710 baseui = self._factory._create_config(wire['config'], hooks=hooks)
750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
711
751
712 # Mercurial internally has a lot of logic that checks ONLY if
752 # Mercurial internally has a lot of logic that checks ONLY if
713 # option is defined, we just pass those if they are defined then
753 # option is defined, we just pass those if they are defined then
714 opts = {}
754 opts = {}
715 if bookmark:
755 if bookmark:
716 opts['bookmark'] = bookmark
756 opts['bookmark'] = bookmark
717 if branch:
757 if branch:
718 opts['branch'] = branch
758 opts['branch'] = branch
719 if revision:
759 if revision:
720 opts['rev'] = revision
760 opts['rev'] = revision
721
761
722 commands.pull(baseui, repo, source, **opts)
762 commands.pull(baseui, repo, source, **opts)
723
763
724 @reraise_safe_exceptions
764 @reraise_safe_exceptions
725 def heads(self, wire, branch=None):
765 def heads(self, wire, branch=None):
726 repo = self._factory.repo(wire)
766 repo = self._factory.repo(wire)
727 baseui = self._factory._create_config(wire['config'])
767 baseui = self._factory._create_config(wire['config'])
728 output = io.BytesIO()
768 output = io.BytesIO()
729
769
730 def write(data, **unused_kwargs):
770 def write(data, **unused_kwargs):
731 output.write(data)
771 output.write(data)
732
772
733 baseui.write = write
773 baseui.write = write
734 if branch:
774 if branch:
735 args = [branch]
775 args = [branch]
736 else:
776 else:
737 args = []
777 args = []
738 commands.heads(baseui, repo, template='{node} ', *args)
778 commands.heads(baseui, repo, template='{node} ', *args)
739
779
740 return output.getvalue()
780 return output.getvalue()
741
781
742 @reraise_safe_exceptions
782 @reraise_safe_exceptions
743 def ancestor(self, wire, revision1, revision2):
783 def ancestor(self, wire, revision1, revision2):
744 repo = self._factory.repo(wire)
784 repo = self._factory.repo(wire)
745 changelog = repo.changelog
785 changelog = repo.changelog
746 lookup = repo.lookup
786 lookup = repo.lookup
747 a = changelog.ancestor(lookup(revision1), lookup(revision2))
787 a = changelog.ancestor(lookup(revision1), lookup(revision2))
748 return hex(a)
788 return hex(a)
749
789
750 @reraise_safe_exceptions
790 @reraise_safe_exceptions
751 def push(self, wire, revisions, dest_path, hooks=True,
791 def push(self, wire, revisions, dest_path, hooks=True,
752 push_branches=False):
792 push_branches=False):
753 repo = self._factory.repo(wire)
793 repo = self._factory.repo(wire)
754 baseui = self._factory._create_config(wire['config'], hooks=hooks)
794 baseui = self._factory._create_config(wire['config'], hooks=hooks)
755 commands.push(baseui, repo, dest=dest_path, rev=revisions,
795 commands.push(baseui, repo, dest=dest_path, rev=revisions,
756 new_branch=push_branches)
796 new_branch=push_branches)
757
797
758 @reraise_safe_exceptions
798 @reraise_safe_exceptions
759 def merge(self, wire, revision):
799 def merge(self, wire, revision):
760 repo = self._factory.repo(wire)
800 repo = self._factory.repo(wire)
761 baseui = self._factory._create_config(wire['config'])
801 baseui = self._factory._create_config(wire['config'])
762 repo.ui.setconfig('ui', 'merge', 'internal:dump')
802 repo.ui.setconfig('ui', 'merge', 'internal:dump')
763
803
764 # In case of sub repositories are used mercurial prompts the user in
804 # In case of sub repositories are used mercurial prompts the user in
765 # case of merge conflicts or different sub repository sources. By
805 # case of merge conflicts or different sub repository sources. By
766 # setting the interactive flag to `False` mercurial doesn't prompt the
806 # setting the interactive flag to `False` mercurial doesn't prompt the
767 # used but instead uses a default value.
807 # used but instead uses a default value.
768 repo.ui.setconfig('ui', 'interactive', False)
808 repo.ui.setconfig('ui', 'interactive', False)
809 commands.merge(baseui, repo, rev=revision)
769
810
770 commands.merge(baseui, repo, rev=revision)
811 @reraise_safe_exceptions
812 def merge_state(self, wire):
813 repo = self._factory.repo(wire)
814 repo.ui.setconfig('ui', 'merge', 'internal:dump')
815
816 # In case of sub repositories are used mercurial prompts the user in
817 # case of merge conflicts or different sub repository sources. By
818 # setting the interactive flag to `False` mercurial doesn't prompt the
819 # used but instead uses a default value.
820 repo.ui.setconfig('ui', 'interactive', False)
821 ms = hg_merge.mergestate(repo)
822 return [x for x in ms.unresolved()]
771
823
772 @reraise_safe_exceptions
824 @reraise_safe_exceptions
773 def commit(self, wire, message, username, close_branch=False):
825 def commit(self, wire, message, username, close_branch=False):
774 repo = self._factory.repo(wire)
826 repo = self._factory.repo(wire)
775 baseui = self._factory._create_config(wire['config'])
827 baseui = self._factory._create_config(wire['config'])
776 repo.ui.setconfig('ui', 'username', username)
828 repo.ui.setconfig('ui', 'username', username)
777 commands.commit(baseui, repo, message=message, close_branch=close_branch)
829 commands.commit(baseui, repo, message=message, close_branch=close_branch)
778
830
831
779 @reraise_safe_exceptions
832 @reraise_safe_exceptions
780 def rebase(self, wire, source=None, dest=None, abort=False):
833 def rebase(self, wire, source=None, dest=None, abort=False):
781 repo = self._factory.repo(wire)
834 repo = self._factory.repo(wire)
782 baseui = self._factory._create_config(wire['config'])
835 baseui = self._factory._create_config(wire['config'])
783 repo.ui.setconfig('ui', 'merge', 'internal:dump')
836 repo.ui.setconfig('ui', 'merge', 'internal:dump')
784 rebase.rebase(
837 rebase.rebase(
785 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
838 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
786
839
787 @reraise_safe_exceptions
840 @reraise_safe_exceptions
788 def bookmark(self, wire, bookmark, revision=None):
841 def bookmark(self, wire, bookmark, revision=None):
789 repo = self._factory.repo(wire)
842 repo = self._factory.repo(wire)
790 baseui = self._factory._create_config(wire['config'])
843 baseui = self._factory._create_config(wire['config'])
791 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
844 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
792
845
793 @reraise_safe_exceptions
846 @reraise_safe_exceptions
794 def install_hooks(self, wire, force=False):
847 def install_hooks(self, wire, force=False):
795 # we don't need any special hooks for Mercurial
848 # we don't need any special hooks for Mercurial
796 pass
849 pass
797
850
798 @reraise_safe_exceptions
851 @reraise_safe_exceptions
799 def get_hooks_info(self, wire):
852 def get_hooks_info(self, wire):
800 return {
853 return {
801 'pre_version': vcsserver.__version__,
854 'pre_version': vcsserver.__version__,
802 'post_version': vcsserver.__version__,
855 'post_version': vcsserver.__version__,
803 }
856 }
@@ -1,63 +1,74 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import tags as hg_tag
39 from mercurial import tags as hg_tag
40
40
41 from mercurial.commands import clone, nullid, pull
41 from mercurial.commands import clone, nullid, pull
42 from mercurial.context import memctx, memfilectx
42 from mercurial.context import memctx, memfilectx
43 from mercurial.error import (
43 from mercurial.error import (
44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 RequirementError)
45 RequirementError, ProgrammingError)
46 from mercurial.hgweb import hgweb_mod
46 from mercurial.hgweb import hgweb_mod
47 from mercurial.localrepo import localrepository
47 from mercurial.localrepo import instance
48 from mercurial.match import match
48 from mercurial.match import match
49 from mercurial.mdiff import diffopts
49 from mercurial.mdiff import diffopts
50 from mercurial.node import bin, hex
50 from mercurial.node import bin, hex
51 from mercurial.encoding import tolocal
51 from mercurial.encoding import tolocal
52 from mercurial.discovery import findcommonoutgoing
52 from mercurial.discovery import findcommonoutgoing
53 from mercurial.hg import peer
53 from mercurial.hg import peer
54 from mercurial.httppeer import makepeer
54 from mercurial.httppeer import makepeer
55 from mercurial.util import url as hg_url
55 from mercurial.util import url as hg_url
56 from mercurial.scmutil import revrange
56 from mercurial.scmutil import revrange, revsymbol
57 from mercurial.node import nullrev
57 from mercurial.node import nullrev
58 from mercurial import exchange
58 from mercurial import exchange
59 from hgext import largefiles
59 from hgext import largefiles
60
60
61 # those authnadlers are patched for python 2.6.5 bug an
61 # those authnadlers are patched for python 2.6.5 bug an
62 # infinit looping when given invalid resources
62 # infinit looping when given invalid resources
63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
64
65
66 def get_ctx(repo, ref):
67 try:
68 ctx = repo[ref]
69 except ProgrammingError:
70 # we're unable to find the rev using a regular lookup, we fallback
71 # to slower, but backward compat revsymbol usage
72 ctx = revsymbol(repo, ref)
73
74 return ctx
@@ -1,203 +1,205 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import re
20 import re
21 import os
21 import os
22 import sys
22 import sys
23 import datetime
23 import datetime
24 import logging
24 import logging
25 import pkg_resources
25 import pkg_resources
26
26
27 import vcsserver
27 import vcsserver
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31
31
32 def get_git_hooks_path(repo_path, bare):
32 def get_git_hooks_path(repo_path, bare):
33 hooks_path = os.path.join(repo_path, 'hooks')
33 hooks_path = os.path.join(repo_path, 'hooks')
34 if not bare:
34 if not bare:
35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
36
36
37 return hooks_path
37 return hooks_path
38
38
39
39
40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
41 """
41 """
42 Creates a RhodeCode hook inside a git repository
42 Creates a RhodeCode hook inside a git repository
43
43
44 :param repo_path: path to repository
44 :param repo_path: path to repository
45 :param executable: binary executable to put in the hooks
45 :param executable: binary executable to put in the hooks
46 :param force_create: Create even if same name hook exists
46 :param force_create: Create even if same name hook exists
47 """
47 """
48 executable = executable or sys.executable
48 executable = executable or sys.executable
49 hooks_path = get_git_hooks_path(repo_path, bare)
49 hooks_path = get_git_hooks_path(repo_path, bare)
50
50
51 if not os.path.isdir(hooks_path):
51 if not os.path.isdir(hooks_path):
52 os.makedirs(hooks_path, mode=0o777)
52 os.makedirs(hooks_path, mode=0o777)
53
53
54 tmpl_post = pkg_resources.resource_string(
54 tmpl_post = pkg_resources.resource_string(
55 'vcsserver', '/'.join(
55 'vcsserver', '/'.join(
56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
57 tmpl_pre = pkg_resources.resource_string(
57 tmpl_pre = pkg_resources.resource_string(
58 'vcsserver', '/'.join(
58 'vcsserver', '/'.join(
59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
60
60
61 path = '' # not used for now
61 path = '' # not used for now
62 timestamp = datetime.datetime.utcnow().isoformat()
62 timestamp = datetime.datetime.utcnow().isoformat()
63
63
64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
65 log.debug('Installing git hook in repo %s', repo_path)
65 log.debug('Installing git hook in repo %s', repo_path)
66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
68
68
69 if _rhodecode_hook or force_create:
69 if _rhodecode_hook or force_create:
70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
71 try:
71 try:
72 with open(_hook_file, 'wb') as f:
72 with open(_hook_file, 'wb') as f:
73 template = template.replace(
73 template = template.replace(
74 '_TMPL_', vcsserver.__version__)
74 '_TMPL_', vcsserver.__version__)
75 template = template.replace('_DATE_', timestamp)
75 template = template.replace('_DATE_', timestamp)
76 template = template.replace('_ENV_', executable)
76 template = template.replace('_ENV_', executable)
77 template = template.replace('_PATH_', path)
77 template = template.replace('_PATH_', path)
78 f.write(template)
78 f.write(template)
79 os.chmod(_hook_file, 0o755)
79 os.chmod(_hook_file, 0o755)
80 except IOError:
80 except IOError:
81 log.exception('error writing hook file %s', _hook_file)
81 log.exception('error writing hook file %s', _hook_file)
82 else:
82 else:
83 log.debug('skipping writing hook file')
83 log.debug('skipping writing hook file')
84
84
85 return True
85 return True
86
86
87
87
88 def get_svn_hooks_path(repo_path):
88 def get_svn_hooks_path(repo_path):
89 hooks_path = os.path.join(repo_path, 'hooks')
89 hooks_path = os.path.join(repo_path, 'hooks')
90
90
91 return hooks_path
91 return hooks_path
92
92
93
93
94 def install_svn_hooks(repo_path, executable=None, force_create=False):
94 def install_svn_hooks(repo_path, executable=None, force_create=False):
95 """
95 """
96 Creates RhodeCode hooks inside a svn repository
96 Creates RhodeCode hooks inside a svn repository
97
97
98 :param repo_path: path to repository
98 :param repo_path: path to repository
99 :param executable: binary executable to put in the hooks
99 :param executable: binary executable to put in the hooks
100 :param force_create: Create even if same name hook exists
100 :param force_create: Create even if same name hook exists
101 """
101 """
102 executable = executable or sys.executable
102 executable = executable or sys.executable
103 hooks_path = get_svn_hooks_path(repo_path)
103 hooks_path = get_svn_hooks_path(repo_path)
104 if not os.path.isdir(hooks_path):
104 if not os.path.isdir(hooks_path):
105 os.makedirs(hooks_path, mode=0o777)
105 os.makedirs(hooks_path, mode=0o777)
106
106
107 tmpl_post = pkg_resources.resource_string(
107 tmpl_post = pkg_resources.resource_string(
108 'vcsserver', '/'.join(
108 'vcsserver', '/'.join(
109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
110 tmpl_pre = pkg_resources.resource_string(
110 tmpl_pre = pkg_resources.resource_string(
111 'vcsserver', '/'.join(
111 'vcsserver', '/'.join(
112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
113
113
114 path = '' # not used for now
114 path = '' # not used for now
115 timestamp = datetime.datetime.utcnow().isoformat()
115 timestamp = datetime.datetime.utcnow().isoformat()
116
116
117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
118 log.debug('Installing svn hook in repo %s', repo_path)
118 log.debug('Installing svn hook in repo %s', repo_path)
119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
121
121
122 if _rhodecode_hook or force_create:
122 if _rhodecode_hook or force_create:
123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
124
124
125 try:
125 try:
126 with open(_hook_file, 'wb') as f:
126 with open(_hook_file, 'wb') as f:
127 template = template.replace(
127 template = template.replace(
128 '_TMPL_', vcsserver.__version__)
128 '_TMPL_', vcsserver.__version__)
129 template = template.replace('_DATE_', timestamp)
129 template = template.replace('_DATE_', timestamp)
130 template = template.replace('_ENV_', executable)
130 template = template.replace('_ENV_', executable)
131 template = template.replace('_PATH_', path)
131 template = template.replace('_PATH_', path)
132
132
133 f.write(template)
133 f.write(template)
134 os.chmod(_hook_file, 0o755)
134 os.chmod(_hook_file, 0o755)
135 except IOError:
135 except IOError:
136 log.exception('error writing hook file %s', _hook_file)
136 log.exception('error writing hook file %s', _hook_file)
137 else:
137 else:
138 log.debug('skipping writing hook file')
138 log.debug('skipping writing hook file')
139
139
140 return True
140 return True
141
141
142
142
143 def get_version_from_hook(hook_path):
143 def get_version_from_hook(hook_path):
144 version = ''
144 version = ''
145 hook_content = read_hook_content(hook_path)
145 hook_content = read_hook_content(hook_path)
146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
147 if matches:
147 if matches:
148 try:
148 try:
149 version = matches.groups()[0]
149 version = matches.groups()[0]
150 log.debug('got version %s from hooks.', version)
150 log.debug('got version %s from hooks.', version)
151 except Exception:
151 except Exception:
152 log.exception("Exception while reading the hook version.")
152 log.exception("Exception while reading the hook version.")
153 return version.replace("'", "")
153 return version.replace("'", "")
154
154
155
155
156 def check_rhodecode_hook(hook_path):
156 def check_rhodecode_hook(hook_path):
157 """
157 """
158 Check if the hook was created by RhodeCode
158 Check if the hook was created by RhodeCode
159 """
159 """
160 if not os.path.exists(hook_path):
160 if not os.path.exists(hook_path):
161 return True
161 return True
162
162
163 log.debug('hook exists, checking if it is from RhodeCode')
163 log.debug('hook exists, checking if it is from RhodeCode')
164
164
165 version = get_version_from_hook(hook_path)
165 version = get_version_from_hook(hook_path)
166 if version:
166 if version:
167 return True
167 return True
168
168
169 return False
169 return False
170
170
171
171
172 def read_hook_content(hook_path):
172 def read_hook_content(hook_path):
173 with open(hook_path, 'rb') as f:
173 content = ''
174 content = f.read()
174 if os.path.isfile(hook_path):
175 with open(hook_path, 'rb') as f:
176 content = f.read()
175 return content
177 return content
176
178
177
179
178 def get_git_pre_hook_version(repo_path, bare):
180 def get_git_pre_hook_version(repo_path, bare):
179 hooks_path = get_git_hooks_path(repo_path, bare)
181 hooks_path = get_git_hooks_path(repo_path, bare)
180 _hook_file = os.path.join(hooks_path, 'pre-receive')
182 _hook_file = os.path.join(hooks_path, 'pre-receive')
181 version = get_version_from_hook(_hook_file)
183 version = get_version_from_hook(_hook_file)
182 return version
184 return version
183
185
184
186
185 def get_git_post_hook_version(repo_path, bare):
187 def get_git_post_hook_version(repo_path, bare):
186 hooks_path = get_git_hooks_path(repo_path, bare)
188 hooks_path = get_git_hooks_path(repo_path, bare)
187 _hook_file = os.path.join(hooks_path, 'post-receive')
189 _hook_file = os.path.join(hooks_path, 'post-receive')
188 version = get_version_from_hook(_hook_file)
190 version = get_version_from_hook(_hook_file)
189 return version
191 return version
190
192
191
193
192 def get_svn_pre_hook_version(repo_path):
194 def get_svn_pre_hook_version(repo_path):
193 hooks_path = get_svn_hooks_path(repo_path)
195 hooks_path = get_svn_hooks_path(repo_path)
194 _hook_file = os.path.join(hooks_path, 'pre-commit')
196 _hook_file = os.path.join(hooks_path, 'pre-commit')
195 version = get_version_from_hook(_hook_file)
197 version = get_version_from_hook(_hook_file)
196 return version
198 return version
197
199
198
200
199 def get_svn_post_hook_version(repo_path):
201 def get_svn_post_hook_version(repo_path):
200 hooks_path = get_svn_hooks_path(repo_path)
202 hooks_path = get_svn_hooks_path(repo_path)
201 _hook_file = os.path.join(hooks_path, 'post-commit')
203 _hook_file = os.path.join(hooks_path, 'post-commit')
202 version = get_version_from_hook(_hook_file)
204 version = get_version_from_hook(_hook_file)
203 return version
205 return version
@@ -1,710 +1,711 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver.hgcompat import get_ctx
36
37
37 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
38
39
39
40
40 class HooksHttpClient(object):
41 class HooksHttpClient(object):
41 connection = None
42 connection = None
42
43
43 def __init__(self, hooks_uri):
44 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
45 self.hooks_uri = hooks_uri
45
46
46 def __call__(self, method, extras):
47 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
48 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
49 body = self._serialize(method, extras)
49 try:
50 try:
50 connection.request('POST', '/', body)
51 connection.request('POST', '/', body)
51 except Exception:
52 except Exception:
52 log.error('Connection failed on %s', connection)
53 log.error('Connection failed on %s', connection)
53 raise
54 raise
54 response = connection.getresponse()
55 response = connection.getresponse()
55
56
56 response_data = response.read()
57 response_data = response.read()
57
58
58 try:
59 try:
59 return json.loads(response_data)
60 return json.loads(response_data)
60 except Exception:
61 except Exception:
61 log.exception('Failed to decode hook response json data. '
62 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
63 'response_code:%s, raw_data:%s',
63 response.status, response_data)
64 response.status, response_data)
64 raise
65 raise
65
66
66 def _serialize(self, hook_name, extras):
67 def _serialize(self, hook_name, extras):
67 data = {
68 data = {
68 'method': hook_name,
69 'method': hook_name,
69 'extras': extras
70 'extras': extras
70 }
71 }
71 return json.dumps(data)
72 return json.dumps(data)
72
73
73
74
74 class HooksDummyClient(object):
75 class HooksDummyClient(object):
75 def __init__(self, hooks_module):
76 def __init__(self, hooks_module):
76 self._hooks_module = importlib.import_module(hooks_module)
77 self._hooks_module = importlib.import_module(hooks_module)
77
78
78 def __call__(self, hook_name, extras):
79 def __call__(self, hook_name, extras):
79 with self._hooks_module.Hooks() as hooks:
80 with self._hooks_module.Hooks() as hooks:
80 return getattr(hooks, hook_name)(extras)
81 return getattr(hooks, hook_name)(extras)
81
82
82
83
83 class RemoteMessageWriter(object):
84 class RemoteMessageWriter(object):
84 """Writer base class."""
85 """Writer base class."""
85 def write(self, message):
86 def write(self, message):
86 raise NotImplementedError()
87 raise NotImplementedError()
87
88
88
89
89 class HgMessageWriter(RemoteMessageWriter):
90 class HgMessageWriter(RemoteMessageWriter):
90 """Writer that knows how to send messages to mercurial clients."""
91 """Writer that knows how to send messages to mercurial clients."""
91
92
92 def __init__(self, ui):
93 def __init__(self, ui):
93 self.ui = ui
94 self.ui = ui
94
95
95 def write(self, message):
96 def write(self, message):
96 # TODO: Check why the quiet flag is set by default.
97 # TODO: Check why the quiet flag is set by default.
97 old = self.ui.quiet
98 old = self.ui.quiet
98 self.ui.quiet = False
99 self.ui.quiet = False
99 self.ui.status(message.encode('utf-8'))
100 self.ui.status(message.encode('utf-8'))
100 self.ui.quiet = old
101 self.ui.quiet = old
101
102
102
103
103 class GitMessageWriter(RemoteMessageWriter):
104 class GitMessageWriter(RemoteMessageWriter):
104 """Writer that knows how to send messages to git clients."""
105 """Writer that knows how to send messages to git clients."""
105
106
106 def __init__(self, stdout=None):
107 def __init__(self, stdout=None):
107 self.stdout = stdout or sys.stdout
108 self.stdout = stdout or sys.stdout
108
109
109 def write(self, message):
110 def write(self, message):
110 self.stdout.write(message.encode('utf-8'))
111 self.stdout.write(message.encode('utf-8'))
111
112
112
113
113 class SvnMessageWriter(RemoteMessageWriter):
114 class SvnMessageWriter(RemoteMessageWriter):
114 """Writer that knows how to send messages to svn clients."""
115 """Writer that knows how to send messages to svn clients."""
115
116
116 def __init__(self, stderr=None):
117 def __init__(self, stderr=None):
117 # SVN needs data sent to stderr for back-to-client messaging
118 # SVN needs data sent to stderr for back-to-client messaging
118 self.stderr = stderr or sys.stderr
119 self.stderr = stderr or sys.stderr
119
120
120 def write(self, message):
121 def write(self, message):
121 self.stderr.write(message.encode('utf-8'))
122 self.stderr.write(message.encode('utf-8'))
122
123
123
124
124 def _handle_exception(result):
125 def _handle_exception(result):
125 exception_class = result.get('exception')
126 exception_class = result.get('exception')
126 exception_traceback = result.get('exception_traceback')
127 exception_traceback = result.get('exception_traceback')
127
128
128 if exception_traceback:
129 if exception_traceback:
129 log.error('Got traceback from remote call:%s', exception_traceback)
130 log.error('Got traceback from remote call:%s', exception_traceback)
130
131
131 if exception_class == 'HTTPLockedRC':
132 if exception_class == 'HTTPLockedRC':
132 raise exceptions.RepositoryLockedException()(*result['exception_args'])
133 raise exceptions.RepositoryLockedException()(*result['exception_args'])
133 elif exception_class == 'HTTPBranchProtected':
134 elif exception_class == 'HTTPBranchProtected':
134 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
135 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
135 elif exception_class == 'RepositoryError':
136 elif exception_class == 'RepositoryError':
136 raise exceptions.VcsException()(*result['exception_args'])
137 raise exceptions.VcsException()(*result['exception_args'])
137 elif exception_class:
138 elif exception_class:
138 raise Exception('Got remote exception "%s" with args "%s"' %
139 raise Exception('Got remote exception "%s" with args "%s"' %
139 (exception_class, result['exception_args']))
140 (exception_class, result['exception_args']))
140
141
141
142
142 def _get_hooks_client(extras):
143 def _get_hooks_client(extras):
143 if 'hooks_uri' in extras:
144 if 'hooks_uri' in extras:
144 protocol = extras.get('hooks_protocol')
145 protocol = extras.get('hooks_protocol')
145 return HooksHttpClient(extras['hooks_uri'])
146 return HooksHttpClient(extras['hooks_uri'])
146 else:
147 else:
147 return HooksDummyClient(extras['hooks_module'])
148 return HooksDummyClient(extras['hooks_module'])
148
149
149
150
150 def _call_hook(hook_name, extras, writer):
151 def _call_hook(hook_name, extras, writer):
151 hooks_client = _get_hooks_client(extras)
152 hooks_client = _get_hooks_client(extras)
152 log.debug('Hooks, using client:%s', hooks_client)
153 log.debug('Hooks, using client:%s', hooks_client)
153 result = hooks_client(hook_name, extras)
154 result = hooks_client(hook_name, extras)
154 log.debug('Hooks got result: %s', result)
155 log.debug('Hooks got result: %s', result)
155
156
156 _handle_exception(result)
157 _handle_exception(result)
157 writer.write(result['output'])
158 writer.write(result['output'])
158
159
159 return result['status']
160 return result['status']
160
161
161
162
162 def _extras_from_ui(ui):
163 def _extras_from_ui(ui):
163 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
164 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
164 if not hook_data:
165 if not hook_data:
165 # maybe it's inside environ ?
166 # maybe it's inside environ ?
166 env_hook_data = os.environ.get('RC_SCM_DATA')
167 env_hook_data = os.environ.get('RC_SCM_DATA')
167 if env_hook_data:
168 if env_hook_data:
168 hook_data = env_hook_data
169 hook_data = env_hook_data
169
170
170 extras = {}
171 extras = {}
171 if hook_data:
172 if hook_data:
172 extras = json.loads(hook_data)
173 extras = json.loads(hook_data)
173 return extras
174 return extras
174
175
175
176
176 def _rev_range_hash(repo, node, check_heads=False):
177 def _rev_range_hash(repo, node, check_heads=False):
177
178
178 commits = []
179 commits = []
179 revs = []
180 revs = []
180 start = repo[node].rev()
181 start = get_ctx(repo, node).rev()
181 end = len(repo)
182 end = len(repo)
182 for rev in range(start, end):
183 for rev in range(start, end):
183 revs.append(rev)
184 revs.append(rev)
184 ctx = repo[rev]
185 ctx = get_ctx(repo, rev)
185 commit_id = mercurial.node.hex(ctx.node())
186 commit_id = mercurial.node.hex(ctx.node())
186 branch = ctx.branch()
187 branch = ctx.branch()
187 commits.append((commit_id, branch))
188 commits.append((commit_id, branch))
188
189
189 parent_heads = []
190 parent_heads = []
190 if check_heads:
191 if check_heads:
191 parent_heads = _check_heads(repo, start, end, revs)
192 parent_heads = _check_heads(repo, start, end, revs)
192 return commits, parent_heads
193 return commits, parent_heads
193
194
194
195
195 def _check_heads(repo, start, end, commits):
196 def _check_heads(repo, start, end, commits):
196 changelog = repo.changelog
197 changelog = repo.changelog
197 parents = set()
198 parents = set()
198
199
199 for new_rev in commits:
200 for new_rev in commits:
200 for p in changelog.parentrevs(new_rev):
201 for p in changelog.parentrevs(new_rev):
201 if p == mercurial.node.nullrev:
202 if p == mercurial.node.nullrev:
202 continue
203 continue
203 if p < start:
204 if p < start:
204 parents.add(p)
205 parents.add(p)
205
206
206 for p in parents:
207 for p in parents:
207 branch = repo[p].branch()
208 branch = get_ctx(repo, p).branch()
208 # The heads descending from that parent, on the same branch
209 # The heads descending from that parent, on the same branch
209 parent_heads = set([p])
210 parent_heads = set([p])
210 reachable = set([p])
211 reachable = set([p])
211 for x in xrange(p + 1, end):
212 for x in xrange(p + 1, end):
212 if repo[x].branch() != branch:
213 if get_ctx(repo, x).branch() != branch:
213 continue
214 continue
214 for pp in changelog.parentrevs(x):
215 for pp in changelog.parentrevs(x):
215 if pp in reachable:
216 if pp in reachable:
216 reachable.add(x)
217 reachable.add(x)
217 parent_heads.discard(pp)
218 parent_heads.discard(pp)
218 parent_heads.add(x)
219 parent_heads.add(x)
219 # More than one head? Suggest merging
220 # More than one head? Suggest merging
220 if len(parent_heads) > 1:
221 if len(parent_heads) > 1:
221 return list(parent_heads)
222 return list(parent_heads)
222
223
223 return []
224 return []
224
225
225
226
226 def _get_git_env():
227 def _get_git_env():
227 env = {}
228 env = {}
228 for k, v in os.environ.items():
229 for k, v in os.environ.items():
229 if k.startswith('GIT'):
230 if k.startswith('GIT'):
230 env[k] = v
231 env[k] = v
231
232
232 # serialized version
233 # serialized version
233 return [(k, v) for k, v in env.items()]
234 return [(k, v) for k, v in env.items()]
234
235
235
236
236 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
237 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
237 env = {}
238 env = {}
238 for k, v in os.environ.items():
239 for k, v in os.environ.items():
239 if k.startswith('HG'):
240 if k.startswith('HG'):
240 env[k] = v
241 env[k] = v
241
242
242 env['HG_NODE'] = old_rev
243 env['HG_NODE'] = old_rev
243 env['HG_NODE_LAST'] = new_rev
244 env['HG_NODE_LAST'] = new_rev
244 env['HG_TXNID'] = txnid
245 env['HG_TXNID'] = txnid
245 env['HG_PENDING'] = repo_path
246 env['HG_PENDING'] = repo_path
246
247
247 return [(k, v) for k, v in env.items()]
248 return [(k, v) for k, v in env.items()]
248
249
249
250
250 def repo_size(ui, repo, **kwargs):
251 def repo_size(ui, repo, **kwargs):
251 extras = _extras_from_ui(ui)
252 extras = _extras_from_ui(ui)
252 return _call_hook('repo_size', extras, HgMessageWriter(ui))
253 return _call_hook('repo_size', extras, HgMessageWriter(ui))
253
254
254
255
255 def pre_pull(ui, repo, **kwargs):
256 def pre_pull(ui, repo, **kwargs):
256 extras = _extras_from_ui(ui)
257 extras = _extras_from_ui(ui)
257 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
258 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
258
259
259
260
260 def pre_pull_ssh(ui, repo, **kwargs):
261 def pre_pull_ssh(ui, repo, **kwargs):
261 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
262 if extras and extras.get('SSH'):
263 if extras and extras.get('SSH'):
263 return pre_pull(ui, repo, **kwargs)
264 return pre_pull(ui, repo, **kwargs)
264 return 0
265 return 0
265
266
266
267
267 def post_pull(ui, repo, **kwargs):
268 def post_pull(ui, repo, **kwargs):
268 extras = _extras_from_ui(ui)
269 extras = _extras_from_ui(ui)
269 return _call_hook('post_pull', extras, HgMessageWriter(ui))
270 return _call_hook('post_pull', extras, HgMessageWriter(ui))
270
271
271
272
272 def post_pull_ssh(ui, repo, **kwargs):
273 def post_pull_ssh(ui, repo, **kwargs):
273 extras = _extras_from_ui(ui)
274 extras = _extras_from_ui(ui)
274 if extras and extras.get('SSH'):
275 if extras and extras.get('SSH'):
275 return post_pull(ui, repo, **kwargs)
276 return post_pull(ui, repo, **kwargs)
276 return 0
277 return 0
277
278
278
279
279 def pre_push(ui, repo, node=None, **kwargs):
280 def pre_push(ui, repo, node=None, **kwargs):
280 """
281 """
281 Mercurial pre_push hook
282 Mercurial pre_push hook
282 """
283 """
283 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
284 detect_force_push = extras.get('detect_force_push')
285 detect_force_push = extras.get('detect_force_push')
285
286
286 rev_data = []
287 rev_data = []
287 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
288 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
288 branches = collections.defaultdict(list)
289 branches = collections.defaultdict(list)
289 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
290 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
290 for commit_id, branch in commits:
291 for commit_id, branch in commits:
291 branches[branch].append(commit_id)
292 branches[branch].append(commit_id)
292
293
293 for branch, commits in branches.items():
294 for branch, commits in branches.items():
294 old_rev = kwargs.get('node_last') or commits[0]
295 old_rev = kwargs.get('node_last') or commits[0]
295 rev_data.append({
296 rev_data.append({
296 'total_commits': len(commits),
297 'total_commits': len(commits),
297 'old_rev': old_rev,
298 'old_rev': old_rev,
298 'new_rev': commits[-1],
299 'new_rev': commits[-1],
299 'ref': '',
300 'ref': '',
300 'type': 'branch',
301 'type': 'branch',
301 'name': branch,
302 'name': branch,
302 })
303 })
303
304
304 for push_ref in rev_data:
305 for push_ref in rev_data:
305 push_ref['multiple_heads'] = _heads
306 push_ref['multiple_heads'] = _heads
306
307
307 repo_path = os.path.join(
308 repo_path = os.path.join(
308 extras.get('repo_store', ''), extras.get('repository', ''))
309 extras.get('repo_store', ''), extras.get('repository', ''))
309 push_ref['hg_env'] = _get_hg_env(
310 push_ref['hg_env'] = _get_hg_env(
310 old_rev=push_ref['old_rev'],
311 old_rev=push_ref['old_rev'],
311 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
312 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
312 repo_path=repo_path)
313 repo_path=repo_path)
313
314
314 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
315 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
315 extras['commit_ids'] = rev_data
316 extras['commit_ids'] = rev_data
316
317
317 return _call_hook('pre_push', extras, HgMessageWriter(ui))
318 return _call_hook('pre_push', extras, HgMessageWriter(ui))
318
319
319
320
320 def pre_push_ssh(ui, repo, node=None, **kwargs):
321 def pre_push_ssh(ui, repo, node=None, **kwargs):
321 extras = _extras_from_ui(ui)
322 extras = _extras_from_ui(ui)
322 if extras.get('SSH'):
323 if extras.get('SSH'):
323 return pre_push(ui, repo, node, **kwargs)
324 return pre_push(ui, repo, node, **kwargs)
324
325
325 return 0
326 return 0
326
327
327
328
328 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
329 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
329 """
330 """
330 Mercurial pre_push hook for SSH
331 Mercurial pre_push hook for SSH
331 """
332 """
332 extras = _extras_from_ui(ui)
333 extras = _extras_from_ui(ui)
333 if extras.get('SSH'):
334 if extras.get('SSH'):
334 permission = extras['SSH_PERMISSIONS']
335 permission = extras['SSH_PERMISSIONS']
335
336
336 if 'repository.write' == permission or 'repository.admin' == permission:
337 if 'repository.write' == permission or 'repository.admin' == permission:
337 return 0
338 return 0
338
339
339 # non-zero ret code
340 # non-zero ret code
340 return 1
341 return 1
341
342
342 return 0
343 return 0
343
344
344
345
345 def post_push(ui, repo, node, **kwargs):
346 def post_push(ui, repo, node, **kwargs):
346 """
347 """
347 Mercurial post_push hook
348 Mercurial post_push hook
348 """
349 """
349 extras = _extras_from_ui(ui)
350 extras = _extras_from_ui(ui)
350
351
351 commit_ids = []
352 commit_ids = []
352 branches = []
353 branches = []
353 bookmarks = []
354 bookmarks = []
354 tags = []
355 tags = []
355
356
356 commits, _heads = _rev_range_hash(repo, node)
357 commits, _heads = _rev_range_hash(repo, node)
357 for commit_id, branch in commits:
358 for commit_id, branch in commits:
358 commit_ids.append(commit_id)
359 commit_ids.append(commit_id)
359 if branch not in branches:
360 if branch not in branches:
360 branches.append(branch)
361 branches.append(branch)
361
362
362 if hasattr(ui, '_rc_pushkey_branches'):
363 if hasattr(ui, '_rc_pushkey_branches'):
363 bookmarks = ui._rc_pushkey_branches
364 bookmarks = ui._rc_pushkey_branches
364
365
365 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
366 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
366 extras['commit_ids'] = commit_ids
367 extras['commit_ids'] = commit_ids
367 extras['new_refs'] = {
368 extras['new_refs'] = {
368 'branches': branches,
369 'branches': branches,
369 'bookmarks': bookmarks,
370 'bookmarks': bookmarks,
370 'tags': tags
371 'tags': tags
371 }
372 }
372
373
373 return _call_hook('post_push', extras, HgMessageWriter(ui))
374 return _call_hook('post_push', extras, HgMessageWriter(ui))
374
375
375
376
376 def post_push_ssh(ui, repo, node, **kwargs):
377 def post_push_ssh(ui, repo, node, **kwargs):
377 """
378 """
378 Mercurial post_push hook for SSH
379 Mercurial post_push hook for SSH
379 """
380 """
380 if _extras_from_ui(ui).get('SSH'):
381 if _extras_from_ui(ui).get('SSH'):
381 return post_push(ui, repo, node, **kwargs)
382 return post_push(ui, repo, node, **kwargs)
382 return 0
383 return 0
383
384
384
385
385 def key_push(ui, repo, **kwargs):
386 def key_push(ui, repo, **kwargs):
386 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
387 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
387 # store new bookmarks in our UI object propagated later to post_push
388 # store new bookmarks in our UI object propagated later to post_push
388 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
389 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
389 return
390 return
390
391
391
392
392 # backward compat
393 # backward compat
393 log_pull_action = post_pull
394 log_pull_action = post_pull
394
395
395 # backward compat
396 # backward compat
396 log_push_action = post_push
397 log_push_action = post_push
397
398
398
399
399 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
400 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
400 """
401 """
401 Old hook name: keep here for backward compatibility.
402 Old hook name: keep here for backward compatibility.
402
403
403 This is only required when the installed git hooks are not upgraded.
404 This is only required when the installed git hooks are not upgraded.
404 """
405 """
405 pass
406 pass
406
407
407
408
408 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
409 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
409 """
410 """
410 Old hook name: keep here for backward compatibility.
411 Old hook name: keep here for backward compatibility.
411
412
412 This is only required when the installed git hooks are not upgraded.
413 This is only required when the installed git hooks are not upgraded.
413 """
414 """
414 pass
415 pass
415
416
416
417
417 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
418 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
418
419
419
420
420 def git_pre_pull(extras):
421 def git_pre_pull(extras):
421 """
422 """
422 Pre pull hook.
423 Pre pull hook.
423
424
424 :param extras: dictionary containing the keys defined in simplevcs
425 :param extras: dictionary containing the keys defined in simplevcs
425 :type extras: dict
426 :type extras: dict
426
427
427 :return: status code of the hook. 0 for success.
428 :return: status code of the hook. 0 for success.
428 :rtype: int
429 :rtype: int
429 """
430 """
430 if 'pull' not in extras['hooks']:
431 if 'pull' not in extras['hooks']:
431 return HookResponse(0, '')
432 return HookResponse(0, '')
432
433
433 stdout = io.BytesIO()
434 stdout = io.BytesIO()
434 try:
435 try:
435 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
436 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
436 except Exception as error:
437 except Exception as error:
437 status = 128
438 status = 128
438 stdout.write('ERROR: %s\n' % str(error))
439 stdout.write('ERROR: %s\n' % str(error))
439
440
440 return HookResponse(status, stdout.getvalue())
441 return HookResponse(status, stdout.getvalue())
441
442
442
443
443 def git_post_pull(extras):
444 def git_post_pull(extras):
444 """
445 """
445 Post pull hook.
446 Post pull hook.
446
447
447 :param extras: dictionary containing the keys defined in simplevcs
448 :param extras: dictionary containing the keys defined in simplevcs
448 :type extras: dict
449 :type extras: dict
449
450
450 :return: status code of the hook. 0 for success.
451 :return: status code of the hook. 0 for success.
451 :rtype: int
452 :rtype: int
452 """
453 """
453 if 'pull' not in extras['hooks']:
454 if 'pull' not in extras['hooks']:
454 return HookResponse(0, '')
455 return HookResponse(0, '')
455
456
456 stdout = io.BytesIO()
457 stdout = io.BytesIO()
457 try:
458 try:
458 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
459 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
459 except Exception as error:
460 except Exception as error:
460 status = 128
461 status = 128
461 stdout.write('ERROR: %s\n' % error)
462 stdout.write('ERROR: %s\n' % error)
462
463
463 return HookResponse(status, stdout.getvalue())
464 return HookResponse(status, stdout.getvalue())
464
465
465
466
466 def _parse_git_ref_lines(revision_lines):
467 def _parse_git_ref_lines(revision_lines):
467 rev_data = []
468 rev_data = []
468 for revision_line in revision_lines or []:
469 for revision_line in revision_lines or []:
469 old_rev, new_rev, ref = revision_line.strip().split(' ')
470 old_rev, new_rev, ref = revision_line.strip().split(' ')
470 ref_data = ref.split('/', 2)
471 ref_data = ref.split('/', 2)
471 if ref_data[1] in ('tags', 'heads'):
472 if ref_data[1] in ('tags', 'heads'):
472 rev_data.append({
473 rev_data.append({
473 # NOTE(marcink):
474 # NOTE(marcink):
474 # we're unable to tell total_commits for git at this point
475 # we're unable to tell total_commits for git at this point
475 # but we set the variable for consistency with GIT
476 # but we set the variable for consistency with GIT
476 'total_commits': -1,
477 'total_commits': -1,
477 'old_rev': old_rev,
478 'old_rev': old_rev,
478 'new_rev': new_rev,
479 'new_rev': new_rev,
479 'ref': ref,
480 'ref': ref,
480 'type': ref_data[1],
481 'type': ref_data[1],
481 'name': ref_data[2],
482 'name': ref_data[2],
482 })
483 })
483 return rev_data
484 return rev_data
484
485
485
486
486 def git_pre_receive(unused_repo_path, revision_lines, env):
487 def git_pre_receive(unused_repo_path, revision_lines, env):
487 """
488 """
488 Pre push hook.
489 Pre push hook.
489
490
490 :param extras: dictionary containing the keys defined in simplevcs
491 :param extras: dictionary containing the keys defined in simplevcs
491 :type extras: dict
492 :type extras: dict
492
493
493 :return: status code of the hook. 0 for success.
494 :return: status code of the hook. 0 for success.
494 :rtype: int
495 :rtype: int
495 """
496 """
496 extras = json.loads(env['RC_SCM_DATA'])
497 extras = json.loads(env['RC_SCM_DATA'])
497 rev_data = _parse_git_ref_lines(revision_lines)
498 rev_data = _parse_git_ref_lines(revision_lines)
498 if 'push' not in extras['hooks']:
499 if 'push' not in extras['hooks']:
499 return 0
500 return 0
500 empty_commit_id = '0' * 40
501 empty_commit_id = '0' * 40
501
502
502 detect_force_push = extras.get('detect_force_push')
503 detect_force_push = extras.get('detect_force_push')
503
504
504 for push_ref in rev_data:
505 for push_ref in rev_data:
505 # store our git-env which holds the temp store
506 # store our git-env which holds the temp store
506 push_ref['git_env'] = _get_git_env()
507 push_ref['git_env'] = _get_git_env()
507 push_ref['pruned_sha'] = ''
508 push_ref['pruned_sha'] = ''
508 if not detect_force_push:
509 if not detect_force_push:
509 # don't check for forced-push when we don't need to
510 # don't check for forced-push when we don't need to
510 continue
511 continue
511
512
512 type_ = push_ref['type']
513 type_ = push_ref['type']
513 new_branch = push_ref['old_rev'] == empty_commit_id
514 new_branch = push_ref['old_rev'] == empty_commit_id
514 delete_branch = push_ref['new_rev'] == empty_commit_id
515 delete_branch = push_ref['new_rev'] == empty_commit_id
515 if type_ == 'heads' and not (new_branch or delete_branch):
516 if type_ == 'heads' and not (new_branch or delete_branch):
516 old_rev = push_ref['old_rev']
517 old_rev = push_ref['old_rev']
517 new_rev = push_ref['new_rev']
518 new_rev = push_ref['new_rev']
518 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
519 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
519 stdout, stderr = subprocessio.run_command(
520 stdout, stderr = subprocessio.run_command(
520 cmd, env=os.environ.copy())
521 cmd, env=os.environ.copy())
521 # means we're having some non-reachable objects, this forced push was used
522 # means we're having some non-reachable objects, this forced push was used
522 if stdout:
523 if stdout:
523 push_ref['pruned_sha'] = stdout.splitlines()
524 push_ref['pruned_sha'] = stdout.splitlines()
524
525
525 extras['hook_type'] = 'pre_receive'
526 extras['hook_type'] = 'pre_receive'
526 extras['commit_ids'] = rev_data
527 extras['commit_ids'] = rev_data
527 return _call_hook('pre_push', extras, GitMessageWriter())
528 return _call_hook('pre_push', extras, GitMessageWriter())
528
529
529
530
530 def git_post_receive(unused_repo_path, revision_lines, env):
531 def git_post_receive(unused_repo_path, revision_lines, env):
531 """
532 """
532 Post push hook.
533 Post push hook.
533
534
534 :param extras: dictionary containing the keys defined in simplevcs
535 :param extras: dictionary containing the keys defined in simplevcs
535 :type extras: dict
536 :type extras: dict
536
537
537 :return: status code of the hook. 0 for success.
538 :return: status code of the hook. 0 for success.
538 :rtype: int
539 :rtype: int
539 """
540 """
540 extras = json.loads(env['RC_SCM_DATA'])
541 extras = json.loads(env['RC_SCM_DATA'])
541 if 'push' not in extras['hooks']:
542 if 'push' not in extras['hooks']:
542 return 0
543 return 0
543
544
544 rev_data = _parse_git_ref_lines(revision_lines)
545 rev_data = _parse_git_ref_lines(revision_lines)
545
546
546 git_revs = []
547 git_revs = []
547
548
548 # N.B.(skreft): it is ok to just call git, as git before calling a
549 # N.B.(skreft): it is ok to just call git, as git before calling a
549 # subcommand sets the PATH environment variable so that it point to the
550 # subcommand sets the PATH environment variable so that it point to the
550 # correct version of the git executable.
551 # correct version of the git executable.
551 empty_commit_id = '0' * 40
552 empty_commit_id = '0' * 40
552 branches = []
553 branches = []
553 tags = []
554 tags = []
554 for push_ref in rev_data:
555 for push_ref in rev_data:
555 type_ = push_ref['type']
556 type_ = push_ref['type']
556
557
557 if type_ == 'heads':
558 if type_ == 'heads':
558 if push_ref['old_rev'] == empty_commit_id:
559 if push_ref['old_rev'] == empty_commit_id:
559 # starting new branch case
560 # starting new branch case
560 if push_ref['name'] not in branches:
561 if push_ref['name'] not in branches:
561 branches.append(push_ref['name'])
562 branches.append(push_ref['name'])
562
563
563 # Fix up head revision if needed
564 # Fix up head revision if needed
564 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
565 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
565 try:
566 try:
566 subprocessio.run_command(cmd, env=os.environ.copy())
567 subprocessio.run_command(cmd, env=os.environ.copy())
567 except Exception:
568 except Exception:
568 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
569 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
569 'refs/heads/%s' % push_ref['name']]
570 'refs/heads/%s' % push_ref['name']]
570 print("Setting default branch to %s" % push_ref['name'])
571 print("Setting default branch to %s" % push_ref['name'])
571 subprocessio.run_command(cmd, env=os.environ.copy())
572 subprocessio.run_command(cmd, env=os.environ.copy())
572
573
573 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
574 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
574 '--format=%(refname)', 'refs/heads/*']
575 '--format=%(refname)', 'refs/heads/*']
575 stdout, stderr = subprocessio.run_command(
576 stdout, stderr = subprocessio.run_command(
576 cmd, env=os.environ.copy())
577 cmd, env=os.environ.copy())
577 heads = stdout
578 heads = stdout
578 heads = heads.replace(push_ref['ref'], '')
579 heads = heads.replace(push_ref['ref'], '')
579 heads = ' '.join(head for head
580 heads = ' '.join(head for head
580 in heads.splitlines() if head) or '.'
581 in heads.splitlines() if head) or '.'
581 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
582 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
582 '--pretty=format:%H', '--', push_ref['new_rev'],
583 '--pretty=format:%H', '--', push_ref['new_rev'],
583 '--not', heads]
584 '--not', heads]
584 stdout, stderr = subprocessio.run_command(
585 stdout, stderr = subprocessio.run_command(
585 cmd, env=os.environ.copy())
586 cmd, env=os.environ.copy())
586 git_revs.extend(stdout.splitlines())
587 git_revs.extend(stdout.splitlines())
587 elif push_ref['new_rev'] == empty_commit_id:
588 elif push_ref['new_rev'] == empty_commit_id:
588 # delete branch case
589 # delete branch case
589 git_revs.append('delete_branch=>%s' % push_ref['name'])
590 git_revs.append('delete_branch=>%s' % push_ref['name'])
590 else:
591 else:
591 if push_ref['name'] not in branches:
592 if push_ref['name'] not in branches:
592 branches.append(push_ref['name'])
593 branches.append(push_ref['name'])
593
594
594 cmd = [settings.GIT_EXECUTABLE, 'log',
595 cmd = [settings.GIT_EXECUTABLE, 'log',
595 '{old_rev}..{new_rev}'.format(**push_ref),
596 '{old_rev}..{new_rev}'.format(**push_ref),
596 '--reverse', '--pretty=format:%H']
597 '--reverse', '--pretty=format:%H']
597 stdout, stderr = subprocessio.run_command(
598 stdout, stderr = subprocessio.run_command(
598 cmd, env=os.environ.copy())
599 cmd, env=os.environ.copy())
599 git_revs.extend(stdout.splitlines())
600 git_revs.extend(stdout.splitlines())
600 elif type_ == 'tags':
601 elif type_ == 'tags':
601 if push_ref['name'] not in tags:
602 if push_ref['name'] not in tags:
602 tags.append(push_ref['name'])
603 tags.append(push_ref['name'])
603 git_revs.append('tag=>%s' % push_ref['name'])
604 git_revs.append('tag=>%s' % push_ref['name'])
604
605
605 extras['hook_type'] = 'post_receive'
606 extras['hook_type'] = 'post_receive'
606 extras['commit_ids'] = git_revs
607 extras['commit_ids'] = git_revs
607 extras['new_refs'] = {
608 extras['new_refs'] = {
608 'branches': branches,
609 'branches': branches,
609 'bookmarks': [],
610 'bookmarks': [],
610 'tags': tags,
611 'tags': tags,
611 }
612 }
612
613
613 if 'repo_size' in extras['hooks']:
614 if 'repo_size' in extras['hooks']:
614 try:
615 try:
615 _call_hook('repo_size', extras, GitMessageWriter())
616 _call_hook('repo_size', extras, GitMessageWriter())
616 except:
617 except:
617 pass
618 pass
618
619
619 return _call_hook('post_push', extras, GitMessageWriter())
620 return _call_hook('post_push', extras, GitMessageWriter())
620
621
621
622
622 def _get_extras_from_txn_id(path, txn_id):
623 def _get_extras_from_txn_id(path, txn_id):
623 extras = {}
624 extras = {}
624 try:
625 try:
625 cmd = ['svnlook', 'pget',
626 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
626 '-t', txn_id,
627 '-t', txn_id,
627 '--revprop', path, 'rc-scm-extras']
628 '--revprop', path, 'rc-scm-extras']
628 stdout, stderr = subprocessio.run_command(
629 stdout, stderr = subprocessio.run_command(
629 cmd, env=os.environ.copy())
630 cmd, env=os.environ.copy())
630 extras = json.loads(base64.urlsafe_b64decode(stdout))
631 extras = json.loads(base64.urlsafe_b64decode(stdout))
631 except Exception:
632 except Exception:
632 log.exception('Failed to extract extras info from txn_id')
633 log.exception('Failed to extract extras info from txn_id')
633
634
634 return extras
635 return extras
635
636
636
637
637 def _get_extras_from_commit_id(commit_id, path):
638 def _get_extras_from_commit_id(commit_id, path):
638 extras = {}
639 extras = {}
639 try:
640 try:
640 cmd = ['svnlook', 'pget',
641 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
641 '-r', commit_id,
642 '-r', commit_id,
642 '--revprop', path, 'rc-scm-extras']
643 '--revprop', path, 'rc-scm-extras']
643 stdout, stderr = subprocessio.run_command(
644 stdout, stderr = subprocessio.run_command(
644 cmd, env=os.environ.copy())
645 cmd, env=os.environ.copy())
645 extras = json.loads(base64.urlsafe_b64decode(stdout))
646 extras = json.loads(base64.urlsafe_b64decode(stdout))
646 except Exception:
647 except Exception:
647 log.exception('Failed to extract extras info from commit_id')
648 log.exception('Failed to extract extras info from commit_id')
648
649
649 return extras
650 return extras
650
651
651
652
652 def svn_pre_commit(repo_path, commit_data, env):
653 def svn_pre_commit(repo_path, commit_data, env):
653 path, txn_id = commit_data
654 path, txn_id = commit_data
654 branches = []
655 branches = []
655 tags = []
656 tags = []
656
657
657 if env.get('RC_SCM_DATA'):
658 if env.get('RC_SCM_DATA'):
658 extras = json.loads(env['RC_SCM_DATA'])
659 extras = json.loads(env['RC_SCM_DATA'])
659 else:
660 else:
660 # fallback method to read from TXN-ID stored data
661 # fallback method to read from TXN-ID stored data
661 extras = _get_extras_from_txn_id(path, txn_id)
662 extras = _get_extras_from_txn_id(path, txn_id)
662 if not extras:
663 if not extras:
663 return 0
664 return 0
664
665
665 extras['hook_type'] = 'pre_commit'
666 extras['hook_type'] = 'pre_commit'
666 extras['commit_ids'] = []
667 extras['commit_ids'] = [txn_id]
667 extras['txn_id'] = txn_id
668 extras['txn_id'] = txn_id
668 extras['new_refs'] = {
669 extras['new_refs'] = {
669 'total_commits': 1,
670 'total_commits': 1,
670 'branches': branches,
671 'branches': branches,
671 'bookmarks': [],
672 'bookmarks': [],
672 'tags': tags,
673 'tags': tags,
673 }
674 }
674
675
675 return _call_hook('pre_push', extras, SvnMessageWriter())
676 return _call_hook('pre_push', extras, SvnMessageWriter())
676
677
677
678
678 def svn_post_commit(repo_path, commit_data, env):
679 def svn_post_commit(repo_path, commit_data, env):
679 """
680 """
680 commit_data is path, rev, txn_id
681 commit_data is path, rev, txn_id
681 """
682 """
682 path, commit_id, txn_id = commit_data
683 path, commit_id, txn_id = commit_data
683 branches = []
684 branches = []
684 tags = []
685 tags = []
685
686
686 if env.get('RC_SCM_DATA'):
687 if env.get('RC_SCM_DATA'):
687 extras = json.loads(env['RC_SCM_DATA'])
688 extras = json.loads(env['RC_SCM_DATA'])
688 else:
689 else:
689 # fallback method to read from TXN-ID stored data
690 # fallback method to read from TXN-ID stored data
690 extras = _get_extras_from_commit_id(commit_id, path)
691 extras = _get_extras_from_commit_id(commit_id, path)
691 if not extras:
692 if not extras:
692 return 0
693 return 0
693
694
694 extras['hook_type'] = 'post_commit'
695 extras['hook_type'] = 'post_commit'
695 extras['commit_ids'] = [commit_id]
696 extras['commit_ids'] = [commit_id]
696 extras['txn_id'] = txn_id
697 extras['txn_id'] = txn_id
697 extras['new_refs'] = {
698 extras['new_refs'] = {
698 'branches': branches,
699 'branches': branches,
699 'bookmarks': [],
700 'bookmarks': [],
700 'tags': tags,
701 'tags': tags,
701 'total_commits': 1,
702 'total_commits': 1,
702 }
703 }
703
704
704 if 'repo_size' in extras['hooks']:
705 if 'repo_size' in extras['hooks']:
705 try:
706 try:
706 _call_hook('repo_size', extras, SvnMessageWriter())
707 _call_hook('repo_size', extras, SvnMessageWriter())
707 except Exception:
708 except Exception:
708 pass
709 pass
709
710
710 return _call_hook('post_push', extras, SvnMessageWriter())
711 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,151 +1,169 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 import os
21 import os
22 import time
22 import time
23 import datetime
23 import datetime
24 import msgpack
24 import msgpack
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import tempfile
27 import tempfile
28
28
29 from pyramid import compat
29
30
30 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
31
32
32 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 global_prefix = 'vcsserver'
34 global_prefix = 'vcsserver'
34 exc_store_dir_name = 'rc_exception_store_v1'
35 exc_store_dir_name = 'rc_exception_store_v1'
35
36
36
37
37 def exc_serialize(exc_id, tb, exc_type):
38 def exc_serialize(exc_id, tb, exc_type):
38
39
39 data = {
40 data = {
40 'version': 'v1',
41 'version': 'v1',
41 'exc_id': exc_id,
42 'exc_id': exc_id,
42 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
43 'exc_timestamp': repr(time.time()),
44 'exc_timestamp': repr(time.time()),
44 'exc_message': tb,
45 'exc_message': tb,
45 'exc_type': exc_type,
46 'exc_type': exc_type,
46 }
47 }
47 return msgpack.packb(data), data
48 return msgpack.packb(data), data
48
49
49
50
50 def exc_unserialize(tb):
51 def exc_unserialize(tb):
51 return msgpack.unpackb(tb)
52 return msgpack.unpackb(tb)
52
53
53
54
54 def get_exc_store():
55 def get_exc_store():
55 """
56 """
56 Get and create exception store if it's not existing
57 Get and create exception store if it's not existing
57 """
58 """
58 import vcsserver as app
59 import vcsserver as app
59
60
60 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
61 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
62
63
63 _exc_store_path = os.path.abspath(_exc_store_path)
64 _exc_store_path = os.path.abspath(_exc_store_path)
64 if not os.path.isdir(_exc_store_path):
65 if not os.path.isdir(_exc_store_path):
65 os.makedirs(_exc_store_path)
66 os.makedirs(_exc_store_path)
66 log.debug('Initializing exceptions store at %s', _exc_store_path)
67 log.debug('Initializing exceptions store at %s', _exc_store_path)
67 return _exc_store_path
68 return _exc_store_path
68
69
69
70
70 def _store_exception(exc_id, exc_info, prefix):
71 def _store_exception(exc_id, exc_info, prefix):
71 exc_type, exc_value, exc_traceback = exc_info
72 exc_type, exc_value, exc_traceback = exc_info
73
72 tb = ''.join(traceback.format_exception(
74 tb = ''.join(traceback.format_exception(
73 exc_type, exc_value, exc_traceback, None))
75 exc_type, exc_value, exc_traceback, None))
74
76
77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78
79 if detailed_tb:
80 if isinstance(detailed_tb, compat.string_types):
81 remote_tb = [detailed_tb]
82
83 tb += (
84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
85 '{}\n'
86 '+++ END SOURCE EXCEPTION +++\n'
87 ''.format('\n'.join(remote_tb))
88 )
89
90 # Avoid that remote_tb also appears in the frame
91 del remote_tb
92
75 exc_type_name = exc_type.__name__
93 exc_type_name = exc_type.__name__
76 exc_store_path = get_exc_store()
94 exc_store_path = get_exc_store()
77 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
78 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
79 if not os.path.isdir(exc_store_path):
97 if not os.path.isdir(exc_store_path):
80 os.makedirs(exc_store_path)
98 os.makedirs(exc_store_path)
81 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
82 with open(stored_exc_path, 'wb') as f:
100 with open(stored_exc_path, 'wb') as f:
83 f.write(exc_data)
101 f.write(exc_data)
84 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
85
103
86
104
87 def store_exception(exc_id, exc_info, prefix=global_prefix):
105 def store_exception(exc_id, exc_info, prefix=global_prefix):
88 """
106 """
89 Example usage::
107 Example usage::
90
108
91 exc_info = sys.exc_info()
109 exc_info = sys.exc_info()
92 store_exception(id(exc_info), exc_info)
110 store_exception(id(exc_info), exc_info)
93 """
111 """
94
112
95 try:
113 try:
96 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
97 except Exception:
115 except Exception:
98 log.exception('Failed to store exception `%s` information', exc_id)
116 log.exception('Failed to store exception `%s` information', exc_id)
99 # there's no way this can fail, it will crash server badly if it does.
117 # there's no way this can fail, it will crash server badly if it does.
100 pass
118 pass
101
119
102
120
103 def _find_exc_file(exc_id, prefix=global_prefix):
121 def _find_exc_file(exc_id, prefix=global_prefix):
104 exc_store_path = get_exc_store()
122 exc_store_path = get_exc_store()
105 if prefix:
123 if prefix:
106 exc_id = '{}_{}'.format(exc_id, prefix)
124 exc_id = '{}_{}'.format(exc_id, prefix)
107 else:
125 else:
108 # search without a prefix
126 # search without a prefix
109 exc_id = '{}'.format(exc_id)
127 exc_id = '{}'.format(exc_id)
110
128
111 # we need to search the store for such start pattern as above
129 # we need to search the store for such start pattern as above
112 for fname in os.listdir(exc_store_path):
130 for fname in os.listdir(exc_store_path):
113 if fname.startswith(exc_id):
131 if fname.startswith(exc_id):
114 exc_id = os.path.join(exc_store_path, fname)
132 exc_id = os.path.join(exc_store_path, fname)
115 break
133 break
116 continue
134 continue
117 else:
135 else:
118 exc_id = None
136 exc_id = None
119
137
120 return exc_id
138 return exc_id
121
139
122
140
123 def _read_exception(exc_id, prefix):
141 def _read_exception(exc_id, prefix):
124 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
142 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
125 if exc_id_file_path:
143 if exc_id_file_path:
126 with open(exc_id_file_path, 'rb') as f:
144 with open(exc_id_file_path, 'rb') as f:
127 return exc_unserialize(f.read())
145 return exc_unserialize(f.read())
128 else:
146 else:
129 log.debug('Exception File `%s` not found', exc_id_file_path)
147 log.debug('Exception File `%s` not found', exc_id_file_path)
130 return None
148 return None
131
149
132
150
133 def read_exception(exc_id, prefix=global_prefix):
151 def read_exception(exc_id, prefix=global_prefix):
134 try:
152 try:
135 return _read_exception(exc_id=exc_id, prefix=prefix)
153 return _read_exception(exc_id=exc_id, prefix=prefix)
136 except Exception:
154 except Exception:
137 log.exception('Failed to read exception `%s` information', exc_id)
155 log.exception('Failed to read exception `%s` information', exc_id)
138 # there's no way this can fail, it will crash server badly if it does.
156 # there's no way this can fail, it will crash server badly if it does.
139 return None
157 return None
140
158
141
159
142 def delete_exception(exc_id, prefix=global_prefix):
160 def delete_exception(exc_id, prefix=global_prefix):
143 try:
161 try:
144 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
162 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
145 if exc_id_file_path:
163 if exc_id_file_path:
146 os.remove(exc_id_file_path)
164 os.remove(exc_id_file_path)
147
165
148 except Exception:
166 except Exception:
149 log.exception('Failed to remove exception `%s` information', exc_id)
167 log.exception('Failed to remove exception `%s` information', exc_id)
150 # there's no way this can fail, it will crash server badly if it does.
168 # there's no way this can fail, it will crash server badly if it does.
151 pass
169 pass
@@ -1,234 +1,235 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import itertools
20 import itertools
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.wireprotoserver
24 import mercurial.wireprotoserver
25 import mercurial.hgweb.common
25 import mercurial.hgweb.common
26 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.hgweb_mod
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30
30
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 # propagated from mercurial documentation
35 # propagated from mercurial documentation
36 HG_UI_SECTIONS = [
36 HG_UI_SECTIONS = [
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 ]
40 ]
41
41
42
42
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 """Extension of hgweb that simplifies some functions."""
44 """Extension of hgweb that simplifies some functions."""
45
45
46 def _get_view(self, repo):
46 def _get_view(self, repo):
47 """Views are not supported."""
47 """Views are not supported."""
48 return repo
48 return repo
49
49
50 def loadsubweb(self):
50 def loadsubweb(self):
51 """The result is only used in the templater method which is not used."""
51 """The result is only used in the templater method which is not used."""
52 return None
52 return None
53
53
54 def run(self):
54 def run(self):
55 """Unused function so raise an exception if accidentally called."""
55 """Unused function so raise an exception if accidentally called."""
56 raise NotImplementedError
56 raise NotImplementedError
57
57
58 def templater(self, req):
58 def templater(self, req):
59 """Function used in an unreachable code path.
59 """Function used in an unreachable code path.
60
60
61 This code is unreachable because we guarantee that the HTTP request,
61 This code is unreachable because we guarantee that the HTTP request,
62 corresponds to a Mercurial command. See the is_hg method. So, we are
62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 never going to get a user-visible url.
63 never going to get a user-visible url.
64 """
64 """
65 raise NotImplementedError
65 raise NotImplementedError
66
66
67 def archivelist(self, nodeid):
67 def archivelist(self, nodeid):
68 """Unused function so raise an exception if accidentally called."""
68 """Unused function so raise an exception if accidentally called."""
69 raise NotImplementedError
69 raise NotImplementedError
70
70
71 def __call__(self, environ, start_response):
71 def __call__(self, environ, start_response):
72 """Run the WSGI application.
72 """Run the WSGI application.
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 from mercurial.hgweb import request as requestmod
76 from mercurial.hgweb import request as requestmod
77 req = requestmod.parserequestfromenv(environ)
77 req = requestmod.parserequestfromenv(environ)
78 res = requestmod.wsgiresponse(req, start_response)
78 res = requestmod.wsgiresponse(req, start_response)
79 gen = self.run_wsgi(req, res)
79 gen = self.run_wsgi(req, res)
80
80
81 first_chunk = None
81 first_chunk = None
82
82
83 try:
83 try:
84 data = gen.next()
84 data = gen.next()
85
85
86 def first_chunk():
86 def first_chunk():
87 yield data
87 yield data
88 except StopIteration:
88 except StopIteration:
89 pass
89 pass
90
90
91 if first_chunk:
91 if first_chunk:
92 return itertools.chain(first_chunk(), gen)
92 return itertools.chain(first_chunk(), gen)
93 return gen
93 return gen
94
94
95 def _runwsgi(self, req, res, repo):
95 def _runwsgi(self, req, res, repo):
96
96
97 cmd = req.qsparams.get('cmd', '')
97 cmd = req.qsparams.get('cmd', '')
98 if not mercurial.wireprotoserver.iscmd(cmd):
98 if not mercurial.wireprotoserver.iscmd(cmd):
99 # NOTE(marcink): for unsupported commands, we return bad request
99 # NOTE(marcink): for unsupported commands, we return bad request
100 # internally from HG
100 # internally from HG
101 from mercurial.hgweb.common import statusmessage
101 from mercurial.hgweb.common import statusmessage
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 res.setbodybytes('')
103 res.setbodybytes('')
104 return res.sendresponse()
104 return res.sendresponse()
105
105
106 return super(HgWeb, self)._runwsgi(req, res, repo)
106 return super(HgWeb, self)._runwsgi(req, res, repo)
107
107
108
108
109 def make_hg_ui_from_config(repo_config):
109 def make_hg_ui_from_config(repo_config):
110 baseui = mercurial.ui.ui()
110 baseui = mercurial.ui.ui()
111
111
112 # clean the baseui object
112 # clean the baseui object
113 baseui._ocfg = mercurial.config.config()
113 baseui._ocfg = mercurial.config.config()
114 baseui._ucfg = mercurial.config.config()
114 baseui._ucfg = mercurial.config.config()
115 baseui._tcfg = mercurial.config.config()
115 baseui._tcfg = mercurial.config.config()
116
116
117 for section, option, value in repo_config:
117 for section, option, value in repo_config:
118 baseui.setconfig(section, option, value)
118 baseui.setconfig(section, option, value)
119
119
120 # make our hgweb quiet so it doesn't print output
120 # make our hgweb quiet so it doesn't print output
121 baseui.setconfig('ui', 'quiet', 'true')
121 baseui.setconfig('ui', 'quiet', 'true')
122
122
123 return baseui
123 return baseui
124
124
125
125
126 def update_hg_ui_from_hgrc(baseui, repo_path):
126 def update_hg_ui_from_hgrc(baseui, repo_path):
127 path = os.path.join(repo_path, '.hg', 'hgrc')
127 path = os.path.join(repo_path, '.hg', 'hgrc')
128
128
129 if not os.path.isfile(path):
129 if not os.path.isfile(path):
130 log.debug('hgrc file is not present at %s, skipping...', path)
130 log.debug('hgrc file is not present at %s, skipping...', path)
131 return
131 return
132 log.debug('reading hgrc from %s', path)
132 log.debug('reading hgrc from %s', path)
133 cfg = mercurial.config.config()
133 cfg = mercurial.config.config()
134 cfg.read(path)
134 cfg.read(path)
135 for section in HG_UI_SECTIONS:
135 for section in HG_UI_SECTIONS:
136 for k, v in cfg.items(section):
136 for k, v in cfg.items(section):
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 baseui.setconfig(section, k, v)
138 baseui.setconfig(section, k, v)
139
139
140
140
141 def create_hg_wsgi_app(repo_path, repo_name, config):
141 def create_hg_wsgi_app(repo_path, repo_name, config):
142 """
142 """
143 Prepares a WSGI application to handle Mercurial requests.
143 Prepares a WSGI application to handle Mercurial requests.
144
144
145 :param config: is a list of 3-item tuples representing a ConfigObject
145 :param config: is a list of 3-item tuples representing a ConfigObject
146 (it is the serialized version of the config object).
146 (it is the serialized version of the config object).
147 """
147 """
148 log.debug("Creating Mercurial WSGI application")
148 log.debug("Creating Mercurial WSGI application")
149
149
150 baseui = make_hg_ui_from_config(config)
150 baseui = make_hg_ui_from_config(config)
151 update_hg_ui_from_hgrc(baseui, repo_path)
151 update_hg_ui_from_hgrc(baseui, repo_path)
152
152
153 try:
153 try:
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
155 except mercurial.error.RequirementError as e:
155 except mercurial.error.RequirementError as e:
156 raise exceptions.RequirementException(e)(e)
156 raise exceptions.RequirementException(e)(e)
157
157
158
158
159 class GitHandler(object):
159 class GitHandler(object):
160 """
160 """
161 Handler for Git operations like push/pull etc
161 Handler for Git operations like push/pull etc
162 """
162 """
163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
164 extras):
164 extras):
165 if not os.path.isdir(repo_location):
165 if not os.path.isdir(repo_location):
166 raise OSError(repo_location)
166 raise OSError(repo_location)
167 self.content_path = repo_location
167 self.content_path = repo_location
168 self.repo_name = repo_name
168 self.repo_name = repo_name
169 self.repo_location = repo_location
169 self.repo_location = repo_location
170 self.extras = extras
170 self.extras = extras
171 self.git_path = git_path
171 self.git_path = git_path
172 self.update_server_info = update_server_info
172 self.update_server_info = update_server_info
173
173
174 def __call__(self, environ, start_response):
174 def __call__(self, environ, start_response):
175 app = webob.exc.HTTPNotFound()
175 app = webob.exc.HTTPNotFound()
176 candidate_paths = (
176 candidate_paths = (
177 self.content_path, os.path.join(self.content_path, '.git'))
177 self.content_path, os.path.join(self.content_path, '.git'))
178
178
179 for content_path in candidate_paths:
179 for content_path in candidate_paths:
180 try:
180 try:
181 app = pygrack.GitRepository(
181 app = pygrack.GitRepository(
182 self.repo_name, content_path, self.git_path,
182 self.repo_name, content_path, self.git_path,
183 self.update_server_info, self.extras)
183 self.update_server_info, self.extras)
184 break
184 break
185 except OSError:
185 except OSError:
186 continue
186 continue
187
187
188 return app(environ, start_response)
188 return app(environ, start_response)
189
189
190
190
191 def create_git_wsgi_app(repo_path, repo_name, config):
191 def create_git_wsgi_app(repo_path, repo_name, config):
192 """
192 """
193 Creates a WSGI application to handle Git requests.
193 Creates a WSGI application to handle Git requests.
194
194
195 :param config: is a dictionary holding the extras.
195 :param config: is a dictionary holding the extras.
196 """
196 """
197 git_path = settings.GIT_EXECUTABLE
197 git_path = settings.GIT_EXECUTABLE
198 update_server_info = config.pop('git_update_server_info')
198 update_server_info = config.pop('git_update_server_info')
199 app = GitHandler(
199 app = GitHandler(
200 repo_path, repo_name, git_path, update_server_info, config)
200 repo_path, repo_name, git_path, update_server_info, config)
201
201
202 return app
202 return app
203
203
204
204
205 class GitLFSHandler(object):
205 class GitLFSHandler(object):
206 """
206 """
207 Handler for Git LFS operations
207 Handler for Git LFS operations
208 """
208 """
209
209
210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
211 extras):
211 extras):
212 if not os.path.isdir(repo_location):
212 if not os.path.isdir(repo_location):
213 raise OSError(repo_location)
213 raise OSError(repo_location)
214 self.content_path = repo_location
214 self.content_path = repo_location
215 self.repo_name = repo_name
215 self.repo_name = repo_name
216 self.repo_location = repo_location
216 self.repo_location = repo_location
217 self.extras = extras
217 self.extras = extras
218 self.git_path = git_path
218 self.git_path = git_path
219 self.update_server_info = update_server_info
219 self.update_server_info = update_server_info
220
220
221 def get_app(self, git_lfs_enabled, git_lfs_store_path):
221 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
223 return app
223 return app
224
224
225
225
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 git_path = settings.GIT_EXECUTABLE
227 git_path = settings.GIT_EXECUTABLE
228 update_server_info = config.pop('git_update_server_info')
228 update_server_info = config.pop('git_update_server_info')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
231 app = GitLFSHandler(
232 app = GitLFSHandler(
232 repo_path, repo_name, git_path, update_server_info, config)
233 repo_path, repo_name, git_path, update_server_info, config)
233
234
234 return app.get_app(git_lfs_enabled, git_lfs_store_path)
235 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
@@ -1,20 +1,22 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 WIRE_ENCODING = 'UTF-8'
18 WIRE_ENCODING = 'UTF-8'
19 GIT_EXECUTABLE = 'git'
19 GIT_EXECUTABLE = 'git'
20 SVN_EXECUTABLE = 'svn'
21 SVNLOOK_EXECUTABLE = 'svnlook'
20 BINARY_DIR = ''
22 BINARY_DIR = ''
@@ -1,732 +1,775 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 # Set of svn compatible version flags.
43 # Set of svn compatible version flags.
44 # Compare with subversion/svnadmin/svnadmin.c
44 # Compare with subversion/svnadmin/svnadmin.c
45 svn_compatible_versions = {
45 svn_compatible_versions = {
46 'pre-1.4-compatible',
46 'pre-1.4-compatible',
47 'pre-1.5-compatible',
47 'pre-1.5-compatible',
48 'pre-1.6-compatible',
48 'pre-1.6-compatible',
49 'pre-1.8-compatible',
49 'pre-1.8-compatible',
50 'pre-1.9-compatible'
50 'pre-1.9-compatible'
51 }
51 }
52
52
53 svn_compatible_versions_map = {
53 svn_compatible_versions_map = {
54 'pre-1.4-compatible': '1.3',
54 'pre-1.4-compatible': '1.3',
55 'pre-1.5-compatible': '1.4',
55 'pre-1.5-compatible': '1.4',
56 'pre-1.6-compatible': '1.5',
56 'pre-1.6-compatible': '1.5',
57 'pre-1.8-compatible': '1.7',
57 'pre-1.8-compatible': '1.7',
58 'pre-1.9-compatible': '1.8',
58 'pre-1.9-compatible': '1.8',
59 }
59 }
60
60
61
61
62 def reraise_safe_exceptions(func):
62 def reraise_safe_exceptions(func):
63 """Decorator for converting svn exceptions to something neutral."""
63 """Decorator for converting svn exceptions to something neutral."""
64 def wrapper(*args, **kwargs):
64 def wrapper(*args, **kwargs):
65 try:
65 try:
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 except Exception as e:
67 except Exception as e:
68 if not hasattr(e, '_vcs_kind'):
68 if not hasattr(e, '_vcs_kind'):
69 log.exception("Unhandled exception in svn remote call")
69 log.exception("Unhandled exception in svn remote call")
70 raise_from_original(exceptions.UnhandledException(e))
70 raise_from_original(exceptions.UnhandledException(e))
71 raise
71 raise
72 return wrapper
72 return wrapper
73
73
74
74
75 class SubversionFactory(RepoFactory):
75 class SubversionFactory(RepoFactory):
76 repo_type = 'svn'
76 repo_type = 'svn'
77
77
78 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
80 if create:
81 fs_config = {'compatible-version': '1.9'}
81 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
82 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
85 .format(compatible_version))
86 fs_config['compatible-version'] = \
86 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
87 svn_compatible_versions_map[compatible_version]
88
88
89 log.debug('Create SVN repo with config "%s"', fs_config)
89 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
91 else:
92 repo = svn.repos.open(path)
92 repo = svn.repos.open(path)
93
93
94 log.debug('Got SVN object: %s', repo)
94 log.debug('Got SVN object: %s', repo)
95 return repo
95 return repo
96
96
97 def repo(self, wire, create=False, compatible_version=None):
97 def repo(self, wire, create=False, compatible_version=None):
98 """
98 """
99 Get a repository instance for the given path.
99 Get a repository instance for the given path.
100
100
101 Uses internally the low level beaker API since the decorators introduce
101 Uses internally the low level beaker API since the decorators introduce
102 significant overhead.
102 significant overhead.
103 """
103 """
104 region = self._cache_region
104 region = self._cache_region
105 context = wire.get('context', None)
105 context = wire.get('context', None)
106 repo_path = wire.get('path', '')
106 repo_path = wire.get('path', '')
107 context_uid = '{}'.format(context)
107 context_uid = '{}'.format(context)
108 cache = wire.get('cache', True)
108 cache = wire.get('cache', True)
109 cache_on = context and cache
109 cache_on = context and cache
110
110
111 @region.conditional_cache_on_arguments(condition=cache_on)
111 @region.conditional_cache_on_arguments(condition=cache_on)
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 return self._create_repo(wire, create, compatible_version)
113 return self._create_repo(wire, create, compatible_version)
114
114
115 return create_new_repo(self.repo_type, repo_path, context_uid,
115 return create_new_repo(self.repo_type, repo_path, context_uid,
116 compatible_version)
116 compatible_version)
117
117
118
118
119 NODE_TYPE_MAPPING = {
119 NODE_TYPE_MAPPING = {
120 svn.core.svn_node_file: 'file',
120 svn.core.svn_node_file: 'file',
121 svn.core.svn_node_dir: 'dir',
121 svn.core.svn_node_dir: 'dir',
122 }
122 }
123
123
124
124
125 class SvnRemote(object):
125 class SvnRemote(object):
126
126
127 def __init__(self, factory, hg_factory=None):
127 def __init__(self, factory, hg_factory=None):
128 self._factory = factory
128 self._factory = factory
129 # TODO: Remove once we do not use internal Mercurial objects anymore
129 # TODO: Remove once we do not use internal Mercurial objects anymore
130 # for subversion
130 # for subversion
131 self._hg_factory = hg_factory
131 self._hg_factory = hg_factory
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def discover_svn_version(self):
134 def discover_svn_version(self):
135 try:
135 try:
136 import svn.core
136 import svn.core
137 svn_ver = svn.core.SVN_VERSION
137 svn_ver = svn.core.SVN_VERSION
138 except ImportError:
138 except ImportError:
139 svn_ver = None
139 svn_ver = None
140 return svn_ver
140 return svn_ver
141
141
142 @reraise_safe_exceptions
143 def is_empty(self, wire):
144 repo = self._factory.repo(wire)
145
146 try:
147 return self.lookup(wire, -1) == 0
148 except Exception:
149 log.exception("failed to read object_store")
150 return False
151
142 def check_url(self, url, config_items):
152 def check_url(self, url, config_items):
143 # this can throw exception if not installed, but we detect this
153 # this can throw exception if not installed, but we detect this
144 from hgsubversion import svnrepo
154 from hgsubversion import svnrepo
145
155
146 baseui = self._hg_factory._create_config(config_items)
156 baseui = self._hg_factory._create_config(config_items)
147 # uuid function get's only valid UUID from proper repo, else
157 # uuid function get's only valid UUID from proper repo, else
148 # throws exception
158 # throws exception
149 try:
159 try:
150 svnrepo.svnremoterepo(baseui, url).svn.uuid
160 svnrepo.svnremoterepo(baseui, url).svn.uuid
151 except Exception:
161 except Exception:
152 tb = traceback.format_exc()
162 tb = traceback.format_exc()
153 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
163 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
154 raise URLError(
164 raise URLError(
155 '"%s" is not a valid Subversion source url.' % (url, ))
165 '"%s" is not a valid Subversion source url.' % (url, ))
156 return True
166 return True
157
167
158 def is_path_valid_repository(self, wire, path):
168 def is_path_valid_repository(self, wire, path):
159
169
160 # NOTE(marcink): short circuit the check for SVN repo
170 # NOTE(marcink): short circuit the check for SVN repo
161 # the repos.open might be expensive to check, but we have one cheap
171 # the repos.open might be expensive to check, but we have one cheap
162 # pre condition that we can use, to check for 'format' file
172 # pre condition that we can use, to check for 'format' file
163
173
164 if not os.path.isfile(os.path.join(path, 'format')):
174 if not os.path.isfile(os.path.join(path, 'format')):
165 return False
175 return False
166
176
167 try:
177 try:
168 svn.repos.open(path)
178 svn.repos.open(path)
169 except svn.core.SubversionException:
179 except svn.core.SubversionException:
170 tb = traceback.format_exc()
180 tb = traceback.format_exc()
171 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
181 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
172 return False
182 return False
173 return True
183 return True
174
184
175 @reraise_safe_exceptions
185 @reraise_safe_exceptions
176 def verify(self, wire,):
186 def verify(self, wire,):
177 repo_path = wire['path']
187 repo_path = wire['path']
178 if not self.is_path_valid_repository(wire, repo_path):
188 if not self.is_path_valid_repository(wire, repo_path):
179 raise Exception(
189 raise Exception(
180 "Path %s is not a valid Subversion repository." % repo_path)
190 "Path %s is not a valid Subversion repository." % repo_path)
181
191
182 cmd = ['svnadmin', 'info', repo_path]
192 cmd = ['svnadmin', 'info', repo_path]
183 stdout, stderr = subprocessio.run_command(cmd)
193 stdout, stderr = subprocessio.run_command(cmd)
184 return stdout
194 return stdout
185
195
186 def lookup(self, wire, revision):
196 def lookup(self, wire, revision):
187 if revision not in [-1, None, 'HEAD']:
197 if revision not in [-1, None, 'HEAD']:
188 raise NotImplementedError
198 raise NotImplementedError
189 repo = self._factory.repo(wire)
199 repo = self._factory.repo(wire)
190 fs_ptr = svn.repos.fs(repo)
200 fs_ptr = svn.repos.fs(repo)
191 head = svn.fs.youngest_rev(fs_ptr)
201 head = svn.fs.youngest_rev(fs_ptr)
192 return head
202 return head
193
203
194 def lookup_interval(self, wire, start_ts, end_ts):
204 def lookup_interval(self, wire, start_ts, end_ts):
195 repo = self._factory.repo(wire)
205 repo = self._factory.repo(wire)
196 fsobj = svn.repos.fs(repo)
206 fsobj = svn.repos.fs(repo)
197 start_rev = None
207 start_rev = None
198 end_rev = None
208 end_rev = None
199 if start_ts:
209 if start_ts:
200 start_ts_svn = apr_time_t(start_ts)
210 start_ts_svn = apr_time_t(start_ts)
201 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
211 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
202 else:
212 else:
203 start_rev = 1
213 start_rev = 1
204 if end_ts:
214 if end_ts:
205 end_ts_svn = apr_time_t(end_ts)
215 end_ts_svn = apr_time_t(end_ts)
206 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
216 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
207 else:
217 else:
208 end_rev = svn.fs.youngest_rev(fsobj)
218 end_rev = svn.fs.youngest_rev(fsobj)
209 return start_rev, end_rev
219 return start_rev, end_rev
210
220
211 def revision_properties(self, wire, revision):
221 def revision_properties(self, wire, revision):
212 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
213 fs_ptr = svn.repos.fs(repo)
223 fs_ptr = svn.repos.fs(repo)
214 return svn.fs.revision_proplist(fs_ptr, revision)
224 return svn.fs.revision_proplist(fs_ptr, revision)
215
225
216 def revision_changes(self, wire, revision):
226 def revision_changes(self, wire, revision):
217
227
218 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
219 fsobj = svn.repos.fs(repo)
229 fsobj = svn.repos.fs(repo)
220 rev_root = svn.fs.revision_root(fsobj, revision)
230 rev_root = svn.fs.revision_root(fsobj, revision)
221
231
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
232 editor = svn.repos.ChangeCollector(fsobj, rev_root)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
233 editor_ptr, editor_baton = svn.delta.make_editor(editor)
224 base_dir = ""
234 base_dir = ""
225 send_deltas = False
235 send_deltas = False
226 svn.repos.replay2(
236 svn.repos.replay2(
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
237 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
228 editor_ptr, editor_baton, None)
238 editor_ptr, editor_baton, None)
229
239
230 added = []
240 added = []
231 changed = []
241 changed = []
232 removed = []
242 removed = []
233
243
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
244 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
235 for path, change in editor.changes.iteritems():
245 for path, change in editor.changes.iteritems():
236 # TODO: Decide what to do with directory nodes. Subversion can add
246 # TODO: Decide what to do with directory nodes. Subversion can add
237 # empty directories.
247 # empty directories.
238
248
239 if change.item_kind == svn.core.svn_node_dir:
249 if change.item_kind == svn.core.svn_node_dir:
240 continue
250 continue
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
251 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
242 added.append(path)
252 added.append(path)
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
253 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
244 svn.repos.CHANGE_ACTION_REPLACE]:
254 svn.repos.CHANGE_ACTION_REPLACE]:
245 changed.append(path)
255 changed.append(path)
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
256 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
247 removed.append(path)
257 removed.append(path)
248 else:
258 else:
249 raise NotImplementedError(
259 raise NotImplementedError(
250 "Action %s not supported on path %s" % (
260 "Action %s not supported on path %s" % (
251 change.action, path))
261 change.action, path))
252
262
253 changes = {
263 changes = {
254 'added': added,
264 'added': added,
255 'changed': changed,
265 'changed': changed,
256 'removed': removed,
266 'removed': removed,
257 }
267 }
258 return changes
268 return changes
259
269
260 def node_history(self, wire, path, revision, limit):
270 def node_history(self, wire, path, revision, limit):
261 cross_copies = False
271 cross_copies = False
262 repo = self._factory.repo(wire)
272 repo = self._factory.repo(wire)
263 fsobj = svn.repos.fs(repo)
273 fsobj = svn.repos.fs(repo)
264 rev_root = svn.fs.revision_root(fsobj, revision)
274 rev_root = svn.fs.revision_root(fsobj, revision)
265
275
266 history_revisions = []
276 history_revisions = []
267 history = svn.fs.node_history(rev_root, path)
277 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.history_prev(history, cross_copies)
278 history = svn.fs.history_prev(history, cross_copies)
269 while history:
279 while history:
270 __, node_revision = svn.fs.history_location(history)
280 __, node_revision = svn.fs.history_location(history)
271 history_revisions.append(node_revision)
281 history_revisions.append(node_revision)
272 if limit and len(history_revisions) >= limit:
282 if limit and len(history_revisions) >= limit:
273 break
283 break
274 history = svn.fs.history_prev(history, cross_copies)
284 history = svn.fs.history_prev(history, cross_copies)
275 return history_revisions
285 return history_revisions
276
286
277 def node_properties(self, wire, path, revision):
287 def node_properties(self, wire, path, revision):
278 repo = self._factory.repo(wire)
288 repo = self._factory.repo(wire)
279 fsobj = svn.repos.fs(repo)
289 fsobj = svn.repos.fs(repo)
280 rev_root = svn.fs.revision_root(fsobj, revision)
290 rev_root = svn.fs.revision_root(fsobj, revision)
281 return svn.fs.node_proplist(rev_root, path)
291 return svn.fs.node_proplist(rev_root, path)
282
292
283 def file_annotate(self, wire, path, revision):
293 def file_annotate(self, wire, path, revision):
284 abs_path = 'file://' + urllib.pathname2url(
294 abs_path = 'file://' + urllib.pathname2url(
285 vcspath.join(wire['path'], path))
295 vcspath.join(wire['path'], path))
286 file_uri = svn.core.svn_path_canonicalize(abs_path)
296 file_uri = svn.core.svn_path_canonicalize(abs_path)
287
297
288 start_rev = svn_opt_revision_value_t(0)
298 start_rev = svn_opt_revision_value_t(0)
289 peg_rev = svn_opt_revision_value_t(revision)
299 peg_rev = svn_opt_revision_value_t(revision)
290 end_rev = peg_rev
300 end_rev = peg_rev
291
301
292 annotations = []
302 annotations = []
293
303
294 def receiver(line_no, revision, author, date, line, pool):
304 def receiver(line_no, revision, author, date, line, pool):
295 annotations.append((line_no, revision, line))
305 annotations.append((line_no, revision, line))
296
306
297 # TODO: Cannot use blame5, missing typemap function in the swig code
307 # TODO: Cannot use blame5, missing typemap function in the swig code
298 try:
308 try:
299 svn.client.blame2(
309 svn.client.blame2(
300 file_uri, peg_rev, start_rev, end_rev,
310 file_uri, peg_rev, start_rev, end_rev,
301 receiver, svn.client.create_context())
311 receiver, svn.client.create_context())
302 except svn.core.SubversionException as exc:
312 except svn.core.SubversionException as exc:
303 log.exception("Error during blame operation.")
313 log.exception("Error during blame operation.")
304 raise Exception(
314 raise Exception(
305 "Blame not supported or file does not exist at path %s. "
315 "Blame not supported or file does not exist at path %s. "
306 "Error %s." % (path, exc))
316 "Error %s." % (path, exc))
307
317
308 return annotations
318 return annotations
309
319
310 def get_node_type(self, wire, path, rev=None):
320 def get_node_type(self, wire, path, rev=None):
311 repo = self._factory.repo(wire)
321 repo = self._factory.repo(wire)
312 fs_ptr = svn.repos.fs(repo)
322 fs_ptr = svn.repos.fs(repo)
313 if rev is None:
323 if rev is None:
314 rev = svn.fs.youngest_rev(fs_ptr)
324 rev = svn.fs.youngest_rev(fs_ptr)
315 root = svn.fs.revision_root(fs_ptr, rev)
325 root = svn.fs.revision_root(fs_ptr, rev)
316 node = svn.fs.check_path(root, path)
326 node = svn.fs.check_path(root, path)
317 return NODE_TYPE_MAPPING.get(node, None)
327 return NODE_TYPE_MAPPING.get(node, None)
318
328
319 def get_nodes(self, wire, path, revision=None):
329 def get_nodes(self, wire, path, revision=None):
320 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
321 fsobj = svn.repos.fs(repo)
331 fsobj = svn.repos.fs(repo)
322 if revision is None:
332 if revision is None:
323 revision = svn.fs.youngest_rev(fsobj)
333 revision = svn.fs.youngest_rev(fsobj)
324 root = svn.fs.revision_root(fsobj, revision)
334 root = svn.fs.revision_root(fsobj, revision)
325 entries = svn.fs.dir_entries(root, path)
335 entries = svn.fs.dir_entries(root, path)
326 result = []
336 result = []
327 for entry_path, entry_info in entries.iteritems():
337 for entry_path, entry_info in entries.iteritems():
328 result.append(
338 result.append(
329 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
330 return result
340 return result
331
341
332 def get_file_content(self, wire, path, rev=None):
342 def get_file_content(self, wire, path, rev=None):
333 repo = self._factory.repo(wire)
343 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
344 fsobj = svn.repos.fs(repo)
335 if rev is None:
345 if rev is None:
336 rev = svn.fs.youngest_revision(fsobj)
346 rev = svn.fs.youngest_revision(fsobj)
337 root = svn.fs.revision_root(fsobj, rev)
347 root = svn.fs.revision_root(fsobj, rev)
338 content = svn.core.Stream(svn.fs.file_contents(root, path))
348 content = svn.core.Stream(svn.fs.file_contents(root, path))
339 return content.read()
349 return content.read()
340
350
341 def get_file_size(self, wire, path, revision=None):
351 def get_file_size(self, wire, path, revision=None):
342 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
343 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
344 if revision is None:
354 if revision is None:
345 revision = svn.fs.youngest_revision(fsobj)
355 revision = svn.fs.youngest_revision(fsobj)
346 root = svn.fs.revision_root(fsobj, revision)
356 root = svn.fs.revision_root(fsobj, revision)
347 size = svn.fs.file_length(root, path)
357 size = svn.fs.file_length(root, path)
348 return size
358 return size
349
359
350 def create_repository(self, wire, compatible_version=None):
360 def create_repository(self, wire, compatible_version=None):
351 log.info('Creating Subversion repository in path "%s"', wire['path'])
361 log.info('Creating Subversion repository in path "%s"', wire['path'])
352 self._factory.repo(wire, create=True,
362 self._factory.repo(wire, create=True,
353 compatible_version=compatible_version)
363 compatible_version=compatible_version)
354
364
355 def get_url_and_credentials(self, src_url):
365 def get_url_and_credentials(self, src_url):
356 obj = urlparse.urlparse(src_url)
366 obj = urlparse.urlparse(src_url)
357 username = obj.username or None
367 username = obj.username or None
358 password = obj.password or None
368 password = obj.password or None
359 return username, password, src_url
369 return username, password, src_url
360
370
361 def import_remote_repository(self, wire, src_url):
371 def import_remote_repository(self, wire, src_url):
362 repo_path = wire['path']
372 repo_path = wire['path']
363 if not self.is_path_valid_repository(wire, repo_path):
373 if not self.is_path_valid_repository(wire, repo_path):
364 raise Exception(
374 raise Exception(
365 "Path %s is not a valid Subversion repository." % repo_path)
375 "Path %s is not a valid Subversion repository." % repo_path)
366
376
367 username, password, src_url = self.get_url_and_credentials(src_url)
377 username, password, src_url = self.get_url_and_credentials(src_url)
368 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
378 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
369 '--trust-server-cert-failures=unknown-ca']
379 '--trust-server-cert-failures=unknown-ca']
370 if username and password:
380 if username and password:
371 rdump_cmd += ['--username', username, '--password', password]
381 rdump_cmd += ['--username', username, '--password', password]
372 rdump_cmd += [src_url]
382 rdump_cmd += [src_url]
373
383
374 rdump = subprocess.Popen(
384 rdump = subprocess.Popen(
375 rdump_cmd,
385 rdump_cmd,
376 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
386 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
377 load = subprocess.Popen(
387 load = subprocess.Popen(
378 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
388 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
379
389
380 # TODO: johbo: This can be a very long operation, might be better
390 # TODO: johbo: This can be a very long operation, might be better
381 # to track some kind of status and provide an api to check if the
391 # to track some kind of status and provide an api to check if the
382 # import is done.
392 # import is done.
383 rdump.wait()
393 rdump.wait()
384 load.wait()
394 load.wait()
385
395
386 log.debug('Return process ended with code: %s', rdump.returncode)
396 log.debug('Return process ended with code: %s', rdump.returncode)
387 if rdump.returncode != 0:
397 if rdump.returncode != 0:
388 errors = rdump.stderr.read()
398 errors = rdump.stderr.read()
389 log.error('svnrdump dump failed: statuscode %s: message: %s',
399 log.error('svnrdump dump failed: statuscode %s: message: %s',
390 rdump.returncode, errors)
400 rdump.returncode, errors)
391 reason = 'UNKNOWN'
401 reason = 'UNKNOWN'
392 if 'svnrdump: E230001:' in errors:
402 if 'svnrdump: E230001:' in errors:
393 reason = 'INVALID_CERTIFICATE'
403 reason = 'INVALID_CERTIFICATE'
394
404
395 if reason == 'UNKNOWN':
405 if reason == 'UNKNOWN':
396 reason = 'UNKNOWN:{}'.format(errors)
406 reason = 'UNKNOWN:{}'.format(errors)
397 raise Exception(
407 raise Exception(
398 'Failed to dump the remote repository from %s. Reason:%s' % (
408 'Failed to dump the remote repository from %s. Reason:%s' % (
399 src_url, reason))
409 src_url, reason))
400 if load.returncode != 0:
410 if load.returncode != 0:
401 raise Exception(
411 raise Exception(
402 'Failed to load the dump of remote repository from %s.' %
412 'Failed to load the dump of remote repository from %s.' %
403 (src_url, ))
413 (src_url, ))
404
414
405 def commit(self, wire, message, author, timestamp, updated, removed):
415 def commit(self, wire, message, author, timestamp, updated, removed):
406 assert isinstance(message, str)
416 assert isinstance(message, str)
407 assert isinstance(author, str)
417 assert isinstance(author, str)
408
418
409 repo = self._factory.repo(wire)
419 repo = self._factory.repo(wire)
410 fsobj = svn.repos.fs(repo)
420 fsobj = svn.repos.fs(repo)
411
421
412 rev = svn.fs.youngest_rev(fsobj)
422 rev = svn.fs.youngest_rev(fsobj)
413 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
423 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
414 txn_root = svn.fs.txn_root(txn)
424 txn_root = svn.fs.txn_root(txn)
415
425
416 for node in updated:
426 for node in updated:
417 TxnNodeProcessor(node, txn_root).update()
427 TxnNodeProcessor(node, txn_root).update()
418 for node in removed:
428 for node in removed:
419 TxnNodeProcessor(node, txn_root).remove()
429 TxnNodeProcessor(node, txn_root).remove()
420
430
421 commit_id = svn.repos.fs_commit_txn(repo, txn)
431 commit_id = svn.repos.fs_commit_txn(repo, txn)
422
432
423 if timestamp:
433 if timestamp:
424 apr_time = apr_time_t(timestamp)
434 apr_time = apr_time_t(timestamp)
425 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
435 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
426 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
436 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
427
437
428 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
438 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
429 return commit_id
439 return commit_id
430
440
431 def diff(self, wire, rev1, rev2, path1=None, path2=None,
441 def diff(self, wire, rev1, rev2, path1=None, path2=None,
432 ignore_whitespace=False, context=3):
442 ignore_whitespace=False, context=3):
433
443
434 wire.update(cache=False)
444 wire.update(cache=False)
435 repo = self._factory.repo(wire)
445 repo = self._factory.repo(wire)
436 diff_creator = SvnDiffer(
446 diff_creator = SvnDiffer(
437 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
447 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
438 try:
448 try:
439 return diff_creator.generate_diff()
449 return diff_creator.generate_diff()
440 except svn.core.SubversionException as e:
450 except svn.core.SubversionException as e:
441 log.exception(
451 log.exception(
442 "Error during diff operation operation. "
452 "Error during diff operation operation. "
443 "Path might not exist %s, %s" % (path1, path2))
453 "Path might not exist %s, %s" % (path1, path2))
444 return ""
454 return ""
445
455
446 @reraise_safe_exceptions
456 @reraise_safe_exceptions
447 def is_large_file(self, wire, path):
457 def is_large_file(self, wire, path):
448 return False
458 return False
449
459
450 @reraise_safe_exceptions
460 @reraise_safe_exceptions
461 def run_svn_command(self, wire, cmd, **opts):
462 path = wire.get('path', None)
463
464 if path and os.path.isdir(path):
465 opts['cwd'] = path
466
467 safe_call = False
468 if '_safe' in opts:
469 safe_call = True
470
471 svnenv = os.environ.copy()
472 svnenv.update(opts.pop('extra_env', {}))
473
474 _opts = {'env': svnenv, 'shell': False}
475
476 try:
477 _opts.update(opts)
478 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
479
480 return ''.join(p), ''.join(p.error)
481 except (EnvironmentError, OSError) as err:
482 cmd = ' '.join(cmd) # human friendly CMD
483 tb_err = ("Couldn't run svn command (%s).\n"
484 "Original error was:%s\n"
485 "Call options:%s\n"
486 % (cmd, err, _opts))
487 log.exception(tb_err)
488 if safe_call:
489 return '', err
490 else:
491 raise exceptions.VcsException()(tb_err)
492
493 @reraise_safe_exceptions
451 def install_hooks(self, wire, force=False):
494 def install_hooks(self, wire, force=False):
452 from vcsserver.hook_utils import install_svn_hooks
495 from vcsserver.hook_utils import install_svn_hooks
453 repo_path = wire['path']
496 repo_path = wire['path']
454 binary_dir = settings.BINARY_DIR
497 binary_dir = settings.BINARY_DIR
455 executable = None
498 executable = None
456 if binary_dir:
499 if binary_dir:
457 executable = os.path.join(binary_dir, 'python')
500 executable = os.path.join(binary_dir, 'python')
458 return install_svn_hooks(
501 return install_svn_hooks(
459 repo_path, executable=executable, force_create=force)
502 repo_path, executable=executable, force_create=force)
460
503
461 @reraise_safe_exceptions
504 @reraise_safe_exceptions
462 def get_hooks_info(self, wire):
505 def get_hooks_info(self, wire):
463 from vcsserver.hook_utils import (
506 from vcsserver.hook_utils import (
464 get_svn_pre_hook_version, get_svn_post_hook_version)
507 get_svn_pre_hook_version, get_svn_post_hook_version)
465 repo_path = wire['path']
508 repo_path = wire['path']
466 return {
509 return {
467 'pre_version': get_svn_pre_hook_version(repo_path),
510 'pre_version': get_svn_pre_hook_version(repo_path),
468 'post_version': get_svn_post_hook_version(repo_path),
511 'post_version': get_svn_post_hook_version(repo_path),
469 }
512 }
470
513
471
514
472 class SvnDiffer(object):
515 class SvnDiffer(object):
473 """
516 """
474 Utility to create diffs based on difflib and the Subversion api
517 Utility to create diffs based on difflib and the Subversion api
475 """
518 """
476
519
477 binary_content = False
520 binary_content = False
478
521
479 def __init__(
522 def __init__(
480 self, repo, src_rev, src_path, tgt_rev, tgt_path,
523 self, repo, src_rev, src_path, tgt_rev, tgt_path,
481 ignore_whitespace, context):
524 ignore_whitespace, context):
482 self.repo = repo
525 self.repo = repo
483 self.ignore_whitespace = ignore_whitespace
526 self.ignore_whitespace = ignore_whitespace
484 self.context = context
527 self.context = context
485
528
486 fsobj = svn.repos.fs(repo)
529 fsobj = svn.repos.fs(repo)
487
530
488 self.tgt_rev = tgt_rev
531 self.tgt_rev = tgt_rev
489 self.tgt_path = tgt_path or ''
532 self.tgt_path = tgt_path or ''
490 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
533 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
491 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
534 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
492
535
493 self.src_rev = src_rev
536 self.src_rev = src_rev
494 self.src_path = src_path or self.tgt_path
537 self.src_path = src_path or self.tgt_path
495 self.src_root = svn.fs.revision_root(fsobj, src_rev)
538 self.src_root = svn.fs.revision_root(fsobj, src_rev)
496 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
539 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
497
540
498 self._validate()
541 self._validate()
499
542
500 def _validate(self):
543 def _validate(self):
501 if (self.tgt_kind != svn.core.svn_node_none and
544 if (self.tgt_kind != svn.core.svn_node_none and
502 self.src_kind != svn.core.svn_node_none and
545 self.src_kind != svn.core.svn_node_none and
503 self.src_kind != self.tgt_kind):
546 self.src_kind != self.tgt_kind):
504 # TODO: johbo: proper error handling
547 # TODO: johbo: proper error handling
505 raise Exception(
548 raise Exception(
506 "Source and target are not compatible for diff generation. "
549 "Source and target are not compatible for diff generation. "
507 "Source type: %s, target type: %s" %
550 "Source type: %s, target type: %s" %
508 (self.src_kind, self.tgt_kind))
551 (self.src_kind, self.tgt_kind))
509
552
510 def generate_diff(self):
553 def generate_diff(self):
511 buf = StringIO.StringIO()
554 buf = StringIO.StringIO()
512 if self.tgt_kind == svn.core.svn_node_dir:
555 if self.tgt_kind == svn.core.svn_node_dir:
513 self._generate_dir_diff(buf)
556 self._generate_dir_diff(buf)
514 else:
557 else:
515 self._generate_file_diff(buf)
558 self._generate_file_diff(buf)
516 return buf.getvalue()
559 return buf.getvalue()
517
560
518 def _generate_dir_diff(self, buf):
561 def _generate_dir_diff(self, buf):
519 editor = DiffChangeEditor()
562 editor = DiffChangeEditor()
520 editor_ptr, editor_baton = svn.delta.make_editor(editor)
563 editor_ptr, editor_baton = svn.delta.make_editor(editor)
521 svn.repos.dir_delta2(
564 svn.repos.dir_delta2(
522 self.src_root,
565 self.src_root,
523 self.src_path,
566 self.src_path,
524 '', # src_entry
567 '', # src_entry
525 self.tgt_root,
568 self.tgt_root,
526 self.tgt_path,
569 self.tgt_path,
527 editor_ptr, editor_baton,
570 editor_ptr, editor_baton,
528 authorization_callback_allow_all,
571 authorization_callback_allow_all,
529 False, # text_deltas
572 False, # text_deltas
530 svn.core.svn_depth_infinity, # depth
573 svn.core.svn_depth_infinity, # depth
531 False, # entry_props
574 False, # entry_props
532 False, # ignore_ancestry
575 False, # ignore_ancestry
533 )
576 )
534
577
535 for path, __, change in sorted(editor.changes):
578 for path, __, change in sorted(editor.changes):
536 self._generate_node_diff(
579 self._generate_node_diff(
537 buf, change, path, self.tgt_path, path, self.src_path)
580 buf, change, path, self.tgt_path, path, self.src_path)
538
581
539 def _generate_file_diff(self, buf):
582 def _generate_file_diff(self, buf):
540 change = None
583 change = None
541 if self.src_kind == svn.core.svn_node_none:
584 if self.src_kind == svn.core.svn_node_none:
542 change = "add"
585 change = "add"
543 elif self.tgt_kind == svn.core.svn_node_none:
586 elif self.tgt_kind == svn.core.svn_node_none:
544 change = "delete"
587 change = "delete"
545 tgt_base, tgt_path = vcspath.split(self.tgt_path)
588 tgt_base, tgt_path = vcspath.split(self.tgt_path)
546 src_base, src_path = vcspath.split(self.src_path)
589 src_base, src_path = vcspath.split(self.src_path)
547 self._generate_node_diff(
590 self._generate_node_diff(
548 buf, change, tgt_path, tgt_base, src_path, src_base)
591 buf, change, tgt_path, tgt_base, src_path, src_base)
549
592
550 def _generate_node_diff(
593 def _generate_node_diff(
551 self, buf, change, tgt_path, tgt_base, src_path, src_base):
594 self, buf, change, tgt_path, tgt_base, src_path, src_base):
552
595
553 if self.src_rev == self.tgt_rev and tgt_base == src_base:
596 if self.src_rev == self.tgt_rev and tgt_base == src_base:
554 # makes consistent behaviour with git/hg to return empty diff if
597 # makes consistent behaviour with git/hg to return empty diff if
555 # we compare same revisions
598 # we compare same revisions
556 return
599 return
557
600
558 tgt_full_path = vcspath.join(tgt_base, tgt_path)
601 tgt_full_path = vcspath.join(tgt_base, tgt_path)
559 src_full_path = vcspath.join(src_base, src_path)
602 src_full_path = vcspath.join(src_base, src_path)
560
603
561 self.binary_content = False
604 self.binary_content = False
562 mime_type = self._get_mime_type(tgt_full_path)
605 mime_type = self._get_mime_type(tgt_full_path)
563
606
564 if mime_type and not mime_type.startswith('text'):
607 if mime_type and not mime_type.startswith('text'):
565 self.binary_content = True
608 self.binary_content = True
566 buf.write("=" * 67 + '\n')
609 buf.write("=" * 67 + '\n')
567 buf.write("Cannot display: file marked as a binary type.\n")
610 buf.write("Cannot display: file marked as a binary type.\n")
568 buf.write("svn:mime-type = %s\n" % mime_type)
611 buf.write("svn:mime-type = %s\n" % mime_type)
569 buf.write("Index: %s\n" % (tgt_path, ))
612 buf.write("Index: %s\n" % (tgt_path, ))
570 buf.write("=" * 67 + '\n')
613 buf.write("=" * 67 + '\n')
571 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
614 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
572 'tgt_path': tgt_path})
615 'tgt_path': tgt_path})
573
616
574 if change == 'add':
617 if change == 'add':
575 # TODO: johbo: SVN is missing a zero here compared to git
618 # TODO: johbo: SVN is missing a zero here compared to git
576 buf.write("new file mode 10644\n")
619 buf.write("new file mode 10644\n")
577
620
578 #TODO(marcink): intro to binary detection of svn patches
621 #TODO(marcink): intro to binary detection of svn patches
579 # if self.binary_content:
622 # if self.binary_content:
580 # buf.write('GIT binary patch\n')
623 # buf.write('GIT binary patch\n')
581
624
582 buf.write("--- /dev/null\t(revision 0)\n")
625 buf.write("--- /dev/null\t(revision 0)\n")
583 src_lines = []
626 src_lines = []
584 else:
627 else:
585 if change == 'delete':
628 if change == 'delete':
586 buf.write("deleted file mode 10644\n")
629 buf.write("deleted file mode 10644\n")
587
630
588 #TODO(marcink): intro to binary detection of svn patches
631 #TODO(marcink): intro to binary detection of svn patches
589 # if self.binary_content:
632 # if self.binary_content:
590 # buf.write('GIT binary patch\n')
633 # buf.write('GIT binary patch\n')
591
634
592 buf.write("--- a/%s\t(revision %s)\n" % (
635 buf.write("--- a/%s\t(revision %s)\n" % (
593 src_path, self.src_rev))
636 src_path, self.src_rev))
594 src_lines = self._svn_readlines(self.src_root, src_full_path)
637 src_lines = self._svn_readlines(self.src_root, src_full_path)
595
638
596 if change == 'delete':
639 if change == 'delete':
597 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
640 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
598 tgt_lines = []
641 tgt_lines = []
599 else:
642 else:
600 buf.write("+++ b/%s\t(revision %s)\n" % (
643 buf.write("+++ b/%s\t(revision %s)\n" % (
601 tgt_path, self.tgt_rev))
644 tgt_path, self.tgt_rev))
602 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
645 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
603
646
604 if not self.binary_content:
647 if not self.binary_content:
605 udiff = svn_diff.unified_diff(
648 udiff = svn_diff.unified_diff(
606 src_lines, tgt_lines, context=self.context,
649 src_lines, tgt_lines, context=self.context,
607 ignore_blank_lines=self.ignore_whitespace,
650 ignore_blank_lines=self.ignore_whitespace,
608 ignore_case=False,
651 ignore_case=False,
609 ignore_space_changes=self.ignore_whitespace)
652 ignore_space_changes=self.ignore_whitespace)
610 buf.writelines(udiff)
653 buf.writelines(udiff)
611
654
612 def _get_mime_type(self, path):
655 def _get_mime_type(self, path):
613 try:
656 try:
614 mime_type = svn.fs.node_prop(
657 mime_type = svn.fs.node_prop(
615 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
658 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
616 except svn.core.SubversionException:
659 except svn.core.SubversionException:
617 mime_type = svn.fs.node_prop(
660 mime_type = svn.fs.node_prop(
618 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
661 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
619 return mime_type
662 return mime_type
620
663
621 def _svn_readlines(self, fs_root, node_path):
664 def _svn_readlines(self, fs_root, node_path):
622 if self.binary_content:
665 if self.binary_content:
623 return []
666 return []
624 node_kind = svn.fs.check_path(fs_root, node_path)
667 node_kind = svn.fs.check_path(fs_root, node_path)
625 if node_kind not in (
668 if node_kind not in (
626 svn.core.svn_node_file, svn.core.svn_node_symlink):
669 svn.core.svn_node_file, svn.core.svn_node_symlink):
627 return []
670 return []
628 content = svn.core.Stream(
671 content = svn.core.Stream(
629 svn.fs.file_contents(fs_root, node_path)).read()
672 svn.fs.file_contents(fs_root, node_path)).read()
630 return content.splitlines(True)
673 return content.splitlines(True)
631
674
632
675
633
676
634 class DiffChangeEditor(svn.delta.Editor):
677 class DiffChangeEditor(svn.delta.Editor):
635 """
678 """
636 Records changes between two given revisions
679 Records changes between two given revisions
637 """
680 """
638
681
639 def __init__(self):
682 def __init__(self):
640 self.changes = []
683 self.changes = []
641
684
642 def delete_entry(self, path, revision, parent_baton, pool=None):
685 def delete_entry(self, path, revision, parent_baton, pool=None):
643 self.changes.append((path, None, 'delete'))
686 self.changes.append((path, None, 'delete'))
644
687
645 def add_file(
688 def add_file(
646 self, path, parent_baton, copyfrom_path, copyfrom_revision,
689 self, path, parent_baton, copyfrom_path, copyfrom_revision,
647 file_pool=None):
690 file_pool=None):
648 self.changes.append((path, 'file', 'add'))
691 self.changes.append((path, 'file', 'add'))
649
692
650 def open_file(self, path, parent_baton, base_revision, file_pool=None):
693 def open_file(self, path, parent_baton, base_revision, file_pool=None):
651 self.changes.append((path, 'file', 'change'))
694 self.changes.append((path, 'file', 'change'))
652
695
653
696
654 def authorization_callback_allow_all(root, path, pool):
697 def authorization_callback_allow_all(root, path, pool):
655 return True
698 return True
656
699
657
700
658 class TxnNodeProcessor(object):
701 class TxnNodeProcessor(object):
659 """
702 """
660 Utility to process the change of one node within a transaction root.
703 Utility to process the change of one node within a transaction root.
661
704
662 It encapsulates the knowledge of how to add, update or remove
705 It encapsulates the knowledge of how to add, update or remove
663 a node for a given transaction root. The purpose is to support the method
706 a node for a given transaction root. The purpose is to support the method
664 `SvnRemote.commit`.
707 `SvnRemote.commit`.
665 """
708 """
666
709
667 def __init__(self, node, txn_root):
710 def __init__(self, node, txn_root):
668 assert isinstance(node['path'], str)
711 assert isinstance(node['path'], str)
669
712
670 self.node = node
713 self.node = node
671 self.txn_root = txn_root
714 self.txn_root = txn_root
672
715
673 def update(self):
716 def update(self):
674 self._ensure_parent_dirs()
717 self._ensure_parent_dirs()
675 self._add_file_if_node_does_not_exist()
718 self._add_file_if_node_does_not_exist()
676 self._update_file_content()
719 self._update_file_content()
677 self._update_file_properties()
720 self._update_file_properties()
678
721
679 def remove(self):
722 def remove(self):
680 svn.fs.delete(self.txn_root, self.node['path'])
723 svn.fs.delete(self.txn_root, self.node['path'])
681 # TODO: Clean up directory if empty
724 # TODO: Clean up directory if empty
682
725
683 def _ensure_parent_dirs(self):
726 def _ensure_parent_dirs(self):
684 curdir = vcspath.dirname(self.node['path'])
727 curdir = vcspath.dirname(self.node['path'])
685 dirs_to_create = []
728 dirs_to_create = []
686 while not self._svn_path_exists(curdir):
729 while not self._svn_path_exists(curdir):
687 dirs_to_create.append(curdir)
730 dirs_to_create.append(curdir)
688 curdir = vcspath.dirname(curdir)
731 curdir = vcspath.dirname(curdir)
689
732
690 for curdir in reversed(dirs_to_create):
733 for curdir in reversed(dirs_to_create):
691 log.debug('Creating missing directory "%s"', curdir)
734 log.debug('Creating missing directory "%s"', curdir)
692 svn.fs.make_dir(self.txn_root, curdir)
735 svn.fs.make_dir(self.txn_root, curdir)
693
736
694 def _svn_path_exists(self, path):
737 def _svn_path_exists(self, path):
695 path_status = svn.fs.check_path(self.txn_root, path)
738 path_status = svn.fs.check_path(self.txn_root, path)
696 return path_status != svn.core.svn_node_none
739 return path_status != svn.core.svn_node_none
697
740
698 def _add_file_if_node_does_not_exist(self):
741 def _add_file_if_node_does_not_exist(self):
699 kind = svn.fs.check_path(self.txn_root, self.node['path'])
742 kind = svn.fs.check_path(self.txn_root, self.node['path'])
700 if kind == svn.core.svn_node_none:
743 if kind == svn.core.svn_node_none:
701 svn.fs.make_file(self.txn_root, self.node['path'])
744 svn.fs.make_file(self.txn_root, self.node['path'])
702
745
703 def _update_file_content(self):
746 def _update_file_content(self):
704 assert isinstance(self.node['content'], str)
747 assert isinstance(self.node['content'], str)
705 handler, baton = svn.fs.apply_textdelta(
748 handler, baton = svn.fs.apply_textdelta(
706 self.txn_root, self.node['path'], None, None)
749 self.txn_root, self.node['path'], None, None)
707 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
750 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
708
751
709 def _update_file_properties(self):
752 def _update_file_properties(self):
710 properties = self.node.get('properties', {})
753 properties = self.node.get('properties', {})
711 for key, value in properties.iteritems():
754 for key, value in properties.iteritems():
712 svn.fs.change_node_prop(
755 svn.fs.change_node_prop(
713 self.txn_root, self.node['path'], key, value)
756 self.txn_root, self.node['path'], key, value)
714
757
715
758
716 def apr_time_t(timestamp):
759 def apr_time_t(timestamp):
717 """
760 """
718 Convert a Python timestamp into APR timestamp type apr_time_t
761 Convert a Python timestamp into APR timestamp type apr_time_t
719 """
762 """
720 return timestamp * 1E6
763 return timestamp * 1E6
721
764
722
765
723 def svn_opt_revision_value_t(num):
766 def svn_opt_revision_value_t(num):
724 """
767 """
725 Put `num` into a `svn_opt_revision_value_t` structure.
768 Put `num` into a `svn_opt_revision_value_t` structure.
726 """
769 """
727 value = svn.core.svn_opt_revision_value_t()
770 value = svn.core.svn_opt_revision_value_t()
728 value.number = num
771 value.number = num
729 revision = svn.core.svn_opt_revision_t()
772 revision = svn.core.svn_opt_revision_t()
730 revision.kind = svn.core.svn_opt_revision_number
773 revision.kind = svn.core.svn_opt_revision_number
731 revision.value = value
774 revision.value = value
732 return revision
775 return revision
General Comments 0
You need to be logged in to leave comments. Login now