##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r702:778db25c merge stable
parent child Browse files
Show More
@@ -0,0 +1,12 b''
1 # contains not directly required libraries we want to pin the version.
2
3 atomicwrites==1.2.1
4 attrs==18.2.0
5 hupper==1.6.1
6 pathlib2==2.3.4
7 pygments==2.4.2
8 psutil==5.5.1
9 pluggy==0.11.0
10 scandir==1.10.0
11 setproctitle==1.1.10
12 venusian==1.2.0
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.16.2
2 current_version = 4.17.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.16.2
12 state = in_progress
13 version = 4.17.0
16 14
@@ -1,152 +1,154 b''
1 1 """
2 2 gunicorn config extension and hooks. Sets additional configuration that is
3 3 available post the .ini config.
4 4
5 5 - workers = ${cpu_number}
6 6 - threads = 1
7 7 - proc_name = ${gunicorn_proc_name}
8 8 - worker_class = sync
9 9 - worker_connections = 10
10 10 - max_requests = 1000
11 11 - max_requests_jitter = 30
12 12 - timeout = 21600
13 13
14 14 """
15 15
16 16 import multiprocessing
17 17 import sys
18 18 import time
19 19 import datetime
20 20 import threading
21 21 import traceback
22 22 from gunicorn.glogging import Logger
23 23
24 24
25 25 # GLOBAL
26 26 errorlog = '-'
27 27 accesslog = '-'
28 28 loglevel = 'debug'
29 29
30 30 # SECURITY
31 31
32 32 # The maximum size of HTTP request line in bytes.
33 limit_request_line = 4094
33 # 0 for unlimited
34 limit_request_line = 0
34 35
35 36 # Limit the number of HTTP headers fields in a request.
36 limit_request_fields = 1024
37 # By default this value is 100 and can’t be larger than 32768.
38 limit_request_fields = 10240
37 39
38 40 # Limit the allowed size of an HTTP request header field.
39 41 # Value is a positive number or 0.
40 42 # Setting it to 0 will allow unlimited header field sizes.
41 43 limit_request_field_size = 0
42 44
43 45
44 46 # Timeout for graceful workers restart.
45 47 # After receiving a restart signal, workers have this much time to finish
46 48 # serving requests. Workers still alive after the timeout (starting from the
47 49 # receipt of the restart signal) are force killed.
48 50 graceful_timeout = 30
49 51
50 52
51 53 # The number of seconds to wait for requests on a Keep-Alive connection.
52 54 # Generally set in the 1-5 seconds range.
53 55 keepalive = 2
54 56
55 57
56 58 # SERVER MECHANICS
57 59 # None == system temp dir
58 60 # worker_tmp_dir is recommended to be set to some tmpfs
59 61 worker_tmp_dir = None
60 62 tmp_upload_dir = None
61 63
62 64 # Custom log format
63 65 access_log_format = (
64 66 '%(t)s [%(p)-8s] GNCRN %(h)-15s rqt:%(L)s %(s)s %(b)-6s "%(m)s:%(U)s %(q)s" usr:%(u)s "%(f)s" "%(a)s"')
65 67
66 68 # self adjust workers based on CPU count
67 69 # workers = multiprocessing.cpu_count() * 2 + 1
68 70
69 71
70 72 def post_fork(server, worker):
71 73 server.log.info("[<%-10s>] WORKER spawned", worker.pid)
72 74
73 75
74 76 def pre_fork(server, worker):
75 77 pass
76 78
77 79
78 80 def pre_exec(server):
79 81 server.log.info("Forked child, re-executing.")
80 82
81 83
82 84 def on_starting(server):
83 85 server.log.info("Server is starting.")
84 86
85 87
86 88 def when_ready(server):
87 89 server.log.info("Server is ready. Spawning workers")
88 90
89 91
90 92 def on_reload(server):
91 93 pass
92 94
93 95
94 96 def worker_int(worker):
95 97 worker.log.info("[<%-10s>] worker received INT or QUIT signal", worker.pid)
96 98
97 99 # get traceback info, on worker crash
98 100 id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
99 101 code = []
100 102 for thread_id, stack in sys._current_frames().items():
101 103 code.append(
102 104 "\n# Thread: %s(%d)" % (id2name.get(thread_id, ""), thread_id))
103 105 for fname, lineno, name, line in traceback.extract_stack(stack):
104 106 code.append('File: "%s", line %d, in %s' % (fname, lineno, name))
105 107 if line:
106 108 code.append(" %s" % (line.strip()))
107 109 worker.log.debug("\n".join(code))
108 110
109 111
110 112 def worker_abort(worker):
111 113 worker.log.info("[<%-10s>] worker received SIGABRT signal", worker.pid)
112 114
113 115
114 116 def worker_exit(server, worker):
115 117 worker.log.info("[<%-10s>] worker exit", worker.pid)
116 118
117 119
118 120 def child_exit(server, worker):
119 121 worker.log.info("[<%-10s>] worker child exit", worker.pid)
120 122
121 123
122 124 def pre_request(worker, req):
123 125 worker.start_time = time.time()
124 126 worker.log.debug(
125 127 "GNCRN PRE WORKER [cnt:%s]: %s %s", worker.nr, req.method, req.path)
126 128
127 129
128 130 def post_request(worker, req, environ, resp):
129 131 total_time = time.time() - worker.start_time
130 132 worker.log.debug(
131 133 "GNCRN POST WORKER [cnt:%s]: %s %s resp: %s, Load Time: %.3fs",
132 134 worker.nr, req.method, req.path, resp.status_code, total_time)
133 135
134 136
135 137 class RhodeCodeLogger(Logger):
136 138 """
137 139 Custom Logger that allows some customization that gunicorn doesn't allow
138 140 """
139 141
140 142 datefmt = r"%Y-%m-%d %H:%M:%S"
141 143
142 144 def __init__(self, cfg):
143 145 Logger.__init__(self, cfg)
144 146
145 147 def now(self):
146 148 """ return date in RhodeCode Log format """
147 149 now = time.time()
148 150 msecs = int((now - long(now)) * 1000)
149 151 return time.strftime(self.datefmt, time.localtime(now)) + '.{0:03d}'.format(msecs)
150 152
151 153
152 154 logger_class = RhodeCodeLogger
@@ -1,3 +1,3 b''
1 1 [pip2nix]
2 requirements = ., -r ./requirements.txt
2 requirements = ., -r ./requirements.txt, -r ./requirements_pinned.txt
3 3 output = ./pkgs/python-packages.nix
@@ -1,955 +1,948 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.2.1";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
12 12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-18.2.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
23 23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "configobj" = super.buildPythonPackage {
52 52 name = "configobj-5.0.6";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."six"
56 56 ];
57 57 src = fetchurl {
58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
58 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
59 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.bsdOriginal ];
63 63 };
64 64 };
65 65 "cov-core" = super.buildPythonPackage {
66 66 name = "cov-core-1.15.0";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."coverage"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.mit ];
77 77 };
78 78 };
79 79 "coverage" = super.buildPythonPackage {
80 name = "coverage-4.5.1";
80 name = "coverage-4.5.3";
81 81 doCheck = false;
82 82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
84 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
83 url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz";
84 sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.asl20 ];
88 88 };
89 89 };
90 90 "decorator" = super.buildPythonPackage {
91 91 name = "decorator-4.1.2";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
99 99 };
100 100 };
101 101 "dogpile.cache" = super.buildPythonPackage {
102 102 name = "dogpile.cache-0.7.1";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."decorator"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz";
109 109 sha256 = "0caazmrzhnfqb5yrp8myhw61ny637jj69wcngrpbvi31jlcpy6v9";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.bsdOriginal ];
113 113 };
114 114 };
115 115 "dogpile.core" = super.buildPythonPackage {
116 116 name = "dogpile.core-0.4.1";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
120 120 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.bsdOriginal ];
124 124 };
125 125 };
126 126 "dulwich" = super.buildPythonPackage {
127 127 name = "dulwich-0.13.0";
128 128 doCheck = false;
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
131 131 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.gpl2Plus ];
135 135 };
136 136 };
137 137 "enum34" = super.buildPythonPackage {
138 138 name = "enum34-1.1.6";
139 139 doCheck = false;
140 140 src = fetchurl {
141 141 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
142 142 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.bsdOriginal ];
146 146 };
147 147 };
148 148 "funcsigs" = super.buildPythonPackage {
149 149 name = "funcsigs-1.0.2";
150 150 doCheck = false;
151 151 src = fetchurl {
152 152 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
153 153 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
154 154 };
155 155 meta = {
156 156 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
157 157 };
158 158 };
159 159 "gevent" = super.buildPythonPackage {
160 160 name = "gevent-1.4.0";
161 161 doCheck = false;
162 162 propagatedBuildInputs = [
163 163 self."greenlet"
164 164 ];
165 165 src = fetchurl {
166 166 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
167 167 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
168 168 };
169 169 meta = {
170 170 license = [ pkgs.lib.licenses.mit ];
171 171 };
172 172 };
173 173 "gprof2dot" = super.buildPythonPackage {
174 174 name = "gprof2dot-2017.9.19";
175 175 doCheck = false;
176 176 src = fetchurl {
177 177 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
178 178 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
179 179 };
180 180 meta = {
181 181 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
182 182 };
183 183 };
184 184 "greenlet" = super.buildPythonPackage {
185 185 name = "greenlet-0.4.15";
186 186 doCheck = false;
187 187 src = fetchurl {
188 188 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
189 189 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
190 190 };
191 191 meta = {
192 192 license = [ pkgs.lib.licenses.mit ];
193 193 };
194 194 };
195 195 "gunicorn" = super.buildPythonPackage {
196 196 name = "gunicorn-19.9.0";
197 197 doCheck = false;
198 198 src = fetchurl {
199 199 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
200 200 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
201 201 };
202 202 meta = {
203 203 license = [ pkgs.lib.licenses.mit ];
204 204 };
205 205 };
206 206 "hg-evolve" = super.buildPythonPackage {
207 name = "hg-evolve-8.0.1";
207 name = "hg-evolve-8.5.1";
208 208 doCheck = false;
209 209 src = fetchurl {
210 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
211 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
210 url = "https://files.pythonhosted.org/packages/e3/ce/6594aa403e3464831d4daf20e45fd2e3ef553d968ac13d2c7fa791d4eedd/hg-evolve-8.5.1.tar.gz";
211 sha256 = "09avqn7c1biz97vb1zw91q6nfzydpcqv43mgpfrj7ywp0fscfgf3";
212 212 };
213 213 meta = {
214 214 license = [ { fullName = "GPLv2+"; } ];
215 215 };
216 216 };
217 217 "hgsubversion" = super.buildPythonPackage {
218 218 name = "hgsubversion-1.9.3";
219 219 doCheck = false;
220 220 propagatedBuildInputs = [
221 221 self."mercurial"
222 222 self."subvertpy"
223 223 ];
224 224 src = fetchurl {
225 225 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
226 226 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
227 227 };
228 228 meta = {
229 229 license = [ pkgs.lib.licenses.gpl1 ];
230 230 };
231 231 };
232 232 "hupper" = super.buildPythonPackage {
233 name = "hupper-1.4.2";
233 name = "hupper-1.6.1";
234 234 doCheck = false;
235 235 src = fetchurl {
236 url = "https://files.pythonhosted.org/packages/f1/75/1915dc7650b4867fa3049256e24ca8eddb5989998fcec788cf52b9812dfc/hupper-1.4.2.tar.gz";
237 sha256 = "16vb9fkiaakdpcp6pn56h3w0dwvm67bxq2k2dv4i382qhqwphdzb";
236 url = "https://files.pythonhosted.org/packages/85/d9/e005d357b11249c5d70ddf5b7adab2e4c0da4e8b0531ff146917a04fe6c0/hupper-1.6.1.tar.gz";
237 sha256 = "0d3cvkc8ssgwk54wvhbifj56ry97qi10pfzwfk8vwzzcikbfp3zy";
238 238 };
239 239 meta = {
240 240 license = [ pkgs.lib.licenses.mit ];
241 241 };
242 242 };
243 243 "ipdb" = super.buildPythonPackage {
244 name = "ipdb-0.11";
244 name = "ipdb-0.12";
245 245 doCheck = false;
246 246 propagatedBuildInputs = [
247 247 self."setuptools"
248 248 self."ipython"
249 249 ];
250 250 src = fetchurl {
251 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
252 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
251 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
252 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
253 253 };
254 254 meta = {
255 255 license = [ pkgs.lib.licenses.bsdOriginal ];
256 256 };
257 257 };
258 258 "ipython" = super.buildPythonPackage {
259 259 name = "ipython-5.1.0";
260 260 doCheck = false;
261 261 propagatedBuildInputs = [
262 262 self."setuptools"
263 263 self."decorator"
264 264 self."pickleshare"
265 265 self."simplegeneric"
266 266 self."traitlets"
267 267 self."prompt-toolkit"
268 268 self."pygments"
269 269 self."pexpect"
270 270 self."backports.shutil-get-terminal-size"
271 271 self."pathlib2"
272 272 self."pexpect"
273 273 ];
274 274 src = fetchurl {
275 275 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
276 276 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
277 277 };
278 278 meta = {
279 279 license = [ pkgs.lib.licenses.bsdOriginal ];
280 280 };
281 281 };
282 282 "ipython-genutils" = super.buildPythonPackage {
283 283 name = "ipython-genutils-0.2.0";
284 284 doCheck = false;
285 285 src = fetchurl {
286 286 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
287 287 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
288 288 };
289 289 meta = {
290 290 license = [ pkgs.lib.licenses.bsdOriginal ];
291 291 };
292 292 };
293 293 "mako" = super.buildPythonPackage {
294 294 name = "mako-1.0.7";
295 295 doCheck = false;
296 296 propagatedBuildInputs = [
297 297 self."markupsafe"
298 298 ];
299 299 src = fetchurl {
300 300 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
301 301 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
302 302 };
303 303 meta = {
304 304 license = [ pkgs.lib.licenses.mit ];
305 305 };
306 306 };
307 307 "markupsafe" = super.buildPythonPackage {
308 308 name = "markupsafe-1.1.0";
309 309 doCheck = false;
310 310 src = fetchurl {
311 311 url = "https://files.pythonhosted.org/packages/ac/7e/1b4c2e05809a4414ebce0892fe1e32c14ace86ca7d50c70f00979ca9b3a3/MarkupSafe-1.1.0.tar.gz";
312 312 sha256 = "1lxirjypbdd3l9jl4vliilhfnhy7c7f2vlldqg1b0i74khn375sf";
313 313 };
314 314 meta = {
315 315 license = [ pkgs.lib.licenses.bsdOriginal ];
316 316 };
317 317 };
318 318 "mercurial" = super.buildPythonPackage {
319 name = "mercurial-4.6.2";
319 name = "mercurial-4.9.1";
320 320 doCheck = false;
321 321 src = fetchurl {
322 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
323 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
322 url = "https://files.pythonhosted.org/packages/60/58/a1c52d5f5c0b755e231faf7c4f507dc51fe26d979d36346bc9d28f4f8a75/mercurial-4.9.1.tar.gz";
323 sha256 = "0iybbkd9add066729zg01kwz5hhc1s6lhp9rrnsmzq6ihyxj3p8v";
324 324 };
325 325 meta = {
326 326 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
327 327 };
328 328 };
329 329 "mock" = super.buildPythonPackage {
330 330 name = "mock-1.0.1";
331 331 doCheck = false;
332 332 src = fetchurl {
333 333 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
334 334 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
335 335 };
336 336 meta = {
337 337 license = [ pkgs.lib.licenses.bsdOriginal ];
338 338 };
339 339 };
340 340 "more-itertools" = super.buildPythonPackage {
341 341 name = "more-itertools-5.0.0";
342 342 doCheck = false;
343 343 propagatedBuildInputs = [
344 344 self."six"
345 345 ];
346 346 src = fetchurl {
347 347 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
348 348 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
349 349 };
350 350 meta = {
351 351 license = [ pkgs.lib.licenses.mit ];
352 352 };
353 353 };
354 354 "msgpack-python" = super.buildPythonPackage {
355 355 name = "msgpack-python-0.5.6";
356 356 doCheck = false;
357 357 src = fetchurl {
358 358 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
359 359 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
360 360 };
361 361 meta = {
362 362 license = [ pkgs.lib.licenses.asl20 ];
363 363 };
364 364 };
365 365 "pastedeploy" = super.buildPythonPackage {
366 366 name = "pastedeploy-2.0.1";
367 367 doCheck = false;
368 368 src = fetchurl {
369 369 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
370 370 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
371 371 };
372 372 meta = {
373 373 license = [ pkgs.lib.licenses.mit ];
374 374 };
375 375 };
376 376 "pathlib2" = super.buildPythonPackage {
377 name = "pathlib2-2.3.3";
377 name = "pathlib2-2.3.4";
378 378 doCheck = false;
379 379 propagatedBuildInputs = [
380 380 self."six"
381 381 self."scandir"
382 382 ];
383 383 src = fetchurl {
384 url = "https://files.pythonhosted.org/packages/bf/d7/a2568f4596b75d2c6e2b4094a7e64f620decc7887f69a1f2811931ea15b9/pathlib2-2.3.3.tar.gz";
385 sha256 = "0hpp92vqqgcd8h92msm9slv161b1q160igjwnkf2ag6cx0c96695";
384 url = "https://files.pythonhosted.org/packages/b5/f4/9c7cc726ece2498b6c8b62d3262aa43f59039b953fe23c9964ac5e18d40b/pathlib2-2.3.4.tar.gz";
385 sha256 = "1y0f9rkm1924zrc5dn4bwxlhgdkbml82lkcc28l5rgmr7d918q24";
386 386 };
387 387 meta = {
388 388 license = [ pkgs.lib.licenses.mit ];
389 389 };
390 390 };
391 391 "pexpect" = super.buildPythonPackage {
392 name = "pexpect-4.6.0";
392 name = "pexpect-4.7.0";
393 393 doCheck = false;
394 394 propagatedBuildInputs = [
395 395 self."ptyprocess"
396 396 ];
397 397 src = fetchurl {
398 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
399 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
398 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
399 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
400 400 };
401 401 meta = {
402 402 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
403 403 };
404 404 };
405 405 "pickleshare" = super.buildPythonPackage {
406 406 name = "pickleshare-0.7.5";
407 407 doCheck = false;
408 408 propagatedBuildInputs = [
409 409 self."pathlib2"
410 410 ];
411 411 src = fetchurl {
412 412 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
413 413 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
414 414 };
415 415 meta = {
416 416 license = [ pkgs.lib.licenses.mit ];
417 417 };
418 418 };
419 419 "plaster" = super.buildPythonPackage {
420 420 name = "plaster-1.0";
421 421 doCheck = false;
422 422 propagatedBuildInputs = [
423 423 self."setuptools"
424 424 ];
425 425 src = fetchurl {
426 426 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
427 427 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
428 428 };
429 429 meta = {
430 430 license = [ pkgs.lib.licenses.mit ];
431 431 };
432 432 };
433 433 "plaster-pastedeploy" = super.buildPythonPackage {
434 name = "plaster-pastedeploy-0.6";
434 name = "plaster-pastedeploy-0.7";
435 435 doCheck = false;
436 436 propagatedBuildInputs = [
437 437 self."pastedeploy"
438 438 self."plaster"
439 439 ];
440 440 src = fetchurl {
441 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
442 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
441 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
442 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
443 443 };
444 444 meta = {
445 445 license = [ pkgs.lib.licenses.mit ];
446 446 };
447 447 };
448 448 "pluggy" = super.buildPythonPackage {
449 name = "pluggy-0.8.1";
449 name = "pluggy-0.11.0";
450 450 doCheck = false;
451 451 src = fetchurl {
452 url = "https://files.pythonhosted.org/packages/38/e1/83b10c17688af7b2998fa5342fec58ecbd2a5a7499f31e606ae6640b71ac/pluggy-0.8.1.tar.gz";
453 sha256 = "05l6g42p9ilmabw0hlbiyxy6gyzjri41m5l11a8dzgvi77q35p4d";
452 url = "https://files.pythonhosted.org/packages/0d/a1/862ab336e8128fde20981d2c1aa8506693412daf5083b1911d539412676b/pluggy-0.11.0.tar.gz";
453 sha256 = "10511a54dvafw1jrk75mrhml53c7b7w4yaw7241696lc2hfvr895";
454 454 };
455 455 meta = {
456 456 license = [ pkgs.lib.licenses.mit ];
457 457 };
458 458 };
459 459 "prompt-toolkit" = super.buildPythonPackage {
460 name = "prompt-toolkit-1.0.15";
460 name = "prompt-toolkit-1.0.16";
461 461 doCheck = false;
462 462 propagatedBuildInputs = [
463 463 self."six"
464 464 self."wcwidth"
465 465 ];
466 466 src = fetchurl {
467 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
468 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
467 url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz";
468 sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1";
469 469 };
470 470 meta = {
471 471 license = [ pkgs.lib.licenses.bsdOriginal ];
472 472 };
473 473 };
474 474 "psutil" = super.buildPythonPackage {
475 name = "psutil-5.4.8";
475 name = "psutil-5.5.1";
476 476 doCheck = false;
477 477 src = fetchurl {
478 url = "https://files.pythonhosted.org/packages/e3/58/0eae6e4466e5abf779d7e2b71fac7fba5f59e00ea36ddb3ed690419ccb0f/psutil-5.4.8.tar.gz";
479 sha256 = "1hyna338sml2cl1mfb2gs89np18z27mvyhmq4ifh22x07n7mq9kf";
478 url = "https://files.pythonhosted.org/packages/c7/01/7c30b247cdc5ba29623faa5c8cf1f1bbf7e041783c340414b0ed7e067c64/psutil-5.5.1.tar.gz";
479 sha256 = "045qaqvn6k90bj5bcy259yrwcd2afgznaav3sfhphy9b8ambzkkj";
480 480 };
481 481 meta = {
482 482 license = [ pkgs.lib.licenses.bsdOriginal ];
483 483 };
484 484 };
485 485 "ptyprocess" = super.buildPythonPackage {
486 486 name = "ptyprocess-0.6.0";
487 487 doCheck = false;
488 488 src = fetchurl {
489 489 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
490 490 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
491 491 };
492 492 meta = {
493 493 license = [ ];
494 494 };
495 495 };
496 496 "py" = super.buildPythonPackage {
497 497 name = "py-1.6.0";
498 498 doCheck = false;
499 499 src = fetchurl {
500 500 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
501 501 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
502 502 };
503 503 meta = {
504 504 license = [ pkgs.lib.licenses.mit ];
505 505 };
506 506 };
507 507 "pygments" = super.buildPythonPackage {
508 name = "pygments-2.3.1";
508 name = "pygments-2.4.2";
509 509 doCheck = false;
510 510 src = fetchurl {
511 url = "https://files.pythonhosted.org/packages/64/69/413708eaf3a64a6abb8972644e0f20891a55e621c6759e2c3f3891e05d63/Pygments-2.3.1.tar.gz";
512 sha256 = "0ji87g09jph8jqcvclgb02qvxasdnr9pzvk90rl66d90yqcxmyjz";
511 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
512 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
513 513 };
514 514 meta = {
515 515 license = [ pkgs.lib.licenses.bsdOriginal ];
516 516 };
517 517 };
518 518 "pyramid" = super.buildPythonPackage {
519 name = "pyramid-1.10.1";
519 name = "pyramid-1.10.4";
520 520 doCheck = false;
521 521 propagatedBuildInputs = [
522 522 self."hupper"
523 523 self."plaster"
524 524 self."plaster-pastedeploy"
525 525 self."setuptools"
526 526 self."translationstring"
527 527 self."venusian"
528 528 self."webob"
529 529 self."zope.deprecation"
530 530 self."zope.interface"
531 531 self."repoze.lru"
532 532 ];
533 533 src = fetchurl {
534 url = "https://files.pythonhosted.org/packages/0a/3e/22e3ac9be1b70a01139adba8906ee4b8f628bb469fea3c52f6c97b73063c/pyramid-1.10.1.tar.gz";
535 sha256 = "1h5105nfh6rsrfjiyw20aavyibj36la3hajy6vh1fa77xb4y3hrp";
534 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
535 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
536 536 };
537 537 meta = {
538 538 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
539 539 };
540 540 };
541 541 "pyramid-mako" = super.buildPythonPackage {
542 542 name = "pyramid-mako-1.0.2";
543 543 doCheck = false;
544 544 propagatedBuildInputs = [
545 545 self."pyramid"
546 546 self."mako"
547 547 ];
548 548 src = fetchurl {
549 549 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
550 550 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
551 551 };
552 552 meta = {
553 553 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
554 554 };
555 555 };
556 556 "pytest" = super.buildPythonPackage {
557 557 name = "pytest-3.8.2";
558 558 doCheck = false;
559 559 propagatedBuildInputs = [
560 560 self."py"
561 561 self."six"
562 562 self."setuptools"
563 563 self."attrs"
564 564 self."more-itertools"
565 565 self."atomicwrites"
566 566 self."pluggy"
567 567 self."funcsigs"
568 568 self."pathlib2"
569 569 ];
570 570 src = fetchurl {
571 571 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
572 572 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
573 573 };
574 574 meta = {
575 575 license = [ pkgs.lib.licenses.mit ];
576 576 };
577 577 };
578 578 "pytest-cov" = super.buildPythonPackage {
579 579 name = "pytest-cov-2.6.0";
580 580 doCheck = false;
581 581 propagatedBuildInputs = [
582 582 self."pytest"
583 583 self."coverage"
584 584 ];
585 585 src = fetchurl {
586 586 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
587 587 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
588 588 };
589 589 meta = {
590 590 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
591 591 };
592 592 };
593 593 "pytest-profiling" = super.buildPythonPackage {
594 594 name = "pytest-profiling-1.3.0";
595 595 doCheck = false;
596 596 propagatedBuildInputs = [
597 597 self."six"
598 598 self."pytest"
599 599 self."gprof2dot"
600 600 ];
601 601 src = fetchurl {
602 602 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
603 603 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
604 604 };
605 605 meta = {
606 606 license = [ pkgs.lib.licenses.mit ];
607 607 };
608 608 };
609 609 "pytest-runner" = super.buildPythonPackage {
610 610 name = "pytest-runner-4.2";
611 611 doCheck = false;
612 612 src = fetchurl {
613 613 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
614 614 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
615 615 };
616 616 meta = {
617 617 license = [ pkgs.lib.licenses.mit ];
618 618 };
619 619 };
620 620 "pytest-sugar" = super.buildPythonPackage {
621 621 name = "pytest-sugar-0.9.1";
622 622 doCheck = false;
623 623 propagatedBuildInputs = [
624 624 self."pytest"
625 625 self."termcolor"
626 626 ];
627 627 src = fetchurl {
628 628 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
629 629 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
630 630 };
631 631 meta = {
632 632 license = [ pkgs.lib.licenses.bsdOriginal ];
633 633 };
634 634 };
635 635 "pytest-timeout" = super.buildPythonPackage {
636 636 name = "pytest-timeout-1.3.2";
637 637 doCheck = false;
638 638 propagatedBuildInputs = [
639 639 self."pytest"
640 640 ];
641 641 src = fetchurl {
642 642 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
643 643 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
644 644 };
645 645 meta = {
646 646 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
647 647 };
648 648 };
649 649 "repoze.lru" = super.buildPythonPackage {
650 650 name = "repoze.lru-0.7";
651 651 doCheck = false;
652 652 src = fetchurl {
653 653 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
654 654 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
655 655 };
656 656 meta = {
657 657 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
658 658 };
659 659 };
660 660 "rhodecode-vcsserver" = super.buildPythonPackage {
661 name = "rhodecode-vcsserver-4.16.2";
661 name = "rhodecode-vcsserver-4.17.0";
662 662 buildInputs = [
663 663 self."pytest"
664 664 self."py"
665 665 self."pytest-cov"
666 666 self."pytest-sugar"
667 667 self."pytest-runner"
668 668 self."pytest-profiling"
669 669 self."pytest-timeout"
670 670 self."gprof2dot"
671 671 self."mock"
672 672 self."cov-core"
673 673 self."coverage"
674 674 self."webtest"
675 675 self."beautifulsoup4"
676 676 self."configobj"
677 677 ];
678 678 doCheck = true;
679 679 propagatedBuildInputs = [
680 680 self."configobj"
681 self."atomicwrites"
682 self."attrs"
683 681 self."dogpile.cache"
684 682 self."dogpile.core"
685 683 self."decorator"
686 684 self."dulwich"
687 685 self."hgsubversion"
688 686 self."hg-evolve"
689 687 self."mako"
690 688 self."markupsafe"
691 689 self."mercurial"
692 690 self."msgpack-python"
693 691 self."pastedeploy"
694 self."psutil"
695 692 self."pyramid"
696 693 self."pyramid-mako"
697 self."pygments"
698 self."pathlib2"
699 694 self."repoze.lru"
700 695 self."simplejson"
701 696 self."subprocess32"
702 697 self."subvertpy"
703 698 self."six"
704 699 self."translationstring"
705 700 self."webob"
706 701 self."zope.deprecation"
707 702 self."zope.interface"
708 self."venusian"
709 703 self."gevent"
710 704 self."greenlet"
711 705 self."gunicorn"
712 706 self."waitress"
713 self."setproctitle"
714 707 self."ipdb"
715 708 self."ipython"
716 709 self."pytest"
717 710 self."py"
718 711 self."pytest-cov"
719 712 self."pytest-sugar"
720 713 self."pytest-runner"
721 714 self."pytest-profiling"
722 715 self."pytest-timeout"
723 716 self."gprof2dot"
724 717 self."mock"
725 718 self."cov-core"
726 719 self."coverage"
727 720 self."webtest"
728 721 self."beautifulsoup4"
729 722 ];
730 723 src = ./.;
731 724 meta = {
732 725 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
733 726 };
734 727 };
735 728 "scandir" = super.buildPythonPackage {
736 name = "scandir-1.9.0";
729 name = "scandir-1.10.0";
737 730 doCheck = false;
738 731 src = fetchurl {
739 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
740 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
732 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
733 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
741 734 };
742 735 meta = {
743 736 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
744 737 };
745 738 };
746 739 "setproctitle" = super.buildPythonPackage {
747 740 name = "setproctitle-1.1.10";
748 741 doCheck = false;
749 742 src = fetchurl {
750 743 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
751 744 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
752 745 };
753 746 meta = {
754 747 license = [ pkgs.lib.licenses.bsdOriginal ];
755 748 };
756 749 };
757 750 "setuptools" = super.buildPythonPackage {
758 name = "setuptools-40.8.0";
751 name = "setuptools-41.0.1";
759 752 doCheck = false;
760 753 src = fetchurl {
761 url = "https://files.pythonhosted.org/packages/c2/f7/c7b501b783e5a74cf1768bc174ee4fb0a8a6ee5af6afa92274ff964703e0/setuptools-40.8.0.zip";
762 sha256 = "0k9hifpgahnw2a26w3cr346iy733k6d3nwh3f7g9m13y6f8fqkkf";
754 url = "https://files.pythonhosted.org/packages/1d/64/a18a487b4391a05b9c7f938b94a16d80305bf0369c6b0b9509e86165e1d3/setuptools-41.0.1.zip";
755 sha256 = "04sns22y2hhsrwfy1mha2lgslvpjsjsz8xws7h2rh5a7ylkd28m2";
763 756 };
764 757 meta = {
765 758 license = [ pkgs.lib.licenses.mit ];
766 759 };
767 760 };
768 761 "simplegeneric" = super.buildPythonPackage {
769 762 name = "simplegeneric-0.8.1";
770 763 doCheck = false;
771 764 src = fetchurl {
772 765 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
773 766 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
774 767 };
775 768 meta = {
776 769 license = [ pkgs.lib.licenses.zpl21 ];
777 770 };
778 771 };
779 772 "simplejson" = super.buildPythonPackage {
780 773 name = "simplejson-3.16.0";
781 774 doCheck = false;
782 775 src = fetchurl {
783 776 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
784 777 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
785 778 };
786 779 meta = {
787 780 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
788 781 };
789 782 };
790 783 "six" = super.buildPythonPackage {
791 784 name = "six-1.11.0";
792 785 doCheck = false;
793 786 src = fetchurl {
794 787 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
795 788 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
796 789 };
797 790 meta = {
798 791 license = [ pkgs.lib.licenses.mit ];
799 792 };
800 793 };
801 794 "subprocess32" = super.buildPythonPackage {
802 name = "subprocess32-3.5.3";
795 name = "subprocess32-3.5.4";
803 796 doCheck = false;
804 797 src = fetchurl {
805 url = "https://files.pythonhosted.org/packages/be/2b/beeba583e9877e64db10b52a96915afc0feabf7144dcbf2a0d0ea68bf73d/subprocess32-3.5.3.tar.gz";
806 sha256 = "1hr5fan8i719hmlmz73hf8rhq74014w07d8ryg7krvvf6692kj3b";
798 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
799 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
807 800 };
808 801 meta = {
809 802 license = [ pkgs.lib.licenses.psfl ];
810 803 };
811 804 };
812 805 "subvertpy" = super.buildPythonPackage {
813 806 name = "subvertpy-0.10.1";
814 807 doCheck = false;
815 808 src = fetchurl {
816 809 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
817 810 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
818 811 };
819 812 meta = {
820 813 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
821 814 };
822 815 };
823 816 "termcolor" = super.buildPythonPackage {
824 817 name = "termcolor-1.1.0";
825 818 doCheck = false;
826 819 src = fetchurl {
827 820 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
828 821 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
829 822 };
830 823 meta = {
831 824 license = [ pkgs.lib.licenses.mit ];
832 825 };
833 826 };
834 827 "traitlets" = super.buildPythonPackage {
835 828 name = "traitlets-4.3.2";
836 829 doCheck = false;
837 830 propagatedBuildInputs = [
838 831 self."ipython-genutils"
839 832 self."six"
840 833 self."decorator"
841 834 self."enum34"
842 835 ];
843 836 src = fetchurl {
844 837 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
845 838 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
846 839 };
847 840 meta = {
848 841 license = [ pkgs.lib.licenses.bsdOriginal ];
849 842 };
850 843 };
851 844 "translationstring" = super.buildPythonPackage {
852 845 name = "translationstring-1.3";
853 846 doCheck = false;
854 847 src = fetchurl {
855 848 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
856 849 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
857 850 };
858 851 meta = {
859 852 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
860 853 };
861 854 };
862 855 "venusian" = super.buildPythonPackage {
863 856 name = "venusian-1.2.0";
864 857 doCheck = false;
865 858 src = fetchurl {
866 859 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
867 860 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
868 861 };
869 862 meta = {
870 863 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
871 864 };
872 865 };
873 866 "waitress" = super.buildPythonPackage {
874 name = "waitress-1.1.0";
867 name = "waitress-1.3.0";
875 868 doCheck = false;
876 869 src = fetchurl {
877 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
878 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
870 url = "https://files.pythonhosted.org/packages/43/50/9890471320d5ad22761ae46661cf745f487b1c8c4ec49352b99e1078b970/waitress-1.3.0.tar.gz";
871 sha256 = "09j5dzbbcxib7vdskhx39s1qsydlr4n2p2png71d7mjnr9pnwajf";
879 872 };
880 873 meta = {
881 874 license = [ pkgs.lib.licenses.zpl21 ];
882 875 };
883 876 };
884 877 "wcwidth" = super.buildPythonPackage {
885 878 name = "wcwidth-0.1.7";
886 879 doCheck = false;
887 880 src = fetchurl {
888 881 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
889 882 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
890 883 };
891 884 meta = {
892 885 license = [ pkgs.lib.licenses.mit ];
893 886 };
894 887 };
895 888 "webob" = super.buildPythonPackage {
896 name = "webob-1.8.4";
889 name = "webob-1.8.5";
897 890 doCheck = false;
898 891 src = fetchurl {
899 url = "https://files.pythonhosted.org/packages/e4/6c/99e322c3d4cc11d9060a67a9bf2f7c9c581f40988c11fffe89bb8c36bc5e/WebOb-1.8.4.tar.gz";
900 sha256 = "16cfg5y4n6sihz59vsmns2yqbfm0gfsn3l5xgz2g0pdhilaib0x4";
892 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
893 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
901 894 };
902 895 meta = {
903 896 license = [ pkgs.lib.licenses.mit ];
904 897 };
905 898 };
906 899 "webtest" = super.buildPythonPackage {
907 name = "webtest-2.0.32";
900 name = "webtest-2.0.33";
908 901 doCheck = false;
909 902 propagatedBuildInputs = [
910 903 self."six"
911 904 self."webob"
912 905 self."waitress"
913 906 self."beautifulsoup4"
914 907 ];
915 908 src = fetchurl {
916 url = "https://files.pythonhosted.org/packages/27/9f/9e74449d272ffbef4fb3012e6dbc53c0b24822d545e7a33a342f80131e59/WebTest-2.0.32.tar.gz";
917 sha256 = "0qp0nnbazzm4ibjiyqfcn6f230svk09i4g58zg2i9x1ga06h48a2";
909 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
910 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
918 911 };
919 912 meta = {
920 913 license = [ pkgs.lib.licenses.mit ];
921 914 };
922 915 };
923 916 "zope.deprecation" = super.buildPythonPackage {
924 name = "zope.deprecation-4.3.0";
917 name = "zope.deprecation-4.4.0";
925 918 doCheck = false;
926 919 propagatedBuildInputs = [
927 920 self."setuptools"
928 921 ];
929 922 src = fetchurl {
930 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
931 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
923 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
924 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
932 925 };
933 926 meta = {
934 927 license = [ pkgs.lib.licenses.zpl21 ];
935 928 };
936 929 };
937 930 "zope.interface" = super.buildPythonPackage {
938 name = "zope.interface-4.5.0";
931 name = "zope.interface-4.6.0";
939 932 doCheck = false;
940 933 propagatedBuildInputs = [
941 934 self."setuptools"
942 935 ];
943 936 src = fetchurl {
944 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
945 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
937 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
938 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
946 939 };
947 940 meta = {
948 941 license = [ pkgs.lib.licenses.zpl21 ];
949 942 };
950 943 };
951 944
952 945 ### Test requirements
953 946
954 947
955 948 }
@@ -1,49 +1,43 b''
1 1 ## dependencies
2 2
3 3 # our custom configobj
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 atomicwrites==1.2.1
6 attrs==18.2.0
4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
5
7 6 dogpile.cache==0.7.1
8 7 dogpile.core==0.4.1
9 8 decorator==4.1.2
10 9 dulwich==0.13.0
11 10 hgsubversion==1.9.3
12 hg-evolve==8.0.1
11 hg-evolve==8.5.1
13 12 mako==1.0.7
14 13 markupsafe==1.1.0
15 mercurial==4.6.2
14 mercurial==4.9.1
16 15 msgpack-python==0.5.6
17 16
18 17 pastedeploy==2.0.1
19 psutil==5.4.8
20 pyramid==1.10.1
18 pyramid==1.10.4
21 19 pyramid-mako==1.0.2
22 20
23 pygments==2.3.1
24 pathlib2==2.3.3
25 21 repoze.lru==0.7
26 22 simplejson==3.16.0
27 subprocess32==3.5.3
23 subprocess32==3.5.4
28 24 subvertpy==0.10.1
29 25
30 26 six==1.11.0
31 27 translationstring==1.3
32 webob==1.8.4
33 zope.deprecation==4.3.0
34 zope.interface==4.5.0
35 venusian==1.2.0
28 webob==1.8.5
29 zope.deprecation==4.4.0
30 zope.interface==4.6.0
36 31
37 32 ## http servers
38 33 gevent==1.4.0
39 34 greenlet==0.4.15
40 35 gunicorn==19.9.0
41 waitress==1.1.0
42 setproctitle==1.1.10
36 waitress==1.3.0
43 37
44 38 ## debug
45 ipdb==0.11.0
39 ipdb==0.12.0
46 40 ipython==5.1.0
47 41
48 42 ## test related requirements
49 43 -r requirements_test.txt
@@ -1,16 +1,16 b''
1 1 # test related requirements
2 2 pytest==3.8.2
3 3 py==1.6.0
4 4 pytest-cov==2.6.0
5 5 pytest-sugar==0.9.1
6 6 pytest-runner==4.2.0
7 7 pytest-profiling==1.3.0
8 8 pytest-timeout==1.3.2
9 9 gprof2dot==2017.9.19
10 10
11 11 mock==1.0.1
12 12 cov-core==1.15.0
13 coverage==4.5.1
13 coverage==4.5.3
14 14
15 webtest==2.0.32
15 webtest==2.0.33
16 16 beautifulsoup4==4.6.3
@@ -1,1 +1,1 b''
1 4.16.2 No newline at end of file
1 4.17.0 No newline at end of file
@@ -1,117 +1,117 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28 28
29 29
30 30 def _make_exception(kind, org_exc, *args):
31 31 """
32 32 Prepares a base `Exception` instance to be sent over the wire.
33 33
34 34 To give our caller a hint what this is about, it will attach an attribute
35 35 `_vcs_kind` to the exception.
36 36 """
37 37 exc = Exception(*args)
38 38 exc._vcs_kind = kind
39 39 exc._org_exc = org_exc
40 exc._org_exc_tb = ''
40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 41 return exc
42 42
43 43
44 44 def AbortException(org_exc=None):
45 45 def _make_exception_wrapper(*args):
46 46 return _make_exception('abort', org_exc, *args)
47 47 return _make_exception_wrapper
48 48
49 49
50 50 def ArchiveException(org_exc=None):
51 51 def _make_exception_wrapper(*args):
52 52 return _make_exception('archive', org_exc, *args)
53 53 return _make_exception_wrapper
54 54
55 55
56 56 def LookupException(org_exc=None):
57 57 def _make_exception_wrapper(*args):
58 58 return _make_exception('lookup', org_exc, *args)
59 59 return _make_exception_wrapper
60 60
61 61
62 62 def VcsException(org_exc=None):
63 63 def _make_exception_wrapper(*args):
64 64 return _make_exception('error', org_exc, *args)
65 65 return _make_exception_wrapper
66 66
67 67
68 68 def RepositoryLockedException(org_exc=None):
69 69 def _make_exception_wrapper(*args):
70 70 return _make_exception('repo_locked', org_exc, *args)
71 71 return _make_exception_wrapper
72 72
73 73
74 74 def RepositoryBranchProtectedException(org_exc=None):
75 75 def _make_exception_wrapper(*args):
76 76 return _make_exception('repo_branch_protected', org_exc, *args)
77 77 return _make_exception_wrapper
78 78
79 79
80 80 def RequirementException(org_exc=None):
81 81 def _make_exception_wrapper(*args):
82 82 return _make_exception('requirement', org_exc, *args)
83 83 return _make_exception_wrapper
84 84
85 85
86 86 def UnhandledException(org_exc=None):
87 87 def _make_exception_wrapper(*args):
88 88 return _make_exception('unhandled', org_exc, *args)
89 89 return _make_exception_wrapper
90 90
91 91
92 92 def URLError(org_exc=None):
93 93 def _make_exception_wrapper(*args):
94 94 return _make_exception('url_error', org_exc, *args)
95 95 return _make_exception_wrapper
96 96
97 97
98 98 def SubrepoMergeException(org_exc=None):
99 99 def _make_exception_wrapper(*args):
100 100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 101 return _make_exception_wrapper
102 102
103 103
104 104 class HTTPRepoLocked(HTTPLocked):
105 105 """
106 106 Subclass of HTTPLocked response that allows to set the title and status
107 107 code via constructor arguments.
108 108 """
109 109 def __init__(self, title, status_code=None, **kwargs):
110 110 self.code = status_code or HTTPLocked.code
111 111 self.title = title
112 112 super(HTTPRepoLocked, self).__init__(**kwargs)
113 113
114 114
115 115 class HTTPRepoBranchProtected(HTTPForbidden):
116 116 def __init__(self, *args, **kwargs):
117 117 super(HTTPForbidden, self).__init__(*args, **kwargs)
@@ -1,742 +1,752 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import collections
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 import more_itertools
29 29 from dulwich import index, objects
30 30 from dulwich.client import HttpGitClient, LocalGitClient
31 31 from dulwich.errors import (
32 32 NotGitRepository, ChecksumMismatch, WrongObjectException,
33 33 MissingCommitError, ObjectMissing, HangupException,
34 34 UnexpectedCommandError)
35 35 from dulwich.repo import Repo as DulwichRepo, Tag
36 36 from dulwich.server import update_server_info
37 37
38 38 from vcsserver import exceptions, settings, subprocessio
39 39 from vcsserver.utils import safe_str
40 40 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
41 41 from vcsserver.hgcompat import (
42 42 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
43 43 from vcsserver.git_lfs.lib import LFSOidStore
44 44
45 45 DIR_STAT = stat.S_IFDIR
46 46 FILE_MODE = stat.S_IFMT
47 47 GIT_LINK = objects.S_IFGITLINK
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 def reraise_safe_exceptions(func):
53 53 """Converts Dulwich exceptions to something neutral."""
54 54 @wraps(func)
55 55 def wrapper(*args, **kwargs):
56 56 try:
57 57 return func(*args, **kwargs)
58 58 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
59 59 ObjectMissing) as e:
60 60 exc = exceptions.LookupException(e)
61 61 raise exc(e)
62 62 except (HangupException, UnexpectedCommandError) as e:
63 63 exc = exceptions.VcsException(e)
64 64 raise exc(e)
65 65 except Exception as e:
66 66 # NOTE(marcink): becuase of how dulwich handles some exceptions
67 67 # (KeyError on empty repos), we cannot track this and catch all
68 68 # exceptions, it's an exceptions from other handlers
69 69 #if not hasattr(e, '_vcs_kind'):
70 70 #log.exception("Unhandled exception in git remote call")
71 71 #raise_from_original(exceptions.UnhandledException)
72 72 raise
73 73 return wrapper
74 74
75 75
76 76 class Repo(DulwichRepo):
77 77 """
78 78 A wrapper for dulwich Repo class.
79 79
80 80 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
81 81 "Too many open files" error. We need to close all opened file descriptors
82 82 once the repo object is destroyed.
83 83
84 84 TODO: mikhail: please check if we need this wrapper after updating dulwich
85 85 to 0.12.0 +
86 86 """
87 87 def __del__(self):
88 88 if hasattr(self, 'object_store'):
89 89 self.close()
90 90
91 91
92 92 class GitFactory(RepoFactory):
93 93 repo_type = 'git'
94 94
95 95 def _create_repo(self, wire, create):
96 96 repo_path = str_to_dulwich(wire['path'])
97 97 return Repo(repo_path)
98 98
99 99
100 100 class GitRemote(object):
101 101
102 102 def __init__(self, factory):
103 103 self._factory = factory
104 104 self.peeled_ref_marker = '^{}'
105 105 self._bulk_methods = {
106 106 "author": self.commit_attribute,
107 107 "date": self.get_object_attrs,
108 108 "message": self.commit_attribute,
109 109 "parents": self.commit_attribute,
110 110 "_commit": self.revision,
111 111 }
112 112
113 113 def _wire_to_config(self, wire):
114 114 if 'config' in wire:
115 115 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
116 116 return {}
117 117
118 118 def _assign_ref(self, wire, ref, commit_id):
119 119 repo = self._factory.repo(wire)
120 120 repo[ref] = commit_id
121 121
122 122 def _remote_conf(self, config):
123 123 params = [
124 124 '-c', 'core.askpass=""',
125 125 ]
126 126 ssl_cert_dir = config.get('vcs_ssl_dir')
127 127 if ssl_cert_dir:
128 128 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
129 129 return params
130 130
131 131 @reraise_safe_exceptions
132 def is_empty(self, wire):
133 repo = self._factory.repo(wire)
134 try:
135 return not repo.head()
136 except Exception:
137 log.exception("failed to read object_store")
138 return True
139
140 @reraise_safe_exceptions
132 141 def add_object(self, wire, content):
133 142 repo = self._factory.repo(wire)
134 143 blob = objects.Blob()
135 144 blob.set_raw_string(content)
136 145 repo.object_store.add_object(blob)
137 146 return blob.id
138 147
139 148 @reraise_safe_exceptions
140 149 def assert_correct_path(self, wire):
141 150 path = wire.get('path')
142 151 try:
143 152 self._factory.repo(wire)
144 153 except NotGitRepository as e:
145 154 tb = traceback.format_exc()
146 155 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
147 156 return False
148 157
149 158 return True
150 159
151 160 @reraise_safe_exceptions
152 161 def bare(self, wire):
153 162 repo = self._factory.repo(wire)
154 163 return repo.bare
155 164
156 165 @reraise_safe_exceptions
157 166 def blob_as_pretty_string(self, wire, sha):
158 167 repo = self._factory.repo(wire)
159 168 return repo[sha].as_pretty_string()
160 169
161 170 @reraise_safe_exceptions
162 171 def blob_raw_length(self, wire, sha):
163 172 repo = self._factory.repo(wire)
164 173 blob = repo[sha]
165 174 return blob.raw_length()
166 175
167 176 def _parse_lfs_pointer(self, raw_content):
168 177
169 178 spec_string = 'version https://git-lfs.github.com/spec'
170 179 if raw_content and raw_content.startswith(spec_string):
171 180 pattern = re.compile(r"""
172 181 (?:\n)?
173 182 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
174 183 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
175 184 ^size[ ](?P<oid_size>[0-9]+)\n
176 185 (?:\n)?
177 186 """, re.VERBOSE | re.MULTILINE)
178 187 match = pattern.match(raw_content)
179 188 if match:
180 189 return match.groupdict()
181 190
182 191 return {}
183 192
184 193 @reraise_safe_exceptions
185 194 def is_large_file(self, wire, sha):
186 195 repo = self._factory.repo(wire)
187 196 blob = repo[sha]
188 197 return self._parse_lfs_pointer(blob.as_raw_string())
189 198
190 199 @reraise_safe_exceptions
191 200 def in_largefiles_store(self, wire, oid):
192 201 repo = self._factory.repo(wire)
193 202 conf = self._wire_to_config(wire)
194 203
195 204 store_location = conf.get('vcs_git_lfs_store_location')
196 205 if store_location:
197 206 repo_name = repo.path
198 207 store = LFSOidStore(
199 208 oid=oid, repo=repo_name, store_location=store_location)
200 209 return store.has_oid()
201 210
202 211 return False
203 212
204 213 @reraise_safe_exceptions
205 214 def store_path(self, wire, oid):
206 215 repo = self._factory.repo(wire)
207 216 conf = self._wire_to_config(wire)
208 217
209 218 store_location = conf.get('vcs_git_lfs_store_location')
210 219 if store_location:
211 220 repo_name = repo.path
212 221 store = LFSOidStore(
213 222 oid=oid, repo=repo_name, store_location=store_location)
214 223 return store.oid_path
215 224 raise ValueError('Unable to fetch oid with path {}'.format(oid))
216 225
217 226 @reraise_safe_exceptions
218 227 def bulk_request(self, wire, rev, pre_load):
219 228 result = {}
220 229 for attr in pre_load:
221 230 try:
222 231 method = self._bulk_methods[attr]
223 232 args = [wire, rev]
224 233 if attr == "date":
225 234 args.extend(["commit_time", "commit_timezone"])
226 235 elif attr in ["author", "message", "parents"]:
227 236 args.append(attr)
228 237 result[attr] = method(*args)
229 238 except KeyError as e:
230 239 raise exceptions.VcsException(e)(
231 240 "Unknown bulk attribute: %s" % attr)
232 241 return result
233 242
234 243 def _build_opener(self, url):
235 244 handlers = []
236 245 url_obj = url_parser(url)
237 246 _, authinfo = url_obj.authinfo()
238 247
239 248 if authinfo:
240 249 # create a password manager
241 250 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
242 251 passmgr.add_password(*authinfo)
243 252
244 253 handlers.extend((httpbasicauthhandler(passmgr),
245 254 httpdigestauthhandler(passmgr)))
246 255
247 256 return urllib2.build_opener(*handlers)
248 257
249 258 @reraise_safe_exceptions
250 259 def check_url(self, url, config):
251 260 url_obj = url_parser(url)
252 261 test_uri, _ = url_obj.authinfo()
253 262 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
254 263 url_obj.query = obfuscate_qs(url_obj.query)
255 264 cleaned_uri = str(url_obj)
256 265 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
257 266
258 267 if not test_uri.endswith('info/refs'):
259 268 test_uri = test_uri.rstrip('/') + '/info/refs'
260 269
261 270 o = self._build_opener(url)
262 271 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
263 272
264 273 q = {"service": 'git-upload-pack'}
265 274 qs = '?%s' % urllib.urlencode(q)
266 275 cu = "%s%s" % (test_uri, qs)
267 276 req = urllib2.Request(cu, None, {})
268 277
269 278 try:
270 279 log.debug("Trying to open URL %s", cleaned_uri)
271 280 resp = o.open(req)
272 281 if resp.code != 200:
273 282 raise exceptions.URLError()('Return Code is not 200')
274 283 except Exception as e:
275 284 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
276 285 # means it cannot be cloned
277 286 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
278 287
279 288 # now detect if it's proper git repo
280 289 gitdata = resp.read()
281 290 if 'service=git-upload-pack' in gitdata:
282 291 pass
283 292 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
284 293 # old style git can return some other format !
285 294 pass
286 295 else:
287 296 raise exceptions.URLError()(
288 297 "url [%s] does not look like an git" % (cleaned_uri,))
289 298
290 299 return True
291 300
292 301 @reraise_safe_exceptions
293 302 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
294 303 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
295 304 remote_refs = self.pull(wire, url, apply_refs=False)
296 305 repo = self._factory.repo(wire)
297 306 if isinstance(valid_refs, list):
298 307 valid_refs = tuple(valid_refs)
299 308
300 309 for k in remote_refs:
301 310 # only parse heads/tags and skip so called deferred tags
302 311 if k.startswith(valid_refs) and not k.endswith(deferred):
303 312 repo[k] = remote_refs[k]
304 313
305 314 if update_after_clone:
306 315 # we want to checkout HEAD
307 316 repo["HEAD"] = remote_refs["HEAD"]
308 317 index.build_index_from_tree(repo.path, repo.index_path(),
309 318 repo.object_store, repo["HEAD"].tree)
310 319
311 320 # TODO: this is quite complex, check if that can be simplified
312 321 @reraise_safe_exceptions
313 322 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
314 323 repo = self._factory.repo(wire)
315 324 object_store = repo.object_store
316 325
317 326 # Create tree and populates it with blobs
318 327 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
319 328
320 329 for node in updated:
321 330 # Compute subdirs if needed
322 331 dirpath, nodename = vcspath.split(node['path'])
323 332 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
324 333 parent = commit_tree
325 334 ancestors = [('', parent)]
326 335
327 336 # Tries to dig for the deepest existing tree
328 337 while dirnames:
329 338 curdir = dirnames.pop(0)
330 339 try:
331 340 dir_id = parent[curdir][1]
332 341 except KeyError:
333 342 # put curdir back into dirnames and stops
334 343 dirnames.insert(0, curdir)
335 344 break
336 345 else:
337 346 # If found, updates parent
338 347 parent = repo[dir_id]
339 348 ancestors.append((curdir, parent))
340 349 # Now parent is deepest existing tree and we need to create
341 350 # subtrees for dirnames (in reverse order)
342 351 # [this only applies for nodes from added]
343 352 new_trees = []
344 353
345 354 blob = objects.Blob.from_string(node['content'])
346 355
347 356 if dirnames:
348 357 # If there are trees which should be created we need to build
349 358 # them now (in reverse order)
350 359 reversed_dirnames = list(reversed(dirnames))
351 360 curtree = objects.Tree()
352 361 curtree[node['node_path']] = node['mode'], blob.id
353 362 new_trees.append(curtree)
354 363 for dirname in reversed_dirnames[:-1]:
355 364 newtree = objects.Tree()
356 365 newtree[dirname] = (DIR_STAT, curtree.id)
357 366 new_trees.append(newtree)
358 367 curtree = newtree
359 368 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
360 369 else:
361 370 parent.add(
362 371 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
363 372
364 373 new_trees.append(parent)
365 374 # Update ancestors
366 375 reversed_ancestors = reversed(
367 376 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
368 377 for parent, tree, path in reversed_ancestors:
369 378 parent[path] = (DIR_STAT, tree.id)
370 379 object_store.add_object(tree)
371 380
372 381 object_store.add_object(blob)
373 382 for tree in new_trees:
374 383 object_store.add_object(tree)
375 384
376 385 for node_path in removed:
377 386 paths = node_path.split('/')
378 387 tree = commit_tree
379 388 trees = [tree]
380 389 # Traverse deep into the forest...
381 390 for path in paths:
382 391 try:
383 392 obj = repo[tree[path][1]]
384 393 if isinstance(obj, objects.Tree):
385 394 trees.append(obj)
386 395 tree = obj
387 396 except KeyError:
388 397 break
389 398 # Cut down the blob and all rotten trees on the way back...
390 399 for path, tree in reversed(zip(paths, trees)):
391 400 del tree[path]
392 401 if tree:
393 402 # This tree still has elements - don't remove it or any
394 403 # of it's parents
395 404 break
396 405
397 406 object_store.add_object(commit_tree)
398 407
399 408 # Create commit
400 409 commit = objects.Commit()
401 410 commit.tree = commit_tree.id
402 411 for k, v in commit_data.iteritems():
403 412 setattr(commit, k, v)
404 413 object_store.add_object(commit)
405 414
406 415 ref = 'refs/heads/%s' % branch
407 416 repo.refs[ref] = commit.id
408 417
409 418 return commit.id
410 419
411 420 @reraise_safe_exceptions
412 421 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
413 422 if url != 'default' and '://' not in url:
414 423 client = LocalGitClient(url)
415 424 else:
416 425 url_obj = url_parser(url)
417 426 o = self._build_opener(url)
418 427 url, _ = url_obj.authinfo()
419 428 client = HttpGitClient(base_url=url, opener=o)
420 429 repo = self._factory.repo(wire)
421 430
422 431 determine_wants = repo.object_store.determine_wants_all
423 432 if refs:
424 433 def determine_wants_requested(references):
425 434 return [references[r] for r in references if r in refs]
426 435 determine_wants = determine_wants_requested
427 436
428 437 try:
429 438 remote_refs = client.fetch(
430 439 path=url, target=repo, determine_wants=determine_wants)
431 440 except NotGitRepository as e:
432 441 log.warning(
433 442 'Trying to fetch from "%s" failed, not a Git repository.', url)
434 443 # Exception can contain unicode which we convert
435 444 raise exceptions.AbortException(e)(repr(e))
436 445
437 446 # mikhail: client.fetch() returns all the remote refs, but fetches only
438 447 # refs filtered by `determine_wants` function. We need to filter result
439 448 # as well
440 449 if refs:
441 450 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
442 451
443 452 if apply_refs:
444 453 # TODO: johbo: Needs proper test coverage with a git repository
445 454 # that contains a tag object, so that we would end up with
446 455 # a peeled ref at this point.
447 456 for k in remote_refs:
448 457 if k.endswith(self.peeled_ref_marker):
449 458 log.debug("Skipping peeled reference %s", k)
450 459 continue
451 460 repo[k] = remote_refs[k]
452 461
453 462 if refs and not update_after:
454 463 # mikhail: explicitly set the head to the last ref.
455 464 repo['HEAD'] = remote_refs[refs[-1]]
456 465
457 466 if update_after:
458 467 # we want to checkout HEAD
459 468 repo["HEAD"] = remote_refs["HEAD"]
460 469 index.build_index_from_tree(repo.path, repo.index_path(),
461 470 repo.object_store, repo["HEAD"].tree)
462 471 return remote_refs
463 472
464 473 @reraise_safe_exceptions
465 474 def sync_fetch(self, wire, url, refs=None):
466 475 repo = self._factory.repo(wire)
467 476 if refs and not isinstance(refs, (list, tuple)):
468 477 refs = [refs]
469 478 config = self._wire_to_config(wire)
470 479 # get all remote refs we'll use to fetch later
471 480 output, __ = self.run_git_command(
472 481 wire, ['ls-remote', url], fail_on_stderr=False,
473 482 _copts=self._remote_conf(config),
474 483 extra_env={'GIT_TERMINAL_PROMPT': '0'})
475 484
476 485 remote_refs = collections.OrderedDict()
477 486 fetch_refs = []
478 487
479 488 for ref_line in output.splitlines():
480 489 sha, ref = ref_line.split('\t')
481 490 sha = sha.strip()
482 491 if ref in remote_refs:
483 492 # duplicate, skip
484 493 continue
485 494 if ref.endswith(self.peeled_ref_marker):
486 495 log.debug("Skipping peeled reference %s", ref)
487 496 continue
488 497 # don't sync HEAD
489 498 if ref in ['HEAD']:
490 499 continue
491 500
492 501 remote_refs[ref] = sha
493 502
494 503 if refs and sha in refs:
495 504 # we filter fetch using our specified refs
496 505 fetch_refs.append('{}:{}'.format(ref, ref))
497 506 elif not refs:
498 507 fetch_refs.append('{}:{}'.format(ref, ref))
499 508 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
500 509 if fetch_refs:
501 510 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
502 511 fetch_refs_chunks = list(chunk)
503 512 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
504 513 _out, _err = self.run_git_command(
505 514 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
506 515 fail_on_stderr=False,
507 516 _copts=self._remote_conf(config),
508 517 extra_env={'GIT_TERMINAL_PROMPT': '0'})
509 518
510 519 return remote_refs
511 520
512 521 @reraise_safe_exceptions
513 522 def sync_push(self, wire, url, refs=None):
514 523 if not self.check_url(url, wire):
515 524 return
516 525 config = self._wire_to_config(wire)
517 526 repo = self._factory.repo(wire)
518 527 self.run_git_command(
519 528 wire, ['push', url, '--mirror'], fail_on_stderr=False,
520 529 _copts=self._remote_conf(config),
521 530 extra_env={'GIT_TERMINAL_PROMPT': '0'})
522 531
523 532 @reraise_safe_exceptions
524 533 def get_remote_refs(self, wire, url):
525 534 repo = Repo(url)
526 535 return repo.get_refs()
527 536
528 537 @reraise_safe_exceptions
529 538 def get_description(self, wire):
530 539 repo = self._factory.repo(wire)
531 540 return repo.get_description()
532 541
533 542 @reraise_safe_exceptions
534 543 def get_missing_revs(self, wire, rev1, rev2, path2):
535 544 repo = self._factory.repo(wire)
536 545 LocalGitClient(thin_packs=False).fetch(path2, repo)
537 546
538 547 wire_remote = wire.copy()
539 548 wire_remote['path'] = path2
540 549 repo_remote = self._factory.repo(wire_remote)
541 550 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
542 551
543 552 revs = [
544 553 x.commit.id
545 554 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
546 555 return revs
547 556
548 557 @reraise_safe_exceptions
549 558 def get_object(self, wire, sha):
550 559 repo = self._factory.repo(wire)
551 560 obj = repo.get_object(sha)
552 561 commit_id = obj.id
553 562
554 563 if isinstance(obj, Tag):
555 564 commit_id = obj.object[1]
556 565
557 566 return {
558 567 'id': obj.id,
559 568 'type': obj.type_name,
560 'commit_id': commit_id
569 'commit_id': commit_id,
570 'idx': 0
561 571 }
562 572
563 573 @reraise_safe_exceptions
564 574 def get_object_attrs(self, wire, sha, *attrs):
565 575 repo = self._factory.repo(wire)
566 576 obj = repo.get_object(sha)
567 577 return list(getattr(obj, a) for a in attrs)
568 578
569 579 @reraise_safe_exceptions
570 580 def get_refs(self, wire):
571 581 repo = self._factory.repo(wire)
572 582 result = {}
573 583 for ref, sha in repo.refs.as_dict().items():
574 584 peeled_sha = repo.get_peeled(ref)
575 585 result[ref] = peeled_sha
576 586 return result
577 587
578 588 @reraise_safe_exceptions
579 589 def get_refs_path(self, wire):
580 590 repo = self._factory.repo(wire)
581 591 return repo.refs.path
582 592
583 593 @reraise_safe_exceptions
584 594 def head(self, wire, show_exc=True):
585 595 repo = self._factory.repo(wire)
586 596 try:
587 597 return repo.head()
588 598 except Exception:
589 599 if show_exc:
590 600 raise
591 601
592 602 @reraise_safe_exceptions
593 603 def init(self, wire):
594 604 repo_path = str_to_dulwich(wire['path'])
595 605 self.repo = Repo.init(repo_path)
596 606
597 607 @reraise_safe_exceptions
598 608 def init_bare(self, wire):
599 609 repo_path = str_to_dulwich(wire['path'])
600 610 self.repo = Repo.init_bare(repo_path)
601 611
602 612 @reraise_safe_exceptions
603 613 def revision(self, wire, rev):
604 614 repo = self._factory.repo(wire)
605 615 obj = repo[rev]
606 616 obj_data = {
607 617 'id': obj.id,
608 618 }
609 619 try:
610 620 obj_data['tree'] = obj.tree
611 621 except AttributeError:
612 622 pass
613 623 return obj_data
614 624
615 625 @reraise_safe_exceptions
616 626 def commit_attribute(self, wire, rev, attr):
617 627 repo = self._factory.repo(wire)
618 628 obj = repo[rev]
619 629 return getattr(obj, attr)
620 630
621 631 @reraise_safe_exceptions
622 632 def set_refs(self, wire, key, value):
623 633 repo = self._factory.repo(wire)
624 634 repo.refs[key] = value
625 635
626 636 @reraise_safe_exceptions
627 637 def remove_ref(self, wire, key):
628 638 repo = self._factory.repo(wire)
629 639 del repo.refs[key]
630 640
631 641 @reraise_safe_exceptions
632 642 def tree_changes(self, wire, source_id, target_id):
633 643 repo = self._factory.repo(wire)
634 644 source = repo[source_id].tree if source_id else None
635 645 target = repo[target_id].tree
636 646 result = repo.object_store.tree_changes(source, target)
637 647 return list(result)
638 648
639 649 @reraise_safe_exceptions
640 650 def tree_items(self, wire, tree_id):
641 651 repo = self._factory.repo(wire)
642 652 tree = repo[tree_id]
643 653
644 654 result = []
645 655 for item in tree.iteritems():
646 656 item_sha = item.sha
647 657 item_mode = item.mode
648 658
649 659 if FILE_MODE(item_mode) == GIT_LINK:
650 660 item_type = "link"
651 661 else:
652 662 item_type = repo[item_sha].type_name
653 663
654 664 result.append((item.path, item_mode, item_sha, item_type))
655 665 return result
656 666
657 667 @reraise_safe_exceptions
658 668 def update_server_info(self, wire):
659 669 repo = self._factory.repo(wire)
660 670 update_server_info(repo)
661 671
662 672 @reraise_safe_exceptions
663 673 def discover_git_version(self):
664 674 stdout, _ = self.run_git_command(
665 675 {}, ['--version'], _bare=True, _safe=True)
666 676 prefix = 'git version'
667 677 if stdout.startswith(prefix):
668 678 stdout = stdout[len(prefix):]
669 679 return stdout.strip()
670 680
671 681 @reraise_safe_exceptions
672 682 def run_git_command(self, wire, cmd, **opts):
673 683 path = wire.get('path', None)
674 684
675 685 if path and os.path.isdir(path):
676 686 opts['cwd'] = path
677 687
678 688 if '_bare' in opts:
679 689 _copts = []
680 690 del opts['_bare']
681 691 else:
682 692 _copts = ['-c', 'core.quotepath=false', ]
683 693 safe_call = False
684 694 if '_safe' in opts:
685 695 # no exc on failure
686 696 del opts['_safe']
687 697 safe_call = True
688 698
689 699 if '_copts' in opts:
690 700 _copts.extend(opts['_copts'] or [])
691 701 del opts['_copts']
692 702
693 703 gitenv = os.environ.copy()
694 704 gitenv.update(opts.pop('extra_env', {}))
695 705 # need to clean fix GIT_DIR !
696 706 if 'GIT_DIR' in gitenv:
697 707 del gitenv['GIT_DIR']
698 708 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
699 709 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
700 710
701 711 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
702 712 _opts = {'env': gitenv, 'shell': False}
703 713
704 714 try:
705 715 _opts.update(opts)
706 716 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
707 717
708 718 return ''.join(p), ''.join(p.error)
709 719 except (EnvironmentError, OSError) as err:
710 720 cmd = ' '.join(cmd) # human friendly CMD
711 721 tb_err = ("Couldn't run git command (%s).\n"
712 722 "Original error was:%s\n"
713 723 "Call options:%s\n"
714 724 % (cmd, err, _opts))
715 725 log.exception(tb_err)
716 726 if safe_call:
717 727 return '', err
718 728 else:
719 729 raise exceptions.VcsException()(tb_err)
720 730
721 731 @reraise_safe_exceptions
722 732 def install_hooks(self, wire, force=False):
723 733 from vcsserver.hook_utils import install_git_hooks
724 734 repo = self._factory.repo(wire)
725 735 return install_git_hooks(repo.path, repo.bare, force_create=force)
726 736
727 737 @reraise_safe_exceptions
728 738 def get_hooks_info(self, wire):
729 739 from vcsserver.hook_utils import (
730 740 get_git_pre_hook_version, get_git_post_hook_version)
731 741 repo = self._factory.repo(wire)
732 742 return {
733 743 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
734 744 'post_version': get_git_post_hook_version(repo.path, repo.bare),
735 745 }
736 746
737 747
738 748 def str_to_dulwich(value):
739 749 """
740 750 Dulwich 0.10.1a requires `unicode` objects to be passed in.
741 751 """
742 752 return value.decode(settings.WIRE_ENCODING)
@@ -1,287 +1,292 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import re
19 19 import logging
20 20 from wsgiref.util import FileWrapper
21 21
22 22 import simplejson as json
23 23 from pyramid.config import Configurator
24 24 from pyramid.response import Response, FileIter
25 25 from pyramid.httpexceptions import (
26 26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 27 HTTPUnprocessableEntity)
28 28
29 29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 31 from vcsserver.utils import safe_int
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38 38
39 39
40 40 def write_response_error(http_exception, text=None):
41 41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 42 _exception = http_exception(content_type=content_type)
43 43 _exception.content_type = content_type
44 44 if text:
45 45 _exception.body = json.dumps({'message': text})
46 46 log.debug('LFS: writing response of type %s to client with text:%s',
47 47 http_exception, text)
48 48 return _exception
49 49
50 50
51 51 class AuthHeaderRequired(object):
52 52 """
53 53 Decorator to check if request has proper auth-header
54 54 """
55 55
56 56 def __call__(self, func):
57 57 return get_cython_compat_decorator(self.__wrapper, func)
58 58
59 59 def __wrapper(self, func, *fargs, **fkwargs):
60 60 request = fargs[1]
61 61 auth = request.authorization
62 62 if not auth:
63 63 return write_response_error(HTTPForbidden)
64 64 return func(*fargs[1:], **fkwargs)
65 65
66 66
67 67 # views
68 68
69 69 def lfs_objects(request):
70 70 # indicate not supported, V1 API
71 71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 73
74 74
75 75 @AuthHeaderRequired()
76 76 def lfs_objects_batch(request):
77 77 """
78 78 The client sends the following information to the Batch endpoint to transfer some objects:
79 79
80 80 operation - Should be download or upload.
81 81 transfers - An optional Array of String identifiers for transfer
82 82 adapters that the client has configured. If omitted, the basic
83 83 transfer adapter MUST be assumed by the server.
84 84 objects - An Array of objects to download.
85 85 oid - String OID of the LFS object.
86 86 size - Integer byte size of the LFS object. Must be at least zero.
87 87 """
88 88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 89 auth = request.authorization
90 90 repo = request.matchdict.get('repo')
91 91 data = request.json
92 92 operation = data.get('operation')
93 http_scheme = request.registry.git_lfs_http_scheme
94
93 95 if operation not in ('download', 'upload'):
94 96 log.debug('LFS: unsupported operation:%s', operation)
95 97 return write_response_error(
96 98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
97 99
98 100 if 'objects' not in data:
99 101 log.debug('LFS: missing objects data')
100 102 return write_response_error(
101 103 HTTPBadRequest, 'missing objects data')
102 104
103 105 log.debug('LFS: handling operation of type: %s', operation)
104 106
105 107 objects = []
106 108 for o in data['objects']:
107 109 try:
108 110 oid = o['oid']
109 111 obj_size = o['size']
110 112 except KeyError:
111 113 log.exception('LFS, failed to extract data')
112 114 return write_response_error(
113 115 HTTPBadRequest, 'unsupported data in objects')
114 116
115 117 obj_data = {'oid': oid}
116 118
117 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid)
118 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo)
119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
120 _scheme=http_scheme)
121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 _scheme=http_scheme)
119 123 store = LFSOidStore(
120 124 oid, repo, store_location=request.registry.git_lfs_store_path)
121 125 handler = OidHandler(
122 126 store, repo, auth, oid, obj_size, obj_data,
123 127 obj_href, obj_verify_href)
124 128
125 129 # this verifies also OIDs
126 130 actions, errors = handler.exec_operation(operation)
127 131 if errors:
128 132 log.warning('LFS: got following errors: %s', errors)
129 133 obj_data['errors'] = errors
130 134
131 135 if actions:
132 136 obj_data['actions'] = actions
133 137
134 138 obj_data['size'] = obj_size
135 139 obj_data['authenticated'] = True
136 140 objects.append(obj_data)
137 141
138 142 result = {'objects': objects, 'transfer': 'basic'}
139 143 log.debug('LFS Response %s', safe_result(result))
140 144
141 145 return result
142 146
143 147
144 148 def lfs_objects_oid_upload(request):
145 149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
146 150 repo = request.matchdict.get('repo')
147 151 oid = request.matchdict.get('oid')
148 152 store = LFSOidStore(
149 153 oid, repo, store_location=request.registry.git_lfs_store_path)
150 154 engine = store.get_engine(mode='wb')
151 155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
152 156
153 157 body = request.environ['wsgi.input']
154 158
155 159 with engine as f:
156 160 blksize = 64 * 1024 # 64kb
157 161 while True:
158 162 # read in chunks as stream comes in from Gunicorn
159 163 # this is a specific Gunicorn support function.
160 164 # might work differently on waitress
161 165 chunk = body.read(blksize)
162 166 if not chunk:
163 167 break
164 168 f.write(chunk)
165 169
166 170 return {'upload': 'ok'}
167 171
168 172
169 173 def lfs_objects_oid_download(request):
170 174 repo = request.matchdict.get('repo')
171 175 oid = request.matchdict.get('oid')
172 176
173 177 store = LFSOidStore(
174 178 oid, repo, store_location=request.registry.git_lfs_store_path)
175 179 if not store.has_oid():
176 180 log.debug('LFS: oid %s does not exists in store', oid)
177 181 return write_response_error(
178 182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
179 183
180 184 # TODO(marcink): support range header ?
181 185 # Range: bytes=0-, `bytes=(\d+)\-.*`
182 186
183 187 f = open(store.oid_path, 'rb')
184 188 response = Response(
185 189 content_type='application/octet-stream', app_iter=FileIter(f))
186 190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
187 191 return response
188 192
189 193
190 194 def lfs_objects_verify(request):
191 195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
192 196 repo = request.matchdict.get('repo')
193 197
194 198 data = request.json
195 199 oid = data.get('oid')
196 200 size = safe_int(data.get('size'))
197 201
198 202 if not (oid and size):
199 203 return write_response_error(
200 204 HTTPBadRequest, 'missing oid and size in request data')
201 205
202 206 store = LFSOidStore(
203 207 oid, repo, store_location=request.registry.git_lfs_store_path)
204 208 if not store.has_oid():
205 209 log.debug('LFS: oid %s does not exists in store', oid)
206 210 return write_response_error(
207 211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
208 212
209 213 store_size = store.size_oid()
210 214 if store_size != size:
211 215 msg = 'requested file size mismatch store size:%s requested:%s' % (
212 216 store_size, size)
213 217 return write_response_error(
214 218 HTTPUnprocessableEntity, msg)
215 219
216 220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
217 221
218 222
219 223 def lfs_objects_lock(request):
220 224 return write_response_error(
221 225 HTTPNotImplemented, 'GIT LFS locking api not supported')
222 226
223 227
224 228 def not_found(request):
225 229 return write_response_error(
226 230 HTTPNotFound, 'request path not found')
227 231
228 232
229 233 def lfs_disabled(request):
230 234 return write_response_error(
231 235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
232 236
233 237
234 238 def git_lfs_app(config):
235 239
236 240 # v1 API deprecation endpoint
237 241 config.add_route('lfs_objects',
238 242 '/{repo:.*?[^/]}/info/lfs/objects')
239 243 config.add_view(lfs_objects, route_name='lfs_objects',
240 244 request_method='POST', renderer='json')
241 245
242 246 # locking API
243 247 config.add_route('lfs_objects_lock',
244 248 '/{repo:.*?[^/]}/info/lfs/locks')
245 249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
246 250 request_method=('POST', 'GET'), renderer='json')
247 251
248 252 config.add_route('lfs_objects_lock_verify',
249 253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
250 254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
251 255 request_method=('POST', 'GET'), renderer='json')
252 256
253 257 # batch API
254 258 config.add_route('lfs_objects_batch',
255 259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
256 260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
257 261 request_method='POST', renderer='json')
258 262
259 263 # oid upload/download API
260 264 config.add_route('lfs_objects_oid',
261 265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
262 266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
263 267 request_method='PUT', renderer='json')
264 268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
265 269 request_method='GET', renderer='json')
266 270
267 271 # verification API
268 272 config.add_route('lfs_objects_verify',
269 273 '/{repo:.*?[^/]}/info/lfs/verify')
270 274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
271 275 request_method='POST', renderer='json')
272 276
273 277 # not found handler for API
274 278 config.add_notfound_view(not_found, renderer='json')
275 279
276 280
277 def create_app(git_lfs_enabled, git_lfs_store_path):
281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
278 282 config = Configurator()
279 283 if git_lfs_enabled:
280 284 config.include(git_lfs_app)
281 285 config.registry.git_lfs_store_path = git_lfs_store_path
286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
282 287 else:
283 288 # not found handler for API, reporting disabled LFS support
284 289 config.add_notfound_view(lfs_disabled, renderer='json')
285 290
286 291 app = config.make_wsgi_app()
287 292 return app
@@ -1,239 +1,272 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from webtest.app import TestApp as WebObTestApp
21 21 import simplejson as json
22 22
23 23 from vcsserver.git_lfs.app import create_app
24 24
25 25
26 26 @pytest.fixture(scope='function')
27 27 def git_lfs_app(tmpdir):
28 28 custom_app = WebObTestApp(create_app(
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir)))
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
30 git_lfs_http_scheme='http'))
31 custom_app._store = str(tmpdir)
32 return custom_app
33
34
35 @pytest.fixture(scope='function')
36 def git_lfs_https_app(tmpdir):
37 custom_app = WebObTestApp(create_app(
38 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
39 git_lfs_http_scheme='https'))
30 40 custom_app._store = str(tmpdir)
31 41 return custom_app
32 42
33 43
34 44 @pytest.fixture()
35 45 def http_auth():
36 46 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
37 47
38 48
39 49 class TestLFSApplication(object):
40 50
41 51 def test_app_wrong_path(self, git_lfs_app):
42 52 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
43 53
44 54 def test_app_deprecated_endpoint(self, git_lfs_app):
45 55 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
46 56 assert response.status_code == 501
47 57 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
48 58
49 59 def test_app_lock_verify_api_not_available(self, git_lfs_app):
50 60 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
51 61 assert response.status_code == 501
52 62 assert json.loads(response.text) == {
53 63 u'message': u'GIT LFS locking api not supported'}
54 64
55 65 def test_app_lock_api_not_available(self, git_lfs_app):
56 66 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
57 67 assert response.status_code == 501
58 68 assert json.loads(response.text) == {
59 69 u'message': u'GIT LFS locking api not supported'}
60 70
61 def test_app_batch_api_missing_auth(self, git_lfs_app,):
71 def test_app_batch_api_missing_auth(self, git_lfs_app):
62 72 git_lfs_app.post_json(
63 73 '/repo/info/lfs/objects/batch', params={}, status=403)
64 74
65 75 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
66 76 response = git_lfs_app.post_json(
67 77 '/repo/info/lfs/objects/batch', params={}, status=400,
68 78 extra_environ=http_auth)
69 79 assert json.loads(response.text) == {
70 80 u'message': u'unsupported operation mode: `None`'}
71 81
72 82 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
73 83 response = git_lfs_app.post_json(
74 84 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
75 85 status=400, extra_environ=http_auth)
76 86 assert json.loads(response.text) == {
77 87 u'message': u'missing objects data'}
78 88
79 89 def test_app_batch_api_unsupported_data_in_objects(
80 90 self, git_lfs_app, http_auth):
81 91 params = {'operation': 'download',
82 92 'objects': [{}]}
83 93 response = git_lfs_app.post_json(
84 94 '/repo/info/lfs/objects/batch', params=params, status=400,
85 95 extra_environ=http_auth)
86 96 assert json.loads(response.text) == {
87 97 u'message': u'unsupported data in objects'}
88 98
89 99 def test_app_batch_api_download_missing_object(
90 100 self, git_lfs_app, http_auth):
91 101 params = {'operation': 'download',
92 102 'objects': [{'oid': '123', 'size': '1024'}]}
93 103 response = git_lfs_app.post_json(
94 104 '/repo/info/lfs/objects/batch', params=params,
95 105 extra_environ=http_auth)
96 106
97 107 expected_objects = [
98 108 {u'authenticated': True,
99 109 u'errors': {u'error': {
100 110 u'code': 404,
101 111 u'message': u'object: 123 does not exist in store'}},
102 112 u'oid': u'123',
103 113 u'size': u'1024'}
104 114 ]
105 115 assert json.loads(response.text) == {
106 116 'objects': expected_objects, 'transfer': 'basic'}
107 117
108 118 def test_app_batch_api_download(self, git_lfs_app, http_auth):
109 119 oid = '456'
110 120 oid_path = os.path.join(git_lfs_app._store, oid)
111 121 if not os.path.isdir(os.path.dirname(oid_path)):
112 122 os.makedirs(os.path.dirname(oid_path))
113 123 with open(oid_path, 'wb') as f:
114 124 f.write('OID_CONTENT')
115 125
116 126 params = {'operation': 'download',
117 127 'objects': [{'oid': oid, 'size': '1024'}]}
118 128 response = git_lfs_app.post_json(
119 129 '/repo/info/lfs/objects/batch', params=params,
120 130 extra_environ=http_auth)
121 131
122 132 expected_objects = [
123 133 {u'authenticated': True,
124 134 u'actions': {
125 135 u'download': {
126 136 u'header': {u'Authorization': u'Basic XXXXX'},
127 137 u'href': u'http://localhost/repo/info/lfs/objects/456'},
128 138 },
129 139 u'oid': u'456',
130 140 u'size': u'1024'}
131 141 ]
132 142 assert json.loads(response.text) == {
133 143 'objects': expected_objects, 'transfer': 'basic'}
134 144
135 145 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
136 146 params = {'operation': 'upload',
137 147 'objects': [{'oid': '123', 'size': '1024'}]}
138 148 response = git_lfs_app.post_json(
139 149 '/repo/info/lfs/objects/batch', params=params,
140 150 extra_environ=http_auth)
141 151 expected_objects = [
142 152 {u'authenticated': True,
143 153 u'actions': {
144 154 u'upload': {
145 155 u'header': {u'Authorization': u'Basic XXXXX',
146 156 u'Transfer-Encoding': u'chunked'},
147 157 u'href': u'http://localhost/repo/info/lfs/objects/123'},
148 158 u'verify': {
149 159 u'header': {u'Authorization': u'Basic XXXXX'},
150 160 u'href': u'http://localhost/repo/info/lfs/verify'}
151 161 },
152 162 u'oid': u'123',
153 163 u'size': u'1024'}
154 164 ]
155 165 assert json.loads(response.text) == {
156 166 'objects': expected_objects, 'transfer': 'basic'}
157 167
168 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
169 params = {'operation': 'upload',
170 'objects': [{'oid': '123', 'size': '1024'}]}
171 response = git_lfs_https_app.post_json(
172 '/repo/info/lfs/objects/batch', params=params,
173 extra_environ=http_auth)
174 expected_objects = [
175 {u'authenticated': True,
176 u'actions': {
177 u'upload': {
178 u'header': {u'Authorization': u'Basic XXXXX',
179 u'Transfer-Encoding': u'chunked'},
180 u'href': u'https://localhost/repo/info/lfs/objects/123'},
181 u'verify': {
182 u'header': {u'Authorization': u'Basic XXXXX'},
183 u'href': u'https://localhost/repo/info/lfs/verify'}
184 },
185 u'oid': u'123',
186 u'size': u'1024'}
187 ]
188 assert json.loads(response.text) == {
189 'objects': expected_objects, 'transfer': 'basic'}
190
158 191 def test_app_verify_api_missing_data(self, git_lfs_app):
159 params = {'oid': 'missing',}
192 params = {'oid': 'missing'}
160 193 response = git_lfs_app.post_json(
161 194 '/repo/info/lfs/verify', params=params,
162 195 status=400)
163 196
164 197 assert json.loads(response.text) == {
165 198 u'message': u'missing oid and size in request data'}
166 199
167 200 def test_app_verify_api_missing_obj(self, git_lfs_app):
168 201 params = {'oid': 'missing', 'size': '1024'}
169 202 response = git_lfs_app.post_json(
170 203 '/repo/info/lfs/verify', params=params,
171 204 status=404)
172 205
173 206 assert json.loads(response.text) == {
174 207 u'message': u'oid `missing` does not exists in store'}
175 208
176 209 def test_app_verify_api_size_mismatch(self, git_lfs_app):
177 210 oid = 'existing'
178 211 oid_path = os.path.join(git_lfs_app._store, oid)
179 212 if not os.path.isdir(os.path.dirname(oid_path)):
180 213 os.makedirs(os.path.dirname(oid_path))
181 214 with open(oid_path, 'wb') as f:
182 215 f.write('OID_CONTENT')
183 216
184 217 params = {'oid': oid, 'size': '1024'}
185 218 response = git_lfs_app.post_json(
186 219 '/repo/info/lfs/verify', params=params, status=422)
187 220
188 221 assert json.loads(response.text) == {
189 222 u'message': u'requested file size mismatch '
190 223 u'store size:11 requested:1024'}
191 224
192 225 def test_app_verify_api(self, git_lfs_app):
193 226 oid = 'existing'
194 227 oid_path = os.path.join(git_lfs_app._store, oid)
195 228 if not os.path.isdir(os.path.dirname(oid_path)):
196 229 os.makedirs(os.path.dirname(oid_path))
197 230 with open(oid_path, 'wb') as f:
198 231 f.write('OID_CONTENT')
199 232
200 233 params = {'oid': oid, 'size': 11}
201 234 response = git_lfs_app.post_json(
202 235 '/repo/info/lfs/verify', params=params)
203 236
204 237 assert json.loads(response.text) == {
205 238 u'message': {u'size': u'ok', u'in_store': u'ok'}}
206 239
207 240 def test_app_download_api_oid_not_existing(self, git_lfs_app):
208 241 oid = 'missing'
209 242
210 243 response = git_lfs_app.get(
211 244 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
212 245
213 246 assert json.loads(response.text) == {
214 247 u'message': u'requested file with oid `missing` not found in store'}
215 248
216 249 def test_app_download_api(self, git_lfs_app):
217 250 oid = 'existing'
218 251 oid_path = os.path.join(git_lfs_app._store, oid)
219 252 if not os.path.isdir(os.path.dirname(oid_path)):
220 253 os.makedirs(os.path.dirname(oid_path))
221 254 with open(oid_path, 'wb') as f:
222 255 f.write('OID_CONTENT')
223 256
224 257 response = git_lfs_app.get(
225 258 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
226 259 assert response
227 260
228 261 def test_app_upload(self, git_lfs_app):
229 262 oid = 'uploaded'
230 263
231 264 response = git_lfs_app.put(
232 265 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
233 266
234 267 assert json.loads(response.text) == {u'upload': u'ok'}
235 268
236 269 # verify that we actually wrote that OID
237 270 oid_path = os.path.join(git_lfs_app._store, oid)
238 271 assert os.path.isfile(oid_path)
239 272 assert 'CONTENT' == open(oid_path).read()
@@ -1,803 +1,856 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 import traceback
23 24
24 25 from hgext import largefiles, rebase
25 26 from hgext.strip import strip as hgext_strip
26 27 from mercurial import commands
27 28 from mercurial import unionrepo
28 29 from mercurial import verify
29 30
30 31 import vcsserver
31 32 from vcsserver import exceptions
32 33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 34 from vcsserver.hgcompat import (
34 archival, bin, clone, config as hgconfig, diffopts, hex,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 39 RepoLookupError, InterventionRequired, RequirementError)
39 40
40 41 log = logging.getLogger(__name__)
41 42
42 43
43 44 def make_ui_from_config(repo_config):
44 baseui = ui.ui()
45
46 class LoggingUI(ui.ui):
47 def status(self, *msg, **opts):
48 log.info(' '.join(msg).rstrip('\n'))
49 super(LoggingUI, self).status(*msg, **opts)
50
51 def warn(self, *msg, **opts):
52 log.warn(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).warn(*msg, **opts)
54
55 def error(self, *msg, **opts):
56 log.error(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).error(*msg, **opts)
58
59 def note(self, *msg, **opts):
60 log.info(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).note(*msg, **opts)
62
63 def debug(self, *msg, **opts):
64 log.debug(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).debug(*msg, **opts)
66
67 baseui = LoggingUI()
45 68
46 69 # clean the baseui object
47 70 baseui._ocfg = hgconfig.config()
48 71 baseui._ucfg = hgconfig.config()
49 72 baseui._tcfg = hgconfig.config()
50 73
51 74 for section, option, value in repo_config:
52 75 baseui.setconfig(section, option, value)
53 76
54 77 # make our hgweb quiet so it doesn't print output
55 78 baseui.setconfig('ui', 'quiet', 'true')
56 79
57 80 baseui.setconfig('ui', 'paginate', 'never')
81 # for better Error reporting of Mercurial
82 baseui.setconfig('ui', 'message-output', 'stderr')
83
58 84 # force mercurial to only use 1 thread, otherwise it may try to set a
59 85 # signal in a non-main thread, thus generating a ValueError.
60 86 baseui.setconfig('worker', 'numcpus', 1)
61 87
62 88 # If there is no config for the largefiles extension, we explicitly disable
63 89 # it here. This overrides settings from repositories hgrc file. Recent
64 90 # mercurial versions enable largefiles in hgrc on clone from largefile
65 91 # repo.
66 92 if not baseui.hasconfig('extensions', 'largefiles'):
67 93 log.debug('Explicitly disable largefiles extension for repo.')
68 94 baseui.setconfig('extensions', 'largefiles', '!')
69 95
70 96 return baseui
71 97
72 98
73 99 def reraise_safe_exceptions(func):
74 100 """Decorator for converting mercurial exceptions to something neutral."""
75 101 def wrapper(*args, **kwargs):
76 102 try:
77 103 return func(*args, **kwargs)
78 104 except (Abort, InterventionRequired) as e:
79 105 raise_from_original(exceptions.AbortException(e))
80 106 except RepoLookupError as e:
81 107 raise_from_original(exceptions.LookupException(e))
82 108 except RequirementError as e:
83 109 raise_from_original(exceptions.RequirementException(e))
84 110 except RepoError as e:
85 111 raise_from_original(exceptions.VcsException(e))
86 112 except LookupError as e:
87 113 raise_from_original(exceptions.LookupException(e))
88 114 except Exception as e:
89 115 if not hasattr(e, '_vcs_kind'):
90 116 log.exception("Unhandled exception in hg remote call")
91 117 raise_from_original(exceptions.UnhandledException(e))
92 118
93 119 raise
94 120 return wrapper
95 121
96 122
97 123 class MercurialFactory(RepoFactory):
98 124 repo_type = 'hg'
99 125
100 126 def _create_config(self, config, hooks=True):
101 127 if not hooks:
102 128 hooks_to_clean = frozenset((
103 129 'changegroup.repo_size', 'preoutgoing.pre_pull',
104 130 'outgoing.pull_logger', 'prechangegroup.pre_push'))
105 131 new_config = []
106 132 for section, option, value in config:
107 133 if section == 'hooks' and option in hooks_to_clean:
108 134 continue
109 135 new_config.append((section, option, value))
110 136 config = new_config
111 137
112 138 baseui = make_ui_from_config(config)
113 139 return baseui
114 140
115 141 def _create_repo(self, wire, create):
116 142 baseui = self._create_config(wire["config"])
117 return localrepository(baseui, wire["path"], create)
143 return instance(baseui, wire["path"], create)
118 144
119 145
120 146 class HgRemote(object):
121 147
122 148 def __init__(self, factory):
123 149 self._factory = factory
124 150
125 151 self._bulk_methods = {
126 152 "affected_files": self.ctx_files,
127 153 "author": self.ctx_user,
128 154 "branch": self.ctx_branch,
129 155 "children": self.ctx_children,
130 156 "date": self.ctx_date,
131 157 "message": self.ctx_description,
132 158 "parents": self.ctx_parents,
133 159 "status": self.ctx_status,
134 160 "obsolete": self.ctx_obsolete,
135 161 "phase": self.ctx_phase,
136 162 "hidden": self.ctx_hidden,
137 163 "_file_paths": self.ctx_list,
138 164 }
139 165
166 def _get_ctx(self, repo, ref):
167 return get_ctx(repo, ref)
168
140 169 @reraise_safe_exceptions
141 170 def discover_hg_version(self):
142 171 from mercurial import util
143 172 return util.version()
144 173
145 174 @reraise_safe_exceptions
175 def is_empty(self, wire):
176 repo = self._factory.repo(wire)
177
178 try:
179 return len(repo) == 0
180 except Exception:
181 log.exception("failed to read object_store")
182 return False
183
184 @reraise_safe_exceptions
146 185 def archive_repo(self, archive_path, mtime, file_info, kind):
147 186 if kind == "tgz":
148 187 archiver = archival.tarit(archive_path, mtime, "gz")
149 188 elif kind == "tbz2":
150 189 archiver = archival.tarit(archive_path, mtime, "bz2")
151 190 elif kind == 'zip':
152 191 archiver = archival.zipit(archive_path, mtime)
153 192 else:
154 193 raise exceptions.ArchiveException()(
155 194 'Remote does not support: "%s".' % kind)
156 195
157 196 for f_path, f_mode, f_is_link, f_content in file_info:
158 197 archiver.addfile(f_path, f_mode, f_is_link, f_content)
159 198 archiver.done()
160 199
161 200 @reraise_safe_exceptions
162 201 def bookmarks(self, wire):
163 202 repo = self._factory.repo(wire)
164 203 return dict(repo._bookmarks)
165 204
166 205 @reraise_safe_exceptions
167 206 def branches(self, wire, normal, closed):
168 207 repo = self._factory.repo(wire)
169 208 iter_branches = repo.branchmap().iterbranches()
170 209 bt = {}
171 210 for branch_name, _heads, tip, is_closed in iter_branches:
172 211 if normal and not is_closed:
173 212 bt[branch_name] = tip
174 213 if closed and is_closed:
175 214 bt[branch_name] = tip
176 215
177 216 return bt
178 217
179 218 @reraise_safe_exceptions
180 219 def bulk_request(self, wire, rev, pre_load):
181 220 result = {}
182 221 for attr in pre_load:
183 222 try:
184 223 method = self._bulk_methods[attr]
185 224 result[attr] = method(wire, rev)
186 225 except KeyError as e:
187 226 raise exceptions.VcsException(e)(
188 227 'Unknown bulk attribute: "%s"' % attr)
189 228 return result
190 229
191 230 @reraise_safe_exceptions
192 231 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
193 232 baseui = self._factory._create_config(wire["config"], hooks=hooks)
194 233 clone(baseui, source, dest, noupdate=not update_after_clone)
195 234
196 235 @reraise_safe_exceptions
197 236 def commitctx(
198 237 self, wire, message, parents, commit_time, commit_timezone,
199 238 user, files, extra, removed, updated):
200 239
201 def _filectxfn(_repo, memctx, path):
240 repo = self._factory.repo(wire)
241 baseui = self._factory._create_config(wire['config'])
242 publishing = baseui.configbool('phases', 'publish')
243 if publishing:
244 new_commit = 'public'
245 else:
246 new_commit = 'draft'
247
248 def _filectxfn(_repo, ctx, path):
202 249 """
203 250 Marks given path as added/changed/removed in a given _repo. This is
204 251 for internal mercurial commit function.
205 252 """
206 253
207 254 # check if this path is removed
208 255 if path in removed:
209 256 # returning None is a way to mark node for removal
210 257 return None
211 258
212 259 # check if this path is added
213 260 for node in updated:
214 261 if node['path'] == path:
215 262 return memfilectx(
216 263 _repo,
217 changectx=memctx,
264 changectx=ctx,
218 265 path=node['path'],
219 266 data=node['content'],
220 267 islink=False,
221 268 isexec=bool(node['mode'] & stat.S_IXUSR),
222 269 copied=False)
223 270
224 271 raise exceptions.AbortException()(
225 272 "Given path haven't been marked as added, "
226 273 "changed or removed (%s)" % path)
227 274
228 repo = self._factory.repo(wire)
275 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
229 276
230 277 commit_ctx = memctx(
231 278 repo=repo,
232 279 parents=parents,
233 280 text=message,
234 281 files=files,
235 282 filectxfn=_filectxfn,
236 283 user=user,
237 284 date=(commit_time, commit_timezone),
238 285 extra=extra)
239 286
240 287 n = repo.commitctx(commit_ctx)
241 288 new_id = hex(n)
242 289
243 290 return new_id
244 291
245 292 @reraise_safe_exceptions
246 293 def ctx_branch(self, wire, revision):
247 294 repo = self._factory.repo(wire)
248 ctx = repo[revision]
295 ctx = self._get_ctx(repo, revision)
249 296 return ctx.branch()
250 297
251 298 @reraise_safe_exceptions
252 299 def ctx_children(self, wire, revision):
253 300 repo = self._factory.repo(wire)
254 ctx = repo[revision]
301 ctx = self._get_ctx(repo, revision)
255 302 return [child.rev() for child in ctx.children()]
256 303
257 304 @reraise_safe_exceptions
258 305 def ctx_date(self, wire, revision):
259 306 repo = self._factory.repo(wire)
260 ctx = repo[revision]
307 ctx = self._get_ctx(repo, revision)
261 308 return ctx.date()
262 309
263 310 @reraise_safe_exceptions
264 311 def ctx_description(self, wire, revision):
265 312 repo = self._factory.repo(wire)
266 ctx = repo[revision]
313 ctx = self._get_ctx(repo, revision)
267 314 return ctx.description()
268 315
269 316 @reraise_safe_exceptions
270 def ctx_diff(
271 self, wire, revision, git=True, ignore_whitespace=True, context=3):
272 repo = self._factory.repo(wire)
273 ctx = repo[revision]
274 result = ctx.diff(
275 git=git, ignore_whitespace=ignore_whitespace, context=context)
276 return list(result)
277
278 @reraise_safe_exceptions
279 317 def ctx_files(self, wire, revision):
280 318 repo = self._factory.repo(wire)
281 ctx = repo[revision]
319 ctx = self._get_ctx(repo, revision)
282 320 return ctx.files()
283 321
284 322 @reraise_safe_exceptions
285 323 def ctx_list(self, path, revision):
286 324 repo = self._factory.repo(path)
287 ctx = repo[revision]
325 ctx = self._get_ctx(repo, revision)
288 326 return list(ctx)
289 327
290 328 @reraise_safe_exceptions
291 329 def ctx_parents(self, wire, revision):
292 330 repo = self._factory.repo(wire)
293 ctx = repo[revision]
331 ctx = self._get_ctx(repo, revision)
294 332 return [parent.rev() for parent in ctx.parents()]
295 333
296 334 @reraise_safe_exceptions
297 335 def ctx_phase(self, wire, revision):
298 336 repo = self._factory.repo(wire)
299 ctx = repo[revision]
337 ctx = self._get_ctx(repo, revision)
300 338 # public=0, draft=1, secret=3
301 339 return ctx.phase()
302 340
303 341 @reraise_safe_exceptions
304 342 def ctx_obsolete(self, wire, revision):
305 343 repo = self._factory.repo(wire)
306 ctx = repo[revision]
344 ctx = self._get_ctx(repo, revision)
307 345 return ctx.obsolete()
308 346
309 347 @reraise_safe_exceptions
310 348 def ctx_hidden(self, wire, revision):
311 349 repo = self._factory.repo(wire)
312 ctx = repo[revision]
350 ctx = self._get_ctx(repo, revision)
313 351 return ctx.hidden()
314 352
315 353 @reraise_safe_exceptions
316 354 def ctx_substate(self, wire, revision):
317 355 repo = self._factory.repo(wire)
318 ctx = repo[revision]
356 ctx = self._get_ctx(repo, revision)
319 357 return ctx.substate
320 358
321 359 @reraise_safe_exceptions
322 360 def ctx_status(self, wire, revision):
323 361 repo = self._factory.repo(wire)
324 ctx = repo[revision]
362 ctx = self._get_ctx(repo, revision)
325 363 status = repo[ctx.p1().node()].status(other=ctx.node())
326 364 # object of status (odd, custom named tuple in mercurial) is not
327 365 # correctly serializable, we make it a list, as the underling
328 366 # API expects this to be a list
329 367 return list(status)
330 368
331 369 @reraise_safe_exceptions
332 370 def ctx_user(self, wire, revision):
333 371 repo = self._factory.repo(wire)
334 ctx = repo[revision]
372 ctx = self._get_ctx(repo, revision)
335 373 return ctx.user()
336 374
337 375 @reraise_safe_exceptions
338 376 def check_url(self, url, config):
339 377 _proto = None
340 378 if '+' in url[:url.find('://')]:
341 379 _proto = url[0:url.find('+')]
342 380 url = url[url.find('+') + 1:]
343 381 handlers = []
344 382 url_obj = url_parser(url)
345 383 test_uri, authinfo = url_obj.authinfo()
346 384 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
347 385 url_obj.query = obfuscate_qs(url_obj.query)
348 386
349 387 cleaned_uri = str(url_obj)
350 388 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
351 389
352 390 if authinfo:
353 391 # create a password manager
354 392 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
355 393 passmgr.add_password(*authinfo)
356 394
357 395 handlers.extend((httpbasicauthhandler(passmgr),
358 396 httpdigestauthhandler(passmgr)))
359 397
360 398 o = urllib2.build_opener(*handlers)
361 399 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
362 400 ('Accept', 'application/mercurial-0.1')]
363 401
364 402 q = {"cmd": 'between'}
365 403 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
366 404 qs = '?%s' % urllib.urlencode(q)
367 405 cu = "%s%s" % (test_uri, qs)
368 406 req = urllib2.Request(cu, None, {})
369 407
370 408 try:
371 409 log.debug("Trying to open URL %s", cleaned_uri)
372 410 resp = o.open(req)
373 411 if resp.code != 200:
374 412 raise exceptions.URLError()('Return Code is not 200')
375 413 except Exception as e:
376 414 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
377 415 # means it cannot be cloned
378 416 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
379 417
380 418 # now check if it's a proper hg repo, but don't do it for svn
381 419 try:
382 420 if _proto == 'svn':
383 421 pass
384 422 else:
385 423 # check for pure hg repos
386 424 log.debug(
387 425 "Verifying if URL is a Mercurial repository: %s",
388 426 cleaned_uri)
389 427 ui = make_ui_from_config(config)
390 428 peer_checker = makepeer(ui, url)
391 429 peer_checker.lookup('tip')
392 430 except Exception as e:
393 431 log.warning("URL is not a valid Mercurial repository: %s",
394 432 cleaned_uri)
395 433 raise exceptions.URLError(e)(
396 434 "url [%s] does not look like an hg repo org_exc: %s"
397 435 % (cleaned_uri, e))
398 436
399 437 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
400 438 return True
401 439
402 440 @reraise_safe_exceptions
403 441 def diff(
404 442 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
405 443 context):
406 444 repo = self._factory.repo(wire)
407 445
408 446 if file_filter:
409 447 match_filter = match(file_filter[0], '', [file_filter[1]])
410 448 else:
411 449 match_filter = file_filter
412 450 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
413 451
414 452 try:
415 453 return "".join(patch.diff(
416 454 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
417 455 except RepoLookupError as e:
418 456 raise exceptions.LookupException(e)()
419 457
420 458 @reraise_safe_exceptions
421 459 def node_history(self, wire, revision, path, limit):
422 460 repo = self._factory.repo(wire)
423 461
424 ctx = repo[revision]
462 ctx = self._get_ctx(repo, revision)
425 463 fctx = ctx.filectx(path)
426 464
427 465 def history_iter():
428 466 limit_rev = fctx.rev()
429 467 for obj in reversed(list(fctx.filelog())):
430 468 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
471 continue
472
431 473 if limit_rev >= obj.rev():
432 474 yield obj
433 475
434 476 history = []
435 477 for cnt, obj in enumerate(history_iter()):
436 478 if limit and cnt >= limit:
437 479 break
438 480 history.append(hex(obj.node()))
439 481
440 482 return [x for x in history]
441 483
442 484 @reraise_safe_exceptions
443 485 def node_history_untill(self, wire, revision, path, limit):
444 486 repo = self._factory.repo(wire)
445 ctx = repo[revision]
487 ctx = self._get_ctx(repo, revision)
446 488 fctx = ctx.filectx(path)
447 489
448 490 file_log = list(fctx.filelog())
449 491 if limit:
450 492 # Limit to the last n items
451 493 file_log = file_log[-limit:]
452 494
453 495 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
454 496
455 497 @reraise_safe_exceptions
456 498 def fctx_annotate(self, wire, revision, path):
457 499 repo = self._factory.repo(wire)
458 ctx = repo[revision]
500 ctx = self._get_ctx(repo, revision)
459 501 fctx = ctx.filectx(path)
460 502
461 503 result = []
462 504 for i, annotate_obj in enumerate(fctx.annotate(), 1):
463 505 ln_no = i
464 506 sha = hex(annotate_obj.fctx.node())
465 507 content = annotate_obj.text
466 508 result.append((ln_no, sha, content))
467 509 return result
468 510
469 511 @reraise_safe_exceptions
470 512 def fctx_data(self, wire, revision, path):
471 513 repo = self._factory.repo(wire)
472 ctx = repo[revision]
514 ctx = self._get_ctx(repo, revision)
473 515 fctx = ctx.filectx(path)
474 516 return fctx.data()
475 517
476 518 @reraise_safe_exceptions
477 519 def fctx_flags(self, wire, revision, path):
478 520 repo = self._factory.repo(wire)
479 ctx = repo[revision]
521 ctx = self._get_ctx(repo, revision)
480 522 fctx = ctx.filectx(path)
481 523 return fctx.flags()
482 524
483 525 @reraise_safe_exceptions
484 526 def fctx_size(self, wire, revision, path):
485 527 repo = self._factory.repo(wire)
486 ctx = repo[revision]
528 ctx = self._get_ctx(repo, revision)
487 529 fctx = ctx.filectx(path)
488 530 return fctx.size()
489 531
490 532 @reraise_safe_exceptions
491 533 def get_all_commit_ids(self, wire, name):
492 534 repo = self._factory.repo(wire)
493 revs = repo.filtered(name).changelog.index
494 return map(lambda x: hex(x[7]), revs)[:-1]
535 repo = repo.filtered(name)
536 revs = map(lambda x: hex(x[7]), repo.changelog.index)
537 return revs
495 538
496 539 @reraise_safe_exceptions
497 540 def get_config_value(self, wire, section, name, untrusted=False):
498 541 repo = self._factory.repo(wire)
499 542 return repo.ui.config(section, name, untrusted=untrusted)
500 543
501 544 @reraise_safe_exceptions
502 545 def get_config_bool(self, wire, section, name, untrusted=False):
503 546 repo = self._factory.repo(wire)
504 547 return repo.ui.configbool(section, name, untrusted=untrusted)
505 548
506 549 @reraise_safe_exceptions
507 550 def get_config_list(self, wire, section, name, untrusted=False):
508 551 repo = self._factory.repo(wire)
509 552 return repo.ui.configlist(section, name, untrusted=untrusted)
510 553
511 554 @reraise_safe_exceptions
512 555 def is_large_file(self, wire, path):
513 556 return largefiles.lfutil.isstandin(path)
514 557
515 558 @reraise_safe_exceptions
516 559 def in_largefiles_store(self, wire, sha):
517 560 repo = self._factory.repo(wire)
518 561 return largefiles.lfutil.instore(repo, sha)
519 562
520 563 @reraise_safe_exceptions
521 564 def in_user_cache(self, wire, sha):
522 565 repo = self._factory.repo(wire)
523 566 return largefiles.lfutil.inusercache(repo.ui, sha)
524 567
525 568 @reraise_safe_exceptions
526 569 def store_path(self, wire, sha):
527 570 repo = self._factory.repo(wire)
528 571 return largefiles.lfutil.storepath(repo, sha)
529 572
530 573 @reraise_safe_exceptions
531 574 def link(self, wire, sha, path):
532 575 repo = self._factory.repo(wire)
533 576 largefiles.lfutil.link(
534 577 largefiles.lfutil.usercachepath(repo.ui, sha), path)
535 578
536 579 @reraise_safe_exceptions
537 580 def localrepository(self, wire, create=False):
538 581 self._factory.repo(wire, create=create)
539 582
540 583 @reraise_safe_exceptions
541 584 def lookup(self, wire, revision, both):
542 585
543 586 repo = self._factory.repo(wire)
544 587
545 588 if isinstance(revision, int):
546 589 # NOTE(marcink):
547 # since Mercurial doesn't support indexes properly
590 # since Mercurial doesn't support negative indexes properly
548 591 # we need to shift accordingly by one to get proper index, e.g
549 592 # repo[-1] => repo[-2]
550 593 # repo[0] => repo[-1]
551 # repo[1] => repo[2] we also never call repo[0] because
552 # it's actually second commit
553 594 if revision <= 0:
554 595 revision = revision + -1
555 else:
556 revision = revision + 1
557
558 596 try:
559 ctx = repo[revision]
560 except RepoLookupError as e:
597 ctx = self._get_ctx(repo, revision)
598 except (TypeError, RepoLookupError) as e:
599 e._org_exc_tb = traceback.format_exc()
561 600 raise exceptions.LookupException(e)(revision)
562 601 except LookupError as e:
602 e._org_exc_tb = traceback.format_exc()
563 603 raise exceptions.LookupException(e)(e.name)
564 604
565 605 if not both:
566 606 return ctx.hex()
567 607
568 608 ctx = repo[ctx.hex()]
569 609 return ctx.hex(), ctx.rev()
570 610
571 611 @reraise_safe_exceptions
572 612 def pull(self, wire, url, commit_ids=None):
573 613 repo = self._factory.repo(wire)
574 614 # Disable any prompts for this repo
575 615 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
576 616
577 617 remote = peer(repo, {}, url)
578 618 # Disable any prompts for this remote
579 619 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
580 620
581 621 if commit_ids:
582 622 commit_ids = [bin(commit_id) for commit_id in commit_ids]
583 623
584 624 return exchange.pull(
585 625 repo, remote, heads=commit_ids, force=None).cgresult
586 626
587 627 @reraise_safe_exceptions
588 628 def sync_push(self, wire, url):
589 629 if not self.check_url(url, wire['config']):
590 630 return
591 631
592 632 repo = self._factory.repo(wire)
593 633
594 634 # Disable any prompts for this repo
595 635 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
596 636
597 637 bookmarks = dict(repo._bookmarks).keys()
598 638 remote = peer(repo, {}, url)
599 639 # Disable any prompts for this remote
600 640 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
601 641
602 642 return exchange.push(
603 643 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
604 644
605 645 @reraise_safe_exceptions
606 646 def revision(self, wire, rev):
607 647 repo = self._factory.repo(wire)
608 ctx = repo[rev]
648 ctx = self._get_ctx(repo, rev)
609 649 return ctx.rev()
610 650
611 651 @reraise_safe_exceptions
612 652 def rev_range(self, wire, filter):
613 653 repo = self._factory.repo(wire)
614 654 revisions = [rev for rev in revrange(repo, filter)]
615 655 return revisions
616 656
617 657 @reraise_safe_exceptions
618 658 def rev_range_hash(self, wire, node):
619 659 repo = self._factory.repo(wire)
620 660
621 661 def get_revs(repo, rev_opt):
622 662 if rev_opt:
623 663 revs = revrange(repo, rev_opt)
624 664 if len(revs) == 0:
625 665 return (nullrev, nullrev)
626 666 return max(revs), min(revs)
627 667 else:
628 668 return len(repo) - 1, 0
629 669
630 670 stop, start = get_revs(repo, [node + ':'])
631 671 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
632 672 return revs
633 673
634 674 @reraise_safe_exceptions
635 675 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
636 676 other_path = kwargs.pop('other_path', None)
637 677
638 678 # case when we want to compare two independent repositories
639 679 if other_path and other_path != wire["path"]:
640 680 baseui = self._factory._create_config(wire["config"])
641 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
681 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
642 682 else:
643 683 repo = self._factory.repo(wire)
644 684 return list(repo.revs(rev_spec, *args))
645 685
646 686 @reraise_safe_exceptions
647 687 def strip(self, wire, revision, update, backup):
648 688 repo = self._factory.repo(wire)
649 ctx = repo[revision]
689 ctx = self._get_ctx(repo, revision)
650 690 hgext_strip(
651 691 repo.baseui, repo, ctx.node(), update=update, backup=backup)
652 692
653 693 @reraise_safe_exceptions
654 694 def verify(self, wire,):
655 695 repo = self._factory.repo(wire)
656 696 baseui = self._factory._create_config(wire['config'])
657 697 baseui.setconfig('ui', 'quiet', 'false')
658 698 output = io.BytesIO()
659 699
660 700 def write(data, **unused_kwargs):
661 701 output.write(data)
662 702 baseui.write = write
663 703
664 704 repo.ui = baseui
665 705 verify.verify(repo)
666 706 return output.getvalue()
667 707
668 708 @reraise_safe_exceptions
669 709 def tag(self, wire, name, revision, message, local, user,
670 710 tag_time, tag_timezone):
671 711 repo = self._factory.repo(wire)
672 ctx = repo[revision]
712 ctx = self._get_ctx(repo, revision)
673 713 node = ctx.node()
674 714
675 715 date = (tag_time, tag_timezone)
676 716 try:
677 717 hg_tag.tag(repo, name, node, message, local, user, date)
678 718 except Abort as e:
679 719 log.exception("Tag operation aborted")
680 720 # Exception can contain unicode which we convert
681 721 raise exceptions.AbortException(e)(repr(e))
682 722
683 723 @reraise_safe_exceptions
684 724 def tags(self, wire):
685 725 repo = self._factory.repo(wire)
686 726 return repo.tags()
687 727
688 728 @reraise_safe_exceptions
689 729 def update(self, wire, node=None, clean=False):
690 730 repo = self._factory.repo(wire)
691 731 baseui = self._factory._create_config(wire['config'])
692 732 commands.update(baseui, repo, node=node, clean=clean)
693 733
694 734 @reraise_safe_exceptions
695 735 def identify(self, wire):
696 736 repo = self._factory.repo(wire)
697 737 baseui = self._factory._create_config(wire['config'])
698 738 output = io.BytesIO()
699 739 baseui.write = output.write
700 740 # This is required to get a full node id
701 741 baseui.debugflag = True
702 742 commands.identify(baseui, repo, id=True)
703 743
704 744 return output.getvalue()
705 745
706 746 @reraise_safe_exceptions
707 747 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
708 748 hooks=True):
709 749 repo = self._factory.repo(wire)
710 750 baseui = self._factory._create_config(wire['config'], hooks=hooks)
711 751
712 752 # Mercurial internally has a lot of logic that checks ONLY if
713 753 # option is defined, we just pass those if they are defined then
714 754 opts = {}
715 755 if bookmark:
716 756 opts['bookmark'] = bookmark
717 757 if branch:
718 758 opts['branch'] = branch
719 759 if revision:
720 760 opts['rev'] = revision
721 761
722 762 commands.pull(baseui, repo, source, **opts)
723 763
724 764 @reraise_safe_exceptions
725 765 def heads(self, wire, branch=None):
726 766 repo = self._factory.repo(wire)
727 767 baseui = self._factory._create_config(wire['config'])
728 768 output = io.BytesIO()
729 769
730 770 def write(data, **unused_kwargs):
731 771 output.write(data)
732 772
733 773 baseui.write = write
734 774 if branch:
735 775 args = [branch]
736 776 else:
737 777 args = []
738 778 commands.heads(baseui, repo, template='{node} ', *args)
739 779
740 780 return output.getvalue()
741 781
742 782 @reraise_safe_exceptions
743 783 def ancestor(self, wire, revision1, revision2):
744 784 repo = self._factory.repo(wire)
745 785 changelog = repo.changelog
746 786 lookup = repo.lookup
747 787 a = changelog.ancestor(lookup(revision1), lookup(revision2))
748 788 return hex(a)
749 789
750 790 @reraise_safe_exceptions
751 791 def push(self, wire, revisions, dest_path, hooks=True,
752 792 push_branches=False):
753 793 repo = self._factory.repo(wire)
754 794 baseui = self._factory._create_config(wire['config'], hooks=hooks)
755 795 commands.push(baseui, repo, dest=dest_path, rev=revisions,
756 796 new_branch=push_branches)
757 797
758 798 @reraise_safe_exceptions
759 799 def merge(self, wire, revision):
760 800 repo = self._factory.repo(wire)
761 801 baseui = self._factory._create_config(wire['config'])
762 802 repo.ui.setconfig('ui', 'merge', 'internal:dump')
763 803
764 804 # In case of sub repositories are used mercurial prompts the user in
765 805 # case of merge conflicts or different sub repository sources. By
766 806 # setting the interactive flag to `False` mercurial doesn't prompt the
767 807 # used but instead uses a default value.
768 808 repo.ui.setconfig('ui', 'interactive', False)
809 commands.merge(baseui, repo, rev=revision)
769 810
770 commands.merge(baseui, repo, rev=revision)
811 @reraise_safe_exceptions
812 def merge_state(self, wire):
813 repo = self._factory.repo(wire)
814 repo.ui.setconfig('ui', 'merge', 'internal:dump')
815
816 # In case of sub repositories are used mercurial prompts the user in
817 # case of merge conflicts or different sub repository sources. By
818 # setting the interactive flag to `False` mercurial doesn't prompt the
819 # used but instead uses a default value.
820 repo.ui.setconfig('ui', 'interactive', False)
821 ms = hg_merge.mergestate(repo)
822 return [x for x in ms.unresolved()]
771 823
772 824 @reraise_safe_exceptions
773 825 def commit(self, wire, message, username, close_branch=False):
774 826 repo = self._factory.repo(wire)
775 827 baseui = self._factory._create_config(wire['config'])
776 828 repo.ui.setconfig('ui', 'username', username)
777 829 commands.commit(baseui, repo, message=message, close_branch=close_branch)
778 830
831
779 832 @reraise_safe_exceptions
780 833 def rebase(self, wire, source=None, dest=None, abort=False):
781 834 repo = self._factory.repo(wire)
782 835 baseui = self._factory._create_config(wire['config'])
783 836 repo.ui.setconfig('ui', 'merge', 'internal:dump')
784 837 rebase.rebase(
785 838 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
786 839
787 840 @reraise_safe_exceptions
788 841 def bookmark(self, wire, bookmark, revision=None):
789 842 repo = self._factory.repo(wire)
790 843 baseui = self._factory._create_config(wire['config'])
791 844 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
792 845
793 846 @reraise_safe_exceptions
794 847 def install_hooks(self, wire, force=False):
795 848 # we don't need any special hooks for Mercurial
796 849 pass
797 850
798 851 @reraise_safe_exceptions
799 852 def get_hooks_info(self, wire):
800 853 return {
801 854 'pre_version': vcsserver.__version__,
802 855 'post_version': vcsserver.__version__,
803 856 }
@@ -1,63 +1,74 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import tags as hg_tag
40 40
41 41 from mercurial.commands import clone, nullid, pull
42 42 from mercurial.context import memctx, memfilectx
43 43 from mercurial.error import (
44 44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 RequirementError)
45 RequirementError, ProgrammingError)
46 46 from mercurial.hgweb import hgweb_mod
47 from mercurial.localrepo import localrepository
47 from mercurial.localrepo import instance
48 48 from mercurial.match import match
49 49 from mercurial.mdiff import diffopts
50 50 from mercurial.node import bin, hex
51 51 from mercurial.encoding import tolocal
52 52 from mercurial.discovery import findcommonoutgoing
53 53 from mercurial.hg import peer
54 54 from mercurial.httppeer import makepeer
55 55 from mercurial.util import url as hg_url
56 from mercurial.scmutil import revrange
56 from mercurial.scmutil import revrange, revsymbol
57 57 from mercurial.node import nullrev
58 58 from mercurial import exchange
59 59 from hgext import largefiles
60 60
61 61 # those authnadlers are patched for python 2.6.5 bug an
62 62 # infinit looping when given invalid resources
63 63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
64
65
66 def get_ctx(repo, ref):
67 try:
68 ctx = repo[ref]
69 except ProgrammingError:
70 # we're unable to find the rev using a regular lookup, we fallback
71 # to slower, but backward compat revsymbol usage
72 ctx = revsymbol(repo, ref)
73
74 return ctx
@@ -1,203 +1,205 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import re
21 21 import os
22 22 import sys
23 23 import datetime
24 24 import logging
25 25 import pkg_resources
26 26
27 27 import vcsserver
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 def get_git_hooks_path(repo_path, bare):
33 33 hooks_path = os.path.join(repo_path, 'hooks')
34 34 if not bare:
35 35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
36 36
37 37 return hooks_path
38 38
39 39
40 40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
41 41 """
42 42 Creates a RhodeCode hook inside a git repository
43 43
44 44 :param repo_path: path to repository
45 45 :param executable: binary executable to put in the hooks
46 46 :param force_create: Create even if same name hook exists
47 47 """
48 48 executable = executable or sys.executable
49 49 hooks_path = get_git_hooks_path(repo_path, bare)
50 50
51 51 if not os.path.isdir(hooks_path):
52 52 os.makedirs(hooks_path, mode=0o777)
53 53
54 54 tmpl_post = pkg_resources.resource_string(
55 55 'vcsserver', '/'.join(
56 56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
57 57 tmpl_pre = pkg_resources.resource_string(
58 58 'vcsserver', '/'.join(
59 59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
60 60
61 61 path = '' # not used for now
62 62 timestamp = datetime.datetime.utcnow().isoformat()
63 63
64 64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
65 65 log.debug('Installing git hook in repo %s', repo_path)
66 66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
67 67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
68 68
69 69 if _rhodecode_hook or force_create:
70 70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
71 71 try:
72 72 with open(_hook_file, 'wb') as f:
73 73 template = template.replace(
74 74 '_TMPL_', vcsserver.__version__)
75 75 template = template.replace('_DATE_', timestamp)
76 76 template = template.replace('_ENV_', executable)
77 77 template = template.replace('_PATH_', path)
78 78 f.write(template)
79 79 os.chmod(_hook_file, 0o755)
80 80 except IOError:
81 81 log.exception('error writing hook file %s', _hook_file)
82 82 else:
83 83 log.debug('skipping writing hook file')
84 84
85 85 return True
86 86
87 87
88 88 def get_svn_hooks_path(repo_path):
89 89 hooks_path = os.path.join(repo_path, 'hooks')
90 90
91 91 return hooks_path
92 92
93 93
94 94 def install_svn_hooks(repo_path, executable=None, force_create=False):
95 95 """
96 96 Creates RhodeCode hooks inside a svn repository
97 97
98 98 :param repo_path: path to repository
99 99 :param executable: binary executable to put in the hooks
100 100 :param force_create: Create even if same name hook exists
101 101 """
102 102 executable = executable or sys.executable
103 103 hooks_path = get_svn_hooks_path(repo_path)
104 104 if not os.path.isdir(hooks_path):
105 105 os.makedirs(hooks_path, mode=0o777)
106 106
107 107 tmpl_post = pkg_resources.resource_string(
108 108 'vcsserver', '/'.join(
109 109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
110 110 tmpl_pre = pkg_resources.resource_string(
111 111 'vcsserver', '/'.join(
112 112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
113 113
114 114 path = '' # not used for now
115 115 timestamp = datetime.datetime.utcnow().isoformat()
116 116
117 117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
118 118 log.debug('Installing svn hook in repo %s', repo_path)
119 119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
120 120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
121 121
122 122 if _rhodecode_hook or force_create:
123 123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
124 124
125 125 try:
126 126 with open(_hook_file, 'wb') as f:
127 127 template = template.replace(
128 128 '_TMPL_', vcsserver.__version__)
129 129 template = template.replace('_DATE_', timestamp)
130 130 template = template.replace('_ENV_', executable)
131 131 template = template.replace('_PATH_', path)
132 132
133 133 f.write(template)
134 134 os.chmod(_hook_file, 0o755)
135 135 except IOError:
136 136 log.exception('error writing hook file %s', _hook_file)
137 137 else:
138 138 log.debug('skipping writing hook file')
139 139
140 140 return True
141 141
142 142
143 143 def get_version_from_hook(hook_path):
144 144 version = ''
145 145 hook_content = read_hook_content(hook_path)
146 146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
147 147 if matches:
148 148 try:
149 149 version = matches.groups()[0]
150 150 log.debug('got version %s from hooks.', version)
151 151 except Exception:
152 152 log.exception("Exception while reading the hook version.")
153 153 return version.replace("'", "")
154 154
155 155
156 156 def check_rhodecode_hook(hook_path):
157 157 """
158 158 Check if the hook was created by RhodeCode
159 159 """
160 160 if not os.path.exists(hook_path):
161 161 return True
162 162
163 163 log.debug('hook exists, checking if it is from RhodeCode')
164 164
165 165 version = get_version_from_hook(hook_path)
166 166 if version:
167 167 return True
168 168
169 169 return False
170 170
171 171
172 172 def read_hook_content(hook_path):
173 content = ''
174 if os.path.isfile(hook_path):
173 175 with open(hook_path, 'rb') as f:
174 176 content = f.read()
175 177 return content
176 178
177 179
178 180 def get_git_pre_hook_version(repo_path, bare):
179 181 hooks_path = get_git_hooks_path(repo_path, bare)
180 182 _hook_file = os.path.join(hooks_path, 'pre-receive')
181 183 version = get_version_from_hook(_hook_file)
182 184 return version
183 185
184 186
185 187 def get_git_post_hook_version(repo_path, bare):
186 188 hooks_path = get_git_hooks_path(repo_path, bare)
187 189 _hook_file = os.path.join(hooks_path, 'post-receive')
188 190 version = get_version_from_hook(_hook_file)
189 191 return version
190 192
191 193
192 194 def get_svn_pre_hook_version(repo_path):
193 195 hooks_path = get_svn_hooks_path(repo_path)
194 196 _hook_file = os.path.join(hooks_path, 'pre-commit')
195 197 version = get_version_from_hook(_hook_file)
196 198 return version
197 199
198 200
199 201 def get_svn_post_hook_version(repo_path):
200 202 hooks_path = get_svn_hooks_path(repo_path)
201 203 _hook_file = os.path.join(hooks_path, 'post-commit')
202 204 version = get_version_from_hook(_hook_file)
203 205 return version
@@ -1,710 +1,711 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 from vcsserver.hgcompat import get_ctx
36 37
37 38 log = logging.getLogger(__name__)
38 39
39 40
40 41 class HooksHttpClient(object):
41 42 connection = None
42 43
43 44 def __init__(self, hooks_uri):
44 45 self.hooks_uri = hooks_uri
45 46
46 47 def __call__(self, method, extras):
47 48 connection = HTTPConnection(self.hooks_uri)
48 49 body = self._serialize(method, extras)
49 50 try:
50 51 connection.request('POST', '/', body)
51 52 except Exception:
52 53 log.error('Connection failed on %s', connection)
53 54 raise
54 55 response = connection.getresponse()
55 56
56 57 response_data = response.read()
57 58
58 59 try:
59 60 return json.loads(response_data)
60 61 except Exception:
61 62 log.exception('Failed to decode hook response json data. '
62 63 'response_code:%s, raw_data:%s',
63 64 response.status, response_data)
64 65 raise
65 66
66 67 def _serialize(self, hook_name, extras):
67 68 data = {
68 69 'method': hook_name,
69 70 'extras': extras
70 71 }
71 72 return json.dumps(data)
72 73
73 74
74 75 class HooksDummyClient(object):
75 76 def __init__(self, hooks_module):
76 77 self._hooks_module = importlib.import_module(hooks_module)
77 78
78 79 def __call__(self, hook_name, extras):
79 80 with self._hooks_module.Hooks() as hooks:
80 81 return getattr(hooks, hook_name)(extras)
81 82
82 83
83 84 class RemoteMessageWriter(object):
84 85 """Writer base class."""
85 86 def write(self, message):
86 87 raise NotImplementedError()
87 88
88 89
89 90 class HgMessageWriter(RemoteMessageWriter):
90 91 """Writer that knows how to send messages to mercurial clients."""
91 92
92 93 def __init__(self, ui):
93 94 self.ui = ui
94 95
95 96 def write(self, message):
96 97 # TODO: Check why the quiet flag is set by default.
97 98 old = self.ui.quiet
98 99 self.ui.quiet = False
99 100 self.ui.status(message.encode('utf-8'))
100 101 self.ui.quiet = old
101 102
102 103
103 104 class GitMessageWriter(RemoteMessageWriter):
104 105 """Writer that knows how to send messages to git clients."""
105 106
106 107 def __init__(self, stdout=None):
107 108 self.stdout = stdout or sys.stdout
108 109
109 110 def write(self, message):
110 111 self.stdout.write(message.encode('utf-8'))
111 112
112 113
113 114 class SvnMessageWriter(RemoteMessageWriter):
114 115 """Writer that knows how to send messages to svn clients."""
115 116
116 117 def __init__(self, stderr=None):
117 118 # SVN needs data sent to stderr for back-to-client messaging
118 119 self.stderr = stderr or sys.stderr
119 120
120 121 def write(self, message):
121 122 self.stderr.write(message.encode('utf-8'))
122 123
123 124
124 125 def _handle_exception(result):
125 126 exception_class = result.get('exception')
126 127 exception_traceback = result.get('exception_traceback')
127 128
128 129 if exception_traceback:
129 130 log.error('Got traceback from remote call:%s', exception_traceback)
130 131
131 132 if exception_class == 'HTTPLockedRC':
132 133 raise exceptions.RepositoryLockedException()(*result['exception_args'])
133 134 elif exception_class == 'HTTPBranchProtected':
134 135 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
135 136 elif exception_class == 'RepositoryError':
136 137 raise exceptions.VcsException()(*result['exception_args'])
137 138 elif exception_class:
138 139 raise Exception('Got remote exception "%s" with args "%s"' %
139 140 (exception_class, result['exception_args']))
140 141
141 142
142 143 def _get_hooks_client(extras):
143 144 if 'hooks_uri' in extras:
144 145 protocol = extras.get('hooks_protocol')
145 146 return HooksHttpClient(extras['hooks_uri'])
146 147 else:
147 148 return HooksDummyClient(extras['hooks_module'])
148 149
149 150
150 151 def _call_hook(hook_name, extras, writer):
151 152 hooks_client = _get_hooks_client(extras)
152 153 log.debug('Hooks, using client:%s', hooks_client)
153 154 result = hooks_client(hook_name, extras)
154 155 log.debug('Hooks got result: %s', result)
155 156
156 157 _handle_exception(result)
157 158 writer.write(result['output'])
158 159
159 160 return result['status']
160 161
161 162
162 163 def _extras_from_ui(ui):
163 164 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
164 165 if not hook_data:
165 166 # maybe it's inside environ ?
166 167 env_hook_data = os.environ.get('RC_SCM_DATA')
167 168 if env_hook_data:
168 169 hook_data = env_hook_data
169 170
170 171 extras = {}
171 172 if hook_data:
172 173 extras = json.loads(hook_data)
173 174 return extras
174 175
175 176
176 177 def _rev_range_hash(repo, node, check_heads=False):
177 178
178 179 commits = []
179 180 revs = []
180 start = repo[node].rev()
181 start = get_ctx(repo, node).rev()
181 182 end = len(repo)
182 183 for rev in range(start, end):
183 184 revs.append(rev)
184 ctx = repo[rev]
185 ctx = get_ctx(repo, rev)
185 186 commit_id = mercurial.node.hex(ctx.node())
186 187 branch = ctx.branch()
187 188 commits.append((commit_id, branch))
188 189
189 190 parent_heads = []
190 191 if check_heads:
191 192 parent_heads = _check_heads(repo, start, end, revs)
192 193 return commits, parent_heads
193 194
194 195
195 196 def _check_heads(repo, start, end, commits):
196 197 changelog = repo.changelog
197 198 parents = set()
198 199
199 200 for new_rev in commits:
200 201 for p in changelog.parentrevs(new_rev):
201 202 if p == mercurial.node.nullrev:
202 203 continue
203 204 if p < start:
204 205 parents.add(p)
205 206
206 207 for p in parents:
207 branch = repo[p].branch()
208 branch = get_ctx(repo, p).branch()
208 209 # The heads descending from that parent, on the same branch
209 210 parent_heads = set([p])
210 211 reachable = set([p])
211 212 for x in xrange(p + 1, end):
212 if repo[x].branch() != branch:
213 if get_ctx(repo, x).branch() != branch:
213 214 continue
214 215 for pp in changelog.parentrevs(x):
215 216 if pp in reachable:
216 217 reachable.add(x)
217 218 parent_heads.discard(pp)
218 219 parent_heads.add(x)
219 220 # More than one head? Suggest merging
220 221 if len(parent_heads) > 1:
221 222 return list(parent_heads)
222 223
223 224 return []
224 225
225 226
226 227 def _get_git_env():
227 228 env = {}
228 229 for k, v in os.environ.items():
229 230 if k.startswith('GIT'):
230 231 env[k] = v
231 232
232 233 # serialized version
233 234 return [(k, v) for k, v in env.items()]
234 235
235 236
236 237 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
237 238 env = {}
238 239 for k, v in os.environ.items():
239 240 if k.startswith('HG'):
240 241 env[k] = v
241 242
242 243 env['HG_NODE'] = old_rev
243 244 env['HG_NODE_LAST'] = new_rev
244 245 env['HG_TXNID'] = txnid
245 246 env['HG_PENDING'] = repo_path
246 247
247 248 return [(k, v) for k, v in env.items()]
248 249
249 250
250 251 def repo_size(ui, repo, **kwargs):
251 252 extras = _extras_from_ui(ui)
252 253 return _call_hook('repo_size', extras, HgMessageWriter(ui))
253 254
254 255
255 256 def pre_pull(ui, repo, **kwargs):
256 257 extras = _extras_from_ui(ui)
257 258 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
258 259
259 260
260 261 def pre_pull_ssh(ui, repo, **kwargs):
261 262 extras = _extras_from_ui(ui)
262 263 if extras and extras.get('SSH'):
263 264 return pre_pull(ui, repo, **kwargs)
264 265 return 0
265 266
266 267
267 268 def post_pull(ui, repo, **kwargs):
268 269 extras = _extras_from_ui(ui)
269 270 return _call_hook('post_pull', extras, HgMessageWriter(ui))
270 271
271 272
272 273 def post_pull_ssh(ui, repo, **kwargs):
273 274 extras = _extras_from_ui(ui)
274 275 if extras and extras.get('SSH'):
275 276 return post_pull(ui, repo, **kwargs)
276 277 return 0
277 278
278 279
279 280 def pre_push(ui, repo, node=None, **kwargs):
280 281 """
281 282 Mercurial pre_push hook
282 283 """
283 284 extras = _extras_from_ui(ui)
284 285 detect_force_push = extras.get('detect_force_push')
285 286
286 287 rev_data = []
287 288 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
288 289 branches = collections.defaultdict(list)
289 290 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
290 291 for commit_id, branch in commits:
291 292 branches[branch].append(commit_id)
292 293
293 294 for branch, commits in branches.items():
294 295 old_rev = kwargs.get('node_last') or commits[0]
295 296 rev_data.append({
296 297 'total_commits': len(commits),
297 298 'old_rev': old_rev,
298 299 'new_rev': commits[-1],
299 300 'ref': '',
300 301 'type': 'branch',
301 302 'name': branch,
302 303 })
303 304
304 305 for push_ref in rev_data:
305 306 push_ref['multiple_heads'] = _heads
306 307
307 308 repo_path = os.path.join(
308 309 extras.get('repo_store', ''), extras.get('repository', ''))
309 310 push_ref['hg_env'] = _get_hg_env(
310 311 old_rev=push_ref['old_rev'],
311 312 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
312 313 repo_path=repo_path)
313 314
314 315 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
315 316 extras['commit_ids'] = rev_data
316 317
317 318 return _call_hook('pre_push', extras, HgMessageWriter(ui))
318 319
319 320
320 321 def pre_push_ssh(ui, repo, node=None, **kwargs):
321 322 extras = _extras_from_ui(ui)
322 323 if extras.get('SSH'):
323 324 return pre_push(ui, repo, node, **kwargs)
324 325
325 326 return 0
326 327
327 328
328 329 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
329 330 """
330 331 Mercurial pre_push hook for SSH
331 332 """
332 333 extras = _extras_from_ui(ui)
333 334 if extras.get('SSH'):
334 335 permission = extras['SSH_PERMISSIONS']
335 336
336 337 if 'repository.write' == permission or 'repository.admin' == permission:
337 338 return 0
338 339
339 340 # non-zero ret code
340 341 return 1
341 342
342 343 return 0
343 344
344 345
345 346 def post_push(ui, repo, node, **kwargs):
346 347 """
347 348 Mercurial post_push hook
348 349 """
349 350 extras = _extras_from_ui(ui)
350 351
351 352 commit_ids = []
352 353 branches = []
353 354 bookmarks = []
354 355 tags = []
355 356
356 357 commits, _heads = _rev_range_hash(repo, node)
357 358 for commit_id, branch in commits:
358 359 commit_ids.append(commit_id)
359 360 if branch not in branches:
360 361 branches.append(branch)
361 362
362 363 if hasattr(ui, '_rc_pushkey_branches'):
363 364 bookmarks = ui._rc_pushkey_branches
364 365
365 366 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
366 367 extras['commit_ids'] = commit_ids
367 368 extras['new_refs'] = {
368 369 'branches': branches,
369 370 'bookmarks': bookmarks,
370 371 'tags': tags
371 372 }
372 373
373 374 return _call_hook('post_push', extras, HgMessageWriter(ui))
374 375
375 376
376 377 def post_push_ssh(ui, repo, node, **kwargs):
377 378 """
378 379 Mercurial post_push hook for SSH
379 380 """
380 381 if _extras_from_ui(ui).get('SSH'):
381 382 return post_push(ui, repo, node, **kwargs)
382 383 return 0
383 384
384 385
385 386 def key_push(ui, repo, **kwargs):
386 387 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
387 388 # store new bookmarks in our UI object propagated later to post_push
388 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
389 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
389 390 return
390 391
391 392
392 393 # backward compat
393 394 log_pull_action = post_pull
394 395
395 396 # backward compat
396 397 log_push_action = post_push
397 398
398 399
399 400 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
400 401 """
401 402 Old hook name: keep here for backward compatibility.
402 403
403 404 This is only required when the installed git hooks are not upgraded.
404 405 """
405 406 pass
406 407
407 408
408 409 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
409 410 """
410 411 Old hook name: keep here for backward compatibility.
411 412
412 413 This is only required when the installed git hooks are not upgraded.
413 414 """
414 415 pass
415 416
416 417
417 418 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
418 419
419 420
420 421 def git_pre_pull(extras):
421 422 """
422 423 Pre pull hook.
423 424
424 425 :param extras: dictionary containing the keys defined in simplevcs
425 426 :type extras: dict
426 427
427 428 :return: status code of the hook. 0 for success.
428 429 :rtype: int
429 430 """
430 431 if 'pull' not in extras['hooks']:
431 432 return HookResponse(0, '')
432 433
433 434 stdout = io.BytesIO()
434 435 try:
435 436 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
436 437 except Exception as error:
437 438 status = 128
438 439 stdout.write('ERROR: %s\n' % str(error))
439 440
440 441 return HookResponse(status, stdout.getvalue())
441 442
442 443
443 444 def git_post_pull(extras):
444 445 """
445 446 Post pull hook.
446 447
447 448 :param extras: dictionary containing the keys defined in simplevcs
448 449 :type extras: dict
449 450
450 451 :return: status code of the hook. 0 for success.
451 452 :rtype: int
452 453 """
453 454 if 'pull' not in extras['hooks']:
454 455 return HookResponse(0, '')
455 456
456 457 stdout = io.BytesIO()
457 458 try:
458 459 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
459 460 except Exception as error:
460 461 status = 128
461 462 stdout.write('ERROR: %s\n' % error)
462 463
463 464 return HookResponse(status, stdout.getvalue())
464 465
465 466
466 467 def _parse_git_ref_lines(revision_lines):
467 468 rev_data = []
468 469 for revision_line in revision_lines or []:
469 470 old_rev, new_rev, ref = revision_line.strip().split(' ')
470 471 ref_data = ref.split('/', 2)
471 472 if ref_data[1] in ('tags', 'heads'):
472 473 rev_data.append({
473 474 # NOTE(marcink):
474 475 # we're unable to tell total_commits for git at this point
475 476 # but we set the variable for consistency with GIT
476 477 'total_commits': -1,
477 478 'old_rev': old_rev,
478 479 'new_rev': new_rev,
479 480 'ref': ref,
480 481 'type': ref_data[1],
481 482 'name': ref_data[2],
482 483 })
483 484 return rev_data
484 485
485 486
486 487 def git_pre_receive(unused_repo_path, revision_lines, env):
487 488 """
488 489 Pre push hook.
489 490
490 491 :param extras: dictionary containing the keys defined in simplevcs
491 492 :type extras: dict
492 493
493 494 :return: status code of the hook. 0 for success.
494 495 :rtype: int
495 496 """
496 497 extras = json.loads(env['RC_SCM_DATA'])
497 498 rev_data = _parse_git_ref_lines(revision_lines)
498 499 if 'push' not in extras['hooks']:
499 500 return 0
500 501 empty_commit_id = '0' * 40
501 502
502 503 detect_force_push = extras.get('detect_force_push')
503 504
504 505 for push_ref in rev_data:
505 506 # store our git-env which holds the temp store
506 507 push_ref['git_env'] = _get_git_env()
507 508 push_ref['pruned_sha'] = ''
508 509 if not detect_force_push:
509 510 # don't check for forced-push when we don't need to
510 511 continue
511 512
512 513 type_ = push_ref['type']
513 514 new_branch = push_ref['old_rev'] == empty_commit_id
514 515 delete_branch = push_ref['new_rev'] == empty_commit_id
515 516 if type_ == 'heads' and not (new_branch or delete_branch):
516 517 old_rev = push_ref['old_rev']
517 518 new_rev = push_ref['new_rev']
518 519 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
519 520 stdout, stderr = subprocessio.run_command(
520 521 cmd, env=os.environ.copy())
521 522 # means we're having some non-reachable objects, this forced push was used
522 523 if stdout:
523 524 push_ref['pruned_sha'] = stdout.splitlines()
524 525
525 526 extras['hook_type'] = 'pre_receive'
526 527 extras['commit_ids'] = rev_data
527 528 return _call_hook('pre_push', extras, GitMessageWriter())
528 529
529 530
530 531 def git_post_receive(unused_repo_path, revision_lines, env):
531 532 """
532 533 Post push hook.
533 534
534 535 :param extras: dictionary containing the keys defined in simplevcs
535 536 :type extras: dict
536 537
537 538 :return: status code of the hook. 0 for success.
538 539 :rtype: int
539 540 """
540 541 extras = json.loads(env['RC_SCM_DATA'])
541 542 if 'push' not in extras['hooks']:
542 543 return 0
543 544
544 545 rev_data = _parse_git_ref_lines(revision_lines)
545 546
546 547 git_revs = []
547 548
548 549 # N.B.(skreft): it is ok to just call git, as git before calling a
549 550 # subcommand sets the PATH environment variable so that it point to the
550 551 # correct version of the git executable.
551 552 empty_commit_id = '0' * 40
552 553 branches = []
553 554 tags = []
554 555 for push_ref in rev_data:
555 556 type_ = push_ref['type']
556 557
557 558 if type_ == 'heads':
558 559 if push_ref['old_rev'] == empty_commit_id:
559 560 # starting new branch case
560 561 if push_ref['name'] not in branches:
561 562 branches.append(push_ref['name'])
562 563
563 564 # Fix up head revision if needed
564 565 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
565 566 try:
566 567 subprocessio.run_command(cmd, env=os.environ.copy())
567 568 except Exception:
568 569 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
569 570 'refs/heads/%s' % push_ref['name']]
570 571 print("Setting default branch to %s" % push_ref['name'])
571 572 subprocessio.run_command(cmd, env=os.environ.copy())
572 573
573 574 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
574 575 '--format=%(refname)', 'refs/heads/*']
575 576 stdout, stderr = subprocessio.run_command(
576 577 cmd, env=os.environ.copy())
577 578 heads = stdout
578 579 heads = heads.replace(push_ref['ref'], '')
579 580 heads = ' '.join(head for head
580 581 in heads.splitlines() if head) or '.'
581 582 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
582 583 '--pretty=format:%H', '--', push_ref['new_rev'],
583 584 '--not', heads]
584 585 stdout, stderr = subprocessio.run_command(
585 586 cmd, env=os.environ.copy())
586 587 git_revs.extend(stdout.splitlines())
587 588 elif push_ref['new_rev'] == empty_commit_id:
588 589 # delete branch case
589 590 git_revs.append('delete_branch=>%s' % push_ref['name'])
590 591 else:
591 592 if push_ref['name'] not in branches:
592 593 branches.append(push_ref['name'])
593 594
594 595 cmd = [settings.GIT_EXECUTABLE, 'log',
595 596 '{old_rev}..{new_rev}'.format(**push_ref),
596 597 '--reverse', '--pretty=format:%H']
597 598 stdout, stderr = subprocessio.run_command(
598 599 cmd, env=os.environ.copy())
599 600 git_revs.extend(stdout.splitlines())
600 601 elif type_ == 'tags':
601 602 if push_ref['name'] not in tags:
602 603 tags.append(push_ref['name'])
603 604 git_revs.append('tag=>%s' % push_ref['name'])
604 605
605 606 extras['hook_type'] = 'post_receive'
606 607 extras['commit_ids'] = git_revs
607 608 extras['new_refs'] = {
608 609 'branches': branches,
609 610 'bookmarks': [],
610 611 'tags': tags,
611 612 }
612 613
613 614 if 'repo_size' in extras['hooks']:
614 615 try:
615 616 _call_hook('repo_size', extras, GitMessageWriter())
616 617 except:
617 618 pass
618 619
619 620 return _call_hook('post_push', extras, GitMessageWriter())
620 621
621 622
622 623 def _get_extras_from_txn_id(path, txn_id):
623 624 extras = {}
624 625 try:
625 cmd = ['svnlook', 'pget',
626 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
626 627 '-t', txn_id,
627 628 '--revprop', path, 'rc-scm-extras']
628 629 stdout, stderr = subprocessio.run_command(
629 630 cmd, env=os.environ.copy())
630 631 extras = json.loads(base64.urlsafe_b64decode(stdout))
631 632 except Exception:
632 633 log.exception('Failed to extract extras info from txn_id')
633 634
634 635 return extras
635 636
636 637
637 638 def _get_extras_from_commit_id(commit_id, path):
638 639 extras = {}
639 640 try:
640 cmd = ['svnlook', 'pget',
641 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
641 642 '-r', commit_id,
642 643 '--revprop', path, 'rc-scm-extras']
643 644 stdout, stderr = subprocessio.run_command(
644 645 cmd, env=os.environ.copy())
645 646 extras = json.loads(base64.urlsafe_b64decode(stdout))
646 647 except Exception:
647 648 log.exception('Failed to extract extras info from commit_id')
648 649
649 650 return extras
650 651
651 652
652 653 def svn_pre_commit(repo_path, commit_data, env):
653 654 path, txn_id = commit_data
654 655 branches = []
655 656 tags = []
656 657
657 658 if env.get('RC_SCM_DATA'):
658 659 extras = json.loads(env['RC_SCM_DATA'])
659 660 else:
660 661 # fallback method to read from TXN-ID stored data
661 662 extras = _get_extras_from_txn_id(path, txn_id)
662 663 if not extras:
663 664 return 0
664 665
665 666 extras['hook_type'] = 'pre_commit'
666 extras['commit_ids'] = []
667 extras['commit_ids'] = [txn_id]
667 668 extras['txn_id'] = txn_id
668 669 extras['new_refs'] = {
669 670 'total_commits': 1,
670 671 'branches': branches,
671 672 'bookmarks': [],
672 673 'tags': tags,
673 674 }
674 675
675 676 return _call_hook('pre_push', extras, SvnMessageWriter())
676 677
677 678
678 679 def svn_post_commit(repo_path, commit_data, env):
679 680 """
680 681 commit_data is path, rev, txn_id
681 682 """
682 683 path, commit_id, txn_id = commit_data
683 684 branches = []
684 685 tags = []
685 686
686 687 if env.get('RC_SCM_DATA'):
687 688 extras = json.loads(env['RC_SCM_DATA'])
688 689 else:
689 690 # fallback method to read from TXN-ID stored data
690 691 extras = _get_extras_from_commit_id(commit_id, path)
691 692 if not extras:
692 693 return 0
693 694
694 695 extras['hook_type'] = 'post_commit'
695 696 extras['commit_ids'] = [commit_id]
696 697 extras['txn_id'] = txn_id
697 698 extras['new_refs'] = {
698 699 'branches': branches,
699 700 'bookmarks': [],
700 701 'tags': tags,
701 702 'total_commits': 1,
702 703 }
703 704
704 705 if 'repo_size' in extras['hooks']:
705 706 try:
706 707 _call_hook('repo_size', extras, SvnMessageWriter())
707 708 except Exception:
708 709 pass
709 710
710 711 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,151 +1,169 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import os
22 22 import time
23 23 import datetime
24 24 import msgpack
25 25 import logging
26 26 import traceback
27 27 import tempfile
28 28
29 from pyramid import compat
29 30
30 31 log = logging.getLogger(__name__)
31 32
32 33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
33 34 global_prefix = 'vcsserver'
34 35 exc_store_dir_name = 'rc_exception_store_v1'
35 36
36 37
37 38 def exc_serialize(exc_id, tb, exc_type):
38 39
39 40 data = {
40 41 'version': 'v1',
41 42 'exc_id': exc_id,
42 43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
43 44 'exc_timestamp': repr(time.time()),
44 45 'exc_message': tb,
45 46 'exc_type': exc_type,
46 47 }
47 48 return msgpack.packb(data), data
48 49
49 50
50 51 def exc_unserialize(tb):
51 52 return msgpack.unpackb(tb)
52 53
53 54
54 55 def get_exc_store():
55 56 """
56 57 Get and create exception store if it's not existing
57 58 """
58 59 import vcsserver as app
59 60
60 61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
61 62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
62 63
63 64 _exc_store_path = os.path.abspath(_exc_store_path)
64 65 if not os.path.isdir(_exc_store_path):
65 66 os.makedirs(_exc_store_path)
66 67 log.debug('Initializing exceptions store at %s', _exc_store_path)
67 68 return _exc_store_path
68 69
69 70
70 71 def _store_exception(exc_id, exc_info, prefix):
71 72 exc_type, exc_value, exc_traceback = exc_info
73
72 74 tb = ''.join(traceback.format_exception(
73 75 exc_type, exc_value, exc_traceback, None))
74 76
77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78
79 if detailed_tb:
80 if isinstance(detailed_tb, compat.string_types):
81 remote_tb = [detailed_tb]
82
83 tb += (
84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
85 '{}\n'
86 '+++ END SOURCE EXCEPTION +++\n'
87 ''.format('\n'.join(remote_tb))
88 )
89
90 # Avoid that remote_tb also appears in the frame
91 del remote_tb
92
75 93 exc_type_name = exc_type.__name__
76 94 exc_store_path = get_exc_store()
77 95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
78 96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
79 97 if not os.path.isdir(exc_store_path):
80 98 os.makedirs(exc_store_path)
81 99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
82 100 with open(stored_exc_path, 'wb') as f:
83 101 f.write(exc_data)
84 102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
85 103
86 104
87 105 def store_exception(exc_id, exc_info, prefix=global_prefix):
88 106 """
89 107 Example usage::
90 108
91 109 exc_info = sys.exc_info()
92 110 store_exception(id(exc_info), exc_info)
93 111 """
94 112
95 113 try:
96 114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
97 115 except Exception:
98 116 log.exception('Failed to store exception `%s` information', exc_id)
99 117 # there's no way this can fail, it will crash server badly if it does.
100 118 pass
101 119
102 120
103 121 def _find_exc_file(exc_id, prefix=global_prefix):
104 122 exc_store_path = get_exc_store()
105 123 if prefix:
106 124 exc_id = '{}_{}'.format(exc_id, prefix)
107 125 else:
108 126 # search without a prefix
109 127 exc_id = '{}'.format(exc_id)
110 128
111 129 # we need to search the store for such start pattern as above
112 130 for fname in os.listdir(exc_store_path):
113 131 if fname.startswith(exc_id):
114 132 exc_id = os.path.join(exc_store_path, fname)
115 133 break
116 134 continue
117 135 else:
118 136 exc_id = None
119 137
120 138 return exc_id
121 139
122 140
123 141 def _read_exception(exc_id, prefix):
124 142 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
125 143 if exc_id_file_path:
126 144 with open(exc_id_file_path, 'rb') as f:
127 145 return exc_unserialize(f.read())
128 146 else:
129 147 log.debug('Exception File `%s` not found', exc_id_file_path)
130 148 return None
131 149
132 150
133 151 def read_exception(exc_id, prefix=global_prefix):
134 152 try:
135 153 return _read_exception(exc_id=exc_id, prefix=prefix)
136 154 except Exception:
137 155 log.exception('Failed to read exception `%s` information', exc_id)
138 156 # there's no way this can fail, it will crash server badly if it does.
139 157 return None
140 158
141 159
142 160 def delete_exception(exc_id, prefix=global_prefix):
143 161 try:
144 162 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
145 163 if exc_id_file_path:
146 164 os.remove(exc_id_file_path)
147 165
148 166 except Exception:
149 167 log.exception('Failed to remove exception `%s` information', exc_id)
150 168 # there's no way this can fail, it will crash server badly if it does.
151 169 pass
@@ -1,234 +1,235 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import itertools
21 21
22 22 import mercurial
23 23 import mercurial.error
24 24 import mercurial.wireprotoserver
25 25 import mercurial.hgweb.common
26 26 import mercurial.hgweb.hgweb_mod
27 27 import webob.exc
28 28
29 29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 30
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 # propagated from mercurial documentation
36 36 HG_UI_SECTIONS = [
37 37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 40 ]
41 41
42 42
43 43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 44 """Extension of hgweb that simplifies some functions."""
45 45
46 46 def _get_view(self, repo):
47 47 """Views are not supported."""
48 48 return repo
49 49
50 50 def loadsubweb(self):
51 51 """The result is only used in the templater method which is not used."""
52 52 return None
53 53
54 54 def run(self):
55 55 """Unused function so raise an exception if accidentally called."""
56 56 raise NotImplementedError
57 57
58 58 def templater(self, req):
59 59 """Function used in an unreachable code path.
60 60
61 61 This code is unreachable because we guarantee that the HTTP request,
62 62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 63 never going to get a user-visible url.
64 64 """
65 65 raise NotImplementedError
66 66
67 67 def archivelist(self, nodeid):
68 68 """Unused function so raise an exception if accidentally called."""
69 69 raise NotImplementedError
70 70
71 71 def __call__(self, environ, start_response):
72 72 """Run the WSGI application.
73 73
74 74 This may be called by multiple threads.
75 75 """
76 76 from mercurial.hgweb import request as requestmod
77 77 req = requestmod.parserequestfromenv(environ)
78 78 res = requestmod.wsgiresponse(req, start_response)
79 79 gen = self.run_wsgi(req, res)
80 80
81 81 first_chunk = None
82 82
83 83 try:
84 84 data = gen.next()
85 85
86 86 def first_chunk():
87 87 yield data
88 88 except StopIteration:
89 89 pass
90 90
91 91 if first_chunk:
92 92 return itertools.chain(first_chunk(), gen)
93 93 return gen
94 94
95 95 def _runwsgi(self, req, res, repo):
96 96
97 97 cmd = req.qsparams.get('cmd', '')
98 98 if not mercurial.wireprotoserver.iscmd(cmd):
99 99 # NOTE(marcink): for unsupported commands, we return bad request
100 100 # internally from HG
101 101 from mercurial.hgweb.common import statusmessage
102 102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 103 res.setbodybytes('')
104 104 return res.sendresponse()
105 105
106 106 return super(HgWeb, self)._runwsgi(req, res, repo)
107 107
108 108
109 109 def make_hg_ui_from_config(repo_config):
110 110 baseui = mercurial.ui.ui()
111 111
112 112 # clean the baseui object
113 113 baseui._ocfg = mercurial.config.config()
114 114 baseui._ucfg = mercurial.config.config()
115 115 baseui._tcfg = mercurial.config.config()
116 116
117 117 for section, option, value in repo_config:
118 118 baseui.setconfig(section, option, value)
119 119
120 120 # make our hgweb quiet so it doesn't print output
121 121 baseui.setconfig('ui', 'quiet', 'true')
122 122
123 123 return baseui
124 124
125 125
126 126 def update_hg_ui_from_hgrc(baseui, repo_path):
127 127 path = os.path.join(repo_path, '.hg', 'hgrc')
128 128
129 129 if not os.path.isfile(path):
130 130 log.debug('hgrc file is not present at %s, skipping...', path)
131 131 return
132 132 log.debug('reading hgrc from %s', path)
133 133 cfg = mercurial.config.config()
134 134 cfg.read(path)
135 135 for section in HG_UI_SECTIONS:
136 136 for k, v in cfg.items(section):
137 137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 138 baseui.setconfig(section, k, v)
139 139
140 140
141 141 def create_hg_wsgi_app(repo_path, repo_name, config):
142 142 """
143 143 Prepares a WSGI application to handle Mercurial requests.
144 144
145 145 :param config: is a list of 3-item tuples representing a ConfigObject
146 146 (it is the serialized version of the config object).
147 147 """
148 148 log.debug("Creating Mercurial WSGI application")
149 149
150 150 baseui = make_hg_ui_from_config(config)
151 151 update_hg_ui_from_hgrc(baseui, repo_path)
152 152
153 153 try:
154 154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
155 155 except mercurial.error.RequirementError as e:
156 156 raise exceptions.RequirementException(e)(e)
157 157
158 158
159 159 class GitHandler(object):
160 160 """
161 161 Handler for Git operations like push/pull etc
162 162 """
163 163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
164 164 extras):
165 165 if not os.path.isdir(repo_location):
166 166 raise OSError(repo_location)
167 167 self.content_path = repo_location
168 168 self.repo_name = repo_name
169 169 self.repo_location = repo_location
170 170 self.extras = extras
171 171 self.git_path = git_path
172 172 self.update_server_info = update_server_info
173 173
174 174 def __call__(self, environ, start_response):
175 175 app = webob.exc.HTTPNotFound()
176 176 candidate_paths = (
177 177 self.content_path, os.path.join(self.content_path, '.git'))
178 178
179 179 for content_path in candidate_paths:
180 180 try:
181 181 app = pygrack.GitRepository(
182 182 self.repo_name, content_path, self.git_path,
183 183 self.update_server_info, self.extras)
184 184 break
185 185 except OSError:
186 186 continue
187 187
188 188 return app(environ, start_response)
189 189
190 190
191 191 def create_git_wsgi_app(repo_path, repo_name, config):
192 192 """
193 193 Creates a WSGI application to handle Git requests.
194 194
195 195 :param config: is a dictionary holding the extras.
196 196 """
197 197 git_path = settings.GIT_EXECUTABLE
198 198 update_server_info = config.pop('git_update_server_info')
199 199 app = GitHandler(
200 200 repo_path, repo_name, git_path, update_server_info, config)
201 201
202 202 return app
203 203
204 204
205 205 class GitLFSHandler(object):
206 206 """
207 207 Handler for Git LFS operations
208 208 """
209 209
210 210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
211 211 extras):
212 212 if not os.path.isdir(repo_location):
213 213 raise OSError(repo_location)
214 214 self.content_path = repo_location
215 215 self.repo_name = repo_name
216 216 self.repo_location = repo_location
217 217 self.extras = extras
218 218 self.git_path = git_path
219 219 self.update_server_info = update_server_info
220 220
221 def get_app(self, git_lfs_enabled, git_lfs_store_path):
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
221 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
223 223 return app
224 224
225 225
226 226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 227 git_path = settings.GIT_EXECUTABLE
228 228 update_server_info = config.pop('git_update_server_info')
229 229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
231 232 app = GitLFSHandler(
232 233 repo_path, repo_name, git_path, update_server_info, config)
233 234
234 return app.get_app(git_lfs_enabled, git_lfs_store_path)
235 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
@@ -1,20 +1,22 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 WIRE_ENCODING = 'UTF-8'
19 19 GIT_EXECUTABLE = 'git'
20 SVN_EXECUTABLE = 'svn'
21 SVNLOOK_EXECUTABLE = 'svnlook'
20 22 BINARY_DIR = ''
@@ -1,732 +1,775 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 from urllib2 import URLError
23 23 import urlparse
24 24 import logging
25 25 import posixpath as vcspath
26 26 import StringIO
27 27 import urllib
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 # Set of svn compatible version flags.
44 44 # Compare with subversion/svnadmin/svnadmin.c
45 45 svn_compatible_versions = {
46 46 'pre-1.4-compatible',
47 47 'pre-1.5-compatible',
48 48 'pre-1.6-compatible',
49 49 'pre-1.8-compatible',
50 50 'pre-1.9-compatible'
51 51 }
52 52
53 53 svn_compatible_versions_map = {
54 54 'pre-1.4-compatible': '1.3',
55 55 'pre-1.5-compatible': '1.4',
56 56 'pre-1.6-compatible': '1.5',
57 57 'pre-1.8-compatible': '1.7',
58 58 'pre-1.9-compatible': '1.8',
59 59 }
60 60
61 61
62 62 def reraise_safe_exceptions(func):
63 63 """Decorator for converting svn exceptions to something neutral."""
64 64 def wrapper(*args, **kwargs):
65 65 try:
66 66 return func(*args, **kwargs)
67 67 except Exception as e:
68 68 if not hasattr(e, '_vcs_kind'):
69 69 log.exception("Unhandled exception in svn remote call")
70 70 raise_from_original(exceptions.UnhandledException(e))
71 71 raise
72 72 return wrapper
73 73
74 74
75 75 class SubversionFactory(RepoFactory):
76 76 repo_type = 'svn'
77 77
78 78 def _create_repo(self, wire, create, compatible_version):
79 79 path = svn.core.svn_path_canonicalize(wire['path'])
80 80 if create:
81 81 fs_config = {'compatible-version': '1.9'}
82 82 if compatible_version:
83 83 if compatible_version not in svn_compatible_versions:
84 84 raise Exception('Unknown SVN compatible version "{}"'
85 85 .format(compatible_version))
86 86 fs_config['compatible-version'] = \
87 87 svn_compatible_versions_map[compatible_version]
88 88
89 89 log.debug('Create SVN repo with config "%s"', fs_config)
90 90 repo = svn.repos.create(path, "", "", None, fs_config)
91 91 else:
92 92 repo = svn.repos.open(path)
93 93
94 94 log.debug('Got SVN object: %s', repo)
95 95 return repo
96 96
97 97 def repo(self, wire, create=False, compatible_version=None):
98 98 """
99 99 Get a repository instance for the given path.
100 100
101 101 Uses internally the low level beaker API since the decorators introduce
102 102 significant overhead.
103 103 """
104 104 region = self._cache_region
105 105 context = wire.get('context', None)
106 106 repo_path = wire.get('path', '')
107 107 context_uid = '{}'.format(context)
108 108 cache = wire.get('cache', True)
109 109 cache_on = context and cache
110 110
111 111 @region.conditional_cache_on_arguments(condition=cache_on)
112 112 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
113 113 return self._create_repo(wire, create, compatible_version)
114 114
115 115 return create_new_repo(self.repo_type, repo_path, context_uid,
116 116 compatible_version)
117 117
118 118
119 119 NODE_TYPE_MAPPING = {
120 120 svn.core.svn_node_file: 'file',
121 121 svn.core.svn_node_dir: 'dir',
122 122 }
123 123
124 124
125 125 class SvnRemote(object):
126 126
127 127 def __init__(self, factory, hg_factory=None):
128 128 self._factory = factory
129 129 # TODO: Remove once we do not use internal Mercurial objects anymore
130 130 # for subversion
131 131 self._hg_factory = hg_factory
132 132
133 133 @reraise_safe_exceptions
134 134 def discover_svn_version(self):
135 135 try:
136 136 import svn.core
137 137 svn_ver = svn.core.SVN_VERSION
138 138 except ImportError:
139 139 svn_ver = None
140 140 return svn_ver
141 141
142 @reraise_safe_exceptions
143 def is_empty(self, wire):
144 repo = self._factory.repo(wire)
145
146 try:
147 return self.lookup(wire, -1) == 0
148 except Exception:
149 log.exception("failed to read object_store")
150 return False
151
142 152 def check_url(self, url, config_items):
143 153 # this can throw exception if not installed, but we detect this
144 154 from hgsubversion import svnrepo
145 155
146 156 baseui = self._hg_factory._create_config(config_items)
147 157 # uuid function get's only valid UUID from proper repo, else
148 158 # throws exception
149 159 try:
150 160 svnrepo.svnremoterepo(baseui, url).svn.uuid
151 161 except Exception:
152 162 tb = traceback.format_exc()
153 163 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
154 164 raise URLError(
155 165 '"%s" is not a valid Subversion source url.' % (url, ))
156 166 return True
157 167
158 168 def is_path_valid_repository(self, wire, path):
159 169
160 170 # NOTE(marcink): short circuit the check for SVN repo
161 171 # the repos.open might be expensive to check, but we have one cheap
162 172 # pre condition that we can use, to check for 'format' file
163 173
164 174 if not os.path.isfile(os.path.join(path, 'format')):
165 175 return False
166 176
167 177 try:
168 178 svn.repos.open(path)
169 179 except svn.core.SubversionException:
170 180 tb = traceback.format_exc()
171 181 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
172 182 return False
173 183 return True
174 184
175 185 @reraise_safe_exceptions
176 186 def verify(self, wire,):
177 187 repo_path = wire['path']
178 188 if not self.is_path_valid_repository(wire, repo_path):
179 189 raise Exception(
180 190 "Path %s is not a valid Subversion repository." % repo_path)
181 191
182 192 cmd = ['svnadmin', 'info', repo_path]
183 193 stdout, stderr = subprocessio.run_command(cmd)
184 194 return stdout
185 195
186 196 def lookup(self, wire, revision):
187 197 if revision not in [-1, None, 'HEAD']:
188 198 raise NotImplementedError
189 199 repo = self._factory.repo(wire)
190 200 fs_ptr = svn.repos.fs(repo)
191 201 head = svn.fs.youngest_rev(fs_ptr)
192 202 return head
193 203
194 204 def lookup_interval(self, wire, start_ts, end_ts):
195 205 repo = self._factory.repo(wire)
196 206 fsobj = svn.repos.fs(repo)
197 207 start_rev = None
198 208 end_rev = None
199 209 if start_ts:
200 210 start_ts_svn = apr_time_t(start_ts)
201 211 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
202 212 else:
203 213 start_rev = 1
204 214 if end_ts:
205 215 end_ts_svn = apr_time_t(end_ts)
206 216 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
207 217 else:
208 218 end_rev = svn.fs.youngest_rev(fsobj)
209 219 return start_rev, end_rev
210 220
211 221 def revision_properties(self, wire, revision):
212 222 repo = self._factory.repo(wire)
213 223 fs_ptr = svn.repos.fs(repo)
214 224 return svn.fs.revision_proplist(fs_ptr, revision)
215 225
216 226 def revision_changes(self, wire, revision):
217 227
218 228 repo = self._factory.repo(wire)
219 229 fsobj = svn.repos.fs(repo)
220 230 rev_root = svn.fs.revision_root(fsobj, revision)
221 231
222 232 editor = svn.repos.ChangeCollector(fsobj, rev_root)
223 233 editor_ptr, editor_baton = svn.delta.make_editor(editor)
224 234 base_dir = ""
225 235 send_deltas = False
226 236 svn.repos.replay2(
227 237 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
228 238 editor_ptr, editor_baton, None)
229 239
230 240 added = []
231 241 changed = []
232 242 removed = []
233 243
234 244 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
235 245 for path, change in editor.changes.iteritems():
236 246 # TODO: Decide what to do with directory nodes. Subversion can add
237 247 # empty directories.
238 248
239 249 if change.item_kind == svn.core.svn_node_dir:
240 250 continue
241 251 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
242 252 added.append(path)
243 253 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
244 254 svn.repos.CHANGE_ACTION_REPLACE]:
245 255 changed.append(path)
246 256 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
247 257 removed.append(path)
248 258 else:
249 259 raise NotImplementedError(
250 260 "Action %s not supported on path %s" % (
251 261 change.action, path))
252 262
253 263 changes = {
254 264 'added': added,
255 265 'changed': changed,
256 266 'removed': removed,
257 267 }
258 268 return changes
259 269
260 270 def node_history(self, wire, path, revision, limit):
261 271 cross_copies = False
262 272 repo = self._factory.repo(wire)
263 273 fsobj = svn.repos.fs(repo)
264 274 rev_root = svn.fs.revision_root(fsobj, revision)
265 275
266 276 history_revisions = []
267 277 history = svn.fs.node_history(rev_root, path)
268 278 history = svn.fs.history_prev(history, cross_copies)
269 279 while history:
270 280 __, node_revision = svn.fs.history_location(history)
271 281 history_revisions.append(node_revision)
272 282 if limit and len(history_revisions) >= limit:
273 283 break
274 284 history = svn.fs.history_prev(history, cross_copies)
275 285 return history_revisions
276 286
277 287 def node_properties(self, wire, path, revision):
278 288 repo = self._factory.repo(wire)
279 289 fsobj = svn.repos.fs(repo)
280 290 rev_root = svn.fs.revision_root(fsobj, revision)
281 291 return svn.fs.node_proplist(rev_root, path)
282 292
283 293 def file_annotate(self, wire, path, revision):
284 294 abs_path = 'file://' + urllib.pathname2url(
285 295 vcspath.join(wire['path'], path))
286 296 file_uri = svn.core.svn_path_canonicalize(abs_path)
287 297
288 298 start_rev = svn_opt_revision_value_t(0)
289 299 peg_rev = svn_opt_revision_value_t(revision)
290 300 end_rev = peg_rev
291 301
292 302 annotations = []
293 303
294 304 def receiver(line_no, revision, author, date, line, pool):
295 305 annotations.append((line_no, revision, line))
296 306
297 307 # TODO: Cannot use blame5, missing typemap function in the swig code
298 308 try:
299 309 svn.client.blame2(
300 310 file_uri, peg_rev, start_rev, end_rev,
301 311 receiver, svn.client.create_context())
302 312 except svn.core.SubversionException as exc:
303 313 log.exception("Error during blame operation.")
304 314 raise Exception(
305 315 "Blame not supported or file does not exist at path %s. "
306 316 "Error %s." % (path, exc))
307 317
308 318 return annotations
309 319
310 320 def get_node_type(self, wire, path, rev=None):
311 321 repo = self._factory.repo(wire)
312 322 fs_ptr = svn.repos.fs(repo)
313 323 if rev is None:
314 324 rev = svn.fs.youngest_rev(fs_ptr)
315 325 root = svn.fs.revision_root(fs_ptr, rev)
316 326 node = svn.fs.check_path(root, path)
317 327 return NODE_TYPE_MAPPING.get(node, None)
318 328
319 329 def get_nodes(self, wire, path, revision=None):
320 330 repo = self._factory.repo(wire)
321 331 fsobj = svn.repos.fs(repo)
322 332 if revision is None:
323 333 revision = svn.fs.youngest_rev(fsobj)
324 334 root = svn.fs.revision_root(fsobj, revision)
325 335 entries = svn.fs.dir_entries(root, path)
326 336 result = []
327 337 for entry_path, entry_info in entries.iteritems():
328 338 result.append(
329 339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
330 340 return result
331 341
332 342 def get_file_content(self, wire, path, rev=None):
333 343 repo = self._factory.repo(wire)
334 344 fsobj = svn.repos.fs(repo)
335 345 if rev is None:
336 346 rev = svn.fs.youngest_revision(fsobj)
337 347 root = svn.fs.revision_root(fsobj, rev)
338 348 content = svn.core.Stream(svn.fs.file_contents(root, path))
339 349 return content.read()
340 350
341 351 def get_file_size(self, wire, path, revision=None):
342 352 repo = self._factory.repo(wire)
343 353 fsobj = svn.repos.fs(repo)
344 354 if revision is None:
345 355 revision = svn.fs.youngest_revision(fsobj)
346 356 root = svn.fs.revision_root(fsobj, revision)
347 357 size = svn.fs.file_length(root, path)
348 358 return size
349 359
350 360 def create_repository(self, wire, compatible_version=None):
351 361 log.info('Creating Subversion repository in path "%s"', wire['path'])
352 362 self._factory.repo(wire, create=True,
353 363 compatible_version=compatible_version)
354 364
355 365 def get_url_and_credentials(self, src_url):
356 366 obj = urlparse.urlparse(src_url)
357 367 username = obj.username or None
358 368 password = obj.password or None
359 369 return username, password, src_url
360 370
361 371 def import_remote_repository(self, wire, src_url):
362 372 repo_path = wire['path']
363 373 if not self.is_path_valid_repository(wire, repo_path):
364 374 raise Exception(
365 375 "Path %s is not a valid Subversion repository." % repo_path)
366 376
367 377 username, password, src_url = self.get_url_and_credentials(src_url)
368 378 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
369 379 '--trust-server-cert-failures=unknown-ca']
370 380 if username and password:
371 381 rdump_cmd += ['--username', username, '--password', password]
372 382 rdump_cmd += [src_url]
373 383
374 384 rdump = subprocess.Popen(
375 385 rdump_cmd,
376 386 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
377 387 load = subprocess.Popen(
378 388 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
379 389
380 390 # TODO: johbo: This can be a very long operation, might be better
381 391 # to track some kind of status and provide an api to check if the
382 392 # import is done.
383 393 rdump.wait()
384 394 load.wait()
385 395
386 396 log.debug('Return process ended with code: %s', rdump.returncode)
387 397 if rdump.returncode != 0:
388 398 errors = rdump.stderr.read()
389 399 log.error('svnrdump dump failed: statuscode %s: message: %s',
390 400 rdump.returncode, errors)
391 401 reason = 'UNKNOWN'
392 402 if 'svnrdump: E230001:' in errors:
393 403 reason = 'INVALID_CERTIFICATE'
394 404
395 405 if reason == 'UNKNOWN':
396 406 reason = 'UNKNOWN:{}'.format(errors)
397 407 raise Exception(
398 408 'Failed to dump the remote repository from %s. Reason:%s' % (
399 409 src_url, reason))
400 410 if load.returncode != 0:
401 411 raise Exception(
402 412 'Failed to load the dump of remote repository from %s.' %
403 413 (src_url, ))
404 414
405 415 def commit(self, wire, message, author, timestamp, updated, removed):
406 416 assert isinstance(message, str)
407 417 assert isinstance(author, str)
408 418
409 419 repo = self._factory.repo(wire)
410 420 fsobj = svn.repos.fs(repo)
411 421
412 422 rev = svn.fs.youngest_rev(fsobj)
413 423 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
414 424 txn_root = svn.fs.txn_root(txn)
415 425
416 426 for node in updated:
417 427 TxnNodeProcessor(node, txn_root).update()
418 428 for node in removed:
419 429 TxnNodeProcessor(node, txn_root).remove()
420 430
421 431 commit_id = svn.repos.fs_commit_txn(repo, txn)
422 432
423 433 if timestamp:
424 434 apr_time = apr_time_t(timestamp)
425 435 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
426 436 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
427 437
428 438 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
429 439 return commit_id
430 440
431 441 def diff(self, wire, rev1, rev2, path1=None, path2=None,
432 442 ignore_whitespace=False, context=3):
433 443
434 444 wire.update(cache=False)
435 445 repo = self._factory.repo(wire)
436 446 diff_creator = SvnDiffer(
437 447 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
438 448 try:
439 449 return diff_creator.generate_diff()
440 450 except svn.core.SubversionException as e:
441 451 log.exception(
442 452 "Error during diff operation operation. "
443 453 "Path might not exist %s, %s" % (path1, path2))
444 454 return ""
445 455
446 456 @reraise_safe_exceptions
447 457 def is_large_file(self, wire, path):
448 458 return False
449 459
450 460 @reraise_safe_exceptions
461 def run_svn_command(self, wire, cmd, **opts):
462 path = wire.get('path', None)
463
464 if path and os.path.isdir(path):
465 opts['cwd'] = path
466
467 safe_call = False
468 if '_safe' in opts:
469 safe_call = True
470
471 svnenv = os.environ.copy()
472 svnenv.update(opts.pop('extra_env', {}))
473
474 _opts = {'env': svnenv, 'shell': False}
475
476 try:
477 _opts.update(opts)
478 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
479
480 return ''.join(p), ''.join(p.error)
481 except (EnvironmentError, OSError) as err:
482 cmd = ' '.join(cmd) # human friendly CMD
483 tb_err = ("Couldn't run svn command (%s).\n"
484 "Original error was:%s\n"
485 "Call options:%s\n"
486 % (cmd, err, _opts))
487 log.exception(tb_err)
488 if safe_call:
489 return '', err
490 else:
491 raise exceptions.VcsException()(tb_err)
492
493 @reraise_safe_exceptions
451 494 def install_hooks(self, wire, force=False):
452 495 from vcsserver.hook_utils import install_svn_hooks
453 496 repo_path = wire['path']
454 497 binary_dir = settings.BINARY_DIR
455 498 executable = None
456 499 if binary_dir:
457 500 executable = os.path.join(binary_dir, 'python')
458 501 return install_svn_hooks(
459 502 repo_path, executable=executable, force_create=force)
460 503
461 504 @reraise_safe_exceptions
462 505 def get_hooks_info(self, wire):
463 506 from vcsserver.hook_utils import (
464 507 get_svn_pre_hook_version, get_svn_post_hook_version)
465 508 repo_path = wire['path']
466 509 return {
467 510 'pre_version': get_svn_pre_hook_version(repo_path),
468 511 'post_version': get_svn_post_hook_version(repo_path),
469 512 }
470 513
471 514
472 515 class SvnDiffer(object):
473 516 """
474 517 Utility to create diffs based on difflib and the Subversion api
475 518 """
476 519
477 520 binary_content = False
478 521
479 522 def __init__(
480 523 self, repo, src_rev, src_path, tgt_rev, tgt_path,
481 524 ignore_whitespace, context):
482 525 self.repo = repo
483 526 self.ignore_whitespace = ignore_whitespace
484 527 self.context = context
485 528
486 529 fsobj = svn.repos.fs(repo)
487 530
488 531 self.tgt_rev = tgt_rev
489 532 self.tgt_path = tgt_path or ''
490 533 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
491 534 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
492 535
493 536 self.src_rev = src_rev
494 537 self.src_path = src_path or self.tgt_path
495 538 self.src_root = svn.fs.revision_root(fsobj, src_rev)
496 539 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
497 540
498 541 self._validate()
499 542
500 543 def _validate(self):
501 544 if (self.tgt_kind != svn.core.svn_node_none and
502 545 self.src_kind != svn.core.svn_node_none and
503 546 self.src_kind != self.tgt_kind):
504 547 # TODO: johbo: proper error handling
505 548 raise Exception(
506 549 "Source and target are not compatible for diff generation. "
507 550 "Source type: %s, target type: %s" %
508 551 (self.src_kind, self.tgt_kind))
509 552
510 553 def generate_diff(self):
511 554 buf = StringIO.StringIO()
512 555 if self.tgt_kind == svn.core.svn_node_dir:
513 556 self._generate_dir_diff(buf)
514 557 else:
515 558 self._generate_file_diff(buf)
516 559 return buf.getvalue()
517 560
518 561 def _generate_dir_diff(self, buf):
519 562 editor = DiffChangeEditor()
520 563 editor_ptr, editor_baton = svn.delta.make_editor(editor)
521 564 svn.repos.dir_delta2(
522 565 self.src_root,
523 566 self.src_path,
524 567 '', # src_entry
525 568 self.tgt_root,
526 569 self.tgt_path,
527 570 editor_ptr, editor_baton,
528 571 authorization_callback_allow_all,
529 572 False, # text_deltas
530 573 svn.core.svn_depth_infinity, # depth
531 574 False, # entry_props
532 575 False, # ignore_ancestry
533 576 )
534 577
535 578 for path, __, change in sorted(editor.changes):
536 579 self._generate_node_diff(
537 580 buf, change, path, self.tgt_path, path, self.src_path)
538 581
539 582 def _generate_file_diff(self, buf):
540 583 change = None
541 584 if self.src_kind == svn.core.svn_node_none:
542 585 change = "add"
543 586 elif self.tgt_kind == svn.core.svn_node_none:
544 587 change = "delete"
545 588 tgt_base, tgt_path = vcspath.split(self.tgt_path)
546 589 src_base, src_path = vcspath.split(self.src_path)
547 590 self._generate_node_diff(
548 591 buf, change, tgt_path, tgt_base, src_path, src_base)
549 592
550 593 def _generate_node_diff(
551 594 self, buf, change, tgt_path, tgt_base, src_path, src_base):
552 595
553 596 if self.src_rev == self.tgt_rev and tgt_base == src_base:
554 597 # makes consistent behaviour with git/hg to return empty diff if
555 598 # we compare same revisions
556 599 return
557 600
558 601 tgt_full_path = vcspath.join(tgt_base, tgt_path)
559 602 src_full_path = vcspath.join(src_base, src_path)
560 603
561 604 self.binary_content = False
562 605 mime_type = self._get_mime_type(tgt_full_path)
563 606
564 607 if mime_type and not mime_type.startswith('text'):
565 608 self.binary_content = True
566 609 buf.write("=" * 67 + '\n')
567 610 buf.write("Cannot display: file marked as a binary type.\n")
568 611 buf.write("svn:mime-type = %s\n" % mime_type)
569 612 buf.write("Index: %s\n" % (tgt_path, ))
570 613 buf.write("=" * 67 + '\n')
571 614 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
572 615 'tgt_path': tgt_path})
573 616
574 617 if change == 'add':
575 618 # TODO: johbo: SVN is missing a zero here compared to git
576 619 buf.write("new file mode 10644\n")
577 620
578 621 #TODO(marcink): intro to binary detection of svn patches
579 622 # if self.binary_content:
580 623 # buf.write('GIT binary patch\n')
581 624
582 625 buf.write("--- /dev/null\t(revision 0)\n")
583 626 src_lines = []
584 627 else:
585 628 if change == 'delete':
586 629 buf.write("deleted file mode 10644\n")
587 630
588 631 #TODO(marcink): intro to binary detection of svn patches
589 632 # if self.binary_content:
590 633 # buf.write('GIT binary patch\n')
591 634
592 635 buf.write("--- a/%s\t(revision %s)\n" % (
593 636 src_path, self.src_rev))
594 637 src_lines = self._svn_readlines(self.src_root, src_full_path)
595 638
596 639 if change == 'delete':
597 640 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
598 641 tgt_lines = []
599 642 else:
600 643 buf.write("+++ b/%s\t(revision %s)\n" % (
601 644 tgt_path, self.tgt_rev))
602 645 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
603 646
604 647 if not self.binary_content:
605 648 udiff = svn_diff.unified_diff(
606 649 src_lines, tgt_lines, context=self.context,
607 650 ignore_blank_lines=self.ignore_whitespace,
608 651 ignore_case=False,
609 652 ignore_space_changes=self.ignore_whitespace)
610 653 buf.writelines(udiff)
611 654
612 655 def _get_mime_type(self, path):
613 656 try:
614 657 mime_type = svn.fs.node_prop(
615 658 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
616 659 except svn.core.SubversionException:
617 660 mime_type = svn.fs.node_prop(
618 661 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
619 662 return mime_type
620 663
621 664 def _svn_readlines(self, fs_root, node_path):
622 665 if self.binary_content:
623 666 return []
624 667 node_kind = svn.fs.check_path(fs_root, node_path)
625 668 if node_kind not in (
626 669 svn.core.svn_node_file, svn.core.svn_node_symlink):
627 670 return []
628 671 content = svn.core.Stream(
629 672 svn.fs.file_contents(fs_root, node_path)).read()
630 673 return content.splitlines(True)
631 674
632 675
633 676
634 677 class DiffChangeEditor(svn.delta.Editor):
635 678 """
636 679 Records changes between two given revisions
637 680 """
638 681
639 682 def __init__(self):
640 683 self.changes = []
641 684
642 685 def delete_entry(self, path, revision, parent_baton, pool=None):
643 686 self.changes.append((path, None, 'delete'))
644 687
645 688 def add_file(
646 689 self, path, parent_baton, copyfrom_path, copyfrom_revision,
647 690 file_pool=None):
648 691 self.changes.append((path, 'file', 'add'))
649 692
650 693 def open_file(self, path, parent_baton, base_revision, file_pool=None):
651 694 self.changes.append((path, 'file', 'change'))
652 695
653 696
654 697 def authorization_callback_allow_all(root, path, pool):
655 698 return True
656 699
657 700
658 701 class TxnNodeProcessor(object):
659 702 """
660 703 Utility to process the change of one node within a transaction root.
661 704
662 705 It encapsulates the knowledge of how to add, update or remove
663 706 a node for a given transaction root. The purpose is to support the method
664 707 `SvnRemote.commit`.
665 708 """
666 709
667 710 def __init__(self, node, txn_root):
668 711 assert isinstance(node['path'], str)
669 712
670 713 self.node = node
671 714 self.txn_root = txn_root
672 715
673 716 def update(self):
674 717 self._ensure_parent_dirs()
675 718 self._add_file_if_node_does_not_exist()
676 719 self._update_file_content()
677 720 self._update_file_properties()
678 721
679 722 def remove(self):
680 723 svn.fs.delete(self.txn_root, self.node['path'])
681 724 # TODO: Clean up directory if empty
682 725
683 726 def _ensure_parent_dirs(self):
684 727 curdir = vcspath.dirname(self.node['path'])
685 728 dirs_to_create = []
686 729 while not self._svn_path_exists(curdir):
687 730 dirs_to_create.append(curdir)
688 731 curdir = vcspath.dirname(curdir)
689 732
690 733 for curdir in reversed(dirs_to_create):
691 734 log.debug('Creating missing directory "%s"', curdir)
692 735 svn.fs.make_dir(self.txn_root, curdir)
693 736
694 737 def _svn_path_exists(self, path):
695 738 path_status = svn.fs.check_path(self.txn_root, path)
696 739 return path_status != svn.core.svn_node_none
697 740
698 741 def _add_file_if_node_does_not_exist(self):
699 742 kind = svn.fs.check_path(self.txn_root, self.node['path'])
700 743 if kind == svn.core.svn_node_none:
701 744 svn.fs.make_file(self.txn_root, self.node['path'])
702 745
703 746 def _update_file_content(self):
704 747 assert isinstance(self.node['content'], str)
705 748 handler, baton = svn.fs.apply_textdelta(
706 749 self.txn_root, self.node['path'], None, None)
707 750 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
708 751
709 752 def _update_file_properties(self):
710 753 properties = self.node.get('properties', {})
711 754 for key, value in properties.iteritems():
712 755 svn.fs.change_node_prop(
713 756 self.txn_root, self.node['path'], key, value)
714 757
715 758
716 759 def apr_time_t(timestamp):
717 760 """
718 761 Convert a Python timestamp into APR timestamp type apr_time_t
719 762 """
720 763 return timestamp * 1E6
721 764
722 765
723 766 def svn_opt_revision_value_t(num):
724 767 """
725 768 Put `num` into a `svn_opt_revision_value_t` structure.
726 769 """
727 770 value = svn.core.svn_opt_revision_value_t()
728 771 value.number = num
729 772 revision = svn.core.svn_opt_revision_t()
730 773 revision.kind = svn.core.svn_opt_revision_number
731 774 revision.value = value
732 775 return revision
General Comments 0
You need to be logged in to leave comments. Login now