##// END OF EJS Templates
lfs: log information about Internal Server Errors reported in the Batch API...
Matt Harbison -
r37708:726c4102 default
parent child Browse files
Show More
@@ -1,291 +1,307 b''
1 1 # wireprotolfsserver.py - lfs protocol server side implementation
2 2 #
3 3 # Copyright 2018 Matt Harbison <matt_harbison@yahoo.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import datetime
11 11 import errno
12 12 import json
13 import traceback
13 14
14 15 from mercurial.hgweb import (
15 16 common as hgwebcommon,
16 17 )
17 18
18 19 from mercurial import (
19 20 pycompat,
20 21 )
21 22
22 23 HTTP_OK = hgwebcommon.HTTP_OK
23 24 HTTP_CREATED = hgwebcommon.HTTP_CREATED
24 25 HTTP_BAD_REQUEST = hgwebcommon.HTTP_BAD_REQUEST
25 26 HTTP_NOT_FOUND = hgwebcommon.HTTP_NOT_FOUND
26 27
27 28 def handlewsgirequest(orig, rctx, req, res, checkperm):
28 29 """Wrap wireprotoserver.handlewsgirequest() to possibly process an LFS
29 30 request if it is left unprocessed by the wrapped method.
30 31 """
31 32 if orig(rctx, req, res, checkperm):
32 33 return True
33 34
34 35 if not rctx.repo.ui.configbool('experimental', 'lfs.serve'):
35 36 return False
36 37
37 38 if not req.dispatchpath:
38 39 return False
39 40
40 41 try:
41 42 if req.dispatchpath == b'.git/info/lfs/objects/batch':
42 43 checkperm(rctx, req, 'pull')
43 44 return _processbatchrequest(rctx.repo, req, res)
44 45 # TODO: reserve and use a path in the proposed http wireprotocol /api/
45 46 # namespace?
46 47 elif req.dispatchpath.startswith(b'.hg/lfs/objects'):
47 48 return _processbasictransfer(rctx.repo, req, res,
48 49 lambda perm:
49 50 checkperm(rctx, req, perm))
50 51 return False
51 52 except hgwebcommon.ErrorResponse as e:
52 53 # XXX: copied from the handler surrounding wireprotoserver._callhttp()
53 54 # in the wrapped function. Should this be moved back to hgweb to
54 55 # be a common handler?
55 56 for k, v in e.headers:
56 57 res.headers[k] = v
57 58 res.status = hgwebcommon.statusmessage(e.code, pycompat.bytestr(e))
58 59 res.setbodybytes(b'0\n%s\n' % pycompat.bytestr(e))
59 60 return True
60 61
61 62 def _sethttperror(res, code, message=None):
62 63 res.status = hgwebcommon.statusmessage(code, message=message)
63 64 res.headers[b'Content-Type'] = b'text/plain; charset=utf-8'
64 65 res.setbodybytes(b'')
65 66
67 def _logexception(req):
68 """Write information about the current exception to wsgi.errors."""
69 tb = pycompat.sysbytes(traceback.format_exc())
70 errorlog = req.rawenv[r'wsgi.errors']
71
72 uri = b''
73 if req.apppath:
74 uri += req.apppath
75 uri += b'/' + req.dispatchpath
76
77 errorlog.write(b"Exception happened while processing request '%s':\n%s" %
78 (uri, tb))
79
66 80 def _processbatchrequest(repo, req, res):
67 81 """Handle a request for the Batch API, which is the gateway to granting file
68 82 access.
69 83
70 84 https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
71 85 """
72 86
73 87 # Mercurial client request:
74 88 #
75 89 # HOST: localhost:$HGPORT
76 90 # ACCEPT: application/vnd.git-lfs+json
77 91 # ACCEPT-ENCODING: identity
78 92 # USER-AGENT: git-lfs/2.3.4 (Mercurial 4.5.2+1114-f48b9754f04c+20180316)
79 93 # Content-Length: 125
80 94 # Content-Type: application/vnd.git-lfs+json
81 95 #
82 96 # {
83 97 # "objects": [
84 98 # {
85 99 # "oid": "31cf...8e5b"
86 100 # "size": 12
87 101 # }
88 102 # ]
89 103 # "operation": "upload"
90 104 # }
91 105
92 106 if (req.method != b'POST'
93 107 or req.headers[b'Content-Type'] != b'application/vnd.git-lfs+json'
94 108 or req.headers[b'Accept'] != b'application/vnd.git-lfs+json'):
95 109 # TODO: figure out what the proper handling for a bad request to the
96 110 # Batch API is.
97 111 _sethttperror(res, HTTP_BAD_REQUEST, b'Invalid Batch API request')
98 112 return True
99 113
100 114 # XXX: specify an encoding?
101 115 lfsreq = json.loads(req.bodyfh.read())
102 116
103 117 # If no transfer handlers are explicitly requested, 'basic' is assumed.
104 118 if 'basic' not in lfsreq.get('transfers', ['basic']):
105 119 _sethttperror(res, HTTP_BAD_REQUEST,
106 120 b'Only the basic LFS transfer handler is supported')
107 121 return True
108 122
109 123 operation = lfsreq.get('operation')
110 124 if operation not in ('upload', 'download'):
111 125 _sethttperror(res, HTTP_BAD_REQUEST,
112 126 b'Unsupported LFS transfer operation: %s' % operation)
113 127 return True
114 128
115 129 localstore = repo.svfs.lfslocalblobstore
116 130
117 131 objects = [p for p in _batchresponseobjects(req, lfsreq.get('objects', []),
118 132 operation, localstore)]
119 133
120 134 rsp = {
121 135 'transfer': 'basic',
122 136 'objects': objects,
123 137 }
124 138
125 139 res.status = hgwebcommon.statusmessage(HTTP_OK)
126 140 res.headers[b'Content-Type'] = b'application/vnd.git-lfs+json'
127 141 res.setbodybytes(pycompat.bytestr(json.dumps(rsp)))
128 142
129 143 return True
130 144
131 145 def _batchresponseobjects(req, objects, action, store):
132 146 """Yield one dictionary of attributes for the Batch API response for each
133 147 object in the list.
134 148
135 149 req: The parsedrequest for the Batch API request
136 150 objects: The list of objects in the Batch API object request list
137 151 action: 'upload' or 'download'
138 152 store: The local blob store for servicing requests"""
139 153
140 154 # Successful lfs-test-server response to solict an upload:
141 155 # {
142 156 # u'objects': [{
143 157 # u'size': 12,
144 158 # u'oid': u'31cf...8e5b',
145 159 # u'actions': {
146 160 # u'upload': {
147 161 # u'href': u'http://localhost:$HGPORT/objects/31cf...8e5b',
148 162 # u'expires_at': u'0001-01-01T00:00:00Z',
149 163 # u'header': {
150 164 # u'Accept': u'application/vnd.git-lfs'
151 165 # }
152 166 # }
153 167 # }
154 168 # }]
155 169 # }
156 170
157 171 # TODO: Sort out the expires_at/expires_in/authenticated keys.
158 172
159 173 for obj in objects:
160 174 # Convert unicode to ASCII to create a filesystem path
161 175 oid = obj.get('oid').encode('ascii')
162 176 rsp = {
163 177 'oid': oid,
164 178 'size': obj.get('size'), # XXX: should this check the local size?
165 179 #'authenticated': True,
166 180 }
167 181
168 182 exists = True
169 183 verifies = False
170 184
171 185 # Verify an existing file on the upload request, so that the client is
172 186 # solicited to re-upload if it corrupt locally. Download requests are
173 187 # also verified, so the error can be flagged in the Batch API response.
174 188 # (Maybe we can use this to short circuit the download for `hg verify`,
175 189 # IFF the client can assert that the remote end is an hg server.)
176 190 # Otherwise, it's potentially overkill on download, since it is also
177 191 # verified as the file is streamed to the caller.
178 192 try:
179 193 verifies = store.verify(oid)
180 194 except IOError as inst:
181 195 if inst.errno != errno.ENOENT:
196 _logexception(req)
197
182 198 rsp['error'] = {
183 199 'code': 500,
184 200 'message': inst.strerror or 'Internal Server Server'
185 201 }
186 202 yield rsp
187 203 continue
188 204
189 205 exists = False
190 206
191 207 # Items are always listed for downloads. They are dropped for uploads
192 208 # IFF they already exist locally.
193 209 if action == 'download':
194 210 if not exists:
195 211 rsp['error'] = {
196 212 'code': 404,
197 213 'message': "The object does not exist"
198 214 }
199 215 yield rsp
200 216 continue
201 217
202 218 elif not verifies:
203 219 rsp['error'] = {
204 220 'code': 422, # XXX: is this the right code?
205 221 'message': "The object is corrupt"
206 222 }
207 223 yield rsp
208 224 continue
209 225
210 226 elif verifies:
211 227 yield rsp # Skip 'actions': already uploaded
212 228 continue
213 229
214 230 expiresat = datetime.datetime.now() + datetime.timedelta(minutes=10)
215 231
216 232 rsp['actions'] = {
217 233 '%s' % action: {
218 234 'href': '%s%s/.hg/lfs/objects/%s'
219 235 % (req.baseurl, req.apppath, oid),
220 236 # datetime.isoformat() doesn't include the 'Z' suffix
221 237 "expires_at": expiresat.strftime('%Y-%m-%dT%H:%M:%SZ'),
222 238 'header': {
223 239 # The spec doesn't mention the Accept header here, but avoid
224 240 # a gratuitous deviation from lfs-test-server in the test
225 241 # output.
226 242 'Accept': 'application/vnd.git-lfs'
227 243 }
228 244 }
229 245 }
230 246
231 247 yield rsp
232 248
233 249 def _processbasictransfer(repo, req, res, checkperm):
234 250 """Handle a single file upload (PUT) or download (GET) action for the Basic
235 251 Transfer Adapter.
236 252
237 253 After determining if the request is for an upload or download, the access
238 254 must be checked by calling ``checkperm()`` with either 'pull' or 'upload'
239 255 before accessing the files.
240 256
241 257 https://github.com/git-lfs/git-lfs/blob/master/docs/api/basic-transfers.md
242 258 """
243 259
244 260 method = req.method
245 261 oid = req.dispatchparts[-1]
246 262 localstore = repo.svfs.lfslocalblobstore
247 263
248 264 if len(req.dispatchparts) != 4:
249 265 _sethttperror(res, HTTP_NOT_FOUND)
250 266 return True
251 267
252 268 if method == b'PUT':
253 269 checkperm('upload')
254 270
255 271 # TODO: verify Content-Type?
256 272
257 273 existed = localstore.has(oid)
258 274
259 275 # TODO: how to handle timeouts? The body proxy handles limiting to
260 276 # Content-Length, but what happens if a client sends less than it
261 277 # says it will?
262 278
263 279 # TODO: download() will abort if the checksum fails. It should raise
264 280 # something checksum specific that can be caught here, and turned
265 281 # into an http code.
266 282 localstore.download(oid, req.bodyfh)
267 283
268 284 statusmessage = hgwebcommon.statusmessage
269 285 res.status = statusmessage(HTTP_OK if existed else HTTP_CREATED)
270 286
271 287 # There's no payload here, but this is the header that lfs-test-server
272 288 # sends back. This eliminates some gratuitous test output conditionals.
273 289 res.headers[b'Content-Type'] = b'text/plain; charset=utf-8'
274 290 res.setbodybytes(b'')
275 291
276 292 return True
277 293 elif method == b'GET':
278 294 checkperm('pull')
279 295
280 296 res.status = hgwebcommon.statusmessage(HTTP_OK)
281 297 res.headers[b'Content-Type'] = b'application/octet-stream'
282 298
283 299 # TODO: figure out how to send back the file in chunks, instead of
284 300 # reading the whole thing.
285 301 res.setbodybytes(localstore.read(oid))
286 302
287 303 return True
288 304 else:
289 305 _sethttperror(res, HTTP_BAD_REQUEST,
290 306 message=b'Unsupported LFS transfer method: %s' % method)
291 307 return True
@@ -1,334 +1,346 b''
1 1 #require serve no-reposimplestore
2 2
3 3 $ cat >> $HGRCPATH <<EOF
4 4 > [extensions]
5 5 > lfs=
6 6 > [lfs]
7 7 > track=all()
8 8 > [web]
9 9 > push_ssl = False
10 10 > allow-push = *
11 11 > EOF
12 12
13 13 Serving LFS files can experimentally be turned off. The long term solution is
14 14 to support the 'verify' action in both client and server, so that the server can
15 15 tell the client to store files elsewhere.
16 16
17 17 $ hg init server
18 18 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
19 19 > --config experimental.lfs.serve=False -R server serve -d \
20 20 > -p $HGPORT --pid-file=hg.pid -A $TESTTMP/access.log -E $TESTTMP/errors.log
21 21 $ cat hg.pid >> $DAEMON_PIDS
22 22
23 23 Uploads fail...
24 24
25 25 $ hg init client
26 26 $ echo 'this-is-an-lfs-file' > client/lfs.bin
27 27 $ hg -R client ci -Am 'initial commit'
28 28 adding lfs.bin
29 29 $ hg -R client push http://localhost:$HGPORT
30 30 pushing to http://localhost:$HGPORT/
31 31 searching for changes
32 32 abort: LFS HTTP error: HTTP Error 400: no such method: .git (action=upload)!
33 33 [255]
34 34
35 35 ... so do a local push to make the data available. Remove the blob from the
36 36 default cache, so it attempts to download.
37 37 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
38 38 > --config "lfs.url=null://" \
39 39 > -R client push -q server
40 40 $ mv `hg config lfs.usercache` $TESTTMP/servercache
41 41
42 42 Downloads fail...
43 43
44 44 $ hg clone http://localhost:$HGPORT httpclone
45 45 requesting all changes
46 46 adding changesets
47 47 adding manifests
48 48 adding file changes
49 49 added 1 changesets with 1 changes to 1 files
50 50 new changesets 525251863cad
51 51 updating to branch default
52 52 abort: LFS HTTP error: HTTP Error 400: no such method: .git (action=download)!
53 53 [255]
54 54
55 55 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
56 56
57 57 $ cat $TESTTMP/access.log $TESTTMP/errors.log
58 58 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
59 59 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D525251863cad618e55d483555f3d00a2ca99597e x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
60 60 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
61 61 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
62 62 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
63 63 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
64 64 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
65 65 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
66 66 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
67 67
68 68 Blob URIs are correct when --prefix is used
69 69
70 70 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
71 71 $ hg --config "lfs.usercache=$TESTTMP/servercache" -R server serve -d \
72 72 > -p $HGPORT --pid-file=hg.pid --prefix=subdir/mount/point \
73 73 > -A $TESTTMP/access.log -E $TESTTMP/errors.log
74 74 $ cat hg.pid >> $DAEMON_PIDS
75 75
76 76 $ hg --config lfs.url=http://localhost:$HGPORT/subdir/mount/point/.git/info/lfs \
77 77 > clone --debug http://localhost:$HGPORT/subdir/mount/point cloned2
78 78 using http://localhost:$HGPORT/subdir/mount/point
79 79 sending capabilities command
80 80 query 1; heads
81 81 sending batch command
82 82 requesting all changes
83 83 sending getbundle command
84 84 bundle2-input-bundle: with-transaction
85 85 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
86 86 adding changesets
87 87 add changeset 525251863cad
88 88 adding manifests
89 89 adding file changes
90 90 adding lfs.bin revisions
91 91 added 1 changesets with 1 changes to 1 files
92 92 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
93 93 bundle2-input-part: total payload size 648
94 94 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
95 95 bundle2-input-part: "phase-heads" supported
96 96 bundle2-input-part: total payload size 24
97 97 bundle2-input-part: "cache:rev-branch-cache" supported
98 98 bundle2-input-part: total payload size 39
99 99 bundle2-input-bundle: 3 parts total
100 100 checking for updated bookmarks
101 101 updating the branch cache
102 102 new changesets 525251863cad
103 103 updating to branch default
104 104 resolving manifests
105 105 branchmerge: False, force: False, partial: False
106 106 ancestor: 000000000000, local: 000000000000+, remote: 525251863cad
107 107 Status: 200
108 108 Content-Length: 371
109 109 Content-Type: application/vnd.git-lfs+json
110 110 Date: $HTTP_DATE$
111 111 Server: testing stub value
112 112 {
113 113 "objects": [
114 114 {
115 115 "actions": {
116 116 "download": {
117 117 "expires_at": "$ISO_8601_DATE_TIME$"
118 118 "header": {
119 119 "Accept": "application/vnd.git-lfs"
120 120 }
121 121 "href": "http://localhost:$HGPORT/subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
122 122 }
123 123 }
124 124 "oid": "f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e"
125 125 "size": 20
126 126 }
127 127 ]
128 128 "transfer": "basic"
129 129 }
130 130 lfs: downloading f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e (20 bytes)
131 131 Status: 200
132 132 Content-Length: 20
133 133 Content-Type: application/octet-stream
134 134 Date: $HTTP_DATE$
135 135 Server: testing stub value
136 136 lfs: adding f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e to the usercache
137 137 lfs: processed: f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e
138 138 lfs.bin: remote created -> g
139 139 getting lfs.bin
140 140 lfs: found f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e in the local lfs store
141 141 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
142 142
143 143 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
144 144
145 145 $ cat $TESTTMP/access.log $TESTTMP/errors.log
146 146 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=capabilities HTTP/1.1" 200 - (glob)
147 147 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
148 148 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
149 149 $LOCALIP - - [$LOGDATE$] "POST /subdir/mount/point/.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
150 150 $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e HTTP/1.1" 200 - (glob)
151 151
152 152 $ cat >> $TESTTMP/lfsstoreerror.py <<EOF
153 153 > import errno
154 154 > from hgext.lfs import blobstore
155 155 >
156 156 > _numverifies = 0
157 157 > _readerr = True
158 158 >
159 159 > def reposetup(ui, repo):
160 160 > # Nothing to do with a remote repo
161 161 > if not repo.local():
162 162 > return
163 163 >
164 164 > store = repo.svfs.lfslocalblobstore
165 165 > class badstore(store.__class__):
166 166 > def download(self, oid, src):
167 167 > '''Called in the server to handle reading from the client in a
168 168 > PUT request.'''
169 169 > origread = src.read
170 170 > def _badread(nbytes):
171 171 > # Simulate bad data/checksum failure from the client
172 172 > return b'0' * len(origread(nbytes))
173 173 > src.read = _badread
174 174 > super(badstore, self).download(oid, src)
175 175 >
176 176 > def _read(self, vfs, oid, verify):
177 177 > '''Called in the server to read data for a GET request, and then
178 178 > calls self._verify() on it before returning.'''
179 179 > global _readerr
180 180 > # One time simulation of a read error
181 181 > if _readerr:
182 182 > _readerr = False
183 183 > raise IOError(errno.EIO, '%s: I/O error' % oid)
184 184 > # Simulate corrupt content on client download
185 185 > blobstore._verify(oid, 'dummy content')
186 186 >
187 187 > def verify(self, oid):
188 188 > '''Called in the server to populate the Batch API response,
189 189 > letting the client re-upload if the file is corrupt.'''
190 190 > # Fail verify in Batch API for one clone command and one push
191 191 > # command with an IOError. Then let it through to access other
192 192 > # functions. Checksum failure is tested elsewhere.
193 193 > global _numverifies
194 194 > _numverifies += 1
195 195 > if _numverifies <= 2:
196 196 > raise IOError(errno.EIO, '%s: I/O error' % oid)
197 197 > return super(badstore, self).verify(oid)
198 198 >
199 199 > store.__class__ = badstore
200 200 > EOF
201 201
202 202 $ rm -rf `hg config lfs.usercache`
203 203 $ rm -f $TESTTMP/access.log $TESTTMP/errors.log
204 204 $ hg --config "lfs.usercache=$TESTTMP/servercache" \
205 205 > --config extensions.lfsstoreerror=$TESTTMP/lfsstoreerror.py \
206 206 > -R server serve -d \
207 207 > -p $HGPORT1 --pid-file=hg.pid -A $TESTTMP/access.log -E $TESTTMP/errors.log
208 208 $ cat hg.pid >> $DAEMON_PIDS
209 209
210 210 Test an I/O error in localstore.verify() (Batch API) with GET
211 211
212 212 $ hg clone http://localhost:$HGPORT1 httpclone2
213 213 requesting all changes
214 214 adding changesets
215 215 adding manifests
216 216 adding file changes
217 217 added 1 changesets with 1 changes to 1 files
218 218 new changesets 525251863cad
219 219 updating to branch default
220 220 abort: LFS server error for "lfs.bin": Internal server error!
221 221 [255]
222 222
223 223 Test an I/O error in localstore.verify() (Batch API) with PUT
224 224
225 225 $ echo foo > client/lfs.bin
226 226 $ hg -R client ci -m 'mod lfs'
227 227 $ hg -R client push http://localhost:$HGPORT1
228 228 pushing to http://localhost:$HGPORT1/
229 229 searching for changes
230 230 abort: LFS server error for "unknown": Internal server error!
231 231 [255]
232 232 TODO: figure out how to associate the file name in the error above
233 233
234 234 Test a bad checksum sent by the client in the transfer API
235 235
236 236 $ hg -R client push http://localhost:$HGPORT1
237 237 pushing to http://localhost:$HGPORT1/
238 238 searching for changes
239 239 abort: HTTP error: HTTP Error 500: Internal Server Error (oid=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c, action=upload)!
240 240 [255]
241 241
242 242 $ echo 'test lfs file' > server/lfs3.bin
243 243 $ hg --config experimental.lfs.disableusercache=True \
244 244 > -R server ci -Aqm 'another lfs file'
245 245 $ hg -R client pull -q http://localhost:$HGPORT1
246 246
247 247 Test an I/O error during the processing of the GET request
248 248
249 249 $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
250 250 > -R client update -r tip
251 251 abort: HTTP error: HTTP Error 500: Internal Server Error (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
252 252 [255]
253 253
254 254 Test a checksum failure during the processing of the GET request
255 255
256 256 $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
257 257 > -R client update -r tip
258 258 abort: HTTP error: HTTP Error 500: Internal Server Error (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
259 259 [255]
260 260
261 261 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
262 262
263 263 $ cat $TESTTMP/access.log
264 264 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
265 265 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
266 266 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
267 267 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
268 268 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
269 269 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
270 270 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
271 271 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
272 272 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
273 273 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
274 274 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
275 275 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
276 276 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
277 277 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
278 278 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
279 279 $LOCALIP - - [$LOGDATE$] "GET /?cmd=branchmap HTTP/1.1" 200 - x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
280 280 $LOCALIP - - [$LOGDATE$] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
281 281 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
282 282 $LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c HTTP/1.1" 500 - (glob)
283 283 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
284 284 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
285 285 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Arev-branch-cache%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
286 286 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
287 287 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob)
288 288 $LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
289 289 $LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob)
290 290
291 291 $ grep -v ' File "' $TESTTMP/errors.log
292 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
293 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
294 $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob)
295 $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, '%s: I/O error' % oid) (glob)
296 $LOCALIP - - [$ERRDATE$] HG error: IOError: [Errno 5] f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e: I/O error (glob)
297 $LOCALIP - - [$ERRDATE$] HG error: (glob)
298 $LOCALIP - - [$ERRDATE$] HG error: Exception happened while processing request '/.git/info/lfs/objects/batch': (glob)
299 $LOCALIP - - [$ERRDATE$] HG error: Traceback (most recent call last): (glob)
300 $LOCALIP - - [$ERRDATE$] HG error: verifies = store.verify(oid) (glob)
301 $LOCALIP - - [$ERRDATE$] HG error: raise IOError(errno.EIO, '%s: I/O error' % oid) (glob)
302 $LOCALIP - - [$ERRDATE$] HG error: IOError: [Errno 5] b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c: I/O error (glob)
303 $LOCALIP - - [$ERRDATE$] HG error: (glob)
292 304 $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c': (glob)
293 305 Traceback (most recent call last):
294 306 self.do_write()
295 307 self.do_hgweb()
296 308 for chunk in self.server.application(env, self._start_response):
297 309 for r in self._runwsgi(req, res, repo):
298 310 rctx, req, res, self.check_perm)
299 311 return func(*(args + a), **kw)
300 312 lambda perm:
301 313 localstore.download(oid, req.bodyfh)
302 314 super(badstore, self).download(oid, src)
303 315 raise error.Abort(_('corrupt remote lfs object: %s') % oid)
304 316 Abort: corrupt remote lfs object: b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c
305 317
306 318 $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
307 319 Traceback (most recent call last):
308 320 self.do_write()
309 321 self.do_hgweb()
310 322 for chunk in self.server.application(env, self._start_response):
311 323 for r in self._runwsgi(req, res, repo):
312 324 rctx, req, res, self.check_perm)
313 325 return func(*(args + a), **kw)
314 326 lambda perm:
315 327 res.setbodybytes(localstore.read(oid))
316 328 blob = self._read(self.vfs, oid, verify)
317 329 raise IOError(errno.EIO, '%s: I/O error' % oid)
318 330 IOError: [Errno 5] 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d: I/O error
319 331
320 332 $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
321 333 Traceback (most recent call last):
322 334 self.do_write()
323 335 self.do_hgweb()
324 336 for chunk in self.server.application(env, self._start_response):
325 337 for r in self._runwsgi(req, res, repo):
326 338 rctx, req, res, self.check_perm)
327 339 return func(*(args + a), **kw)
328 340 lambda perm:
329 341 res.setbodybytes(localstore.read(oid))
330 342 blob = self._read(self.vfs, oid, verify)
331 343 blobstore._verify(oid, 'dummy content')
332 344 hint=_('run hg verify'))
333 345 Abort: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d
334 346
General Comments 0
You need to be logged in to leave comments. Login now