##// END OF EJS Templates
lfs: fix interpolation of int and %s in an exception case...
Matt Harbison -
r50421:192949b6 stable
parent child Browse files
Show More
@@ -1,769 +1,769 b''
1 1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import contextlib
10 10 import errno
11 11 import hashlib
12 12 import json
13 13 import os
14 14 import re
15 15 import socket
16 16
17 17 from mercurial.i18n import _
18 18 from mercurial.pycompat import getattr
19 19 from mercurial.node import hex
20 20
21 21 from mercurial import (
22 22 encoding,
23 23 error,
24 24 httpconnection as httpconnectionmod,
25 25 pathutil,
26 26 pycompat,
27 27 url as urlmod,
28 28 util,
29 29 vfs as vfsmod,
30 30 worker,
31 31 )
32 32
33 33 from mercurial.utils import (
34 34 stringutil,
35 35 urlutil,
36 36 )
37 37
38 38 from ..largefiles import lfutil
39 39
40 40 # 64 bytes for SHA256
41 41 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
42 42
43 43
44 44 class lfsvfs(vfsmod.vfs):
45 45 def join(self, path):
46 46 """split the path at first two characters, like: XX/XXXXX..."""
47 47 if not _lfsre.match(path):
48 48 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
49 49 return super(lfsvfs, self).join(path[0:2], path[2:])
50 50
51 51 def walk(self, path=None, onerror=None):
52 52 """Yield (dirpath, [], oids) tuple for blobs under path
53 53
54 54 Oids only exist in the root of this vfs, so dirpath is always ''.
55 55 """
56 56 root = os.path.normpath(self.base)
57 57 # when dirpath == root, dirpath[prefixlen:] becomes empty
58 58 # because len(dirpath) < prefixlen.
59 59 prefixlen = len(pathutil.normasprefix(root))
60 60 oids = []
61 61
62 62 for dirpath, dirs, files in os.walk(
63 63 self.reljoin(self.base, path or b''), onerror=onerror
64 64 ):
65 65 dirpath = dirpath[prefixlen:]
66 66
67 67 # Silently skip unexpected files and directories
68 68 if len(dirpath) == 2:
69 69 oids.extend(
70 70 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
71 71 )
72 72
73 73 yield (b'', [], oids)
74 74
75 75
76 76 class nullvfs(lfsvfs):
77 77 def __init__(self):
78 78 pass
79 79
80 80 def exists(self, oid):
81 81 return False
82 82
83 83 def read(self, oid):
84 84 # store.read() calls into here if the blob doesn't exist in its
85 85 # self.vfs. Raise the same error as a normal vfs when asked to read a
86 86 # file that doesn't exist. The only difference is the full file path
87 87 # isn't available in the error.
88 88 raise IOError(
89 89 errno.ENOENT,
90 90 pycompat.sysstr(b'%s: No such file or directory' % oid),
91 91 )
92 92
93 93 def walk(self, path=None, onerror=None):
94 94 return (b'', [], [])
95 95
96 96 def write(self, oid, data):
97 97 pass
98 98
99 99
100 100 class lfsuploadfile(httpconnectionmod.httpsendfile):
101 101 """a file-like object that supports keepalive."""
102 102
103 103 def __init__(self, ui, filename):
104 104 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
105 105 self.read = self._data.read
106 106
107 107 def _makeprogress(self):
108 108 return None # progress is handled by the worker client
109 109
110 110
111 111 class local:
112 112 """Local blobstore for large file contents.
113 113
114 114 This blobstore is used both as a cache and as a staging area for large blobs
115 115 to be uploaded to the remote blobstore.
116 116 """
117 117
118 118 def __init__(self, repo):
119 119 fullpath = repo.svfs.join(b'lfs/objects')
120 120 self.vfs = lfsvfs(fullpath)
121 121
122 122 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
123 123 self.cachevfs = nullvfs()
124 124 else:
125 125 usercache = lfutil._usercachedir(repo.ui, b'lfs')
126 126 self.cachevfs = lfsvfs(usercache)
127 127 self.ui = repo.ui
128 128
129 129 def open(self, oid):
130 130 """Open a read-only file descriptor to the named blob, in either the
131 131 usercache or the local store."""
132 132 return open(self.path(oid), 'rb')
133 133
134 134 def path(self, oid):
135 135 """Build the path for the given blob ``oid``.
136 136
137 137 If the blob exists locally, the path may point to either the usercache
138 138 or the local store. If it doesn't, it will point to the local store.
139 139 This is meant for situations where existing code that isn't LFS aware
140 140 needs to open a blob. Generally, prefer the ``open`` method on this
141 141 class.
142 142 """
143 143 # The usercache is the most likely place to hold the file. Commit will
144 144 # write to both it and the local store, as will anything that downloads
145 145 # the blobs. However, things like clone without an update won't
146 146 # populate the local store. For an init + push of a local clone,
147 147 # the usercache is the only place it _could_ be. If not present, the
148 148 # missing file msg here will indicate the local repo, not the usercache.
149 149 if self.cachevfs.exists(oid):
150 150 return self.cachevfs.join(oid)
151 151
152 152 return self.vfs.join(oid)
153 153
154 154 def download(self, oid, src, content_length):
155 155 """Read the blob from the remote source in chunks, verify the content,
156 156 and write to this local blobstore."""
157 157 sha256 = hashlib.sha256()
158 158 size = 0
159 159
160 160 with self.vfs(oid, b'wb', atomictemp=True) as fp:
161 161 for chunk in util.filechunkiter(src, size=1048576):
162 162 fp.write(chunk)
163 163 sha256.update(chunk)
164 164 size += len(chunk)
165 165
166 166 # If the server advertised a length longer than what we actually
167 167 # received, then we should expect that the server crashed while
168 168 # producing the response (but the server has no way of telling us
169 169 # that), and we really don't need to try to write the response to
170 170 # the localstore, because it's not going to match the expected.
171 171 if content_length is not None and int(content_length) != size:
172 172 msg = (
173 b"Response length (%s) does not match Content-Length "
173 b"Response length (%d) does not match Content-Length "
174 174 b"header (%d): likely server-side crash"
175 175 )
176 176 raise LfsRemoteError(_(msg) % (size, int(content_length)))
177 177
178 178 realoid = hex(sha256.digest())
179 179 if realoid != oid:
180 180 raise LfsCorruptionError(
181 181 _(b'corrupt remote lfs object: %s') % oid
182 182 )
183 183
184 184 self._linktousercache(oid)
185 185
186 186 def write(self, oid, data):
187 187 """Write blob to local blobstore.
188 188
189 189 This should only be called from the filelog during a commit or similar.
190 190 As such, there is no need to verify the data. Imports from a remote
191 191 store must use ``download()`` instead."""
192 192 with self.vfs(oid, b'wb', atomictemp=True) as fp:
193 193 fp.write(data)
194 194
195 195 self._linktousercache(oid)
196 196
197 197 def linkfromusercache(self, oid):
198 198 """Link blobs found in the user cache into this store.
199 199
200 200 The server module needs to do this when it lets the client know not to
201 201 upload the blob, to ensure it is always available in this store.
202 202 Normally this is done implicitly when the client reads or writes the
203 203 blob, but that doesn't happen when the server tells the client that it
204 204 already has the blob.
205 205 """
206 206 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
207 207 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
208 208 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
209 209
210 210 def _linktousercache(self, oid):
211 211 # XXX: should we verify the content of the cache, and hardlink back to
212 212 # the local store on success, but truncate, write and link on failure?
213 213 if not self.cachevfs.exists(oid) and not isinstance(
214 214 self.cachevfs, nullvfs
215 215 ):
216 216 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
217 217 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
218 218
219 219 def read(self, oid, verify=True):
220 220 """Read blob from local blobstore."""
221 221 if not self.vfs.exists(oid):
222 222 blob = self._read(self.cachevfs, oid, verify)
223 223
224 224 # Even if revlog will verify the content, it needs to be verified
225 225 # now before making the hardlink to avoid propagating corrupt blobs.
226 226 # Don't abort if corruption is detected, because `hg verify` will
227 227 # give more useful info about the corruption- simply don't add the
228 228 # hardlink.
229 229 if verify or hex(hashlib.sha256(blob).digest()) == oid:
230 230 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
231 231 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
232 232 else:
233 233 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
234 234 blob = self._read(self.vfs, oid, verify)
235 235 return blob
236 236
237 237 def _read(self, vfs, oid, verify):
238 238 """Read blob (after verifying) from the given store"""
239 239 blob = vfs.read(oid)
240 240 if verify:
241 241 _verify(oid, blob)
242 242 return blob
243 243
244 244 def verify(self, oid):
245 245 """Indicate whether or not the hash of the underlying file matches its
246 246 name."""
247 247 sha256 = hashlib.sha256()
248 248
249 249 with self.open(oid) as fp:
250 250 for chunk in util.filechunkiter(fp, size=1048576):
251 251 sha256.update(chunk)
252 252
253 253 return oid == hex(sha256.digest())
254 254
255 255 def has(self, oid):
256 256 """Returns True if the local blobstore contains the requested blob,
257 257 False otherwise."""
258 258 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
259 259
260 260
261 261 def _urlerrorreason(urlerror):
262 262 """Create a friendly message for the given URLError to be used in an
263 263 LfsRemoteError message.
264 264 """
265 265 inst = urlerror
266 266
267 267 if isinstance(urlerror.reason, Exception):
268 268 inst = urlerror.reason
269 269
270 270 if util.safehasattr(inst, b'reason'):
271 271 try: # usually it is in the form (errno, strerror)
272 272 reason = inst.reason.args[1]
273 273 except (AttributeError, IndexError):
274 274 # it might be anything, for example a string
275 275 reason = inst.reason
276 276 if isinstance(reason, str):
277 277 # SSLError of Python 2.7.9 contains a unicode
278 278 reason = encoding.unitolocal(reason)
279 279 return reason
280 280 elif getattr(inst, "strerror", None):
281 281 return encoding.strtolocal(inst.strerror)
282 282 else:
283 283 return stringutil.forcebytestr(urlerror)
284 284
285 285
286 286 class lfsauthhandler(util.urlreq.basehandler):
287 287 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
288 288
289 289 def http_error_401(self, req, fp, code, msg, headers):
290 290 """Enforces that any authentication performed is HTTP Basic
291 291 Authentication. No authentication is also acceptable.
292 292 """
293 293 authreq = headers.get('www-authenticate', None)
294 294 if authreq:
295 295 scheme = authreq.split()[0]
296 296
297 297 if scheme.lower() != 'basic':
298 298 msg = _(b'the server must support Basic Authentication')
299 299 raise util.urlerr.httperror(
300 300 req.get_full_url(),
301 301 code,
302 302 encoding.strfromlocal(msg),
303 303 headers,
304 304 fp,
305 305 )
306 306 return None
307 307
308 308
309 309 class _gitlfsremote:
310 310 def __init__(self, repo, url):
311 311 ui = repo.ui
312 312 self.ui = ui
313 313 baseurl, authinfo = url.authinfo()
314 314 self.baseurl = baseurl.rstrip(b'/')
315 315 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
316 316 if not useragent:
317 317 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
318 318 self.urlopener = urlmod.opener(ui, authinfo, useragent)
319 319 self.urlopener.add_handler(lfsauthhandler())
320 320 self.retry = ui.configint(b'lfs', b'retry')
321 321
322 322 def writebatch(self, pointers, fromstore):
323 323 """Batch upload from local to remote blobstore."""
324 324 self._batch(_deduplicate(pointers), fromstore, b'upload')
325 325
326 326 def readbatch(self, pointers, tostore):
327 327 """Batch download from remote to local blostore."""
328 328 self._batch(_deduplicate(pointers), tostore, b'download')
329 329
330 330 def _batchrequest(self, pointers, action):
331 331 """Get metadata about objects pointed by pointers for given action
332 332
333 333 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
334 334 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
335 335 """
336 336 objects = [
337 337 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
338 338 for p in pointers
339 339 ]
340 340 requestdata = pycompat.bytesurl(
341 341 json.dumps(
342 342 {
343 343 'objects': objects,
344 344 'operation': pycompat.strurl(action),
345 345 }
346 346 )
347 347 )
348 348 url = b'%s/objects/batch' % self.baseurl
349 349 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
350 350 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
351 351 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
352 352 try:
353 353 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
354 354 rawjson = rsp.read()
355 355 except util.urlerr.httperror as ex:
356 356 hints = {
357 357 400: _(
358 358 b'check that lfs serving is enabled on %s and "%s" is '
359 359 b'supported'
360 360 )
361 361 % (self.baseurl, action),
362 362 404: _(b'the "lfs.url" config may be used to override %s')
363 363 % self.baseurl,
364 364 }
365 365 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
366 366 raise LfsRemoteError(
367 367 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
368 368 hint=hint,
369 369 )
370 370 except util.urlerr.urlerror as ex:
371 371 hint = (
372 372 _(b'the "lfs.url" config may be used to override %s')
373 373 % self.baseurl
374 374 )
375 375 raise LfsRemoteError(
376 376 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
377 377 )
378 378 try:
379 379 response = pycompat.json_loads(rawjson)
380 380 except ValueError:
381 381 raise LfsRemoteError(
382 382 _(b'LFS server returns invalid JSON: %s')
383 383 % rawjson.encode("utf-8")
384 384 )
385 385
386 386 if self.ui.debugflag:
387 387 self.ui.debug(b'Status: %d\n' % rsp.status)
388 388 # lfs-test-server and hg serve return headers in different order
389 389 headers = pycompat.bytestr(rsp.info()).strip()
390 390 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
391 391
392 392 if 'objects' in response:
393 393 response['objects'] = sorted(
394 394 response['objects'], key=lambda p: p['oid']
395 395 )
396 396 self.ui.debug(
397 397 b'%s\n'
398 398 % pycompat.bytesurl(
399 399 json.dumps(
400 400 response,
401 401 indent=2,
402 402 separators=('', ': '),
403 403 sort_keys=True,
404 404 )
405 405 )
406 406 )
407 407
408 408 def encodestr(x):
409 409 if isinstance(x, str):
410 410 return x.encode('utf-8')
411 411 return x
412 412
413 413 return pycompat.rapply(encodestr, response)
414 414
415 415 def _checkforservererror(self, pointers, responses, action):
416 416 """Scans errors from objects
417 417
418 418 Raises LfsRemoteError if any objects have an error"""
419 419 for response in responses:
420 420 # The server should return 404 when objects cannot be found. Some
421 421 # server implementation (ex. lfs-test-server) does not set "error"
422 422 # but just removes "download" from "actions". Treat that case
423 423 # as the same as 404 error.
424 424 if b'error' not in response:
425 425 if action == b'download' and action not in response.get(
426 426 b'actions', []
427 427 ):
428 428 code = 404
429 429 else:
430 430 continue
431 431 else:
432 432 # An error dict without a code doesn't make much sense, so
433 433 # treat as a server error.
434 434 code = response.get(b'error').get(b'code', 500)
435 435
436 436 ptrmap = {p.oid(): p for p in pointers}
437 437 p = ptrmap.get(response[b'oid'], None)
438 438 if p:
439 439 filename = getattr(p, 'filename', b'unknown')
440 440 errors = {
441 441 404: b'The object does not exist',
442 442 410: b'The object was removed by the owner',
443 443 422: b'Validation error',
444 444 500: b'Internal server error',
445 445 }
446 446 msg = errors.get(code, b'status code %d' % code)
447 447 raise LfsRemoteError(
448 448 _(b'LFS server error for "%s": %s') % (filename, msg)
449 449 )
450 450 else:
451 451 raise LfsRemoteError(
452 452 _(b'LFS server error. Unsolicited response for oid %s')
453 453 % response[b'oid']
454 454 )
455 455
456 456 def _extractobjects(self, response, pointers, action):
457 457 """extract objects from response of the batch API
458 458
459 459 response: parsed JSON object returned by batch API
460 460 return response['objects'] filtered by action
461 461 raise if any object has an error
462 462 """
463 463 # Scan errors from objects - fail early
464 464 objects = response.get(b'objects', [])
465 465 self._checkforservererror(pointers, objects, action)
466 466
467 467 # Filter objects with given action. Practically, this skips uploading
468 468 # objects which exist in the server.
469 469 filteredobjects = [
470 470 o for o in objects if action in o.get(b'actions', [])
471 471 ]
472 472
473 473 return filteredobjects
474 474
475 475 def _basictransfer(self, obj, action, localstore):
476 476 """Download or upload a single object using basic transfer protocol
477 477
478 478 obj: dict, an object description returned by batch API
479 479 action: string, one of ['upload', 'download']
480 480 localstore: blobstore.local
481 481
482 482 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
483 483 basic-transfers.md
484 484 """
485 485 oid = obj[b'oid']
486 486 href = obj[b'actions'][action].get(b'href')
487 487 headers = obj[b'actions'][action].get(b'header', {}).items()
488 488
489 489 request = util.urlreq.request(pycompat.strurl(href))
490 490 if action == b'upload':
491 491 # If uploading blobs, read data from local blobstore.
492 492 if not localstore.verify(oid):
493 493 raise error.Abort(
494 494 _(b'detected corrupt lfs object: %s') % oid,
495 495 hint=_(b'run hg verify'),
496 496 )
497 497
498 498 for k, v in headers:
499 499 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
500 500
501 501 try:
502 502 if action == b'upload':
503 503 request.data = lfsuploadfile(self.ui, localstore.path(oid))
504 504 request.get_method = lambda: 'PUT'
505 505 request.add_header('Content-Type', 'application/octet-stream')
506 506 request.add_header('Content-Length', request.data.length)
507 507
508 508 with contextlib.closing(self.urlopener.open(request)) as res:
509 509 contentlength = res.info().get(b"content-length")
510 510 ui = self.ui # Shorten debug lines
511 511 if self.ui.debugflag:
512 512 ui.debug(b'Status: %d\n' % res.status)
513 513 # lfs-test-server and hg serve return headers in different
514 514 # order
515 515 headers = pycompat.bytestr(res.info()).strip()
516 516 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
517 517
518 518 if action == b'download':
519 519 # If downloading blobs, store downloaded data to local
520 520 # blobstore
521 521 localstore.download(oid, res, contentlength)
522 522 else:
523 523 blocks = []
524 524 while True:
525 525 data = res.read(1048576)
526 526 if not data:
527 527 break
528 528 blocks.append(data)
529 529
530 530 response = b"".join(blocks)
531 531 if response:
532 532 ui.debug(b'lfs %s response: %s' % (action, response))
533 533 except util.urlerr.httperror as ex:
534 534 if self.ui.debugflag:
535 535 self.ui.debug(
536 536 b'%s: %s\n' % (oid, ex.read())
537 537 ) # XXX: also bytes?
538 538 raise LfsRemoteError(
539 539 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
540 540 % (stringutil.forcebytestr(ex), oid, action)
541 541 )
542 542 except util.urlerr.urlerror as ex:
543 543 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
544 544 util.urllibcompat.getfullurl(request)
545 545 )
546 546 raise LfsRemoteError(
547 547 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
548 548 )
549 549 finally:
550 550 if request.data:
551 551 request.data.close()
552 552
553 553 def _batch(self, pointers, localstore, action):
554 554 if action not in [b'upload', b'download']:
555 555 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
556 556
557 557 response = self._batchrequest(pointers, action)
558 558 objects = self._extractobjects(response, pointers, action)
559 559 total = sum(x.get(b'size', 0) for x in objects)
560 560 sizes = {}
561 561 for obj in objects:
562 562 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
563 563 topic = {
564 564 b'upload': _(b'lfs uploading'),
565 565 b'download': _(b'lfs downloading'),
566 566 }[action]
567 567 if len(objects) > 1:
568 568 self.ui.note(
569 569 _(b'lfs: need to transfer %d objects (%s)\n')
570 570 % (len(objects), util.bytecount(total))
571 571 )
572 572
573 573 def transfer(chunk):
574 574 for obj in chunk:
575 575 objsize = obj.get(b'size', 0)
576 576 if self.ui.verbose:
577 577 if action == b'download':
578 578 msg = _(b'lfs: downloading %s (%s)\n')
579 579 elif action == b'upload':
580 580 msg = _(b'lfs: uploading %s (%s)\n')
581 581 self.ui.note(
582 582 msg % (obj.get(b'oid'), util.bytecount(objsize))
583 583 )
584 584 retry = self.retry
585 585 while True:
586 586 try:
587 587 self._basictransfer(obj, action, localstore)
588 588 yield 1, obj.get(b'oid')
589 589 break
590 590 except socket.error as ex:
591 591 if retry > 0:
592 592 self.ui.note(
593 593 _(b'lfs: failed: %r (remaining retry %d)\n')
594 594 % (stringutil.forcebytestr(ex), retry)
595 595 )
596 596 retry -= 1
597 597 continue
598 598 raise
599 599
600 600 # Until https multiplexing gets sorted out
601 601 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
602 602 oids = worker.worker(
603 603 self.ui,
604 604 0.1,
605 605 transfer,
606 606 (),
607 607 sorted(objects, key=lambda o: o.get(b'oid')),
608 608 )
609 609 else:
610 610 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
611 611
612 612 with self.ui.makeprogress(
613 613 topic, unit=_(b"bytes"), total=total
614 614 ) as progress:
615 615 progress.update(0)
616 616 processed = 0
617 617 blobs = 0
618 618 for _one, oid in oids:
619 619 processed += sizes[oid]
620 620 blobs += 1
621 621 progress.update(processed)
622 622 self.ui.note(_(b'lfs: processed: %s\n') % oid)
623 623
624 624 if blobs > 0:
625 625 if action == b'upload':
626 626 self.ui.status(
627 627 _(b'lfs: uploaded %d files (%s)\n')
628 628 % (blobs, util.bytecount(processed))
629 629 )
630 630 elif action == b'download':
631 631 self.ui.status(
632 632 _(b'lfs: downloaded %d files (%s)\n')
633 633 % (blobs, util.bytecount(processed))
634 634 )
635 635
636 636 def __del__(self):
637 637 # copied from mercurial/httppeer.py
638 638 urlopener = getattr(self, 'urlopener', None)
639 639 if urlopener:
640 640 for h in urlopener.handlers:
641 641 h.close()
642 642 getattr(h, "close_all", lambda: None)()
643 643
644 644
645 645 class _dummyremote:
646 646 """Dummy store storing blobs to temp directory."""
647 647
648 648 def __init__(self, repo, url):
649 649 fullpath = repo.vfs.join(b'lfs', url.path)
650 650 self.vfs = lfsvfs(fullpath)
651 651
652 652 def writebatch(self, pointers, fromstore):
653 653 for p in _deduplicate(pointers):
654 654 content = fromstore.read(p.oid(), verify=True)
655 655 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
656 656 fp.write(content)
657 657
658 658 def readbatch(self, pointers, tostore):
659 659 for p in _deduplicate(pointers):
660 660 with self.vfs(p.oid(), b'rb') as fp:
661 661 tostore.download(p.oid(), fp, None)
662 662
663 663
664 664 class _nullremote:
665 665 """Null store storing blobs to /dev/null."""
666 666
667 667 def __init__(self, repo, url):
668 668 pass
669 669
670 670 def writebatch(self, pointers, fromstore):
671 671 pass
672 672
673 673 def readbatch(self, pointers, tostore):
674 674 pass
675 675
676 676
677 677 class _promptremote:
678 678 """Prompt user to set lfs.url when accessed."""
679 679
680 680 def __init__(self, repo, url):
681 681 pass
682 682
683 683 def writebatch(self, pointers, fromstore, ui=None):
684 684 self._prompt()
685 685
686 686 def readbatch(self, pointers, tostore, ui=None):
687 687 self._prompt()
688 688
689 689 def _prompt(self):
690 690 raise error.Abort(_(b'lfs.url needs to be configured'))
691 691
692 692
693 693 _storemap = {
694 694 b'https': _gitlfsremote,
695 695 b'http': _gitlfsremote,
696 696 b'file': _dummyremote,
697 697 b'null': _nullremote,
698 698 None: _promptremote,
699 699 }
700 700
701 701
702 702 def _deduplicate(pointers):
703 703 """Remove any duplicate oids that exist in the list"""
704 704 reduced = util.sortdict()
705 705 for p in pointers:
706 706 reduced[p.oid()] = p
707 707 return reduced.values()
708 708
709 709
710 710 def _verify(oid, content):
711 711 realoid = hex(hashlib.sha256(content).digest())
712 712 if realoid != oid:
713 713 raise LfsCorruptionError(
714 714 _(b'detected corrupt lfs object: %s') % oid,
715 715 hint=_(b'run hg verify'),
716 716 )
717 717
718 718
719 719 def remote(repo, remote=None):
720 720 """remotestore factory. return a store in _storemap depending on config
721 721
722 722 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
723 723 infer the endpoint, based on the remote repository using the same path
724 724 adjustments as git. As an extension, 'http' is supported as well so that
725 725 ``hg serve`` works out of the box.
726 726
727 727 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
728 728 """
729 729 lfsurl = repo.ui.config(b'lfs', b'url')
730 730 url = urlutil.url(lfsurl or b'')
731 731 if lfsurl is None:
732 732 if remote:
733 733 path = remote
734 734 elif util.safehasattr(repo, b'_subtoppath'):
735 735 # The pull command sets this during the optional update phase, which
736 736 # tells exactly where the pull originated, whether 'paths.default'
737 737 # or explicit.
738 738 path = repo._subtoppath
739 739 else:
740 740 # TODO: investigate 'paths.remote:lfsurl' style path customization,
741 741 # and fall back to inferring from 'paths.remote' if unspecified.
742 742 path = repo.ui.config(b'paths', b'default') or b''
743 743
744 744 defaulturl = urlutil.url(path)
745 745
746 746 # TODO: support local paths as well.
747 747 # TODO: consider the ssh -> https transformation that git applies
748 748 if defaulturl.scheme in (b'http', b'https'):
749 749 if defaulturl.path and defaulturl.path[:-1] != b'/':
750 750 defaulturl.path += b'/'
751 751 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
752 752
753 753 url = urlutil.url(bytes(defaulturl))
754 754 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
755 755
756 756 scheme = url.scheme
757 757 if scheme not in _storemap:
758 758 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
759 759 return _storemap[scheme](repo, url)
760 760
761 761
762 762 class LfsRemoteError(error.StorageError):
763 763 pass
764 764
765 765
766 766 class LfsCorruptionError(error.Abort):
767 767 """Raised when a corrupt blob is detected, aborting an operation
768 768
769 769 It exists to allow specialized handling on the server side."""
General Comments 0
You need to be logged in to leave comments. Login now