##// END OF EJS Templates
lfs: debug print HTTP headers and JSON payload received from the server...
Matt Harbison -
r36944:0dcf50dc default
parent child Browse files
Show More
@@ -1,482 +1,503 b''
1 1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import hashlib
11 11 import json
12 12 import os
13 13 import re
14 14 import socket
15 15
16 16 from mercurial.i18n import _
17 17
18 18 from mercurial import (
19 19 error,
20 20 pathutil,
21 21 pycompat,
22 22 url as urlmod,
23 23 util,
24 24 vfs as vfsmod,
25 25 worker,
26 26 )
27 27
28 28 from ..largefiles import lfutil
29 29
30 30 # 64 bytes for SHA256
31 31 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
32 32
33 33 class lfsvfs(vfsmod.vfs):
34 34 def join(self, path):
35 35 """split the path at first two characters, like: XX/XXXXX..."""
36 36 if not _lfsre.match(path):
37 37 raise error.ProgrammingError('unexpected lfs path: %s' % path)
38 38 return super(lfsvfs, self).join(path[0:2], path[2:])
39 39
40 40 def walk(self, path=None, onerror=None):
41 41 """Yield (dirpath, [], oids) tuple for blobs under path
42 42
43 43 Oids only exist in the root of this vfs, so dirpath is always ''.
44 44 """
45 45 root = os.path.normpath(self.base)
46 46 # when dirpath == root, dirpath[prefixlen:] becomes empty
47 47 # because len(dirpath) < prefixlen.
48 48 prefixlen = len(pathutil.normasprefix(root))
49 49 oids = []
50 50
51 51 for dirpath, dirs, files in os.walk(self.reljoin(self.base, path or ''),
52 52 onerror=onerror):
53 53 dirpath = dirpath[prefixlen:]
54 54
55 55 # Silently skip unexpected files and directories
56 56 if len(dirpath) == 2:
57 57 oids.extend([dirpath + f for f in files
58 58 if _lfsre.match(dirpath + f)])
59 59
60 60 yield ('', [], oids)
61 61
62 62 class filewithprogress(object):
63 63 """a file-like object that supports __len__ and read.
64 64
65 65 Useful to provide progress information for how many bytes are read.
66 66 """
67 67
68 68 def __init__(self, fp, callback):
69 69 self._fp = fp
70 70 self._callback = callback # func(readsize)
71 71 fp.seek(0, os.SEEK_END)
72 72 self._len = fp.tell()
73 73 fp.seek(0)
74 74
75 75 def __len__(self):
76 76 return self._len
77 77
78 78 def read(self, size):
79 79 if self._fp is None:
80 80 return b''
81 81 data = self._fp.read(size)
82 82 if data:
83 83 if self._callback:
84 84 self._callback(len(data))
85 85 else:
86 86 self._fp.close()
87 87 self._fp = None
88 88 return data
89 89
90 90 class local(object):
91 91 """Local blobstore for large file contents.
92 92
93 93 This blobstore is used both as a cache and as a staging area for large blobs
94 94 to be uploaded to the remote blobstore.
95 95 """
96 96
97 97 def __init__(self, repo):
98 98 fullpath = repo.svfs.join('lfs/objects')
99 99 self.vfs = lfsvfs(fullpath)
100 100 usercache = lfutil._usercachedir(repo.ui, 'lfs')
101 101 self.cachevfs = lfsvfs(usercache)
102 102 self.ui = repo.ui
103 103
104 104 def open(self, oid):
105 105 """Open a read-only file descriptor to the named blob, in either the
106 106 usercache or the local store."""
107 107 # The usercache is the most likely place to hold the file. Commit will
108 108 # write to both it and the local store, as will anything that downloads
109 109 # the blobs. However, things like clone without an update won't
110 110 # populate the local store. For an init + push of a local clone,
111 111 # the usercache is the only place it _could_ be. If not present, the
112 112 # missing file msg here will indicate the local repo, not the usercache.
113 113 if self.cachevfs.exists(oid):
114 114 return self.cachevfs(oid, 'rb')
115 115
116 116 return self.vfs(oid, 'rb')
117 117
118 118 def download(self, oid, src):
119 119 """Read the blob from the remote source in chunks, verify the content,
120 120 and write to this local blobstore."""
121 121 sha256 = hashlib.sha256()
122 122
123 123 with self.vfs(oid, 'wb', atomictemp=True) as fp:
124 124 for chunk in util.filechunkiter(src, size=1048576):
125 125 fp.write(chunk)
126 126 sha256.update(chunk)
127 127
128 128 realoid = sha256.hexdigest()
129 129 if realoid != oid:
130 130 raise error.Abort(_('corrupt remote lfs object: %s') % oid)
131 131
132 132 # XXX: should we verify the content of the cache, and hardlink back to
133 133 # the local store on success, but truncate, write and link on failure?
134 134 if not self.cachevfs.exists(oid):
135 135 self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
136 136 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
137 137
138 138 def write(self, oid, data):
139 139 """Write blob to local blobstore.
140 140
141 141 This should only be called from the filelog during a commit or similar.
142 142 As such, there is no need to verify the data. Imports from a remote
143 143 store must use ``download()`` instead."""
144 144 with self.vfs(oid, 'wb', atomictemp=True) as fp:
145 145 fp.write(data)
146 146
147 147 # XXX: should we verify the content of the cache, and hardlink back to
148 148 # the local store on success, but truncate, write and link on failure?
149 149 if not self.cachevfs.exists(oid):
150 150 self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
151 151 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
152 152
153 153 def read(self, oid, verify=True):
154 154 """Read blob from local blobstore."""
155 155 if not self.vfs.exists(oid):
156 156 blob = self._read(self.cachevfs, oid, verify)
157 157
158 158 # Even if revlog will verify the content, it needs to be verified
159 159 # now before making the hardlink to avoid propagating corrupt blobs.
160 160 # Don't abort if corruption is detected, because `hg verify` will
161 161 # give more useful info about the corruption- simply don't add the
162 162 # hardlink.
163 163 if verify or hashlib.sha256(blob).hexdigest() == oid:
164 164 self.ui.note(_('lfs: found %s in the usercache\n') % oid)
165 165 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
166 166 else:
167 167 self.ui.note(_('lfs: found %s in the local lfs store\n') % oid)
168 168 blob = self._read(self.vfs, oid, verify)
169 169 return blob
170 170
171 171 def _read(self, vfs, oid, verify):
172 172 """Read blob (after verifying) from the given store"""
173 173 blob = vfs.read(oid)
174 174 if verify:
175 175 _verify(oid, blob)
176 176 return blob
177 177
178 178 def has(self, oid):
179 179 """Returns True if the local blobstore contains the requested blob,
180 180 False otherwise."""
181 181 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
182 182
183 183 class _gitlfsremote(object):
184 184
185 185 def __init__(self, repo, url):
186 186 ui = repo.ui
187 187 self.ui = ui
188 188 baseurl, authinfo = url.authinfo()
189 189 self.baseurl = baseurl.rstrip('/')
190 190 useragent = repo.ui.config('experimental', 'lfs.user-agent')
191 191 if not useragent:
192 192 useragent = 'git-lfs/2.3.4 (Mercurial %s)' % util.version()
193 193 self.urlopener = urlmod.opener(ui, authinfo, useragent)
194 194 self.retry = ui.configint('lfs', 'retry')
195 195
196 196 def writebatch(self, pointers, fromstore):
197 197 """Batch upload from local to remote blobstore."""
198 198 self._batch(_deduplicate(pointers), fromstore, 'upload')
199 199
200 200 def readbatch(self, pointers, tostore):
201 201 """Batch download from remote to local blostore."""
202 202 self._batch(_deduplicate(pointers), tostore, 'download')
203 203
204 204 def _batchrequest(self, pointers, action):
205 205 """Get metadata about objects pointed by pointers for given action
206 206
207 207 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
208 208 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
209 209 """
210 210 objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
211 211 requestdata = json.dumps({
212 212 'objects': objects,
213 213 'operation': action,
214 214 })
215 215 batchreq = util.urlreq.request('%s/objects/batch' % self.baseurl,
216 216 data=requestdata)
217 217 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
218 218 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
219 219 try:
220 rawjson = self.urlopener.open(batchreq).read()
220 rsp = self.urlopener.open(batchreq)
221 rawjson = rsp.read()
221 222 except util.urlerr.httperror as ex:
222 223 raise LfsRemoteError(_('LFS HTTP error: %s (action=%s)')
223 224 % (ex, action))
224 225 try:
225 226 response = json.loads(rawjson)
226 227 except ValueError:
227 228 raise LfsRemoteError(_('LFS server returns invalid JSON: %s')
228 229 % rawjson)
230
231 if self.ui.debugflag:
232 self.ui.debug('Status: %d\n' % rsp.status)
233 # lfs-test-server and hg serve return headers in different order
234 self.ui.debug('%s\n'
235 % '\n'.join(sorted(str(rsp.info()).splitlines())))
236
237 if 'objects' in response:
238 response['objects'] = sorted(response['objects'],
239 key=lambda p: p['oid'])
240 self.ui.debug('%s\n'
241 % json.dumps(response, indent=2, sort_keys=True))
242
229 243 return response
230 244
231 245 def _checkforservererror(self, pointers, responses, action):
232 246 """Scans errors from objects
233 247
234 248 Raises LfsRemoteError if any objects have an error"""
235 249 for response in responses:
236 250 # The server should return 404 when objects cannot be found. Some
237 251 # server implementation (ex. lfs-test-server) does not set "error"
238 252 # but just removes "download" from "actions". Treat that case
239 253 # as the same as 404 error.
240 254 notfound = (response.get('error', {}).get('code') == 404
241 255 or (action == 'download'
242 256 and action not in response.get('actions', [])))
243 257 if notfound:
244 258 ptrmap = {p.oid(): p for p in pointers}
245 259 p = ptrmap.get(response['oid'], None)
246 260 if p:
247 261 filename = getattr(p, 'filename', 'unknown')
248 262 raise LfsRemoteError(
249 263 _(('LFS server error. Remote object '
250 264 'for "%s" not found: %r')) % (filename, response))
251 265 else:
252 266 raise LfsRemoteError(
253 267 _('LFS server error. Unsolicited response for oid %s')
254 268 % response['oid'])
255 269 if 'error' in response:
256 270 raise LfsRemoteError(_('LFS server error: %r') % response)
257 271
258 272 def _extractobjects(self, response, pointers, action):
259 273 """extract objects from response of the batch API
260 274
261 275 response: parsed JSON object returned by batch API
262 276 return response['objects'] filtered by action
263 277 raise if any object has an error
264 278 """
265 279 # Scan errors from objects - fail early
266 280 objects = response.get('objects', [])
267 281 self._checkforservererror(pointers, objects, action)
268 282
269 283 # Filter objects with given action. Practically, this skips uploading
270 284 # objects which exist in the server.
271 285 filteredobjects = [o for o in objects if action in o.get('actions', [])]
272 286
273 287 return filteredobjects
274 288
275 289 def _basictransfer(self, obj, action, localstore):
276 290 """Download or upload a single object using basic transfer protocol
277 291
278 292 obj: dict, an object description returned by batch API
279 293 action: string, one of ['upload', 'download']
280 294 localstore: blobstore.local
281 295
282 296 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
283 297 basic-transfers.md
284 298 """
285 299 oid = pycompat.bytestr(obj['oid'])
286 300
287 301 href = pycompat.bytestr(obj['actions'][action].get('href'))
288 302 headers = obj['actions'][action].get('header', {}).items()
289 303
290 304 request = util.urlreq.request(href)
291 305 if action == 'upload':
292 306 # If uploading blobs, read data from local blobstore.
293 307 with localstore.open(oid) as fp:
294 308 _verifyfile(oid, fp)
295 309 request.data = filewithprogress(localstore.open(oid), None)
296 310 request.get_method = lambda: 'PUT'
297 311
298 312 for k, v in headers:
299 313 request.add_header(k, v)
300 314
301 315 response = b''
302 316 try:
303 317 req = self.urlopener.open(request)
318
319 if self.ui.debugflag:
320 self.ui.debug('Status: %d\n' % req.status)
321 # lfs-test-server and hg serve return headers in different order
322 self.ui.debug('%s\n'
323 % '\n'.join(sorted(str(req.info()).splitlines())))
324
304 325 if action == 'download':
305 326 # If downloading blobs, store downloaded data to local blobstore
306 327 localstore.download(oid, req)
307 328 else:
308 329 while True:
309 330 data = req.read(1048576)
310 331 if not data:
311 332 break
312 333 response += data
313 334 if response:
314 335 self.ui.debug('lfs %s response: %s' % (action, response))
315 336 except util.urlerr.httperror as ex:
316 337 if self.ui.debugflag:
317 338 self.ui.debug('%s: %s\n' % (oid, ex.read()))
318 339 raise LfsRemoteError(_('HTTP error: %s (oid=%s, action=%s)')
319 340 % (ex, oid, action))
320 341
321 342 def _batch(self, pointers, localstore, action):
322 343 if action not in ['upload', 'download']:
323 344 raise error.ProgrammingError('invalid Git-LFS action: %s' % action)
324 345
325 346 response = self._batchrequest(pointers, action)
326 347 objects = self._extractobjects(response, pointers, action)
327 348 total = sum(x.get('size', 0) for x in objects)
328 349 sizes = {}
329 350 for obj in objects:
330 351 sizes[obj.get('oid')] = obj.get('size', 0)
331 352 topic = {'upload': _('lfs uploading'),
332 353 'download': _('lfs downloading')}[action]
333 354 if len(objects) > 1:
334 355 self.ui.note(_('lfs: need to transfer %d objects (%s)\n')
335 356 % (len(objects), util.bytecount(total)))
336 357 self.ui.progress(topic, 0, total=total)
337 358 def transfer(chunk):
338 359 for obj in chunk:
339 360 objsize = obj.get('size', 0)
340 361 if self.ui.verbose:
341 362 if action == 'download':
342 363 msg = _('lfs: downloading %s (%s)\n')
343 364 elif action == 'upload':
344 365 msg = _('lfs: uploading %s (%s)\n')
345 366 self.ui.note(msg % (obj.get('oid'),
346 367 util.bytecount(objsize)))
347 368 retry = self.retry
348 369 while True:
349 370 try:
350 371 self._basictransfer(obj, action, localstore)
351 372 yield 1, obj.get('oid')
352 373 break
353 374 except socket.error as ex:
354 375 if retry > 0:
355 376 self.ui.note(
356 377 _('lfs: failed: %r (remaining retry %d)\n')
357 378 % (ex, retry))
358 379 retry -= 1
359 380 continue
360 381 raise
361 382
362 383 # Until https multiplexing gets sorted out
363 384 if self.ui.configbool('experimental', 'lfs.worker-enable'):
364 385 oids = worker.worker(self.ui, 0.1, transfer, (),
365 386 sorted(objects, key=lambda o: o.get('oid')))
366 387 else:
367 388 oids = transfer(sorted(objects, key=lambda o: o.get('oid')))
368 389
369 390 processed = 0
370 391 blobs = 0
371 392 for _one, oid in oids:
372 393 processed += sizes[oid]
373 394 blobs += 1
374 395 self.ui.progress(topic, processed, total=total)
375 396 self.ui.note(_('lfs: processed: %s\n') % oid)
376 397 self.ui.progress(topic, pos=None, total=total)
377 398
378 399 if blobs > 0:
379 400 if action == 'upload':
380 401 self.ui.status(_('lfs: uploaded %d files (%s)\n')
381 402 % (blobs, util.bytecount(processed)))
382 403 # TODO: coalesce the download requests, and comment this in
383 404 #elif action == 'download':
384 405 # self.ui.status(_('lfs: downloaded %d files (%s)\n')
385 406 # % (blobs, util.bytecount(processed)))
386 407
387 408 def __del__(self):
388 409 # copied from mercurial/httppeer.py
389 410 urlopener = getattr(self, 'urlopener', None)
390 411 if urlopener:
391 412 for h in urlopener.handlers:
392 413 h.close()
393 414 getattr(h, "close_all", lambda : None)()
394 415
395 416 class _dummyremote(object):
396 417 """Dummy store storing blobs to temp directory."""
397 418
398 419 def __init__(self, repo, url):
399 420 fullpath = repo.vfs.join('lfs', url.path)
400 421 self.vfs = lfsvfs(fullpath)
401 422
402 423 def writebatch(self, pointers, fromstore):
403 424 for p in _deduplicate(pointers):
404 425 content = fromstore.read(p.oid(), verify=True)
405 426 with self.vfs(p.oid(), 'wb', atomictemp=True) as fp:
406 427 fp.write(content)
407 428
408 429 def readbatch(self, pointers, tostore):
409 430 for p in _deduplicate(pointers):
410 431 with self.vfs(p.oid(), 'rb') as fp:
411 432 tostore.download(p.oid(), fp)
412 433
413 434 class _nullremote(object):
414 435 """Null store storing blobs to /dev/null."""
415 436
416 437 def __init__(self, repo, url):
417 438 pass
418 439
419 440 def writebatch(self, pointers, fromstore):
420 441 pass
421 442
422 443 def readbatch(self, pointers, tostore):
423 444 pass
424 445
425 446 class _promptremote(object):
426 447 """Prompt user to set lfs.url when accessed."""
427 448
428 449 def __init__(self, repo, url):
429 450 pass
430 451
431 452 def writebatch(self, pointers, fromstore, ui=None):
432 453 self._prompt()
433 454
434 455 def readbatch(self, pointers, tostore, ui=None):
435 456 self._prompt()
436 457
437 458 def _prompt(self):
438 459 raise error.Abort(_('lfs.url needs to be configured'))
439 460
440 461 _storemap = {
441 462 'https': _gitlfsremote,
442 463 'http': _gitlfsremote,
443 464 'file': _dummyremote,
444 465 'null': _nullremote,
445 466 None: _promptremote,
446 467 }
447 468
448 469 def _deduplicate(pointers):
449 470 """Remove any duplicate oids that exist in the list"""
450 471 reduced = util.sortdict()
451 472 for p in pointers:
452 473 reduced[p.oid()] = p
453 474 return reduced.values()
454 475
455 476 def _verify(oid, content):
456 477 realoid = hashlib.sha256(content).hexdigest()
457 478 if realoid != oid:
458 479 raise error.Abort(_('detected corrupt lfs object: %s') % oid,
459 480 hint=_('run hg verify'))
460 481
461 482 def _verifyfile(oid, fp):
462 483 sha256 = hashlib.sha256()
463 484 while True:
464 485 data = fp.read(1024 * 1024)
465 486 if not data:
466 487 break
467 488 sha256.update(data)
468 489 realoid = sha256.hexdigest()
469 490 if realoid != oid:
470 491 raise error.Abort(_('detected corrupt lfs object: %s') % oid,
471 492 hint=_('run hg verify'))
472 493
473 494 def remote(repo):
474 495 """remotestore factory. return a store in _storemap depending on config"""
475 496 url = util.url(repo.ui.config('lfs', 'url') or '')
476 497 scheme = url.scheme
477 498 if scheme not in _storemap:
478 499 raise error.Abort(_('lfs: unknown url scheme: %s') % scheme)
479 500 return _storemap[scheme](repo, url)
480 501
481 502 class LfsRemoteError(error.RevlogError):
482 503 pass
@@ -1,377 +1,772 b''
1 1 #require lfs-test-server
2 2
3 3 $ LFS_LISTEN="tcp://:$HGPORT"
4 4 $ LFS_HOST="localhost:$HGPORT"
5 5 $ LFS_PUBLIC=1
6 6 $ export LFS_LISTEN LFS_HOST LFS_PUBLIC
7 7 #if no-windows
8 8 $ lfs-test-server &> lfs-server.log &
9 9 $ echo $! >> $DAEMON_PIDS
10 10 #else
11 11 $ cat >> $TESTTMP/spawn.py <<EOF
12 12 > import os
13 13 > import subprocess
14 14 > import sys
15 15 >
16 16 > for path in os.environ["PATH"].split(os.pathsep):
17 17 > exe = os.path.join(path, 'lfs-test-server.exe')
18 18 > if os.path.exists(exe):
19 19 > with open('lfs-server.log', 'wb') as out:
20 20 > p = subprocess.Popen(exe, stdout=out, stderr=out)
21 21 > sys.stdout.write('%s\n' % p.pid)
22 22 > sys.exit(0)
23 23 > sys.exit(1)
24 24 > EOF
25 25 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
26 26 #endif
27 27
28 28 $ cat >> $HGRCPATH <<EOF
29 29 > [extensions]
30 30 > lfs=
31 31 > [lfs]
32 32 > url=http://foo:bar@$LFS_HOST/
33 33 > track=all()
34 34 > EOF
35 35
36 36 $ hg init repo1
37 37 $ cd repo1
38 38 $ echo THIS-IS-LFS > a
39 39 $ hg commit -m a -A a
40 40
41 41 A push can be serviced directly from the usercache if it isn't in the local
42 42 store.
43 43
44 44 $ hg init ../repo2
45 45 $ mv .hg/store/lfs .hg/store/lfs_
46 46 $ hg push ../repo2 --debug
47 47 http auth: user foo, password ***
48 48 pushing to ../repo2
49 49 http auth: user foo, password ***
50 50 query 1; heads
51 51 searching for changes
52 52 1 total queries in *s (glob)
53 53 listing keys for "phases"
54 54 checking for updated bookmarks
55 55 listing keys for "bookmarks"
56 56 lfs: computing set of blobs to upload
57 Status: 200
58 Content-Length: 309
59 Content-Type: application/vnd.git-lfs+json
60 Date: $HTTP_DATE$
61 {
62 "objects": [
63 {
64 "actions": {
65 "upload": {
66 "expires_at": "$ISO_8601_DATE_TIME$",
67 "header": {
68 "Accept": "application/vnd.git-lfs"
69 },
70 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
71 }
72 },
73 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
74 "size": 12
75 }
76 ]
77 }
57 78 lfs: uploading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
79 Status: 200
80 Content-Length: 0
81 Content-Type: text/plain; charset=utf-8
82 Date: $HTTP_DATE$
58 83 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
59 84 lfs: uploaded 1 files (12 bytes)
60 85 1 changesets found
61 86 list of changesets:
62 87 99a7098854a3984a5c9eab0fc7a2906697b7cb5c
63 88 bundle2-output-bundle: "HG20", 4 parts total
64 89 bundle2-output-part: "replycaps" 191 bytes payload
65 90 bundle2-output-part: "check:heads" streamed payload
66 91 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
67 92 bundle2-output-part: "phase-heads" 24 bytes payload
68 93 bundle2-input-bundle: with-transaction
69 94 bundle2-input-part: "replycaps" supported
70 95 bundle2-input-part: total payload size 191
71 96 bundle2-input-part: "check:heads" supported
72 97 bundle2-input-part: total payload size 20
73 98 bundle2-input-part: "changegroup" (params: 1 mandatory) supported
74 99 adding changesets
75 100 add changeset 99a7098854a3
76 101 adding manifests
77 102 adding file changes
78 103 adding a revisions
79 104 added 1 changesets with 1 changes to 1 files
80 105 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
81 106 bundle2-input-part: total payload size 617
82 107 bundle2-input-part: "phase-heads" supported
83 108 bundle2-input-part: total payload size 24
84 109 bundle2-input-bundle: 3 parts total
85 110 updating the branch cache
86 111 bundle2-output-bundle: "HG20", 1 parts total
87 112 bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
88 113 bundle2-input-bundle: no-transaction
89 114 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
90 115 bundle2-input-bundle: 0 parts total
91 116 listing keys for "phases"
92 117 $ mv .hg/store/lfs_ .hg/store/lfs
93 118
94 119 Clear the cache to force a download
95 120 $ rm -rf `hg config lfs.usercache`
96 121 $ cd ../repo2
97 122 $ hg update tip --debug
98 123 http auth: user foo, password ***
99 124 resolving manifests
100 125 branchmerge: False, force: False, partial: False
101 126 ancestor: 000000000000, local: 000000000000+, remote: 99a7098854a3
127 Status: 200
128 Content-Length: 311
129 Content-Type: application/vnd.git-lfs+json
130 Date: $HTTP_DATE$
131 {
132 "objects": [
133 {
134 "actions": {
135 "download": {
136 "expires_at": "$ISO_8601_DATE_TIME$",
137 "header": {
138 "Accept": "application/vnd.git-lfs"
139 },
140 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
141 }
142 },
143 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
144 "size": 12
145 }
146 ]
147 }
102 148 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
149 Status: 200
150 Content-Length: 12
151 Content-Type: text/plain; charset=utf-8
152 Date: $HTTP_DATE$
103 153 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
104 154 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
105 155 a: remote created -> g
106 156 getting a
107 157 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
108 158 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
109 159
110 160 When the server has some blobs already
111 161
112 162 $ hg mv a b
113 163 $ echo ANOTHER-LARGE-FILE > c
114 164 $ echo ANOTHER-LARGE-FILE2 > d
115 165 $ hg commit -m b-and-c -A b c d
116 166 $ hg push ../repo1 --debug
117 167 http auth: user foo, password ***
118 168 pushing to ../repo1
119 169 http auth: user foo, password ***
120 170 query 1; heads
121 171 searching for changes
122 172 all remote heads known locally
123 173 listing keys for "phases"
124 174 checking for updated bookmarks
125 175 listing keys for "bookmarks"
126 176 listing keys for "bookmarks"
127 177 lfs: computing set of blobs to upload
178 Status: 200
179 Content-Length: 901
180 Content-Type: application/vnd.git-lfs+json
181 Date: $HTTP_DATE$
182 {
183 "objects": [
184 {
185 "actions": {
186 "download": {
187 "expires_at": "$ISO_8601_DATE_TIME$",
188 "header": {
189 "Accept": "application/vnd.git-lfs"
190 },
191 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
192 }
193 },
194 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
195 "size": 12
196 },
197 {
198 "actions": {
199 "upload": {
200 "expires_at": "$ISO_8601_DATE_TIME$",
201 "header": {
202 "Accept": "application/vnd.git-lfs"
203 },
204 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
205 }
206 },
207 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
208 "size": 20
209 },
210 {
211 "actions": {
212 "upload": {
213 "expires_at": "$ISO_8601_DATE_TIME$",
214 "header": {
215 "Accept": "application/vnd.git-lfs"
216 },
217 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
218 }
219 },
220 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
221 "size": 19
222 }
223 ]
224 }
128 225 lfs: need to transfer 2 objects (39 bytes)
129 226 lfs: uploading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
227 Status: 200
228 Content-Length: 0
229 Content-Type: text/plain; charset=utf-8
230 Date: $HTTP_DATE$
130 231 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
131 232 lfs: uploading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
233 Status: 200
234 Content-Length: 0
235 Content-Type: text/plain; charset=utf-8
236 Date: $HTTP_DATE$
132 237 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
133 238 lfs: uploaded 2 files (39 bytes)
134 239 1 changesets found
135 240 list of changesets:
136 241 dfca2c9e2ef24996aa61ba2abd99277d884b3d63
137 242 bundle2-output-bundle: "HG20", 5 parts total
138 243 bundle2-output-part: "replycaps" 191 bytes payload
139 244 bundle2-output-part: "check:phases" 24 bytes payload
140 245 bundle2-output-part: "check:heads" streamed payload
141 246 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
142 247 bundle2-output-part: "phase-heads" 24 bytes payload
143 248 bundle2-input-bundle: with-transaction
144 249 bundle2-input-part: "replycaps" supported
145 250 bundle2-input-part: total payload size 191
146 251 bundle2-input-part: "check:phases" supported
147 252 bundle2-input-part: total payload size 24
148 253 bundle2-input-part: "check:heads" supported
149 254 bundle2-input-part: total payload size 20
150 255 bundle2-input-part: "changegroup" (params: 1 mandatory) supported
151 256 adding changesets
152 257 add changeset dfca2c9e2ef2
153 258 adding manifests
154 259 adding file changes
155 260 adding b revisions
156 261 adding c revisions
157 262 adding d revisions
158 263 added 1 changesets with 3 changes to 3 files
159 264 bundle2-input-part: total payload size 1315
160 265 bundle2-input-part: "phase-heads" supported
161 266 bundle2-input-part: total payload size 24
162 267 bundle2-input-bundle: 4 parts total
163 268 updating the branch cache
164 269 bundle2-output-bundle: "HG20", 1 parts total
165 270 bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
166 271 bundle2-input-bundle: no-transaction
167 272 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
168 273 bundle2-input-bundle: 0 parts total
169 274 listing keys for "phases"
170 275
171 276 Clear the cache to force a download
172 277 $ rm -rf `hg config lfs.usercache`
173 278 $ hg --repo ../repo1 update tip --debug
174 279 http auth: user foo, password ***
175 280 resolving manifests
176 281 branchmerge: False, force: False, partial: False
177 282 ancestor: 99a7098854a3, local: 99a7098854a3+, remote: dfca2c9e2ef2
283 Status: 200
284 Content-Length: 608
285 Content-Type: application/vnd.git-lfs+json
286 Date: $HTTP_DATE$
287 {
288 "objects": [
289 {
290 "actions": {
291 "download": {
292 "expires_at": "$ISO_8601_DATE_TIME$",
293 "header": {
294 "Accept": "application/vnd.git-lfs"
295 },
296 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
297 }
298 },
299 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
300 "size": 20
301 },
302 {
303 "actions": {
304 "download": {
305 "expires_at": "$ISO_8601_DATE_TIME$",
306 "header": {
307 "Accept": "application/vnd.git-lfs"
308 },
309 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
310 }
311 },
312 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
313 "size": 19
314 }
315 ]
316 }
178 317 lfs: need to transfer 2 objects (39 bytes)
179 318 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
319 Status: 200
320 Content-Length: 20
321 Content-Type: text/plain; charset=utf-8
322 Date: $HTTP_DATE$
180 323 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
181 324 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
182 325 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
326 Status: 200
327 Content-Length: 19
328 Content-Type: text/plain; charset=utf-8
329 Date: $HTTP_DATE$
183 330 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
184 331 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
185 332 b: remote created -> g
186 333 getting b
187 334 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
188 335 c: remote created -> g
189 336 getting c
190 337 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
191 338 d: remote created -> g
192 339 getting d
193 340 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
194 341 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
195 342
196 343 Test a corrupt file download, but clear the cache first to force a download.
197 344
198 345 $ rm -rf `hg config lfs.usercache`
199 346 $ cp $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 blob
200 347 $ echo 'damage' > $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
201 348 $ rm ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
202 349 $ rm ../repo1/*
203 350
204 351 $ hg --repo ../repo1 update -C tip --debug
205 352 http auth: user foo, password ***
206 353 resolving manifests
207 354 branchmerge: False, force: True, partial: False
208 355 ancestor: dfca2c9e2ef2+, local: dfca2c9e2ef2+, remote: dfca2c9e2ef2
356 Status: 200
357 Content-Length: 311
358 Content-Type: application/vnd.git-lfs+json
359 Date: $HTTP_DATE$
360 {
361 "objects": [
362 {
363 "actions": {
364 "download": {
365 "expires_at": "$ISO_8601_DATE_TIME$",
366 "header": {
367 "Accept": "application/vnd.git-lfs"
368 },
369 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
370 }
371 },
372 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
373 "size": 19
374 }
375 ]
376 }
209 377 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
378 Status: 200
379 Content-Length: 7
380 Content-Type: text/plain; charset=utf-8
381 Date: $HTTP_DATE$
210 382 abort: corrupt remote lfs object: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
211 383 [255]
212 384
213 385 The corrupted blob is not added to the usercache or local store
214 386
215 387 $ test -f ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
216 388 [1]
217 389 $ test -f `hg config lfs.usercache`/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
218 390 [1]
219 391 $ cp blob $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
220 392
221 393 Test a corrupted file upload
222 394
223 395 $ echo 'another lfs blob' > b
224 396 $ hg ci -m 'another blob'
225 397 $ echo 'damage' > .hg/store/lfs/objects/e6/59058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
226 398 $ hg push --debug ../repo1
227 399 http auth: user foo, password ***
228 400 pushing to ../repo1
229 401 http auth: user foo, password ***
230 402 query 1; heads
231 403 searching for changes
232 404 all remote heads known locally
233 405 listing keys for "phases"
234 406 checking for updated bookmarks
235 407 listing keys for "bookmarks"
236 408 listing keys for "bookmarks"
237 409 lfs: computing set of blobs to upload
410 Status: 200
411 Content-Length: 309
412 Content-Type: application/vnd.git-lfs+json
413 Date: $HTTP_DATE$
414 {
415 "objects": [
416 {
417 "actions": {
418 "upload": {
419 "expires_at": "$ISO_8601_DATE_TIME$",
420 "header": {
421 "Accept": "application/vnd.git-lfs"
422 },
423 "href": "http://localhost:$HGPORT/objects/e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0"
424 }
425 },
426 "oid": "e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0",
427 "size": 17
428 }
429 ]
430 }
238 431 lfs: uploading e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0 (17 bytes)
239 432 abort: detected corrupt lfs object: e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
240 433 (run hg verify)
241 434 [255]
242 435
243 436 Archive will prefetch blobs in a group
244 437
245 438 $ rm -rf .hg/store/lfs `hg config lfs.usercache`
246 439 $ hg archive --debug -r 1 ../archive
247 440 http auth: user foo, password ***
441 Status: 200
442 Content-Length: 905
443 Content-Type: application/vnd.git-lfs+json
444 Date: $HTTP_DATE$
445 {
446 "objects": [
447 {
448 "actions": {
449 "download": {
450 "expires_at": "$ISO_8601_DATE_TIME$",
451 "header": {
452 "Accept": "application/vnd.git-lfs"
453 },
454 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
455 }
456 },
457 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
458 "size": 12
459 },
460 {
461 "actions": {
462 "download": {
463 "expires_at": "$ISO_8601_DATE_TIME$",
464 "header": {
465 "Accept": "application/vnd.git-lfs"
466 },
467 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
468 }
469 },
470 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
471 "size": 20
472 },
473 {
474 "actions": {
475 "download": {
476 "expires_at": "$ISO_8601_DATE_TIME$",
477 "header": {
478 "Accept": "application/vnd.git-lfs"
479 },
480 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
481 }
482 },
483 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
484 "size": 19
485 }
486 ]
487 }
248 488 lfs: need to transfer 3 objects (51 bytes)
249 489 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
490 Status: 200
491 Content-Length: 12
492 Content-Type: text/plain; charset=utf-8
493 Date: $HTTP_DATE$
250 494 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
251 495 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
252 496 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
497 Status: 200
498 Content-Length: 20
499 Content-Type: text/plain; charset=utf-8
500 Date: $HTTP_DATE$
253 501 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
254 502 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
255 503 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
504 Status: 200
505 Content-Length: 19
506 Content-Type: text/plain; charset=utf-8
507 Date: $HTTP_DATE$
256 508 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
257 509 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
258 510 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
259 511 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
260 512 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
261 513 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
262 514 $ find ../archive | sort
263 515 ../archive
264 516 ../archive/.hg_archival.txt
265 517 ../archive/a
266 518 ../archive/b
267 519 ../archive/c
268 520 ../archive/d
269 521
270 522 Cat will prefetch blobs in a group
271 523
272 524 $ rm -rf .hg/store/lfs `hg config lfs.usercache`
273 525 $ hg cat --debug -r 1 a b c
274 526 http auth: user foo, password ***
527 Status: 200
528 Content-Length: 608
529 Content-Type: application/vnd.git-lfs+json
530 Date: $HTTP_DATE$
531 {
532 "objects": [
533 {
534 "actions": {
535 "download": {
536 "expires_at": "$ISO_8601_DATE_TIME$",
537 "header": {
538 "Accept": "application/vnd.git-lfs"
539 },
540 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
541 }
542 },
543 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
544 "size": 12
545 },
546 {
547 "actions": {
548 "download": {
549 "expires_at": "$ISO_8601_DATE_TIME$",
550 "header": {
551 "Accept": "application/vnd.git-lfs"
552 },
553 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
554 }
555 },
556 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
557 "size": 19
558 }
559 ]
560 }
275 561 lfs: need to transfer 2 objects (31 bytes)
276 562 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
563 Status: 200
564 Content-Length: 12
565 Content-Type: text/plain; charset=utf-8
566 Date: $HTTP_DATE$
277 567 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
278 568 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
279 569 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
570 Status: 200
571 Content-Length: 19
572 Content-Type: text/plain; charset=utf-8
573 Date: $HTTP_DATE$
280 574 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
281 575 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
282 576 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
283 577 THIS-IS-LFS
284 578 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
285 579 THIS-IS-LFS
286 580 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
287 581 ANOTHER-LARGE-FILE
288 582
289 583 Revert will prefetch blobs in a group
290 584
291 585 $ rm -rf .hg/store/lfs
292 586 $ rm -rf `hg config lfs.usercache`
293 587 $ rm *
294 588 $ hg revert --all -r 1 --debug
295 589 http auth: user foo, password ***
296 590 adding a
297 591 reverting b
298 592 reverting c
299 593 reverting d
594 Status: 200
595 Content-Length: 905
596 Content-Type: application/vnd.git-lfs+json
597 Date: $HTTP_DATE$
598 {
599 "objects": [
600 {
601 "actions": {
602 "download": {
603 "expires_at": "$ISO_8601_DATE_TIME$",
604 "header": {
605 "Accept": "application/vnd.git-lfs"
606 },
607 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
608 }
609 },
610 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
611 "size": 12
612 },
613 {
614 "actions": {
615 "download": {
616 "expires_at": "$ISO_8601_DATE_TIME$",
617 "header": {
618 "Accept": "application/vnd.git-lfs"
619 },
620 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
621 }
622 },
623 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
624 "size": 20
625 },
626 {
627 "actions": {
628 "download": {
629 "expires_at": "$ISO_8601_DATE_TIME$",
630 "header": {
631 "Accept": "application/vnd.git-lfs"
632 },
633 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
634 }
635 },
636 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
637 "size": 19
638 }
639 ]
640 }
300 641 lfs: need to transfer 3 objects (51 bytes)
301 642 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
643 Status: 200
644 Content-Length: 12
645 Content-Type: text/plain; charset=utf-8
646 Date: $HTTP_DATE$
302 647 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
303 648 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
304 649 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
650 Status: 200
651 Content-Length: 20
652 Content-Type: text/plain; charset=utf-8
653 Date: $HTTP_DATE$
305 654 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
306 655 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
307 656 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
657 Status: 200
658 Content-Length: 19
659 Content-Type: text/plain; charset=utf-8
660 Date: $HTTP_DATE$
308 661 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
309 662 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
310 663 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
311 664 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
312 665 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
313 666 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
314 667
315 668 Check error message when the remote missed a blob:
316 669
317 670 $ echo FFFFF > b
318 671 $ hg commit -m b -A b
319 672 $ echo FFFFF >> b
320 673 $ hg commit -m b b
321 674 $ rm -rf .hg/store/lfs
322 675 $ rm -rf `hg config lfs.usercache`
323 676 $ hg update -C '.^' --debug
324 677 http auth: user foo, password ***
325 678 resolving manifests
326 679 branchmerge: False, force: True, partial: False
327 680 ancestor: 62fdbaf221c6+, local: 62fdbaf221c6+, remote: ef0564edf47e
681 Status: 200
682 Content-Length: 308
683 Content-Type: application/vnd.git-lfs+json
684 Date: $HTTP_DATE$
685 {
686 "objects": [
687 {
688 "actions": {
689 "upload": {
690 "expires_at": "$ISO_8601_DATE_TIME$",
691 "header": {
692 "Accept": "application/vnd.git-lfs"
693 },
694 "href": "http://localhost:$HGPORT/objects/8e6ea5f6c066b44a0efa43bcce86aea73f17e6e23f0663df0251e7524e140a13"
695 }
696 },
697 "oid": "8e6ea5f6c066b44a0efa43bcce86aea73f17e6e23f0663df0251e7524e140a13",
698 "size": 6
699 }
700 ]
701 }
328 702 abort: LFS server error. Remote object for "b" not found:(.*)! (re)
329 703 [255]
330 704
331 705 Check error message when object does not exist:
332 706
333 707 $ cd $TESTTMP
334 708 $ hg init test && cd test
335 709 $ echo "[extensions]" >> .hg/hgrc
336 710 $ echo "lfs=" >> .hg/hgrc
337 711 $ echo "[lfs]" >> .hg/hgrc
338 712 $ echo "threshold=1" >> .hg/hgrc
339 713 $ echo a > a
340 714 $ hg add a
341 715 $ hg commit -m 'test'
342 716 $ echo aaaaa > a
343 717 $ hg commit -m 'largefile'
344 718 $ hg debugdata .hg/store/data/a.i 1 # verify this is no the file content but includes "oid", the LFS "pointer".
345 719 version https://git-lfs.github.com/spec/v1
346 720 oid sha256:bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a
347 721 size 6
348 722 x-is-binary 0
349 723 $ cd ..
350 724 $ rm -rf `hg config lfs.usercache`
351 725
352 726 (Restart the server in a different location so it no longer has the content)
353 727
354 728 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
355 729 $ rm $DAEMON_PIDS
356 730 $ mkdir $TESTTMP/lfs-server2
357 731 $ cd $TESTTMP/lfs-server2
358 732 #if no-windows
359 733 $ lfs-test-server &> lfs-server.log &
360 734 $ echo $! >> $DAEMON_PIDS
361 735 #else
362 736 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
363 737 #endif
364 738
365 739 $ cd $TESTTMP
366 740 $ hg --debug clone test test2
367 741 http auth: user foo, password ***
368 742 linked 6 files
369 743 http auth: user foo, password ***
370 744 updating to branch default
371 745 resolving manifests
372 746 branchmerge: False, force: False, partial: False
373 747 ancestor: 000000000000, local: 000000000000+, remote: d2a338f184a8
748 Status: 200
749 Content-Length: 308
750 Content-Type: application/vnd.git-lfs+json
751 Date: $HTTP_DATE$
752 {
753 "objects": [
754 {
755 "actions": {
756 "upload": {
757 "expires_at": "$ISO_8601_DATE_TIME$",
758 "header": {
759 "Accept": "application/vnd.git-lfs"
760 },
761 "href": "http://localhost:$HGPORT/objects/bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a"
762 }
763 },
764 "oid": "bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a",
765 "size": 6
766 }
767 ]
768 }
374 769 abort: LFS server error. Remote object for "a" not found:(.*)! (re)
375 770 [255]
376 771
377 772 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
General Comments 0
You need to be logged in to leave comments. Login now