##// END OF EJS Templates
lfs: debug print HTTP headers and JSON payload received from the server...
Matt Harbison -
r36944:0dcf50dc default
parent child Browse files
Show More
@@ -1,482 +1,503 b''
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import hashlib
10 import hashlib
11 import json
11 import json
12 import os
12 import os
13 import re
13 import re
14 import socket
14 import socket
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17
17
18 from mercurial import (
18 from mercurial import (
19 error,
19 error,
20 pathutil,
20 pathutil,
21 pycompat,
21 pycompat,
22 url as urlmod,
22 url as urlmod,
23 util,
23 util,
24 vfs as vfsmod,
24 vfs as vfsmod,
25 worker,
25 worker,
26 )
26 )
27
27
28 from ..largefiles import lfutil
28 from ..largefiles import lfutil
29
29
30 # 64 bytes for SHA256
30 # 64 bytes for SHA256
31 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
31 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
32
32
33 class lfsvfs(vfsmod.vfs):
33 class lfsvfs(vfsmod.vfs):
34 def join(self, path):
34 def join(self, path):
35 """split the path at first two characters, like: XX/XXXXX..."""
35 """split the path at first two characters, like: XX/XXXXX..."""
36 if not _lfsre.match(path):
36 if not _lfsre.match(path):
37 raise error.ProgrammingError('unexpected lfs path: %s' % path)
37 raise error.ProgrammingError('unexpected lfs path: %s' % path)
38 return super(lfsvfs, self).join(path[0:2], path[2:])
38 return super(lfsvfs, self).join(path[0:2], path[2:])
39
39
40 def walk(self, path=None, onerror=None):
40 def walk(self, path=None, onerror=None):
41 """Yield (dirpath, [], oids) tuple for blobs under path
41 """Yield (dirpath, [], oids) tuple for blobs under path
42
42
43 Oids only exist in the root of this vfs, so dirpath is always ''.
43 Oids only exist in the root of this vfs, so dirpath is always ''.
44 """
44 """
45 root = os.path.normpath(self.base)
45 root = os.path.normpath(self.base)
46 # when dirpath == root, dirpath[prefixlen:] becomes empty
46 # when dirpath == root, dirpath[prefixlen:] becomes empty
47 # because len(dirpath) < prefixlen.
47 # because len(dirpath) < prefixlen.
48 prefixlen = len(pathutil.normasprefix(root))
48 prefixlen = len(pathutil.normasprefix(root))
49 oids = []
49 oids = []
50
50
51 for dirpath, dirs, files in os.walk(self.reljoin(self.base, path or ''),
51 for dirpath, dirs, files in os.walk(self.reljoin(self.base, path or ''),
52 onerror=onerror):
52 onerror=onerror):
53 dirpath = dirpath[prefixlen:]
53 dirpath = dirpath[prefixlen:]
54
54
55 # Silently skip unexpected files and directories
55 # Silently skip unexpected files and directories
56 if len(dirpath) == 2:
56 if len(dirpath) == 2:
57 oids.extend([dirpath + f for f in files
57 oids.extend([dirpath + f for f in files
58 if _lfsre.match(dirpath + f)])
58 if _lfsre.match(dirpath + f)])
59
59
60 yield ('', [], oids)
60 yield ('', [], oids)
61
61
62 class filewithprogress(object):
62 class filewithprogress(object):
63 """a file-like object that supports __len__ and read.
63 """a file-like object that supports __len__ and read.
64
64
65 Useful to provide progress information for how many bytes are read.
65 Useful to provide progress information for how many bytes are read.
66 """
66 """
67
67
68 def __init__(self, fp, callback):
68 def __init__(self, fp, callback):
69 self._fp = fp
69 self._fp = fp
70 self._callback = callback # func(readsize)
70 self._callback = callback # func(readsize)
71 fp.seek(0, os.SEEK_END)
71 fp.seek(0, os.SEEK_END)
72 self._len = fp.tell()
72 self._len = fp.tell()
73 fp.seek(0)
73 fp.seek(0)
74
74
75 def __len__(self):
75 def __len__(self):
76 return self._len
76 return self._len
77
77
78 def read(self, size):
78 def read(self, size):
79 if self._fp is None:
79 if self._fp is None:
80 return b''
80 return b''
81 data = self._fp.read(size)
81 data = self._fp.read(size)
82 if data:
82 if data:
83 if self._callback:
83 if self._callback:
84 self._callback(len(data))
84 self._callback(len(data))
85 else:
85 else:
86 self._fp.close()
86 self._fp.close()
87 self._fp = None
87 self._fp = None
88 return data
88 return data
89
89
90 class local(object):
90 class local(object):
91 """Local blobstore for large file contents.
91 """Local blobstore for large file contents.
92
92
93 This blobstore is used both as a cache and as a staging area for large blobs
93 This blobstore is used both as a cache and as a staging area for large blobs
94 to be uploaded to the remote blobstore.
94 to be uploaded to the remote blobstore.
95 """
95 """
96
96
97 def __init__(self, repo):
97 def __init__(self, repo):
98 fullpath = repo.svfs.join('lfs/objects')
98 fullpath = repo.svfs.join('lfs/objects')
99 self.vfs = lfsvfs(fullpath)
99 self.vfs = lfsvfs(fullpath)
100 usercache = lfutil._usercachedir(repo.ui, 'lfs')
100 usercache = lfutil._usercachedir(repo.ui, 'lfs')
101 self.cachevfs = lfsvfs(usercache)
101 self.cachevfs = lfsvfs(usercache)
102 self.ui = repo.ui
102 self.ui = repo.ui
103
103
104 def open(self, oid):
104 def open(self, oid):
105 """Open a read-only file descriptor to the named blob, in either the
105 """Open a read-only file descriptor to the named blob, in either the
106 usercache or the local store."""
106 usercache or the local store."""
107 # The usercache is the most likely place to hold the file. Commit will
107 # The usercache is the most likely place to hold the file. Commit will
108 # write to both it and the local store, as will anything that downloads
108 # write to both it and the local store, as will anything that downloads
109 # the blobs. However, things like clone without an update won't
109 # the blobs. However, things like clone without an update won't
110 # populate the local store. For an init + push of a local clone,
110 # populate the local store. For an init + push of a local clone,
111 # the usercache is the only place it _could_ be. If not present, the
111 # the usercache is the only place it _could_ be. If not present, the
112 # missing file msg here will indicate the local repo, not the usercache.
112 # missing file msg here will indicate the local repo, not the usercache.
113 if self.cachevfs.exists(oid):
113 if self.cachevfs.exists(oid):
114 return self.cachevfs(oid, 'rb')
114 return self.cachevfs(oid, 'rb')
115
115
116 return self.vfs(oid, 'rb')
116 return self.vfs(oid, 'rb')
117
117
118 def download(self, oid, src):
118 def download(self, oid, src):
119 """Read the blob from the remote source in chunks, verify the content,
119 """Read the blob from the remote source in chunks, verify the content,
120 and write to this local blobstore."""
120 and write to this local blobstore."""
121 sha256 = hashlib.sha256()
121 sha256 = hashlib.sha256()
122
122
123 with self.vfs(oid, 'wb', atomictemp=True) as fp:
123 with self.vfs(oid, 'wb', atomictemp=True) as fp:
124 for chunk in util.filechunkiter(src, size=1048576):
124 for chunk in util.filechunkiter(src, size=1048576):
125 fp.write(chunk)
125 fp.write(chunk)
126 sha256.update(chunk)
126 sha256.update(chunk)
127
127
128 realoid = sha256.hexdigest()
128 realoid = sha256.hexdigest()
129 if realoid != oid:
129 if realoid != oid:
130 raise error.Abort(_('corrupt remote lfs object: %s') % oid)
130 raise error.Abort(_('corrupt remote lfs object: %s') % oid)
131
131
132 # XXX: should we verify the content of the cache, and hardlink back to
132 # XXX: should we verify the content of the cache, and hardlink back to
133 # the local store on success, but truncate, write and link on failure?
133 # the local store on success, but truncate, write and link on failure?
134 if not self.cachevfs.exists(oid):
134 if not self.cachevfs.exists(oid):
135 self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
135 self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
136 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
136 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
137
137
138 def write(self, oid, data):
138 def write(self, oid, data):
139 """Write blob to local blobstore.
139 """Write blob to local blobstore.
140
140
141 This should only be called from the filelog during a commit or similar.
141 This should only be called from the filelog during a commit or similar.
142 As such, there is no need to verify the data. Imports from a remote
142 As such, there is no need to verify the data. Imports from a remote
143 store must use ``download()`` instead."""
143 store must use ``download()`` instead."""
144 with self.vfs(oid, 'wb', atomictemp=True) as fp:
144 with self.vfs(oid, 'wb', atomictemp=True) as fp:
145 fp.write(data)
145 fp.write(data)
146
146
147 # XXX: should we verify the content of the cache, and hardlink back to
147 # XXX: should we verify the content of the cache, and hardlink back to
148 # the local store on success, but truncate, write and link on failure?
148 # the local store on success, but truncate, write and link on failure?
149 if not self.cachevfs.exists(oid):
149 if not self.cachevfs.exists(oid):
150 self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
150 self.ui.note(_('lfs: adding %s to the usercache\n') % oid)
151 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
151 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
152
152
153 def read(self, oid, verify=True):
153 def read(self, oid, verify=True):
154 """Read blob from local blobstore."""
154 """Read blob from local blobstore."""
155 if not self.vfs.exists(oid):
155 if not self.vfs.exists(oid):
156 blob = self._read(self.cachevfs, oid, verify)
156 blob = self._read(self.cachevfs, oid, verify)
157
157
158 # Even if revlog will verify the content, it needs to be verified
158 # Even if revlog will verify the content, it needs to be verified
159 # now before making the hardlink to avoid propagating corrupt blobs.
159 # now before making the hardlink to avoid propagating corrupt blobs.
160 # Don't abort if corruption is detected, because `hg verify` will
160 # Don't abort if corruption is detected, because `hg verify` will
161 # give more useful info about the corruption- simply don't add the
161 # give more useful info about the corruption- simply don't add the
162 # hardlink.
162 # hardlink.
163 if verify or hashlib.sha256(blob).hexdigest() == oid:
163 if verify or hashlib.sha256(blob).hexdigest() == oid:
164 self.ui.note(_('lfs: found %s in the usercache\n') % oid)
164 self.ui.note(_('lfs: found %s in the usercache\n') % oid)
165 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
165 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
166 else:
166 else:
167 self.ui.note(_('lfs: found %s in the local lfs store\n') % oid)
167 self.ui.note(_('lfs: found %s in the local lfs store\n') % oid)
168 blob = self._read(self.vfs, oid, verify)
168 blob = self._read(self.vfs, oid, verify)
169 return blob
169 return blob
170
170
171 def _read(self, vfs, oid, verify):
171 def _read(self, vfs, oid, verify):
172 """Read blob (after verifying) from the given store"""
172 """Read blob (after verifying) from the given store"""
173 blob = vfs.read(oid)
173 blob = vfs.read(oid)
174 if verify:
174 if verify:
175 _verify(oid, blob)
175 _verify(oid, blob)
176 return blob
176 return blob
177
177
178 def has(self, oid):
178 def has(self, oid):
179 """Returns True if the local blobstore contains the requested blob,
179 """Returns True if the local blobstore contains the requested blob,
180 False otherwise."""
180 False otherwise."""
181 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
181 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
182
182
183 class _gitlfsremote(object):
183 class _gitlfsremote(object):
184
184
185 def __init__(self, repo, url):
185 def __init__(self, repo, url):
186 ui = repo.ui
186 ui = repo.ui
187 self.ui = ui
187 self.ui = ui
188 baseurl, authinfo = url.authinfo()
188 baseurl, authinfo = url.authinfo()
189 self.baseurl = baseurl.rstrip('/')
189 self.baseurl = baseurl.rstrip('/')
190 useragent = repo.ui.config('experimental', 'lfs.user-agent')
190 useragent = repo.ui.config('experimental', 'lfs.user-agent')
191 if not useragent:
191 if not useragent:
192 useragent = 'git-lfs/2.3.4 (Mercurial %s)' % util.version()
192 useragent = 'git-lfs/2.3.4 (Mercurial %s)' % util.version()
193 self.urlopener = urlmod.opener(ui, authinfo, useragent)
193 self.urlopener = urlmod.opener(ui, authinfo, useragent)
194 self.retry = ui.configint('lfs', 'retry')
194 self.retry = ui.configint('lfs', 'retry')
195
195
196 def writebatch(self, pointers, fromstore):
196 def writebatch(self, pointers, fromstore):
197 """Batch upload from local to remote blobstore."""
197 """Batch upload from local to remote blobstore."""
198 self._batch(_deduplicate(pointers), fromstore, 'upload')
198 self._batch(_deduplicate(pointers), fromstore, 'upload')
199
199
200 def readbatch(self, pointers, tostore):
200 def readbatch(self, pointers, tostore):
201 """Batch download from remote to local blostore."""
201 """Batch download from remote to local blostore."""
202 self._batch(_deduplicate(pointers), tostore, 'download')
202 self._batch(_deduplicate(pointers), tostore, 'download')
203
203
204 def _batchrequest(self, pointers, action):
204 def _batchrequest(self, pointers, action):
205 """Get metadata about objects pointed by pointers for given action
205 """Get metadata about objects pointed by pointers for given action
206
206
207 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
207 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
208 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
208 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
209 """
209 """
210 objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
210 objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
211 requestdata = json.dumps({
211 requestdata = json.dumps({
212 'objects': objects,
212 'objects': objects,
213 'operation': action,
213 'operation': action,
214 })
214 })
215 batchreq = util.urlreq.request('%s/objects/batch' % self.baseurl,
215 batchreq = util.urlreq.request('%s/objects/batch' % self.baseurl,
216 data=requestdata)
216 data=requestdata)
217 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
217 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
218 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
218 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
219 try:
219 try:
220 rawjson = self.urlopener.open(batchreq).read()
220 rsp = self.urlopener.open(batchreq)
221 rawjson = rsp.read()
221 except util.urlerr.httperror as ex:
222 except util.urlerr.httperror as ex:
222 raise LfsRemoteError(_('LFS HTTP error: %s (action=%s)')
223 raise LfsRemoteError(_('LFS HTTP error: %s (action=%s)')
223 % (ex, action))
224 % (ex, action))
224 try:
225 try:
225 response = json.loads(rawjson)
226 response = json.loads(rawjson)
226 except ValueError:
227 except ValueError:
227 raise LfsRemoteError(_('LFS server returns invalid JSON: %s')
228 raise LfsRemoteError(_('LFS server returns invalid JSON: %s')
228 % rawjson)
229 % rawjson)
230
231 if self.ui.debugflag:
232 self.ui.debug('Status: %d\n' % rsp.status)
233 # lfs-test-server and hg serve return headers in different order
234 self.ui.debug('%s\n'
235 % '\n'.join(sorted(str(rsp.info()).splitlines())))
236
237 if 'objects' in response:
238 response['objects'] = sorted(response['objects'],
239 key=lambda p: p['oid'])
240 self.ui.debug('%s\n'
241 % json.dumps(response, indent=2, sort_keys=True))
242
229 return response
243 return response
230
244
231 def _checkforservererror(self, pointers, responses, action):
245 def _checkforservererror(self, pointers, responses, action):
232 """Scans errors from objects
246 """Scans errors from objects
233
247
234 Raises LfsRemoteError if any objects have an error"""
248 Raises LfsRemoteError if any objects have an error"""
235 for response in responses:
249 for response in responses:
236 # The server should return 404 when objects cannot be found. Some
250 # The server should return 404 when objects cannot be found. Some
237 # server implementation (ex. lfs-test-server) does not set "error"
251 # server implementation (ex. lfs-test-server) does not set "error"
238 # but just removes "download" from "actions". Treat that case
252 # but just removes "download" from "actions". Treat that case
239 # as the same as 404 error.
253 # as the same as 404 error.
240 notfound = (response.get('error', {}).get('code') == 404
254 notfound = (response.get('error', {}).get('code') == 404
241 or (action == 'download'
255 or (action == 'download'
242 and action not in response.get('actions', [])))
256 and action not in response.get('actions', [])))
243 if notfound:
257 if notfound:
244 ptrmap = {p.oid(): p for p in pointers}
258 ptrmap = {p.oid(): p for p in pointers}
245 p = ptrmap.get(response['oid'], None)
259 p = ptrmap.get(response['oid'], None)
246 if p:
260 if p:
247 filename = getattr(p, 'filename', 'unknown')
261 filename = getattr(p, 'filename', 'unknown')
248 raise LfsRemoteError(
262 raise LfsRemoteError(
249 _(('LFS server error. Remote object '
263 _(('LFS server error. Remote object '
250 'for "%s" not found: %r')) % (filename, response))
264 'for "%s" not found: %r')) % (filename, response))
251 else:
265 else:
252 raise LfsRemoteError(
266 raise LfsRemoteError(
253 _('LFS server error. Unsolicited response for oid %s')
267 _('LFS server error. Unsolicited response for oid %s')
254 % response['oid'])
268 % response['oid'])
255 if 'error' in response:
269 if 'error' in response:
256 raise LfsRemoteError(_('LFS server error: %r') % response)
270 raise LfsRemoteError(_('LFS server error: %r') % response)
257
271
258 def _extractobjects(self, response, pointers, action):
272 def _extractobjects(self, response, pointers, action):
259 """extract objects from response of the batch API
273 """extract objects from response of the batch API
260
274
261 response: parsed JSON object returned by batch API
275 response: parsed JSON object returned by batch API
262 return response['objects'] filtered by action
276 return response['objects'] filtered by action
263 raise if any object has an error
277 raise if any object has an error
264 """
278 """
265 # Scan errors from objects - fail early
279 # Scan errors from objects - fail early
266 objects = response.get('objects', [])
280 objects = response.get('objects', [])
267 self._checkforservererror(pointers, objects, action)
281 self._checkforservererror(pointers, objects, action)
268
282
269 # Filter objects with given action. Practically, this skips uploading
283 # Filter objects with given action. Practically, this skips uploading
270 # objects which exist in the server.
284 # objects which exist in the server.
271 filteredobjects = [o for o in objects if action in o.get('actions', [])]
285 filteredobjects = [o for o in objects if action in o.get('actions', [])]
272
286
273 return filteredobjects
287 return filteredobjects
274
288
275 def _basictransfer(self, obj, action, localstore):
289 def _basictransfer(self, obj, action, localstore):
276 """Download or upload a single object using basic transfer protocol
290 """Download or upload a single object using basic transfer protocol
277
291
278 obj: dict, an object description returned by batch API
292 obj: dict, an object description returned by batch API
279 action: string, one of ['upload', 'download']
293 action: string, one of ['upload', 'download']
280 localstore: blobstore.local
294 localstore: blobstore.local
281
295
282 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
296 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
283 basic-transfers.md
297 basic-transfers.md
284 """
298 """
285 oid = pycompat.bytestr(obj['oid'])
299 oid = pycompat.bytestr(obj['oid'])
286
300
287 href = pycompat.bytestr(obj['actions'][action].get('href'))
301 href = pycompat.bytestr(obj['actions'][action].get('href'))
288 headers = obj['actions'][action].get('header', {}).items()
302 headers = obj['actions'][action].get('header', {}).items()
289
303
290 request = util.urlreq.request(href)
304 request = util.urlreq.request(href)
291 if action == 'upload':
305 if action == 'upload':
292 # If uploading blobs, read data from local blobstore.
306 # If uploading blobs, read data from local blobstore.
293 with localstore.open(oid) as fp:
307 with localstore.open(oid) as fp:
294 _verifyfile(oid, fp)
308 _verifyfile(oid, fp)
295 request.data = filewithprogress(localstore.open(oid), None)
309 request.data = filewithprogress(localstore.open(oid), None)
296 request.get_method = lambda: 'PUT'
310 request.get_method = lambda: 'PUT'
297
311
298 for k, v in headers:
312 for k, v in headers:
299 request.add_header(k, v)
313 request.add_header(k, v)
300
314
301 response = b''
315 response = b''
302 try:
316 try:
303 req = self.urlopener.open(request)
317 req = self.urlopener.open(request)
318
319 if self.ui.debugflag:
320 self.ui.debug('Status: %d\n' % req.status)
321 # lfs-test-server and hg serve return headers in different order
322 self.ui.debug('%s\n'
323 % '\n'.join(sorted(str(req.info()).splitlines())))
324
304 if action == 'download':
325 if action == 'download':
305 # If downloading blobs, store downloaded data to local blobstore
326 # If downloading blobs, store downloaded data to local blobstore
306 localstore.download(oid, req)
327 localstore.download(oid, req)
307 else:
328 else:
308 while True:
329 while True:
309 data = req.read(1048576)
330 data = req.read(1048576)
310 if not data:
331 if not data:
311 break
332 break
312 response += data
333 response += data
313 if response:
334 if response:
314 self.ui.debug('lfs %s response: %s' % (action, response))
335 self.ui.debug('lfs %s response: %s' % (action, response))
315 except util.urlerr.httperror as ex:
336 except util.urlerr.httperror as ex:
316 if self.ui.debugflag:
337 if self.ui.debugflag:
317 self.ui.debug('%s: %s\n' % (oid, ex.read()))
338 self.ui.debug('%s: %s\n' % (oid, ex.read()))
318 raise LfsRemoteError(_('HTTP error: %s (oid=%s, action=%s)')
339 raise LfsRemoteError(_('HTTP error: %s (oid=%s, action=%s)')
319 % (ex, oid, action))
340 % (ex, oid, action))
320
341
321 def _batch(self, pointers, localstore, action):
342 def _batch(self, pointers, localstore, action):
322 if action not in ['upload', 'download']:
343 if action not in ['upload', 'download']:
323 raise error.ProgrammingError('invalid Git-LFS action: %s' % action)
344 raise error.ProgrammingError('invalid Git-LFS action: %s' % action)
324
345
325 response = self._batchrequest(pointers, action)
346 response = self._batchrequest(pointers, action)
326 objects = self._extractobjects(response, pointers, action)
347 objects = self._extractobjects(response, pointers, action)
327 total = sum(x.get('size', 0) for x in objects)
348 total = sum(x.get('size', 0) for x in objects)
328 sizes = {}
349 sizes = {}
329 for obj in objects:
350 for obj in objects:
330 sizes[obj.get('oid')] = obj.get('size', 0)
351 sizes[obj.get('oid')] = obj.get('size', 0)
331 topic = {'upload': _('lfs uploading'),
352 topic = {'upload': _('lfs uploading'),
332 'download': _('lfs downloading')}[action]
353 'download': _('lfs downloading')}[action]
333 if len(objects) > 1:
354 if len(objects) > 1:
334 self.ui.note(_('lfs: need to transfer %d objects (%s)\n')
355 self.ui.note(_('lfs: need to transfer %d objects (%s)\n')
335 % (len(objects), util.bytecount(total)))
356 % (len(objects), util.bytecount(total)))
336 self.ui.progress(topic, 0, total=total)
357 self.ui.progress(topic, 0, total=total)
337 def transfer(chunk):
358 def transfer(chunk):
338 for obj in chunk:
359 for obj in chunk:
339 objsize = obj.get('size', 0)
360 objsize = obj.get('size', 0)
340 if self.ui.verbose:
361 if self.ui.verbose:
341 if action == 'download':
362 if action == 'download':
342 msg = _('lfs: downloading %s (%s)\n')
363 msg = _('lfs: downloading %s (%s)\n')
343 elif action == 'upload':
364 elif action == 'upload':
344 msg = _('lfs: uploading %s (%s)\n')
365 msg = _('lfs: uploading %s (%s)\n')
345 self.ui.note(msg % (obj.get('oid'),
366 self.ui.note(msg % (obj.get('oid'),
346 util.bytecount(objsize)))
367 util.bytecount(objsize)))
347 retry = self.retry
368 retry = self.retry
348 while True:
369 while True:
349 try:
370 try:
350 self._basictransfer(obj, action, localstore)
371 self._basictransfer(obj, action, localstore)
351 yield 1, obj.get('oid')
372 yield 1, obj.get('oid')
352 break
373 break
353 except socket.error as ex:
374 except socket.error as ex:
354 if retry > 0:
375 if retry > 0:
355 self.ui.note(
376 self.ui.note(
356 _('lfs: failed: %r (remaining retry %d)\n')
377 _('lfs: failed: %r (remaining retry %d)\n')
357 % (ex, retry))
378 % (ex, retry))
358 retry -= 1
379 retry -= 1
359 continue
380 continue
360 raise
381 raise
361
382
362 # Until https multiplexing gets sorted out
383 # Until https multiplexing gets sorted out
363 if self.ui.configbool('experimental', 'lfs.worker-enable'):
384 if self.ui.configbool('experimental', 'lfs.worker-enable'):
364 oids = worker.worker(self.ui, 0.1, transfer, (),
385 oids = worker.worker(self.ui, 0.1, transfer, (),
365 sorted(objects, key=lambda o: o.get('oid')))
386 sorted(objects, key=lambda o: o.get('oid')))
366 else:
387 else:
367 oids = transfer(sorted(objects, key=lambda o: o.get('oid')))
388 oids = transfer(sorted(objects, key=lambda o: o.get('oid')))
368
389
369 processed = 0
390 processed = 0
370 blobs = 0
391 blobs = 0
371 for _one, oid in oids:
392 for _one, oid in oids:
372 processed += sizes[oid]
393 processed += sizes[oid]
373 blobs += 1
394 blobs += 1
374 self.ui.progress(topic, processed, total=total)
395 self.ui.progress(topic, processed, total=total)
375 self.ui.note(_('lfs: processed: %s\n') % oid)
396 self.ui.note(_('lfs: processed: %s\n') % oid)
376 self.ui.progress(topic, pos=None, total=total)
397 self.ui.progress(topic, pos=None, total=total)
377
398
378 if blobs > 0:
399 if blobs > 0:
379 if action == 'upload':
400 if action == 'upload':
380 self.ui.status(_('lfs: uploaded %d files (%s)\n')
401 self.ui.status(_('lfs: uploaded %d files (%s)\n')
381 % (blobs, util.bytecount(processed)))
402 % (blobs, util.bytecount(processed)))
382 # TODO: coalesce the download requests, and comment this in
403 # TODO: coalesce the download requests, and comment this in
383 #elif action == 'download':
404 #elif action == 'download':
384 # self.ui.status(_('lfs: downloaded %d files (%s)\n')
405 # self.ui.status(_('lfs: downloaded %d files (%s)\n')
385 # % (blobs, util.bytecount(processed)))
406 # % (blobs, util.bytecount(processed)))
386
407
387 def __del__(self):
408 def __del__(self):
388 # copied from mercurial/httppeer.py
409 # copied from mercurial/httppeer.py
389 urlopener = getattr(self, 'urlopener', None)
410 urlopener = getattr(self, 'urlopener', None)
390 if urlopener:
411 if urlopener:
391 for h in urlopener.handlers:
412 for h in urlopener.handlers:
392 h.close()
413 h.close()
393 getattr(h, "close_all", lambda : None)()
414 getattr(h, "close_all", lambda : None)()
394
415
395 class _dummyremote(object):
416 class _dummyremote(object):
396 """Dummy store storing blobs to temp directory."""
417 """Dummy store storing blobs to temp directory."""
397
418
398 def __init__(self, repo, url):
419 def __init__(self, repo, url):
399 fullpath = repo.vfs.join('lfs', url.path)
420 fullpath = repo.vfs.join('lfs', url.path)
400 self.vfs = lfsvfs(fullpath)
421 self.vfs = lfsvfs(fullpath)
401
422
402 def writebatch(self, pointers, fromstore):
423 def writebatch(self, pointers, fromstore):
403 for p in _deduplicate(pointers):
424 for p in _deduplicate(pointers):
404 content = fromstore.read(p.oid(), verify=True)
425 content = fromstore.read(p.oid(), verify=True)
405 with self.vfs(p.oid(), 'wb', atomictemp=True) as fp:
426 with self.vfs(p.oid(), 'wb', atomictemp=True) as fp:
406 fp.write(content)
427 fp.write(content)
407
428
408 def readbatch(self, pointers, tostore):
429 def readbatch(self, pointers, tostore):
409 for p in _deduplicate(pointers):
430 for p in _deduplicate(pointers):
410 with self.vfs(p.oid(), 'rb') as fp:
431 with self.vfs(p.oid(), 'rb') as fp:
411 tostore.download(p.oid(), fp)
432 tostore.download(p.oid(), fp)
412
433
413 class _nullremote(object):
434 class _nullremote(object):
414 """Null store storing blobs to /dev/null."""
435 """Null store storing blobs to /dev/null."""
415
436
416 def __init__(self, repo, url):
437 def __init__(self, repo, url):
417 pass
438 pass
418
439
419 def writebatch(self, pointers, fromstore):
440 def writebatch(self, pointers, fromstore):
420 pass
441 pass
421
442
422 def readbatch(self, pointers, tostore):
443 def readbatch(self, pointers, tostore):
423 pass
444 pass
424
445
425 class _promptremote(object):
446 class _promptremote(object):
426 """Prompt user to set lfs.url when accessed."""
447 """Prompt user to set lfs.url when accessed."""
427
448
428 def __init__(self, repo, url):
449 def __init__(self, repo, url):
429 pass
450 pass
430
451
431 def writebatch(self, pointers, fromstore, ui=None):
452 def writebatch(self, pointers, fromstore, ui=None):
432 self._prompt()
453 self._prompt()
433
454
434 def readbatch(self, pointers, tostore, ui=None):
455 def readbatch(self, pointers, tostore, ui=None):
435 self._prompt()
456 self._prompt()
436
457
437 def _prompt(self):
458 def _prompt(self):
438 raise error.Abort(_('lfs.url needs to be configured'))
459 raise error.Abort(_('lfs.url needs to be configured'))
439
460
440 _storemap = {
461 _storemap = {
441 'https': _gitlfsremote,
462 'https': _gitlfsremote,
442 'http': _gitlfsremote,
463 'http': _gitlfsremote,
443 'file': _dummyremote,
464 'file': _dummyremote,
444 'null': _nullremote,
465 'null': _nullremote,
445 None: _promptremote,
466 None: _promptremote,
446 }
467 }
447
468
448 def _deduplicate(pointers):
469 def _deduplicate(pointers):
449 """Remove any duplicate oids that exist in the list"""
470 """Remove any duplicate oids that exist in the list"""
450 reduced = util.sortdict()
471 reduced = util.sortdict()
451 for p in pointers:
472 for p in pointers:
452 reduced[p.oid()] = p
473 reduced[p.oid()] = p
453 return reduced.values()
474 return reduced.values()
454
475
455 def _verify(oid, content):
476 def _verify(oid, content):
456 realoid = hashlib.sha256(content).hexdigest()
477 realoid = hashlib.sha256(content).hexdigest()
457 if realoid != oid:
478 if realoid != oid:
458 raise error.Abort(_('detected corrupt lfs object: %s') % oid,
479 raise error.Abort(_('detected corrupt lfs object: %s') % oid,
459 hint=_('run hg verify'))
480 hint=_('run hg verify'))
460
481
461 def _verifyfile(oid, fp):
482 def _verifyfile(oid, fp):
462 sha256 = hashlib.sha256()
483 sha256 = hashlib.sha256()
463 while True:
484 while True:
464 data = fp.read(1024 * 1024)
485 data = fp.read(1024 * 1024)
465 if not data:
486 if not data:
466 break
487 break
467 sha256.update(data)
488 sha256.update(data)
468 realoid = sha256.hexdigest()
489 realoid = sha256.hexdigest()
469 if realoid != oid:
490 if realoid != oid:
470 raise error.Abort(_('detected corrupt lfs object: %s') % oid,
491 raise error.Abort(_('detected corrupt lfs object: %s') % oid,
471 hint=_('run hg verify'))
492 hint=_('run hg verify'))
472
493
473 def remote(repo):
494 def remote(repo):
474 """remotestore factory. return a store in _storemap depending on config"""
495 """remotestore factory. return a store in _storemap depending on config"""
475 url = util.url(repo.ui.config('lfs', 'url') or '')
496 url = util.url(repo.ui.config('lfs', 'url') or '')
476 scheme = url.scheme
497 scheme = url.scheme
477 if scheme not in _storemap:
498 if scheme not in _storemap:
478 raise error.Abort(_('lfs: unknown url scheme: %s') % scheme)
499 raise error.Abort(_('lfs: unknown url scheme: %s') % scheme)
479 return _storemap[scheme](repo, url)
500 return _storemap[scheme](repo, url)
480
501
481 class LfsRemoteError(error.RevlogError):
502 class LfsRemoteError(error.RevlogError):
482 pass
503 pass
@@ -1,377 +1,772 b''
1 #require lfs-test-server
1 #require lfs-test-server
2
2
3 $ LFS_LISTEN="tcp://:$HGPORT"
3 $ LFS_LISTEN="tcp://:$HGPORT"
4 $ LFS_HOST="localhost:$HGPORT"
4 $ LFS_HOST="localhost:$HGPORT"
5 $ LFS_PUBLIC=1
5 $ LFS_PUBLIC=1
6 $ export LFS_LISTEN LFS_HOST LFS_PUBLIC
6 $ export LFS_LISTEN LFS_HOST LFS_PUBLIC
7 #if no-windows
7 #if no-windows
8 $ lfs-test-server &> lfs-server.log &
8 $ lfs-test-server &> lfs-server.log &
9 $ echo $! >> $DAEMON_PIDS
9 $ echo $! >> $DAEMON_PIDS
10 #else
10 #else
11 $ cat >> $TESTTMP/spawn.py <<EOF
11 $ cat >> $TESTTMP/spawn.py <<EOF
12 > import os
12 > import os
13 > import subprocess
13 > import subprocess
14 > import sys
14 > import sys
15 >
15 >
16 > for path in os.environ["PATH"].split(os.pathsep):
16 > for path in os.environ["PATH"].split(os.pathsep):
17 > exe = os.path.join(path, 'lfs-test-server.exe')
17 > exe = os.path.join(path, 'lfs-test-server.exe')
18 > if os.path.exists(exe):
18 > if os.path.exists(exe):
19 > with open('lfs-server.log', 'wb') as out:
19 > with open('lfs-server.log', 'wb') as out:
20 > p = subprocess.Popen(exe, stdout=out, stderr=out)
20 > p = subprocess.Popen(exe, stdout=out, stderr=out)
21 > sys.stdout.write('%s\n' % p.pid)
21 > sys.stdout.write('%s\n' % p.pid)
22 > sys.exit(0)
22 > sys.exit(0)
23 > sys.exit(1)
23 > sys.exit(1)
24 > EOF
24 > EOF
25 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
25 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
26 #endif
26 #endif
27
27
28 $ cat >> $HGRCPATH <<EOF
28 $ cat >> $HGRCPATH <<EOF
29 > [extensions]
29 > [extensions]
30 > lfs=
30 > lfs=
31 > [lfs]
31 > [lfs]
32 > url=http://foo:bar@$LFS_HOST/
32 > url=http://foo:bar@$LFS_HOST/
33 > track=all()
33 > track=all()
34 > EOF
34 > EOF
35
35
36 $ hg init repo1
36 $ hg init repo1
37 $ cd repo1
37 $ cd repo1
38 $ echo THIS-IS-LFS > a
38 $ echo THIS-IS-LFS > a
39 $ hg commit -m a -A a
39 $ hg commit -m a -A a
40
40
41 A push can be serviced directly from the usercache if it isn't in the local
41 A push can be serviced directly from the usercache if it isn't in the local
42 store.
42 store.
43
43
44 $ hg init ../repo2
44 $ hg init ../repo2
45 $ mv .hg/store/lfs .hg/store/lfs_
45 $ mv .hg/store/lfs .hg/store/lfs_
46 $ hg push ../repo2 --debug
46 $ hg push ../repo2 --debug
47 http auth: user foo, password ***
47 http auth: user foo, password ***
48 pushing to ../repo2
48 pushing to ../repo2
49 http auth: user foo, password ***
49 http auth: user foo, password ***
50 query 1; heads
50 query 1; heads
51 searching for changes
51 searching for changes
52 1 total queries in *s (glob)
52 1 total queries in *s (glob)
53 listing keys for "phases"
53 listing keys for "phases"
54 checking for updated bookmarks
54 checking for updated bookmarks
55 listing keys for "bookmarks"
55 listing keys for "bookmarks"
56 lfs: computing set of blobs to upload
56 lfs: computing set of blobs to upload
57 Status: 200
58 Content-Length: 309
59 Content-Type: application/vnd.git-lfs+json
60 Date: $HTTP_DATE$
61 {
62 "objects": [
63 {
64 "actions": {
65 "upload": {
66 "expires_at": "$ISO_8601_DATE_TIME$",
67 "header": {
68 "Accept": "application/vnd.git-lfs"
69 },
70 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
71 }
72 },
73 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
74 "size": 12
75 }
76 ]
77 }
57 lfs: uploading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
78 lfs: uploading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
79 Status: 200
80 Content-Length: 0
81 Content-Type: text/plain; charset=utf-8
82 Date: $HTTP_DATE$
58 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
83 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
59 lfs: uploaded 1 files (12 bytes)
84 lfs: uploaded 1 files (12 bytes)
60 1 changesets found
85 1 changesets found
61 list of changesets:
86 list of changesets:
62 99a7098854a3984a5c9eab0fc7a2906697b7cb5c
87 99a7098854a3984a5c9eab0fc7a2906697b7cb5c
63 bundle2-output-bundle: "HG20", 4 parts total
88 bundle2-output-bundle: "HG20", 4 parts total
64 bundle2-output-part: "replycaps" 191 bytes payload
89 bundle2-output-part: "replycaps" 191 bytes payload
65 bundle2-output-part: "check:heads" streamed payload
90 bundle2-output-part: "check:heads" streamed payload
66 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
91 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
67 bundle2-output-part: "phase-heads" 24 bytes payload
92 bundle2-output-part: "phase-heads" 24 bytes payload
68 bundle2-input-bundle: with-transaction
93 bundle2-input-bundle: with-transaction
69 bundle2-input-part: "replycaps" supported
94 bundle2-input-part: "replycaps" supported
70 bundle2-input-part: total payload size 191
95 bundle2-input-part: total payload size 191
71 bundle2-input-part: "check:heads" supported
96 bundle2-input-part: "check:heads" supported
72 bundle2-input-part: total payload size 20
97 bundle2-input-part: total payload size 20
73 bundle2-input-part: "changegroup" (params: 1 mandatory) supported
98 bundle2-input-part: "changegroup" (params: 1 mandatory) supported
74 adding changesets
99 adding changesets
75 add changeset 99a7098854a3
100 add changeset 99a7098854a3
76 adding manifests
101 adding manifests
77 adding file changes
102 adding file changes
78 adding a revisions
103 adding a revisions
79 added 1 changesets with 1 changes to 1 files
104 added 1 changesets with 1 changes to 1 files
80 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
105 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
81 bundle2-input-part: total payload size 617
106 bundle2-input-part: total payload size 617
82 bundle2-input-part: "phase-heads" supported
107 bundle2-input-part: "phase-heads" supported
83 bundle2-input-part: total payload size 24
108 bundle2-input-part: total payload size 24
84 bundle2-input-bundle: 3 parts total
109 bundle2-input-bundle: 3 parts total
85 updating the branch cache
110 updating the branch cache
86 bundle2-output-bundle: "HG20", 1 parts total
111 bundle2-output-bundle: "HG20", 1 parts total
87 bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
112 bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
88 bundle2-input-bundle: no-transaction
113 bundle2-input-bundle: no-transaction
89 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
114 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
90 bundle2-input-bundle: 0 parts total
115 bundle2-input-bundle: 0 parts total
91 listing keys for "phases"
116 listing keys for "phases"
92 $ mv .hg/store/lfs_ .hg/store/lfs
117 $ mv .hg/store/lfs_ .hg/store/lfs
93
118
94 Clear the cache to force a download
119 Clear the cache to force a download
95 $ rm -rf `hg config lfs.usercache`
120 $ rm -rf `hg config lfs.usercache`
96 $ cd ../repo2
121 $ cd ../repo2
97 $ hg update tip --debug
122 $ hg update tip --debug
98 http auth: user foo, password ***
123 http auth: user foo, password ***
99 resolving manifests
124 resolving manifests
100 branchmerge: False, force: False, partial: False
125 branchmerge: False, force: False, partial: False
101 ancestor: 000000000000, local: 000000000000+, remote: 99a7098854a3
126 ancestor: 000000000000, local: 000000000000+, remote: 99a7098854a3
127 Status: 200
128 Content-Length: 311
129 Content-Type: application/vnd.git-lfs+json
130 Date: $HTTP_DATE$
131 {
132 "objects": [
133 {
134 "actions": {
135 "download": {
136 "expires_at": "$ISO_8601_DATE_TIME$",
137 "header": {
138 "Accept": "application/vnd.git-lfs"
139 },
140 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
141 }
142 },
143 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
144 "size": 12
145 }
146 ]
147 }
102 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
148 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
149 Status: 200
150 Content-Length: 12
151 Content-Type: text/plain; charset=utf-8
152 Date: $HTTP_DATE$
103 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
153 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
104 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
154 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
105 a: remote created -> g
155 a: remote created -> g
106 getting a
156 getting a
107 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
157 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
108 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
158 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
109
159
110 When the server has some blobs already
160 When the server has some blobs already
111
161
112 $ hg mv a b
162 $ hg mv a b
113 $ echo ANOTHER-LARGE-FILE > c
163 $ echo ANOTHER-LARGE-FILE > c
114 $ echo ANOTHER-LARGE-FILE2 > d
164 $ echo ANOTHER-LARGE-FILE2 > d
115 $ hg commit -m b-and-c -A b c d
165 $ hg commit -m b-and-c -A b c d
116 $ hg push ../repo1 --debug
166 $ hg push ../repo1 --debug
117 http auth: user foo, password ***
167 http auth: user foo, password ***
118 pushing to ../repo1
168 pushing to ../repo1
119 http auth: user foo, password ***
169 http auth: user foo, password ***
120 query 1; heads
170 query 1; heads
121 searching for changes
171 searching for changes
122 all remote heads known locally
172 all remote heads known locally
123 listing keys for "phases"
173 listing keys for "phases"
124 checking for updated bookmarks
174 checking for updated bookmarks
125 listing keys for "bookmarks"
175 listing keys for "bookmarks"
126 listing keys for "bookmarks"
176 listing keys for "bookmarks"
127 lfs: computing set of blobs to upload
177 lfs: computing set of blobs to upload
178 Status: 200
179 Content-Length: 901
180 Content-Type: application/vnd.git-lfs+json
181 Date: $HTTP_DATE$
182 {
183 "objects": [
184 {
185 "actions": {
186 "download": {
187 "expires_at": "$ISO_8601_DATE_TIME$",
188 "header": {
189 "Accept": "application/vnd.git-lfs"
190 },
191 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
192 }
193 },
194 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
195 "size": 12
196 },
197 {
198 "actions": {
199 "upload": {
200 "expires_at": "$ISO_8601_DATE_TIME$",
201 "header": {
202 "Accept": "application/vnd.git-lfs"
203 },
204 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
205 }
206 },
207 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
208 "size": 20
209 },
210 {
211 "actions": {
212 "upload": {
213 "expires_at": "$ISO_8601_DATE_TIME$",
214 "header": {
215 "Accept": "application/vnd.git-lfs"
216 },
217 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
218 }
219 },
220 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
221 "size": 19
222 }
223 ]
224 }
128 lfs: need to transfer 2 objects (39 bytes)
225 lfs: need to transfer 2 objects (39 bytes)
129 lfs: uploading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
226 lfs: uploading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
227 Status: 200
228 Content-Length: 0
229 Content-Type: text/plain; charset=utf-8
230 Date: $HTTP_DATE$
130 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
231 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
131 lfs: uploading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
232 lfs: uploading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
233 Status: 200
234 Content-Length: 0
235 Content-Type: text/plain; charset=utf-8
236 Date: $HTTP_DATE$
132 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
237 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
133 lfs: uploaded 2 files (39 bytes)
238 lfs: uploaded 2 files (39 bytes)
134 1 changesets found
239 1 changesets found
135 list of changesets:
240 list of changesets:
136 dfca2c9e2ef24996aa61ba2abd99277d884b3d63
241 dfca2c9e2ef24996aa61ba2abd99277d884b3d63
137 bundle2-output-bundle: "HG20", 5 parts total
242 bundle2-output-bundle: "HG20", 5 parts total
138 bundle2-output-part: "replycaps" 191 bytes payload
243 bundle2-output-part: "replycaps" 191 bytes payload
139 bundle2-output-part: "check:phases" 24 bytes payload
244 bundle2-output-part: "check:phases" 24 bytes payload
140 bundle2-output-part: "check:heads" streamed payload
245 bundle2-output-part: "check:heads" streamed payload
141 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
246 bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
142 bundle2-output-part: "phase-heads" 24 bytes payload
247 bundle2-output-part: "phase-heads" 24 bytes payload
143 bundle2-input-bundle: with-transaction
248 bundle2-input-bundle: with-transaction
144 bundle2-input-part: "replycaps" supported
249 bundle2-input-part: "replycaps" supported
145 bundle2-input-part: total payload size 191
250 bundle2-input-part: total payload size 191
146 bundle2-input-part: "check:phases" supported
251 bundle2-input-part: "check:phases" supported
147 bundle2-input-part: total payload size 24
252 bundle2-input-part: total payload size 24
148 bundle2-input-part: "check:heads" supported
253 bundle2-input-part: "check:heads" supported
149 bundle2-input-part: total payload size 20
254 bundle2-input-part: total payload size 20
150 bundle2-input-part: "changegroup" (params: 1 mandatory) supported
255 bundle2-input-part: "changegroup" (params: 1 mandatory) supported
151 adding changesets
256 adding changesets
152 add changeset dfca2c9e2ef2
257 add changeset dfca2c9e2ef2
153 adding manifests
258 adding manifests
154 adding file changes
259 adding file changes
155 adding b revisions
260 adding b revisions
156 adding c revisions
261 adding c revisions
157 adding d revisions
262 adding d revisions
158 added 1 changesets with 3 changes to 3 files
263 added 1 changesets with 3 changes to 3 files
159 bundle2-input-part: total payload size 1315
264 bundle2-input-part: total payload size 1315
160 bundle2-input-part: "phase-heads" supported
265 bundle2-input-part: "phase-heads" supported
161 bundle2-input-part: total payload size 24
266 bundle2-input-part: total payload size 24
162 bundle2-input-bundle: 4 parts total
267 bundle2-input-bundle: 4 parts total
163 updating the branch cache
268 updating the branch cache
164 bundle2-output-bundle: "HG20", 1 parts total
269 bundle2-output-bundle: "HG20", 1 parts total
165 bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
270 bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
166 bundle2-input-bundle: no-transaction
271 bundle2-input-bundle: no-transaction
167 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
272 bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
168 bundle2-input-bundle: 0 parts total
273 bundle2-input-bundle: 0 parts total
169 listing keys for "phases"
274 listing keys for "phases"
170
275
171 Clear the cache to force a download
276 Clear the cache to force a download
172 $ rm -rf `hg config lfs.usercache`
277 $ rm -rf `hg config lfs.usercache`
173 $ hg --repo ../repo1 update tip --debug
278 $ hg --repo ../repo1 update tip --debug
174 http auth: user foo, password ***
279 http auth: user foo, password ***
175 resolving manifests
280 resolving manifests
176 branchmerge: False, force: False, partial: False
281 branchmerge: False, force: False, partial: False
177 ancestor: 99a7098854a3, local: 99a7098854a3+, remote: dfca2c9e2ef2
282 ancestor: 99a7098854a3, local: 99a7098854a3+, remote: dfca2c9e2ef2
283 Status: 200
284 Content-Length: 608
285 Content-Type: application/vnd.git-lfs+json
286 Date: $HTTP_DATE$
287 {
288 "objects": [
289 {
290 "actions": {
291 "download": {
292 "expires_at": "$ISO_8601_DATE_TIME$",
293 "header": {
294 "Accept": "application/vnd.git-lfs"
295 },
296 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
297 }
298 },
299 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
300 "size": 20
301 },
302 {
303 "actions": {
304 "download": {
305 "expires_at": "$ISO_8601_DATE_TIME$",
306 "header": {
307 "Accept": "application/vnd.git-lfs"
308 },
309 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
310 }
311 },
312 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
313 "size": 19
314 }
315 ]
316 }
178 lfs: need to transfer 2 objects (39 bytes)
317 lfs: need to transfer 2 objects (39 bytes)
179 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
318 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
319 Status: 200
320 Content-Length: 20
321 Content-Type: text/plain; charset=utf-8
322 Date: $HTTP_DATE$
180 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
323 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
181 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
324 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
182 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
325 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
326 Status: 200
327 Content-Length: 19
328 Content-Type: text/plain; charset=utf-8
329 Date: $HTTP_DATE$
183 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
330 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
184 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
331 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
185 b: remote created -> g
332 b: remote created -> g
186 getting b
333 getting b
187 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
334 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
188 c: remote created -> g
335 c: remote created -> g
189 getting c
336 getting c
190 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
337 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
191 d: remote created -> g
338 d: remote created -> g
192 getting d
339 getting d
193 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
340 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
194 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
341 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
195
342
196 Test a corrupt file download, but clear the cache first to force a download.
343 Test a corrupt file download, but clear the cache first to force a download.
197
344
198 $ rm -rf `hg config lfs.usercache`
345 $ rm -rf `hg config lfs.usercache`
199 $ cp $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 blob
346 $ cp $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 blob
200 $ echo 'damage' > $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
347 $ echo 'damage' > $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
201 $ rm ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
348 $ rm ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
202 $ rm ../repo1/*
349 $ rm ../repo1/*
203
350
204 $ hg --repo ../repo1 update -C tip --debug
351 $ hg --repo ../repo1 update -C tip --debug
205 http auth: user foo, password ***
352 http auth: user foo, password ***
206 resolving manifests
353 resolving manifests
207 branchmerge: False, force: True, partial: False
354 branchmerge: False, force: True, partial: False
208 ancestor: dfca2c9e2ef2+, local: dfca2c9e2ef2+, remote: dfca2c9e2ef2
355 ancestor: dfca2c9e2ef2+, local: dfca2c9e2ef2+, remote: dfca2c9e2ef2
356 Status: 200
357 Content-Length: 311
358 Content-Type: application/vnd.git-lfs+json
359 Date: $HTTP_DATE$
360 {
361 "objects": [
362 {
363 "actions": {
364 "download": {
365 "expires_at": "$ISO_8601_DATE_TIME$",
366 "header": {
367 "Accept": "application/vnd.git-lfs"
368 },
369 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
370 }
371 },
372 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
373 "size": 19
374 }
375 ]
376 }
209 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
377 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
378 Status: 200
379 Content-Length: 7
380 Content-Type: text/plain; charset=utf-8
381 Date: $HTTP_DATE$
210 abort: corrupt remote lfs object: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
382 abort: corrupt remote lfs object: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
211 [255]
383 [255]
212
384
213 The corrupted blob is not added to the usercache or local store
385 The corrupted blob is not added to the usercache or local store
214
386
215 $ test -f ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
387 $ test -f ../repo1/.hg/store/lfs/objects/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
216 [1]
388 [1]
217 $ test -f `hg config lfs.usercache`/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
389 $ test -f `hg config lfs.usercache`/d1/1e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
218 [1]
390 [1]
219 $ cp blob $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
391 $ cp blob $TESTTMP/lfs-content/d1/1e/1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
220
392
221 Test a corrupted file upload
393 Test a corrupted file upload
222
394
223 $ echo 'another lfs blob' > b
395 $ echo 'another lfs blob' > b
224 $ hg ci -m 'another blob'
396 $ hg ci -m 'another blob'
225 $ echo 'damage' > .hg/store/lfs/objects/e6/59058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
397 $ echo 'damage' > .hg/store/lfs/objects/e6/59058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
226 $ hg push --debug ../repo1
398 $ hg push --debug ../repo1
227 http auth: user foo, password ***
399 http auth: user foo, password ***
228 pushing to ../repo1
400 pushing to ../repo1
229 http auth: user foo, password ***
401 http auth: user foo, password ***
230 query 1; heads
402 query 1; heads
231 searching for changes
403 searching for changes
232 all remote heads known locally
404 all remote heads known locally
233 listing keys for "phases"
405 listing keys for "phases"
234 checking for updated bookmarks
406 checking for updated bookmarks
235 listing keys for "bookmarks"
407 listing keys for "bookmarks"
236 listing keys for "bookmarks"
408 listing keys for "bookmarks"
237 lfs: computing set of blobs to upload
409 lfs: computing set of blobs to upload
410 Status: 200
411 Content-Length: 309
412 Content-Type: application/vnd.git-lfs+json
413 Date: $HTTP_DATE$
414 {
415 "objects": [
416 {
417 "actions": {
418 "upload": {
419 "expires_at": "$ISO_8601_DATE_TIME$",
420 "header": {
421 "Accept": "application/vnd.git-lfs"
422 },
423 "href": "http://localhost:$HGPORT/objects/e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0"
424 }
425 },
426 "oid": "e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0",
427 "size": 17
428 }
429 ]
430 }
238 lfs: uploading e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0 (17 bytes)
431 lfs: uploading e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0 (17 bytes)
239 abort: detected corrupt lfs object: e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
432 abort: detected corrupt lfs object: e659058e26b07b39d2a9c7145b3f99b41f797b6621c8076600e9cb7ee88291f0
240 (run hg verify)
433 (run hg verify)
241 [255]
434 [255]
242
435
243 Archive will prefetch blobs in a group
436 Archive will prefetch blobs in a group
244
437
245 $ rm -rf .hg/store/lfs `hg config lfs.usercache`
438 $ rm -rf .hg/store/lfs `hg config lfs.usercache`
246 $ hg archive --debug -r 1 ../archive
439 $ hg archive --debug -r 1 ../archive
247 http auth: user foo, password ***
440 http auth: user foo, password ***
441 Status: 200
442 Content-Length: 905
443 Content-Type: application/vnd.git-lfs+json
444 Date: $HTTP_DATE$
445 {
446 "objects": [
447 {
448 "actions": {
449 "download": {
450 "expires_at": "$ISO_8601_DATE_TIME$",
451 "header": {
452 "Accept": "application/vnd.git-lfs"
453 },
454 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
455 }
456 },
457 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
458 "size": 12
459 },
460 {
461 "actions": {
462 "download": {
463 "expires_at": "$ISO_8601_DATE_TIME$",
464 "header": {
465 "Accept": "application/vnd.git-lfs"
466 },
467 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
468 }
469 },
470 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
471 "size": 20
472 },
473 {
474 "actions": {
475 "download": {
476 "expires_at": "$ISO_8601_DATE_TIME$",
477 "header": {
478 "Accept": "application/vnd.git-lfs"
479 },
480 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
481 }
482 },
483 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
484 "size": 19
485 }
486 ]
487 }
248 lfs: need to transfer 3 objects (51 bytes)
488 lfs: need to transfer 3 objects (51 bytes)
249 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
489 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
490 Status: 200
491 Content-Length: 12
492 Content-Type: text/plain; charset=utf-8
493 Date: $HTTP_DATE$
250 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
494 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
251 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
495 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
252 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
496 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
497 Status: 200
498 Content-Length: 20
499 Content-Type: text/plain; charset=utf-8
500 Date: $HTTP_DATE$
253 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
501 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
254 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
502 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
255 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
503 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
504 Status: 200
505 Content-Length: 19
506 Content-Type: text/plain; charset=utf-8
507 Date: $HTTP_DATE$
256 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
508 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
257 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
509 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
258 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
510 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
259 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
511 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
260 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
512 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
261 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
513 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
262 $ find ../archive | sort
514 $ find ../archive | sort
263 ../archive
515 ../archive
264 ../archive/.hg_archival.txt
516 ../archive/.hg_archival.txt
265 ../archive/a
517 ../archive/a
266 ../archive/b
518 ../archive/b
267 ../archive/c
519 ../archive/c
268 ../archive/d
520 ../archive/d
269
521
270 Cat will prefetch blobs in a group
522 Cat will prefetch blobs in a group
271
523
272 $ rm -rf .hg/store/lfs `hg config lfs.usercache`
524 $ rm -rf .hg/store/lfs `hg config lfs.usercache`
273 $ hg cat --debug -r 1 a b c
525 $ hg cat --debug -r 1 a b c
274 http auth: user foo, password ***
526 http auth: user foo, password ***
527 Status: 200
528 Content-Length: 608
529 Content-Type: application/vnd.git-lfs+json
530 Date: $HTTP_DATE$
531 {
532 "objects": [
533 {
534 "actions": {
535 "download": {
536 "expires_at": "$ISO_8601_DATE_TIME$",
537 "header": {
538 "Accept": "application/vnd.git-lfs"
539 },
540 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
541 }
542 },
543 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
544 "size": 12
545 },
546 {
547 "actions": {
548 "download": {
549 "expires_at": "$ISO_8601_DATE_TIME$",
550 "header": {
551 "Accept": "application/vnd.git-lfs"
552 },
553 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
554 }
555 },
556 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
557 "size": 19
558 }
559 ]
560 }
275 lfs: need to transfer 2 objects (31 bytes)
561 lfs: need to transfer 2 objects (31 bytes)
276 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
562 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
563 Status: 200
564 Content-Length: 12
565 Content-Type: text/plain; charset=utf-8
566 Date: $HTTP_DATE$
277 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
567 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
278 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
568 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
279 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
569 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
570 Status: 200
571 Content-Length: 19
572 Content-Type: text/plain; charset=utf-8
573 Date: $HTTP_DATE$
280 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
574 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
281 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
575 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
282 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
576 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
283 THIS-IS-LFS
577 THIS-IS-LFS
284 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
578 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
285 THIS-IS-LFS
579 THIS-IS-LFS
286 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
580 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
287 ANOTHER-LARGE-FILE
581 ANOTHER-LARGE-FILE
288
582
289 Revert will prefetch blobs in a group
583 Revert will prefetch blobs in a group
290
584
291 $ rm -rf .hg/store/lfs
585 $ rm -rf .hg/store/lfs
292 $ rm -rf `hg config lfs.usercache`
586 $ rm -rf `hg config lfs.usercache`
293 $ rm *
587 $ rm *
294 $ hg revert --all -r 1 --debug
588 $ hg revert --all -r 1 --debug
295 http auth: user foo, password ***
589 http auth: user foo, password ***
296 adding a
590 adding a
297 reverting b
591 reverting b
298 reverting c
592 reverting c
299 reverting d
593 reverting d
594 Status: 200
595 Content-Length: 905
596 Content-Type: application/vnd.git-lfs+json
597 Date: $HTTP_DATE$
598 {
599 "objects": [
600 {
601 "actions": {
602 "download": {
603 "expires_at": "$ISO_8601_DATE_TIME$",
604 "header": {
605 "Accept": "application/vnd.git-lfs"
606 },
607 "href": "http://localhost:$HGPORT/objects/31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b"
608 }
609 },
610 "oid": "31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b",
611 "size": 12
612 },
613 {
614 "actions": {
615 "download": {
616 "expires_at": "$ISO_8601_DATE_TIME$",
617 "header": {
618 "Accept": "application/vnd.git-lfs"
619 },
620 "href": "http://localhost:$HGPORT/objects/37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19"
621 }
622 },
623 "oid": "37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19",
624 "size": 20
625 },
626 {
627 "actions": {
628 "download": {
629 "expires_at": "$ISO_8601_DATE_TIME$",
630 "header": {
631 "Accept": "application/vnd.git-lfs"
632 },
633 "href": "http://localhost:$HGPORT/objects/d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998"
634 }
635 },
636 "oid": "d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998",
637 "size": 19
638 }
639 ]
640 }
300 lfs: need to transfer 3 objects (51 bytes)
641 lfs: need to transfer 3 objects (51 bytes)
301 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
642 lfs: downloading 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b (12 bytes)
643 Status: 200
644 Content-Length: 12
645 Content-Type: text/plain; charset=utf-8
646 Date: $HTTP_DATE$
302 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
647 lfs: adding 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b to the usercache
303 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
648 lfs: processed: 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b
304 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
649 lfs: downloading 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 (20 bytes)
650 Status: 200
651 Content-Length: 20
652 Content-Type: text/plain; charset=utf-8
653 Date: $HTTP_DATE$
305 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
654 lfs: adding 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 to the usercache
306 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
655 lfs: processed: 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19
307 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
656 lfs: downloading d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (19 bytes)
657 Status: 200
658 Content-Length: 19
659 Content-Type: text/plain; charset=utf-8
660 Date: $HTTP_DATE$
308 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
661 lfs: adding d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 to the usercache
309 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
662 lfs: processed: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998
310 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
663 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
311 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
664 lfs: found d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 in the local lfs store
312 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
665 lfs: found 37a65ab78d5ecda767e8622c248b5dbff1e68b1678ab0e730d5eb8601ec8ad19 in the local lfs store
313 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
666 lfs: found 31cf46fbc4ecd458a0943c5b4881f1f5a6dd36c53d6167d5b69ac45149b38e5b in the local lfs store
314
667
315 Check error message when the remote missed a blob:
668 Check error message when the remote missed a blob:
316
669
317 $ echo FFFFF > b
670 $ echo FFFFF > b
318 $ hg commit -m b -A b
671 $ hg commit -m b -A b
319 $ echo FFFFF >> b
672 $ echo FFFFF >> b
320 $ hg commit -m b b
673 $ hg commit -m b b
321 $ rm -rf .hg/store/lfs
674 $ rm -rf .hg/store/lfs
322 $ rm -rf `hg config lfs.usercache`
675 $ rm -rf `hg config lfs.usercache`
323 $ hg update -C '.^' --debug
676 $ hg update -C '.^' --debug
324 http auth: user foo, password ***
677 http auth: user foo, password ***
325 resolving manifests
678 resolving manifests
326 branchmerge: False, force: True, partial: False
679 branchmerge: False, force: True, partial: False
327 ancestor: 62fdbaf221c6+, local: 62fdbaf221c6+, remote: ef0564edf47e
680 ancestor: 62fdbaf221c6+, local: 62fdbaf221c6+, remote: ef0564edf47e
681 Status: 200
682 Content-Length: 308
683 Content-Type: application/vnd.git-lfs+json
684 Date: $HTTP_DATE$
685 {
686 "objects": [
687 {
688 "actions": {
689 "upload": {
690 "expires_at": "$ISO_8601_DATE_TIME$",
691 "header": {
692 "Accept": "application/vnd.git-lfs"
693 },
694 "href": "http://localhost:$HGPORT/objects/8e6ea5f6c066b44a0efa43bcce86aea73f17e6e23f0663df0251e7524e140a13"
695 }
696 },
697 "oid": "8e6ea5f6c066b44a0efa43bcce86aea73f17e6e23f0663df0251e7524e140a13",
698 "size": 6
699 }
700 ]
701 }
328 abort: LFS server error. Remote object for "b" not found:(.*)! (re)
702 abort: LFS server error. Remote object for "b" not found:(.*)! (re)
329 [255]
703 [255]
330
704
331 Check error message when object does not exist:
705 Check error message when object does not exist:
332
706
333 $ cd $TESTTMP
707 $ cd $TESTTMP
334 $ hg init test && cd test
708 $ hg init test && cd test
335 $ echo "[extensions]" >> .hg/hgrc
709 $ echo "[extensions]" >> .hg/hgrc
336 $ echo "lfs=" >> .hg/hgrc
710 $ echo "lfs=" >> .hg/hgrc
337 $ echo "[lfs]" >> .hg/hgrc
711 $ echo "[lfs]" >> .hg/hgrc
338 $ echo "threshold=1" >> .hg/hgrc
712 $ echo "threshold=1" >> .hg/hgrc
339 $ echo a > a
713 $ echo a > a
340 $ hg add a
714 $ hg add a
341 $ hg commit -m 'test'
715 $ hg commit -m 'test'
342 $ echo aaaaa > a
716 $ echo aaaaa > a
343 $ hg commit -m 'largefile'
717 $ hg commit -m 'largefile'
344 $ hg debugdata .hg/store/data/a.i 1 # verify this is no the file content but includes "oid", the LFS "pointer".
718 $ hg debugdata .hg/store/data/a.i 1 # verify this is no the file content but includes "oid", the LFS "pointer".
345 version https://git-lfs.github.com/spec/v1
719 version https://git-lfs.github.com/spec/v1
346 oid sha256:bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a
720 oid sha256:bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a
347 size 6
721 size 6
348 x-is-binary 0
722 x-is-binary 0
349 $ cd ..
723 $ cd ..
350 $ rm -rf `hg config lfs.usercache`
724 $ rm -rf `hg config lfs.usercache`
351
725
352 (Restart the server in a different location so it no longer has the content)
726 (Restart the server in a different location so it no longer has the content)
353
727
354 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
728 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
355 $ rm $DAEMON_PIDS
729 $ rm $DAEMON_PIDS
356 $ mkdir $TESTTMP/lfs-server2
730 $ mkdir $TESTTMP/lfs-server2
357 $ cd $TESTTMP/lfs-server2
731 $ cd $TESTTMP/lfs-server2
358 #if no-windows
732 #if no-windows
359 $ lfs-test-server &> lfs-server.log &
733 $ lfs-test-server &> lfs-server.log &
360 $ echo $! >> $DAEMON_PIDS
734 $ echo $! >> $DAEMON_PIDS
361 #else
735 #else
362 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
736 $ $PYTHON $TESTTMP/spawn.py >> $DAEMON_PIDS
363 #endif
737 #endif
364
738
365 $ cd $TESTTMP
739 $ cd $TESTTMP
366 $ hg --debug clone test test2
740 $ hg --debug clone test test2
367 http auth: user foo, password ***
741 http auth: user foo, password ***
368 linked 6 files
742 linked 6 files
369 http auth: user foo, password ***
743 http auth: user foo, password ***
370 updating to branch default
744 updating to branch default
371 resolving manifests
745 resolving manifests
372 branchmerge: False, force: False, partial: False
746 branchmerge: False, force: False, partial: False
373 ancestor: 000000000000, local: 000000000000+, remote: d2a338f184a8
747 ancestor: 000000000000, local: 000000000000+, remote: d2a338f184a8
748 Status: 200
749 Content-Length: 308
750 Content-Type: application/vnd.git-lfs+json
751 Date: $HTTP_DATE$
752 {
753 "objects": [
754 {
755 "actions": {
756 "upload": {
757 "expires_at": "$ISO_8601_DATE_TIME$",
758 "header": {
759 "Accept": "application/vnd.git-lfs"
760 },
761 "href": "http://localhost:$HGPORT/objects/bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a"
762 }
763 },
764 "oid": "bdc26931acfb734b142a8d675f205becf27560dc461f501822de13274fe6fc8a",
765 "size": 6
766 }
767 ]
768 }
374 abort: LFS server error. Remote object for "a" not found:(.*)! (re)
769 abort: LFS server error. Remote object for "a" not found:(.*)! (re)
375 [255]
770 [255]
376
771
377 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
772 $ $PYTHON $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
General Comments 0
You need to be logged in to leave comments. Login now