##// END OF EJS Templates
lfs: add a method to the local blobstore to convert OIDs to file paths...
Matt Harbison -
r44745:06de4a67 default
parent child Browse files
Show More
@@ -1,765 +1,776 b''
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import contextlib
10 import contextlib
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import json
13 import json
14 import os
14 import os
15 import re
15 import re
16 import socket
16 import socket
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.pycompat import getattr
19 from mercurial.pycompat import getattr
20
20
21 from mercurial import (
21 from mercurial import (
22 encoding,
22 encoding,
23 error,
23 error,
24 node,
24 node,
25 pathutil,
25 pathutil,
26 pycompat,
26 pycompat,
27 url as urlmod,
27 url as urlmod,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 worker,
30 worker,
31 )
31 )
32
32
33 from mercurial.utils import stringutil
33 from mercurial.utils import stringutil
34
34
35 from ..largefiles import lfutil
35 from ..largefiles import lfutil
36
36
37 # 64 bytes for SHA256
37 # 64 bytes for SHA256
38 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
38 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
39
39
40
40
41 class lfsvfs(vfsmod.vfs):
41 class lfsvfs(vfsmod.vfs):
42 def join(self, path):
42 def join(self, path):
43 """split the path at first two characters, like: XX/XXXXX..."""
43 """split the path at first two characters, like: XX/XXXXX..."""
44 if not _lfsre.match(path):
44 if not _lfsre.match(path):
45 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
45 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
46 return super(lfsvfs, self).join(path[0:2], path[2:])
46 return super(lfsvfs, self).join(path[0:2], path[2:])
47
47
48 def walk(self, path=None, onerror=None):
48 def walk(self, path=None, onerror=None):
49 """Yield (dirpath, [], oids) tuple for blobs under path
49 """Yield (dirpath, [], oids) tuple for blobs under path
50
50
51 Oids only exist in the root of this vfs, so dirpath is always ''.
51 Oids only exist in the root of this vfs, so dirpath is always ''.
52 """
52 """
53 root = os.path.normpath(self.base)
53 root = os.path.normpath(self.base)
54 # when dirpath == root, dirpath[prefixlen:] becomes empty
54 # when dirpath == root, dirpath[prefixlen:] becomes empty
55 # because len(dirpath) < prefixlen.
55 # because len(dirpath) < prefixlen.
56 prefixlen = len(pathutil.normasprefix(root))
56 prefixlen = len(pathutil.normasprefix(root))
57 oids = []
57 oids = []
58
58
59 for dirpath, dirs, files in os.walk(
59 for dirpath, dirs, files in os.walk(
60 self.reljoin(self.base, path or b''), onerror=onerror
60 self.reljoin(self.base, path or b''), onerror=onerror
61 ):
61 ):
62 dirpath = dirpath[prefixlen:]
62 dirpath = dirpath[prefixlen:]
63
63
64 # Silently skip unexpected files and directories
64 # Silently skip unexpected files and directories
65 if len(dirpath) == 2:
65 if len(dirpath) == 2:
66 oids.extend(
66 oids.extend(
67 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
67 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
68 )
68 )
69
69
70 yield (b'', [], oids)
70 yield (b'', [], oids)
71
71
72
72
73 class nullvfs(lfsvfs):
73 class nullvfs(lfsvfs):
74 def __init__(self):
74 def __init__(self):
75 pass
75 pass
76
76
77 def exists(self, oid):
77 def exists(self, oid):
78 return False
78 return False
79
79
80 def read(self, oid):
80 def read(self, oid):
81 # store.read() calls into here if the blob doesn't exist in its
81 # store.read() calls into here if the blob doesn't exist in its
82 # self.vfs. Raise the same error as a normal vfs when asked to read a
82 # self.vfs. Raise the same error as a normal vfs when asked to read a
83 # file that doesn't exist. The only difference is the full file path
83 # file that doesn't exist. The only difference is the full file path
84 # isn't available in the error.
84 # isn't available in the error.
85 raise IOError(
85 raise IOError(
86 errno.ENOENT,
86 errno.ENOENT,
87 pycompat.sysstr(b'%s: No such file or directory' % oid),
87 pycompat.sysstr(b'%s: No such file or directory' % oid),
88 )
88 )
89
89
90 def walk(self, path=None, onerror=None):
90 def walk(self, path=None, onerror=None):
91 return (b'', [], [])
91 return (b'', [], [])
92
92
93 def write(self, oid, data):
93 def write(self, oid, data):
94 pass
94 pass
95
95
96
96
97 class lfsuploadfile(object):
97 class lfsuploadfile(object):
98 """a file-like object that supports __len__ and read.
98 """a file-like object that supports __len__ and read.
99 """
99 """
100
100
101 def __init__(self, fp):
101 def __init__(self, fp):
102 self._fp = fp
102 self._fp = fp
103 fp.seek(0, os.SEEK_END)
103 fp.seek(0, os.SEEK_END)
104 self._len = fp.tell()
104 self._len = fp.tell()
105 fp.seek(0)
105 fp.seek(0)
106
106
107 def __len__(self):
107 def __len__(self):
108 return self._len
108 return self._len
109
109
110 def read(self, size):
110 def read(self, size):
111 if self._fp is None:
111 if self._fp is None:
112 return b''
112 return b''
113 return self._fp.read(size)
113 return self._fp.read(size)
114
114
115 def close(self):
115 def close(self):
116 if self._fp is not None:
116 if self._fp is not None:
117 self._fp.close()
117 self._fp.close()
118 self._fp = None
118 self._fp = None
119
119
120
120
121 class local(object):
121 class local(object):
122 """Local blobstore for large file contents.
122 """Local blobstore for large file contents.
123
123
124 This blobstore is used both as a cache and as a staging area for large blobs
124 This blobstore is used both as a cache and as a staging area for large blobs
125 to be uploaded to the remote blobstore.
125 to be uploaded to the remote blobstore.
126 """
126 """
127
127
128 def __init__(self, repo):
128 def __init__(self, repo):
129 fullpath = repo.svfs.join(b'lfs/objects')
129 fullpath = repo.svfs.join(b'lfs/objects')
130 self.vfs = lfsvfs(fullpath)
130 self.vfs = lfsvfs(fullpath)
131
131
132 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
132 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
133 self.cachevfs = nullvfs()
133 self.cachevfs = nullvfs()
134 else:
134 else:
135 usercache = lfutil._usercachedir(repo.ui, b'lfs')
135 usercache = lfutil._usercachedir(repo.ui, b'lfs')
136 self.cachevfs = lfsvfs(usercache)
136 self.cachevfs = lfsvfs(usercache)
137 self.ui = repo.ui
137 self.ui = repo.ui
138
138
139 def open(self, oid):
139 def open(self, oid):
140 """Open a read-only file descriptor to the named blob, in either the
140 """Open a read-only file descriptor to the named blob, in either the
141 usercache or the local store."""
141 usercache or the local store."""
142 return open(self.path(oid), b'rb')
143
144 def path(self, oid):
145 """Build the path for the given blob ``oid``.
146
147 If the blob exists locally, the path may point to either the usercache
148 or the local store. If it doesn't, it will point to the local store.
149 This is meant for situations where existing code that isn't LFS aware
150 needs to open a blob. Generally, prefer the ``open`` method on this
151 class.
152 """
142 # The usercache is the most likely place to hold the file. Commit will
153 # The usercache is the most likely place to hold the file. Commit will
143 # write to both it and the local store, as will anything that downloads
154 # write to both it and the local store, as will anything that downloads
144 # the blobs. However, things like clone without an update won't
155 # the blobs. However, things like clone without an update won't
145 # populate the local store. For an init + push of a local clone,
156 # populate the local store. For an init + push of a local clone,
146 # the usercache is the only place it _could_ be. If not present, the
157 # the usercache is the only place it _could_ be. If not present, the
147 # missing file msg here will indicate the local repo, not the usercache.
158 # missing file msg here will indicate the local repo, not the usercache.
148 if self.cachevfs.exists(oid):
159 if self.cachevfs.exists(oid):
149 return self.cachevfs(oid, b'rb')
160 return self.cachevfs.join(oid)
150
161
151 return self.vfs(oid, b'rb')
162 return self.vfs.join(oid)
152
163
153 def download(self, oid, src, content_length):
164 def download(self, oid, src, content_length):
154 """Read the blob from the remote source in chunks, verify the content,
165 """Read the blob from the remote source in chunks, verify the content,
155 and write to this local blobstore."""
166 and write to this local blobstore."""
156 sha256 = hashlib.sha256()
167 sha256 = hashlib.sha256()
157 size = 0
168 size = 0
158
169
159 with self.vfs(oid, b'wb', atomictemp=True) as fp:
170 with self.vfs(oid, b'wb', atomictemp=True) as fp:
160 for chunk in util.filechunkiter(src, size=1048576):
171 for chunk in util.filechunkiter(src, size=1048576):
161 fp.write(chunk)
172 fp.write(chunk)
162 sha256.update(chunk)
173 sha256.update(chunk)
163 size += len(chunk)
174 size += len(chunk)
164
175
165 # If the server advertised a length longer than what we actually
176 # If the server advertised a length longer than what we actually
166 # received, then we should expect that the server crashed while
177 # received, then we should expect that the server crashed while
167 # producing the response (but the server has no way of telling us
178 # producing the response (but the server has no way of telling us
168 # that), and we really don't need to try to write the response to
179 # that), and we really don't need to try to write the response to
169 # the localstore, because it's not going to match the expected.
180 # the localstore, because it's not going to match the expected.
170 if content_length is not None and int(content_length) != size:
181 if content_length is not None and int(content_length) != size:
171 msg = (
182 msg = (
172 b"Response length (%s) does not match Content-Length "
183 b"Response length (%s) does not match Content-Length "
173 b"header (%d): likely server-side crash"
184 b"header (%d): likely server-side crash"
174 )
185 )
175 raise LfsRemoteError(_(msg) % (size, int(content_length)))
186 raise LfsRemoteError(_(msg) % (size, int(content_length)))
176
187
177 realoid = node.hex(sha256.digest())
188 realoid = node.hex(sha256.digest())
178 if realoid != oid:
189 if realoid != oid:
179 raise LfsCorruptionError(
190 raise LfsCorruptionError(
180 _(b'corrupt remote lfs object: %s') % oid
191 _(b'corrupt remote lfs object: %s') % oid
181 )
192 )
182
193
183 self._linktousercache(oid)
194 self._linktousercache(oid)
184
195
185 def write(self, oid, data):
196 def write(self, oid, data):
186 """Write blob to local blobstore.
197 """Write blob to local blobstore.
187
198
188 This should only be called from the filelog during a commit or similar.
199 This should only be called from the filelog during a commit or similar.
189 As such, there is no need to verify the data. Imports from a remote
200 As such, there is no need to verify the data. Imports from a remote
190 store must use ``download()`` instead."""
201 store must use ``download()`` instead."""
191 with self.vfs(oid, b'wb', atomictemp=True) as fp:
202 with self.vfs(oid, b'wb', atomictemp=True) as fp:
192 fp.write(data)
203 fp.write(data)
193
204
194 self._linktousercache(oid)
205 self._linktousercache(oid)
195
206
196 def linkfromusercache(self, oid):
207 def linkfromusercache(self, oid):
197 """Link blobs found in the user cache into this store.
208 """Link blobs found in the user cache into this store.
198
209
199 The server module needs to do this when it lets the client know not to
210 The server module needs to do this when it lets the client know not to
200 upload the blob, to ensure it is always available in this store.
211 upload the blob, to ensure it is always available in this store.
201 Normally this is done implicitly when the client reads or writes the
212 Normally this is done implicitly when the client reads or writes the
202 blob, but that doesn't happen when the server tells the client that it
213 blob, but that doesn't happen when the server tells the client that it
203 already has the blob.
214 already has the blob.
204 """
215 """
205 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
216 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
206 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
217 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
207 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
218 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
208
219
209 def _linktousercache(self, oid):
220 def _linktousercache(self, oid):
210 # XXX: should we verify the content of the cache, and hardlink back to
221 # XXX: should we verify the content of the cache, and hardlink back to
211 # the local store on success, but truncate, write and link on failure?
222 # the local store on success, but truncate, write and link on failure?
212 if not self.cachevfs.exists(oid) and not isinstance(
223 if not self.cachevfs.exists(oid) and not isinstance(
213 self.cachevfs, nullvfs
224 self.cachevfs, nullvfs
214 ):
225 ):
215 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
226 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
216 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
227 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
217
228
218 def read(self, oid, verify=True):
229 def read(self, oid, verify=True):
219 """Read blob from local blobstore."""
230 """Read blob from local blobstore."""
220 if not self.vfs.exists(oid):
231 if not self.vfs.exists(oid):
221 blob = self._read(self.cachevfs, oid, verify)
232 blob = self._read(self.cachevfs, oid, verify)
222
233
223 # Even if revlog will verify the content, it needs to be verified
234 # Even if revlog will verify the content, it needs to be verified
224 # now before making the hardlink to avoid propagating corrupt blobs.
235 # now before making the hardlink to avoid propagating corrupt blobs.
225 # Don't abort if corruption is detected, because `hg verify` will
236 # Don't abort if corruption is detected, because `hg verify` will
226 # give more useful info about the corruption- simply don't add the
237 # give more useful info about the corruption- simply don't add the
227 # hardlink.
238 # hardlink.
228 if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
239 if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
229 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
240 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
230 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
241 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
231 else:
242 else:
232 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
243 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
233 blob = self._read(self.vfs, oid, verify)
244 blob = self._read(self.vfs, oid, verify)
234 return blob
245 return blob
235
246
236 def _read(self, vfs, oid, verify):
247 def _read(self, vfs, oid, verify):
237 """Read blob (after verifying) from the given store"""
248 """Read blob (after verifying) from the given store"""
238 blob = vfs.read(oid)
249 blob = vfs.read(oid)
239 if verify:
250 if verify:
240 _verify(oid, blob)
251 _verify(oid, blob)
241 return blob
252 return blob
242
253
243 def verify(self, oid):
254 def verify(self, oid):
244 """Indicate whether or not the hash of the underlying file matches its
255 """Indicate whether or not the hash of the underlying file matches its
245 name."""
256 name."""
246 sha256 = hashlib.sha256()
257 sha256 = hashlib.sha256()
247
258
248 with self.open(oid) as fp:
259 with self.open(oid) as fp:
249 for chunk in util.filechunkiter(fp, size=1048576):
260 for chunk in util.filechunkiter(fp, size=1048576):
250 sha256.update(chunk)
261 sha256.update(chunk)
251
262
252 return oid == node.hex(sha256.digest())
263 return oid == node.hex(sha256.digest())
253
264
254 def has(self, oid):
265 def has(self, oid):
255 """Returns True if the local blobstore contains the requested blob,
266 """Returns True if the local blobstore contains the requested blob,
256 False otherwise."""
267 False otherwise."""
257 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
268 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
258
269
259
270
260 def _urlerrorreason(urlerror):
271 def _urlerrorreason(urlerror):
261 '''Create a friendly message for the given URLError to be used in an
272 '''Create a friendly message for the given URLError to be used in an
262 LfsRemoteError message.
273 LfsRemoteError message.
263 '''
274 '''
264 inst = urlerror
275 inst = urlerror
265
276
266 if isinstance(urlerror.reason, Exception):
277 if isinstance(urlerror.reason, Exception):
267 inst = urlerror.reason
278 inst = urlerror.reason
268
279
269 if util.safehasattr(inst, b'reason'):
280 if util.safehasattr(inst, b'reason'):
270 try: # usually it is in the form (errno, strerror)
281 try: # usually it is in the form (errno, strerror)
271 reason = inst.reason.args[1]
282 reason = inst.reason.args[1]
272 except (AttributeError, IndexError):
283 except (AttributeError, IndexError):
273 # it might be anything, for example a string
284 # it might be anything, for example a string
274 reason = inst.reason
285 reason = inst.reason
275 if isinstance(reason, pycompat.unicode):
286 if isinstance(reason, pycompat.unicode):
276 # SSLError of Python 2.7.9 contains a unicode
287 # SSLError of Python 2.7.9 contains a unicode
277 reason = encoding.unitolocal(reason)
288 reason = encoding.unitolocal(reason)
278 return reason
289 return reason
279 elif getattr(inst, "strerror", None):
290 elif getattr(inst, "strerror", None):
280 return encoding.strtolocal(inst.strerror)
291 return encoding.strtolocal(inst.strerror)
281 else:
292 else:
282 return stringutil.forcebytestr(urlerror)
293 return stringutil.forcebytestr(urlerror)
283
294
284
295
285 class lfsauthhandler(util.urlreq.basehandler):
296 class lfsauthhandler(util.urlreq.basehandler):
286 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
297 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
287
298
288 def http_error_401(self, req, fp, code, msg, headers):
299 def http_error_401(self, req, fp, code, msg, headers):
289 """Enforces that any authentication performed is HTTP Basic
300 """Enforces that any authentication performed is HTTP Basic
290 Authentication. No authentication is also acceptable.
301 Authentication. No authentication is also acceptable.
291 """
302 """
292 authreq = headers.get('www-authenticate', None)
303 authreq = headers.get('www-authenticate', None)
293 if authreq:
304 if authreq:
294 scheme = authreq.split()[0]
305 scheme = authreq.split()[0]
295
306
296 if scheme.lower() != 'basic':
307 if scheme.lower() != 'basic':
297 msg = _(b'the server must support Basic Authentication')
308 msg = _(b'the server must support Basic Authentication')
298 raise util.urlerr.httperror(
309 raise util.urlerr.httperror(
299 req.get_full_url(),
310 req.get_full_url(),
300 code,
311 code,
301 encoding.strfromlocal(msg),
312 encoding.strfromlocal(msg),
302 headers,
313 headers,
303 fp,
314 fp,
304 )
315 )
305 return None
316 return None
306
317
307
318
308 class _gitlfsremote(object):
319 class _gitlfsremote(object):
309 def __init__(self, repo, url):
320 def __init__(self, repo, url):
310 ui = repo.ui
321 ui = repo.ui
311 self.ui = ui
322 self.ui = ui
312 baseurl, authinfo = url.authinfo()
323 baseurl, authinfo = url.authinfo()
313 self.baseurl = baseurl.rstrip(b'/')
324 self.baseurl = baseurl.rstrip(b'/')
314 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
325 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
315 if not useragent:
326 if not useragent:
316 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
327 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
317 self.urlopener = urlmod.opener(ui, authinfo, useragent)
328 self.urlopener = urlmod.opener(ui, authinfo, useragent)
318 self.urlopener.add_handler(lfsauthhandler())
329 self.urlopener.add_handler(lfsauthhandler())
319 self.retry = ui.configint(b'lfs', b'retry')
330 self.retry = ui.configint(b'lfs', b'retry')
320
331
321 def writebatch(self, pointers, fromstore):
332 def writebatch(self, pointers, fromstore):
322 """Batch upload from local to remote blobstore."""
333 """Batch upload from local to remote blobstore."""
323 self._batch(_deduplicate(pointers), fromstore, b'upload')
334 self._batch(_deduplicate(pointers), fromstore, b'upload')
324
335
325 def readbatch(self, pointers, tostore):
336 def readbatch(self, pointers, tostore):
326 """Batch download from remote to local blostore."""
337 """Batch download from remote to local blostore."""
327 self._batch(_deduplicate(pointers), tostore, b'download')
338 self._batch(_deduplicate(pointers), tostore, b'download')
328
339
329 def _batchrequest(self, pointers, action):
340 def _batchrequest(self, pointers, action):
330 """Get metadata about objects pointed by pointers for given action
341 """Get metadata about objects pointed by pointers for given action
331
342
332 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
343 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
333 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
344 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
334 """
345 """
335 objects = [
346 objects = [
336 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
347 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
337 for p in pointers
348 for p in pointers
338 ]
349 ]
339 requestdata = pycompat.bytesurl(
350 requestdata = pycompat.bytesurl(
340 json.dumps(
351 json.dumps(
341 {'objects': objects, 'operation': pycompat.strurl(action),}
352 {'objects': objects, 'operation': pycompat.strurl(action),}
342 )
353 )
343 )
354 )
344 url = b'%s/objects/batch' % self.baseurl
355 url = b'%s/objects/batch' % self.baseurl
345 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
356 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
346 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
357 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
347 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
358 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
348 try:
359 try:
349 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
360 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
350 rawjson = rsp.read()
361 rawjson = rsp.read()
351 except util.urlerr.httperror as ex:
362 except util.urlerr.httperror as ex:
352 hints = {
363 hints = {
353 400: _(
364 400: _(
354 b'check that lfs serving is enabled on %s and "%s" is '
365 b'check that lfs serving is enabled on %s and "%s" is '
355 b'supported'
366 b'supported'
356 )
367 )
357 % (self.baseurl, action),
368 % (self.baseurl, action),
358 404: _(b'the "lfs.url" config may be used to override %s')
369 404: _(b'the "lfs.url" config may be used to override %s')
359 % self.baseurl,
370 % self.baseurl,
360 }
371 }
361 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
372 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
362 raise LfsRemoteError(
373 raise LfsRemoteError(
363 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
374 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
364 hint=hint,
375 hint=hint,
365 )
376 )
366 except util.urlerr.urlerror as ex:
377 except util.urlerr.urlerror as ex:
367 hint = (
378 hint = (
368 _(b'the "lfs.url" config may be used to override %s')
379 _(b'the "lfs.url" config may be used to override %s')
369 % self.baseurl
380 % self.baseurl
370 )
381 )
371 raise LfsRemoteError(
382 raise LfsRemoteError(
372 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
383 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
373 )
384 )
374 try:
385 try:
375 response = pycompat.json_loads(rawjson)
386 response = pycompat.json_loads(rawjson)
376 except ValueError:
387 except ValueError:
377 raise LfsRemoteError(
388 raise LfsRemoteError(
378 _(b'LFS server returns invalid JSON: %s')
389 _(b'LFS server returns invalid JSON: %s')
379 % rawjson.encode("utf-8")
390 % rawjson.encode("utf-8")
380 )
391 )
381
392
382 if self.ui.debugflag:
393 if self.ui.debugflag:
383 self.ui.debug(b'Status: %d\n' % rsp.status)
394 self.ui.debug(b'Status: %d\n' % rsp.status)
384 # lfs-test-server and hg serve return headers in different order
395 # lfs-test-server and hg serve return headers in different order
385 headers = pycompat.bytestr(rsp.info()).strip()
396 headers = pycompat.bytestr(rsp.info()).strip()
386 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
397 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
387
398
388 if 'objects' in response:
399 if 'objects' in response:
389 response['objects'] = sorted(
400 response['objects'] = sorted(
390 response['objects'], key=lambda p: p['oid']
401 response['objects'], key=lambda p: p['oid']
391 )
402 )
392 self.ui.debug(
403 self.ui.debug(
393 b'%s\n'
404 b'%s\n'
394 % pycompat.bytesurl(
405 % pycompat.bytesurl(
395 json.dumps(
406 json.dumps(
396 response,
407 response,
397 indent=2,
408 indent=2,
398 separators=('', ': '),
409 separators=('', ': '),
399 sort_keys=True,
410 sort_keys=True,
400 )
411 )
401 )
412 )
402 )
413 )
403
414
404 def encodestr(x):
415 def encodestr(x):
405 if isinstance(x, pycompat.unicode):
416 if isinstance(x, pycompat.unicode):
406 return x.encode('utf-8')
417 return x.encode('utf-8')
407 return x
418 return x
408
419
409 return pycompat.rapply(encodestr, response)
420 return pycompat.rapply(encodestr, response)
410
421
411 def _checkforservererror(self, pointers, responses, action):
422 def _checkforservererror(self, pointers, responses, action):
412 """Scans errors from objects
423 """Scans errors from objects
413
424
414 Raises LfsRemoteError if any objects have an error"""
425 Raises LfsRemoteError if any objects have an error"""
415 for response in responses:
426 for response in responses:
416 # The server should return 404 when objects cannot be found. Some
427 # The server should return 404 when objects cannot be found. Some
417 # server implementation (ex. lfs-test-server) does not set "error"
428 # server implementation (ex. lfs-test-server) does not set "error"
418 # but just removes "download" from "actions". Treat that case
429 # but just removes "download" from "actions". Treat that case
419 # as the same as 404 error.
430 # as the same as 404 error.
420 if b'error' not in response:
431 if b'error' not in response:
421 if action == b'download' and action not in response.get(
432 if action == b'download' and action not in response.get(
422 b'actions', []
433 b'actions', []
423 ):
434 ):
424 code = 404
435 code = 404
425 else:
436 else:
426 continue
437 continue
427 else:
438 else:
428 # An error dict without a code doesn't make much sense, so
439 # An error dict without a code doesn't make much sense, so
429 # treat as a server error.
440 # treat as a server error.
430 code = response.get(b'error').get(b'code', 500)
441 code = response.get(b'error').get(b'code', 500)
431
442
432 ptrmap = {p.oid(): p for p in pointers}
443 ptrmap = {p.oid(): p for p in pointers}
433 p = ptrmap.get(response[b'oid'], None)
444 p = ptrmap.get(response[b'oid'], None)
434 if p:
445 if p:
435 filename = getattr(p, 'filename', b'unknown')
446 filename = getattr(p, 'filename', b'unknown')
436 errors = {
447 errors = {
437 404: b'The object does not exist',
448 404: b'The object does not exist',
438 410: b'The object was removed by the owner',
449 410: b'The object was removed by the owner',
439 422: b'Validation error',
450 422: b'Validation error',
440 500: b'Internal server error',
451 500: b'Internal server error',
441 }
452 }
442 msg = errors.get(code, b'status code %d' % code)
453 msg = errors.get(code, b'status code %d' % code)
443 raise LfsRemoteError(
454 raise LfsRemoteError(
444 _(b'LFS server error for "%s": %s') % (filename, msg)
455 _(b'LFS server error for "%s": %s') % (filename, msg)
445 )
456 )
446 else:
457 else:
447 raise LfsRemoteError(
458 raise LfsRemoteError(
448 _(b'LFS server error. Unsolicited response for oid %s')
459 _(b'LFS server error. Unsolicited response for oid %s')
449 % response[b'oid']
460 % response[b'oid']
450 )
461 )
451
462
452 def _extractobjects(self, response, pointers, action):
463 def _extractobjects(self, response, pointers, action):
453 """extract objects from response of the batch API
464 """extract objects from response of the batch API
454
465
455 response: parsed JSON object returned by batch API
466 response: parsed JSON object returned by batch API
456 return response['objects'] filtered by action
467 return response['objects'] filtered by action
457 raise if any object has an error
468 raise if any object has an error
458 """
469 """
459 # Scan errors from objects - fail early
470 # Scan errors from objects - fail early
460 objects = response.get(b'objects', [])
471 objects = response.get(b'objects', [])
461 self._checkforservererror(pointers, objects, action)
472 self._checkforservererror(pointers, objects, action)
462
473
463 # Filter objects with given action. Practically, this skips uploading
474 # Filter objects with given action. Practically, this skips uploading
464 # objects which exist in the server.
475 # objects which exist in the server.
465 filteredobjects = [
476 filteredobjects = [
466 o for o in objects if action in o.get(b'actions', [])
477 o for o in objects if action in o.get(b'actions', [])
467 ]
478 ]
468
479
469 return filteredobjects
480 return filteredobjects
470
481
471 def _basictransfer(self, obj, action, localstore):
482 def _basictransfer(self, obj, action, localstore):
472 """Download or upload a single object using basic transfer protocol
483 """Download or upload a single object using basic transfer protocol
473
484
474 obj: dict, an object description returned by batch API
485 obj: dict, an object description returned by batch API
475 action: string, one of ['upload', 'download']
486 action: string, one of ['upload', 'download']
476 localstore: blobstore.local
487 localstore: blobstore.local
477
488
478 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
489 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
479 basic-transfers.md
490 basic-transfers.md
480 """
491 """
481 oid = obj[b'oid']
492 oid = obj[b'oid']
482 href = obj[b'actions'][action].get(b'href')
493 href = obj[b'actions'][action].get(b'href')
483 headers = obj[b'actions'][action].get(b'header', {}).items()
494 headers = obj[b'actions'][action].get(b'header', {}).items()
484
495
485 request = util.urlreq.request(pycompat.strurl(href))
496 request = util.urlreq.request(pycompat.strurl(href))
486 if action == b'upload':
497 if action == b'upload':
487 # If uploading blobs, read data from local blobstore.
498 # If uploading blobs, read data from local blobstore.
488 if not localstore.verify(oid):
499 if not localstore.verify(oid):
489 raise error.Abort(
500 raise error.Abort(
490 _(b'detected corrupt lfs object: %s') % oid,
501 _(b'detected corrupt lfs object: %s') % oid,
491 hint=_(b'run hg verify'),
502 hint=_(b'run hg verify'),
492 )
503 )
493
504
494 for k, v in headers:
505 for k, v in headers:
495 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
506 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
496
507
497 try:
508 try:
498 if action == b'upload':
509 if action == b'upload':
499 request.data = lfsuploadfile(localstore.open(oid))
510 request.data = lfsuploadfile(localstore.open(oid))
500 request.get_method = lambda: 'PUT'
511 request.get_method = lambda: 'PUT'
501 request.add_header('Content-Type', 'application/octet-stream')
512 request.add_header('Content-Type', 'application/octet-stream')
502 request.add_header('Content-Length', len(request.data))
513 request.add_header('Content-Length', len(request.data))
503
514
504 with contextlib.closing(self.urlopener.open(request)) as res:
515 with contextlib.closing(self.urlopener.open(request)) as res:
505 contentlength = res.info().get(b"content-length")
516 contentlength = res.info().get(b"content-length")
506 ui = self.ui # Shorten debug lines
517 ui = self.ui # Shorten debug lines
507 if self.ui.debugflag:
518 if self.ui.debugflag:
508 ui.debug(b'Status: %d\n' % res.status)
519 ui.debug(b'Status: %d\n' % res.status)
509 # lfs-test-server and hg serve return headers in different
520 # lfs-test-server and hg serve return headers in different
510 # order
521 # order
511 headers = pycompat.bytestr(res.info()).strip()
522 headers = pycompat.bytestr(res.info()).strip()
512 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
523 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
513
524
514 if action == b'download':
525 if action == b'download':
515 # If downloading blobs, store downloaded data to local
526 # If downloading blobs, store downloaded data to local
516 # blobstore
527 # blobstore
517 localstore.download(oid, res, contentlength)
528 localstore.download(oid, res, contentlength)
518 else:
529 else:
519 blocks = []
530 blocks = []
520 while True:
531 while True:
521 data = res.read(1048576)
532 data = res.read(1048576)
522 if not data:
533 if not data:
523 break
534 break
524 blocks.append(data)
535 blocks.append(data)
525
536
526 response = b"".join(blocks)
537 response = b"".join(blocks)
527 if response:
538 if response:
528 ui.debug(b'lfs %s response: %s' % (action, response))
539 ui.debug(b'lfs %s response: %s' % (action, response))
529 except util.urlerr.httperror as ex:
540 except util.urlerr.httperror as ex:
530 if self.ui.debugflag:
541 if self.ui.debugflag:
531 self.ui.debug(
542 self.ui.debug(
532 b'%s: %s\n' % (oid, ex.read())
543 b'%s: %s\n' % (oid, ex.read())
533 ) # XXX: also bytes?
544 ) # XXX: also bytes?
534 raise LfsRemoteError(
545 raise LfsRemoteError(
535 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
546 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
536 % (stringutil.forcebytestr(ex), oid, action)
547 % (stringutil.forcebytestr(ex), oid, action)
537 )
548 )
538 except util.urlerr.urlerror as ex:
549 except util.urlerr.urlerror as ex:
539 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
550 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
540 util.urllibcompat.getfullurl(request)
551 util.urllibcompat.getfullurl(request)
541 )
552 )
542 raise LfsRemoteError(
553 raise LfsRemoteError(
543 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
554 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
544 )
555 )
545 finally:
556 finally:
546 if request.data:
557 if request.data:
547 request.data.close()
558 request.data.close()
548
559
549 def _batch(self, pointers, localstore, action):
560 def _batch(self, pointers, localstore, action):
550 if action not in [b'upload', b'download']:
561 if action not in [b'upload', b'download']:
551 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
562 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
552
563
553 response = self._batchrequest(pointers, action)
564 response = self._batchrequest(pointers, action)
554 objects = self._extractobjects(response, pointers, action)
565 objects = self._extractobjects(response, pointers, action)
555 total = sum(x.get(b'size', 0) for x in objects)
566 total = sum(x.get(b'size', 0) for x in objects)
556 sizes = {}
567 sizes = {}
557 for obj in objects:
568 for obj in objects:
558 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
569 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
559 topic = {
570 topic = {
560 b'upload': _(b'lfs uploading'),
571 b'upload': _(b'lfs uploading'),
561 b'download': _(b'lfs downloading'),
572 b'download': _(b'lfs downloading'),
562 }[action]
573 }[action]
563 if len(objects) > 1:
574 if len(objects) > 1:
564 self.ui.note(
575 self.ui.note(
565 _(b'lfs: need to transfer %d objects (%s)\n')
576 _(b'lfs: need to transfer %d objects (%s)\n')
566 % (len(objects), util.bytecount(total))
577 % (len(objects), util.bytecount(total))
567 )
578 )
568
579
569 def transfer(chunk):
580 def transfer(chunk):
570 for obj in chunk:
581 for obj in chunk:
571 objsize = obj.get(b'size', 0)
582 objsize = obj.get(b'size', 0)
572 if self.ui.verbose:
583 if self.ui.verbose:
573 if action == b'download':
584 if action == b'download':
574 msg = _(b'lfs: downloading %s (%s)\n')
585 msg = _(b'lfs: downloading %s (%s)\n')
575 elif action == b'upload':
586 elif action == b'upload':
576 msg = _(b'lfs: uploading %s (%s)\n')
587 msg = _(b'lfs: uploading %s (%s)\n')
577 self.ui.note(
588 self.ui.note(
578 msg % (obj.get(b'oid'), util.bytecount(objsize))
589 msg % (obj.get(b'oid'), util.bytecount(objsize))
579 )
590 )
580 retry = self.retry
591 retry = self.retry
581 while True:
592 while True:
582 try:
593 try:
583 self._basictransfer(obj, action, localstore)
594 self._basictransfer(obj, action, localstore)
584 yield 1, obj.get(b'oid')
595 yield 1, obj.get(b'oid')
585 break
596 break
586 except socket.error as ex:
597 except socket.error as ex:
587 if retry > 0:
598 if retry > 0:
588 self.ui.note(
599 self.ui.note(
589 _(b'lfs: failed: %r (remaining retry %d)\n')
600 _(b'lfs: failed: %r (remaining retry %d)\n')
590 % (stringutil.forcebytestr(ex), retry)
601 % (stringutil.forcebytestr(ex), retry)
591 )
602 )
592 retry -= 1
603 retry -= 1
593 continue
604 continue
594 raise
605 raise
595
606
596 # Until https multiplexing gets sorted out
607 # Until https multiplexing gets sorted out
597 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
608 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
598 oids = worker.worker(
609 oids = worker.worker(
599 self.ui,
610 self.ui,
600 0.1,
611 0.1,
601 transfer,
612 transfer,
602 (),
613 (),
603 sorted(objects, key=lambda o: o.get(b'oid')),
614 sorted(objects, key=lambda o: o.get(b'oid')),
604 )
615 )
605 else:
616 else:
606 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
617 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
607
618
608 with self.ui.makeprogress(
619 with self.ui.makeprogress(
609 topic, unit=_(b"bytes"), total=total
620 topic, unit=_(b"bytes"), total=total
610 ) as progress:
621 ) as progress:
611 progress.update(0)
622 progress.update(0)
612 processed = 0
623 processed = 0
613 blobs = 0
624 blobs = 0
614 for _one, oid in oids:
625 for _one, oid in oids:
615 processed += sizes[oid]
626 processed += sizes[oid]
616 blobs += 1
627 blobs += 1
617 progress.update(processed)
628 progress.update(processed)
618 self.ui.note(_(b'lfs: processed: %s\n') % oid)
629 self.ui.note(_(b'lfs: processed: %s\n') % oid)
619
630
620 if blobs > 0:
631 if blobs > 0:
621 if action == b'upload':
632 if action == b'upload':
622 self.ui.status(
633 self.ui.status(
623 _(b'lfs: uploaded %d files (%s)\n')
634 _(b'lfs: uploaded %d files (%s)\n')
624 % (blobs, util.bytecount(processed))
635 % (blobs, util.bytecount(processed))
625 )
636 )
626 elif action == b'download':
637 elif action == b'download':
627 self.ui.status(
638 self.ui.status(
628 _(b'lfs: downloaded %d files (%s)\n')
639 _(b'lfs: downloaded %d files (%s)\n')
629 % (blobs, util.bytecount(processed))
640 % (blobs, util.bytecount(processed))
630 )
641 )
631
642
632 def __del__(self):
643 def __del__(self):
633 # copied from mercurial/httppeer.py
644 # copied from mercurial/httppeer.py
634 urlopener = getattr(self, 'urlopener', None)
645 urlopener = getattr(self, 'urlopener', None)
635 if urlopener:
646 if urlopener:
636 for h in urlopener.handlers:
647 for h in urlopener.handlers:
637 h.close()
648 h.close()
638 getattr(h, "close_all", lambda: None)()
649 getattr(h, "close_all", lambda: None)()
639
650
640
651
641 class _dummyremote(object):
652 class _dummyremote(object):
642 """Dummy store storing blobs to temp directory."""
653 """Dummy store storing blobs to temp directory."""
643
654
644 def __init__(self, repo, url):
655 def __init__(self, repo, url):
645 fullpath = repo.vfs.join(b'lfs', url.path)
656 fullpath = repo.vfs.join(b'lfs', url.path)
646 self.vfs = lfsvfs(fullpath)
657 self.vfs = lfsvfs(fullpath)
647
658
648 def writebatch(self, pointers, fromstore):
659 def writebatch(self, pointers, fromstore):
649 for p in _deduplicate(pointers):
660 for p in _deduplicate(pointers):
650 content = fromstore.read(p.oid(), verify=True)
661 content = fromstore.read(p.oid(), verify=True)
651 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
662 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
652 fp.write(content)
663 fp.write(content)
653
664
654 def readbatch(self, pointers, tostore):
665 def readbatch(self, pointers, tostore):
655 for p in _deduplicate(pointers):
666 for p in _deduplicate(pointers):
656 with self.vfs(p.oid(), b'rb') as fp:
667 with self.vfs(p.oid(), b'rb') as fp:
657 tostore.download(p.oid(), fp, None)
668 tostore.download(p.oid(), fp, None)
658
669
659
670
660 class _nullremote(object):
671 class _nullremote(object):
661 """Null store storing blobs to /dev/null."""
672 """Null store storing blobs to /dev/null."""
662
673
663 def __init__(self, repo, url):
674 def __init__(self, repo, url):
664 pass
675 pass
665
676
666 def writebatch(self, pointers, fromstore):
677 def writebatch(self, pointers, fromstore):
667 pass
678 pass
668
679
669 def readbatch(self, pointers, tostore):
680 def readbatch(self, pointers, tostore):
670 pass
681 pass
671
682
672
683
673 class _promptremote(object):
684 class _promptremote(object):
674 """Prompt user to set lfs.url when accessed."""
685 """Prompt user to set lfs.url when accessed."""
675
686
676 def __init__(self, repo, url):
687 def __init__(self, repo, url):
677 pass
688 pass
678
689
679 def writebatch(self, pointers, fromstore, ui=None):
690 def writebatch(self, pointers, fromstore, ui=None):
680 self._prompt()
691 self._prompt()
681
692
682 def readbatch(self, pointers, tostore, ui=None):
693 def readbatch(self, pointers, tostore, ui=None):
683 self._prompt()
694 self._prompt()
684
695
685 def _prompt(self):
696 def _prompt(self):
686 raise error.Abort(_(b'lfs.url needs to be configured'))
697 raise error.Abort(_(b'lfs.url needs to be configured'))
687
698
688
699
689 _storemap = {
700 _storemap = {
690 b'https': _gitlfsremote,
701 b'https': _gitlfsremote,
691 b'http': _gitlfsremote,
702 b'http': _gitlfsremote,
692 b'file': _dummyremote,
703 b'file': _dummyremote,
693 b'null': _nullremote,
704 b'null': _nullremote,
694 None: _promptremote,
705 None: _promptremote,
695 }
706 }
696
707
697
708
698 def _deduplicate(pointers):
709 def _deduplicate(pointers):
699 """Remove any duplicate oids that exist in the list"""
710 """Remove any duplicate oids that exist in the list"""
700 reduced = util.sortdict()
711 reduced = util.sortdict()
701 for p in pointers:
712 for p in pointers:
702 reduced[p.oid()] = p
713 reduced[p.oid()] = p
703 return reduced.values()
714 return reduced.values()
704
715
705
716
706 def _verify(oid, content):
717 def _verify(oid, content):
707 realoid = node.hex(hashlib.sha256(content).digest())
718 realoid = node.hex(hashlib.sha256(content).digest())
708 if realoid != oid:
719 if realoid != oid:
709 raise LfsCorruptionError(
720 raise LfsCorruptionError(
710 _(b'detected corrupt lfs object: %s') % oid,
721 _(b'detected corrupt lfs object: %s') % oid,
711 hint=_(b'run hg verify'),
722 hint=_(b'run hg verify'),
712 )
723 )
713
724
714
725
715 def remote(repo, remote=None):
726 def remote(repo, remote=None):
716 """remotestore factory. return a store in _storemap depending on config
727 """remotestore factory. return a store in _storemap depending on config
717
728
718 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
729 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
719 infer the endpoint, based on the remote repository using the same path
730 infer the endpoint, based on the remote repository using the same path
720 adjustments as git. As an extension, 'http' is supported as well so that
731 adjustments as git. As an extension, 'http' is supported as well so that
721 ``hg serve`` works out of the box.
732 ``hg serve`` works out of the box.
722
733
723 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
734 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
724 """
735 """
725 lfsurl = repo.ui.config(b'lfs', b'url')
736 lfsurl = repo.ui.config(b'lfs', b'url')
726 url = util.url(lfsurl or b'')
737 url = util.url(lfsurl or b'')
727 if lfsurl is None:
738 if lfsurl is None:
728 if remote:
739 if remote:
729 path = remote
740 path = remote
730 elif util.safehasattr(repo, b'_subtoppath'):
741 elif util.safehasattr(repo, b'_subtoppath'):
731 # The pull command sets this during the optional update phase, which
742 # The pull command sets this during the optional update phase, which
732 # tells exactly where the pull originated, whether 'paths.default'
743 # tells exactly where the pull originated, whether 'paths.default'
733 # or explicit.
744 # or explicit.
734 path = repo._subtoppath
745 path = repo._subtoppath
735 else:
746 else:
736 # TODO: investigate 'paths.remote:lfsurl' style path customization,
747 # TODO: investigate 'paths.remote:lfsurl' style path customization,
737 # and fall back to inferring from 'paths.remote' if unspecified.
748 # and fall back to inferring from 'paths.remote' if unspecified.
738 path = repo.ui.config(b'paths', b'default') or b''
749 path = repo.ui.config(b'paths', b'default') or b''
739
750
740 defaulturl = util.url(path)
751 defaulturl = util.url(path)
741
752
742 # TODO: support local paths as well.
753 # TODO: support local paths as well.
743 # TODO: consider the ssh -> https transformation that git applies
754 # TODO: consider the ssh -> https transformation that git applies
744 if defaulturl.scheme in (b'http', b'https'):
755 if defaulturl.scheme in (b'http', b'https'):
745 if defaulturl.path and defaulturl.path[:-1] != b'/':
756 if defaulturl.path and defaulturl.path[:-1] != b'/':
746 defaulturl.path += b'/'
757 defaulturl.path += b'/'
747 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
758 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
748
759
749 url = util.url(bytes(defaulturl))
760 url = util.url(bytes(defaulturl))
750 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
761 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
751
762
752 scheme = url.scheme
763 scheme = url.scheme
753 if scheme not in _storemap:
764 if scheme not in _storemap:
754 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
765 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
755 return _storemap[scheme](repo, url)
766 return _storemap[scheme](repo, url)
756
767
757
768
758 class LfsRemoteError(error.StorageError):
769 class LfsRemoteError(error.StorageError):
759 pass
770 pass
760
771
761
772
762 class LfsCorruptionError(error.Abort):
773 class LfsCorruptionError(error.Abort):
763 """Raised when a corrupt blob is detected, aborting an operation
774 """Raised when a corrupt blob is detected, aborting an operation
764
775
765 It exists to allow specialized handling on the server side."""
776 It exists to allow specialized handling on the server side."""
General Comments 0
You need to be logged in to leave comments. Login now