##// END OF EJS Templates
lfs: use str for the open() mode when opening a blob for py3...
Matt Harbison -
r44776:234001d2 default
parent child Browse files
Show More
@@ -1,765 +1,765
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import contextlib
10 import contextlib
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import json
13 import json
14 import os
14 import os
15 import re
15 import re
16 import socket
16 import socket
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.pycompat import getattr
19 from mercurial.pycompat import getattr
20
20
21 from mercurial import (
21 from mercurial import (
22 encoding,
22 encoding,
23 error,
23 error,
24 httpconnection as httpconnectionmod,
24 httpconnection as httpconnectionmod,
25 node,
25 node,
26 pathutil,
26 pathutil,
27 pycompat,
27 pycompat,
28 url as urlmod,
28 url as urlmod,
29 util,
29 util,
30 vfs as vfsmod,
30 vfs as vfsmod,
31 worker,
31 worker,
32 )
32 )
33
33
34 from mercurial.utils import stringutil
34 from mercurial.utils import stringutil
35
35
36 from ..largefiles import lfutil
36 from ..largefiles import lfutil
37
37
38 # 64 bytes for SHA256
38 # 64 bytes for SHA256
39 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
39 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
40
40
41
41
42 class lfsvfs(vfsmod.vfs):
42 class lfsvfs(vfsmod.vfs):
43 def join(self, path):
43 def join(self, path):
44 """split the path at first two characters, like: XX/XXXXX..."""
44 """split the path at first two characters, like: XX/XXXXX..."""
45 if not _lfsre.match(path):
45 if not _lfsre.match(path):
46 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
46 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
47 return super(lfsvfs, self).join(path[0:2], path[2:])
47 return super(lfsvfs, self).join(path[0:2], path[2:])
48
48
49 def walk(self, path=None, onerror=None):
49 def walk(self, path=None, onerror=None):
50 """Yield (dirpath, [], oids) tuple for blobs under path
50 """Yield (dirpath, [], oids) tuple for blobs under path
51
51
52 Oids only exist in the root of this vfs, so dirpath is always ''.
52 Oids only exist in the root of this vfs, so dirpath is always ''.
53 """
53 """
54 root = os.path.normpath(self.base)
54 root = os.path.normpath(self.base)
55 # when dirpath == root, dirpath[prefixlen:] becomes empty
55 # when dirpath == root, dirpath[prefixlen:] becomes empty
56 # because len(dirpath) < prefixlen.
56 # because len(dirpath) < prefixlen.
57 prefixlen = len(pathutil.normasprefix(root))
57 prefixlen = len(pathutil.normasprefix(root))
58 oids = []
58 oids = []
59
59
60 for dirpath, dirs, files in os.walk(
60 for dirpath, dirs, files in os.walk(
61 self.reljoin(self.base, path or b''), onerror=onerror
61 self.reljoin(self.base, path or b''), onerror=onerror
62 ):
62 ):
63 dirpath = dirpath[prefixlen:]
63 dirpath = dirpath[prefixlen:]
64
64
65 # Silently skip unexpected files and directories
65 # Silently skip unexpected files and directories
66 if len(dirpath) == 2:
66 if len(dirpath) == 2:
67 oids.extend(
67 oids.extend(
68 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
68 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
69 )
69 )
70
70
71 yield (b'', [], oids)
71 yield (b'', [], oids)
72
72
73
73
74 class nullvfs(lfsvfs):
74 class nullvfs(lfsvfs):
75 def __init__(self):
75 def __init__(self):
76 pass
76 pass
77
77
78 def exists(self, oid):
78 def exists(self, oid):
79 return False
79 return False
80
80
81 def read(self, oid):
81 def read(self, oid):
82 # store.read() calls into here if the blob doesn't exist in its
82 # store.read() calls into here if the blob doesn't exist in its
83 # self.vfs. Raise the same error as a normal vfs when asked to read a
83 # self.vfs. Raise the same error as a normal vfs when asked to read a
84 # file that doesn't exist. The only difference is the full file path
84 # file that doesn't exist. The only difference is the full file path
85 # isn't available in the error.
85 # isn't available in the error.
86 raise IOError(
86 raise IOError(
87 errno.ENOENT,
87 errno.ENOENT,
88 pycompat.sysstr(b'%s: No such file or directory' % oid),
88 pycompat.sysstr(b'%s: No such file or directory' % oid),
89 )
89 )
90
90
91 def walk(self, path=None, onerror=None):
91 def walk(self, path=None, onerror=None):
92 return (b'', [], [])
92 return (b'', [], [])
93
93
94 def write(self, oid, data):
94 def write(self, oid, data):
95 pass
95 pass
96
96
97
97
98 class lfsuploadfile(httpconnectionmod.httpsendfile):
98 class lfsuploadfile(httpconnectionmod.httpsendfile):
99 """a file-like object that supports keepalive.
99 """a file-like object that supports keepalive.
100 """
100 """
101
101
102 def __init__(self, ui, filename):
102 def __init__(self, ui, filename):
103 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
103 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
104 self.read = self._data.read
104 self.read = self._data.read
105
105
106 def _makeprogress(self):
106 def _makeprogress(self):
107 return None # progress is handled by the worker client
107 return None # progress is handled by the worker client
108
108
109
109
110 class local(object):
110 class local(object):
111 """Local blobstore for large file contents.
111 """Local blobstore for large file contents.
112
112
113 This blobstore is used both as a cache and as a staging area for large blobs
113 This blobstore is used both as a cache and as a staging area for large blobs
114 to be uploaded to the remote blobstore.
114 to be uploaded to the remote blobstore.
115 """
115 """
116
116
117 def __init__(self, repo):
117 def __init__(self, repo):
118 fullpath = repo.svfs.join(b'lfs/objects')
118 fullpath = repo.svfs.join(b'lfs/objects')
119 self.vfs = lfsvfs(fullpath)
119 self.vfs = lfsvfs(fullpath)
120
120
121 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
121 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
122 self.cachevfs = nullvfs()
122 self.cachevfs = nullvfs()
123 else:
123 else:
124 usercache = lfutil._usercachedir(repo.ui, b'lfs')
124 usercache = lfutil._usercachedir(repo.ui, b'lfs')
125 self.cachevfs = lfsvfs(usercache)
125 self.cachevfs = lfsvfs(usercache)
126 self.ui = repo.ui
126 self.ui = repo.ui
127
127
128 def open(self, oid):
128 def open(self, oid):
129 """Open a read-only file descriptor to the named blob, in either the
129 """Open a read-only file descriptor to the named blob, in either the
130 usercache or the local store."""
130 usercache or the local store."""
131 return open(self.path(oid), b'rb')
131 return open(self.path(oid), 'rb')
132
132
133 def path(self, oid):
133 def path(self, oid):
134 """Build the path for the given blob ``oid``.
134 """Build the path for the given blob ``oid``.
135
135
136 If the blob exists locally, the path may point to either the usercache
136 If the blob exists locally, the path may point to either the usercache
137 or the local store. If it doesn't, it will point to the local store.
137 or the local store. If it doesn't, it will point to the local store.
138 This is meant for situations where existing code that isn't LFS aware
138 This is meant for situations where existing code that isn't LFS aware
139 needs to open a blob. Generally, prefer the ``open`` method on this
139 needs to open a blob. Generally, prefer the ``open`` method on this
140 class.
140 class.
141 """
141 """
142 # The usercache is the most likely place to hold the file. Commit will
142 # The usercache is the most likely place to hold the file. Commit will
143 # write to both it and the local store, as will anything that downloads
143 # write to both it and the local store, as will anything that downloads
144 # the blobs. However, things like clone without an update won't
144 # the blobs. However, things like clone without an update won't
145 # populate the local store. For an init + push of a local clone,
145 # populate the local store. For an init + push of a local clone,
146 # the usercache is the only place it _could_ be. If not present, the
146 # the usercache is the only place it _could_ be. If not present, the
147 # missing file msg here will indicate the local repo, not the usercache.
147 # missing file msg here will indicate the local repo, not the usercache.
148 if self.cachevfs.exists(oid):
148 if self.cachevfs.exists(oid):
149 return self.cachevfs.join(oid)
149 return self.cachevfs.join(oid)
150
150
151 return self.vfs.join(oid)
151 return self.vfs.join(oid)
152
152
153 def download(self, oid, src, content_length):
153 def download(self, oid, src, content_length):
154 """Read the blob from the remote source in chunks, verify the content,
154 """Read the blob from the remote source in chunks, verify the content,
155 and write to this local blobstore."""
155 and write to this local blobstore."""
156 sha256 = hashlib.sha256()
156 sha256 = hashlib.sha256()
157 size = 0
157 size = 0
158
158
159 with self.vfs(oid, b'wb', atomictemp=True) as fp:
159 with self.vfs(oid, b'wb', atomictemp=True) as fp:
160 for chunk in util.filechunkiter(src, size=1048576):
160 for chunk in util.filechunkiter(src, size=1048576):
161 fp.write(chunk)
161 fp.write(chunk)
162 sha256.update(chunk)
162 sha256.update(chunk)
163 size += len(chunk)
163 size += len(chunk)
164
164
165 # If the server advertised a length longer than what we actually
165 # If the server advertised a length longer than what we actually
166 # received, then we should expect that the server crashed while
166 # received, then we should expect that the server crashed while
167 # producing the response (but the server has no way of telling us
167 # producing the response (but the server has no way of telling us
168 # that), and we really don't need to try to write the response to
168 # that), and we really don't need to try to write the response to
169 # the localstore, because it's not going to match the expected.
169 # the localstore, because it's not going to match the expected.
170 if content_length is not None and int(content_length) != size:
170 if content_length is not None and int(content_length) != size:
171 msg = (
171 msg = (
172 b"Response length (%s) does not match Content-Length "
172 b"Response length (%s) does not match Content-Length "
173 b"header (%d): likely server-side crash"
173 b"header (%d): likely server-side crash"
174 )
174 )
175 raise LfsRemoteError(_(msg) % (size, int(content_length)))
175 raise LfsRemoteError(_(msg) % (size, int(content_length)))
176
176
177 realoid = node.hex(sha256.digest())
177 realoid = node.hex(sha256.digest())
178 if realoid != oid:
178 if realoid != oid:
179 raise LfsCorruptionError(
179 raise LfsCorruptionError(
180 _(b'corrupt remote lfs object: %s') % oid
180 _(b'corrupt remote lfs object: %s') % oid
181 )
181 )
182
182
183 self._linktousercache(oid)
183 self._linktousercache(oid)
184
184
185 def write(self, oid, data):
185 def write(self, oid, data):
186 """Write blob to local blobstore.
186 """Write blob to local blobstore.
187
187
188 This should only be called from the filelog during a commit or similar.
188 This should only be called from the filelog during a commit or similar.
189 As such, there is no need to verify the data. Imports from a remote
189 As such, there is no need to verify the data. Imports from a remote
190 store must use ``download()`` instead."""
190 store must use ``download()`` instead."""
191 with self.vfs(oid, b'wb', atomictemp=True) as fp:
191 with self.vfs(oid, b'wb', atomictemp=True) as fp:
192 fp.write(data)
192 fp.write(data)
193
193
194 self._linktousercache(oid)
194 self._linktousercache(oid)
195
195
196 def linkfromusercache(self, oid):
196 def linkfromusercache(self, oid):
197 """Link blobs found in the user cache into this store.
197 """Link blobs found in the user cache into this store.
198
198
199 The server module needs to do this when it lets the client know not to
199 The server module needs to do this when it lets the client know not to
200 upload the blob, to ensure it is always available in this store.
200 upload the blob, to ensure it is always available in this store.
201 Normally this is done implicitly when the client reads or writes the
201 Normally this is done implicitly when the client reads or writes the
202 blob, but that doesn't happen when the server tells the client that it
202 blob, but that doesn't happen when the server tells the client that it
203 already has the blob.
203 already has the blob.
204 """
204 """
205 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
205 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
206 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
206 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
207 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
207 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
208
208
209 def _linktousercache(self, oid):
209 def _linktousercache(self, oid):
210 # XXX: should we verify the content of the cache, and hardlink back to
210 # XXX: should we verify the content of the cache, and hardlink back to
211 # the local store on success, but truncate, write and link on failure?
211 # the local store on success, but truncate, write and link on failure?
212 if not self.cachevfs.exists(oid) and not isinstance(
212 if not self.cachevfs.exists(oid) and not isinstance(
213 self.cachevfs, nullvfs
213 self.cachevfs, nullvfs
214 ):
214 ):
215 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
215 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
216 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
216 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
217
217
218 def read(self, oid, verify=True):
218 def read(self, oid, verify=True):
219 """Read blob from local blobstore."""
219 """Read blob from local blobstore."""
220 if not self.vfs.exists(oid):
220 if not self.vfs.exists(oid):
221 blob = self._read(self.cachevfs, oid, verify)
221 blob = self._read(self.cachevfs, oid, verify)
222
222
223 # Even if revlog will verify the content, it needs to be verified
223 # Even if revlog will verify the content, it needs to be verified
224 # now before making the hardlink to avoid propagating corrupt blobs.
224 # now before making the hardlink to avoid propagating corrupt blobs.
225 # Don't abort if corruption is detected, because `hg verify` will
225 # Don't abort if corruption is detected, because `hg verify` will
226 # give more useful info about the corruption- simply don't add the
226 # give more useful info about the corruption- simply don't add the
227 # hardlink.
227 # hardlink.
228 if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
228 if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
229 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
229 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
230 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
230 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
231 else:
231 else:
232 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
232 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
233 blob = self._read(self.vfs, oid, verify)
233 blob = self._read(self.vfs, oid, verify)
234 return blob
234 return blob
235
235
236 def _read(self, vfs, oid, verify):
236 def _read(self, vfs, oid, verify):
237 """Read blob (after verifying) from the given store"""
237 """Read blob (after verifying) from the given store"""
238 blob = vfs.read(oid)
238 blob = vfs.read(oid)
239 if verify:
239 if verify:
240 _verify(oid, blob)
240 _verify(oid, blob)
241 return blob
241 return blob
242
242
243 def verify(self, oid):
243 def verify(self, oid):
244 """Indicate whether or not the hash of the underlying file matches its
244 """Indicate whether or not the hash of the underlying file matches its
245 name."""
245 name."""
246 sha256 = hashlib.sha256()
246 sha256 = hashlib.sha256()
247
247
248 with self.open(oid) as fp:
248 with self.open(oid) as fp:
249 for chunk in util.filechunkiter(fp, size=1048576):
249 for chunk in util.filechunkiter(fp, size=1048576):
250 sha256.update(chunk)
250 sha256.update(chunk)
251
251
252 return oid == node.hex(sha256.digest())
252 return oid == node.hex(sha256.digest())
253
253
254 def has(self, oid):
254 def has(self, oid):
255 """Returns True if the local blobstore contains the requested blob,
255 """Returns True if the local blobstore contains the requested blob,
256 False otherwise."""
256 False otherwise."""
257 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
257 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
258
258
259
259
260 def _urlerrorreason(urlerror):
260 def _urlerrorreason(urlerror):
261 '''Create a friendly message for the given URLError to be used in an
261 '''Create a friendly message for the given URLError to be used in an
262 LfsRemoteError message.
262 LfsRemoteError message.
263 '''
263 '''
264 inst = urlerror
264 inst = urlerror
265
265
266 if isinstance(urlerror.reason, Exception):
266 if isinstance(urlerror.reason, Exception):
267 inst = urlerror.reason
267 inst = urlerror.reason
268
268
269 if util.safehasattr(inst, b'reason'):
269 if util.safehasattr(inst, b'reason'):
270 try: # usually it is in the form (errno, strerror)
270 try: # usually it is in the form (errno, strerror)
271 reason = inst.reason.args[1]
271 reason = inst.reason.args[1]
272 except (AttributeError, IndexError):
272 except (AttributeError, IndexError):
273 # it might be anything, for example a string
273 # it might be anything, for example a string
274 reason = inst.reason
274 reason = inst.reason
275 if isinstance(reason, pycompat.unicode):
275 if isinstance(reason, pycompat.unicode):
276 # SSLError of Python 2.7.9 contains a unicode
276 # SSLError of Python 2.7.9 contains a unicode
277 reason = encoding.unitolocal(reason)
277 reason = encoding.unitolocal(reason)
278 return reason
278 return reason
279 elif getattr(inst, "strerror", None):
279 elif getattr(inst, "strerror", None):
280 return encoding.strtolocal(inst.strerror)
280 return encoding.strtolocal(inst.strerror)
281 else:
281 else:
282 return stringutil.forcebytestr(urlerror)
282 return stringutil.forcebytestr(urlerror)
283
283
284
284
285 class lfsauthhandler(util.urlreq.basehandler):
285 class lfsauthhandler(util.urlreq.basehandler):
286 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
286 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
287
287
288 def http_error_401(self, req, fp, code, msg, headers):
288 def http_error_401(self, req, fp, code, msg, headers):
289 """Enforces that any authentication performed is HTTP Basic
289 """Enforces that any authentication performed is HTTP Basic
290 Authentication. No authentication is also acceptable.
290 Authentication. No authentication is also acceptable.
291 """
291 """
292 authreq = headers.get('www-authenticate', None)
292 authreq = headers.get('www-authenticate', None)
293 if authreq:
293 if authreq:
294 scheme = authreq.split()[0]
294 scheme = authreq.split()[0]
295
295
296 if scheme.lower() != 'basic':
296 if scheme.lower() != 'basic':
297 msg = _(b'the server must support Basic Authentication')
297 msg = _(b'the server must support Basic Authentication')
298 raise util.urlerr.httperror(
298 raise util.urlerr.httperror(
299 req.get_full_url(),
299 req.get_full_url(),
300 code,
300 code,
301 encoding.strfromlocal(msg),
301 encoding.strfromlocal(msg),
302 headers,
302 headers,
303 fp,
303 fp,
304 )
304 )
305 return None
305 return None
306
306
307
307
308 class _gitlfsremote(object):
308 class _gitlfsremote(object):
309 def __init__(self, repo, url):
309 def __init__(self, repo, url):
310 ui = repo.ui
310 ui = repo.ui
311 self.ui = ui
311 self.ui = ui
312 baseurl, authinfo = url.authinfo()
312 baseurl, authinfo = url.authinfo()
313 self.baseurl = baseurl.rstrip(b'/')
313 self.baseurl = baseurl.rstrip(b'/')
314 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
314 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
315 if not useragent:
315 if not useragent:
316 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
316 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
317 self.urlopener = urlmod.opener(ui, authinfo, useragent)
317 self.urlopener = urlmod.opener(ui, authinfo, useragent)
318 self.urlopener.add_handler(lfsauthhandler())
318 self.urlopener.add_handler(lfsauthhandler())
319 self.retry = ui.configint(b'lfs', b'retry')
319 self.retry = ui.configint(b'lfs', b'retry')
320
320
321 def writebatch(self, pointers, fromstore):
321 def writebatch(self, pointers, fromstore):
322 """Batch upload from local to remote blobstore."""
322 """Batch upload from local to remote blobstore."""
323 self._batch(_deduplicate(pointers), fromstore, b'upload')
323 self._batch(_deduplicate(pointers), fromstore, b'upload')
324
324
325 def readbatch(self, pointers, tostore):
325 def readbatch(self, pointers, tostore):
326 """Batch download from remote to local blostore."""
326 """Batch download from remote to local blostore."""
327 self._batch(_deduplicate(pointers), tostore, b'download')
327 self._batch(_deduplicate(pointers), tostore, b'download')
328
328
329 def _batchrequest(self, pointers, action):
329 def _batchrequest(self, pointers, action):
330 """Get metadata about objects pointed by pointers for given action
330 """Get metadata about objects pointed by pointers for given action
331
331
332 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
332 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
333 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
333 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
334 """
334 """
335 objects = [
335 objects = [
336 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
336 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
337 for p in pointers
337 for p in pointers
338 ]
338 ]
339 requestdata = pycompat.bytesurl(
339 requestdata = pycompat.bytesurl(
340 json.dumps(
340 json.dumps(
341 {'objects': objects, 'operation': pycompat.strurl(action),}
341 {'objects': objects, 'operation': pycompat.strurl(action),}
342 )
342 )
343 )
343 )
344 url = b'%s/objects/batch' % self.baseurl
344 url = b'%s/objects/batch' % self.baseurl
345 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
345 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
346 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
346 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
347 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
347 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
348 try:
348 try:
349 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
349 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
350 rawjson = rsp.read()
350 rawjson = rsp.read()
351 except util.urlerr.httperror as ex:
351 except util.urlerr.httperror as ex:
352 hints = {
352 hints = {
353 400: _(
353 400: _(
354 b'check that lfs serving is enabled on %s and "%s" is '
354 b'check that lfs serving is enabled on %s and "%s" is '
355 b'supported'
355 b'supported'
356 )
356 )
357 % (self.baseurl, action),
357 % (self.baseurl, action),
358 404: _(b'the "lfs.url" config may be used to override %s')
358 404: _(b'the "lfs.url" config may be used to override %s')
359 % self.baseurl,
359 % self.baseurl,
360 }
360 }
361 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
361 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
362 raise LfsRemoteError(
362 raise LfsRemoteError(
363 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
363 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
364 hint=hint,
364 hint=hint,
365 )
365 )
366 except util.urlerr.urlerror as ex:
366 except util.urlerr.urlerror as ex:
367 hint = (
367 hint = (
368 _(b'the "lfs.url" config may be used to override %s')
368 _(b'the "lfs.url" config may be used to override %s')
369 % self.baseurl
369 % self.baseurl
370 )
370 )
371 raise LfsRemoteError(
371 raise LfsRemoteError(
372 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
372 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
373 )
373 )
374 try:
374 try:
375 response = pycompat.json_loads(rawjson)
375 response = pycompat.json_loads(rawjson)
376 except ValueError:
376 except ValueError:
377 raise LfsRemoteError(
377 raise LfsRemoteError(
378 _(b'LFS server returns invalid JSON: %s')
378 _(b'LFS server returns invalid JSON: %s')
379 % rawjson.encode("utf-8")
379 % rawjson.encode("utf-8")
380 )
380 )
381
381
382 if self.ui.debugflag:
382 if self.ui.debugflag:
383 self.ui.debug(b'Status: %d\n' % rsp.status)
383 self.ui.debug(b'Status: %d\n' % rsp.status)
384 # lfs-test-server and hg serve return headers in different order
384 # lfs-test-server and hg serve return headers in different order
385 headers = pycompat.bytestr(rsp.info()).strip()
385 headers = pycompat.bytestr(rsp.info()).strip()
386 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
386 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
387
387
388 if 'objects' in response:
388 if 'objects' in response:
389 response['objects'] = sorted(
389 response['objects'] = sorted(
390 response['objects'], key=lambda p: p['oid']
390 response['objects'], key=lambda p: p['oid']
391 )
391 )
392 self.ui.debug(
392 self.ui.debug(
393 b'%s\n'
393 b'%s\n'
394 % pycompat.bytesurl(
394 % pycompat.bytesurl(
395 json.dumps(
395 json.dumps(
396 response,
396 response,
397 indent=2,
397 indent=2,
398 separators=('', ': '),
398 separators=('', ': '),
399 sort_keys=True,
399 sort_keys=True,
400 )
400 )
401 )
401 )
402 )
402 )
403
403
404 def encodestr(x):
404 def encodestr(x):
405 if isinstance(x, pycompat.unicode):
405 if isinstance(x, pycompat.unicode):
406 return x.encode('utf-8')
406 return x.encode('utf-8')
407 return x
407 return x
408
408
409 return pycompat.rapply(encodestr, response)
409 return pycompat.rapply(encodestr, response)
410
410
411 def _checkforservererror(self, pointers, responses, action):
411 def _checkforservererror(self, pointers, responses, action):
412 """Scans errors from objects
412 """Scans errors from objects
413
413
414 Raises LfsRemoteError if any objects have an error"""
414 Raises LfsRemoteError if any objects have an error"""
415 for response in responses:
415 for response in responses:
416 # The server should return 404 when objects cannot be found. Some
416 # The server should return 404 when objects cannot be found. Some
417 # server implementation (ex. lfs-test-server) does not set "error"
417 # server implementation (ex. lfs-test-server) does not set "error"
418 # but just removes "download" from "actions". Treat that case
418 # but just removes "download" from "actions". Treat that case
419 # as the same as 404 error.
419 # as the same as 404 error.
420 if b'error' not in response:
420 if b'error' not in response:
421 if action == b'download' and action not in response.get(
421 if action == b'download' and action not in response.get(
422 b'actions', []
422 b'actions', []
423 ):
423 ):
424 code = 404
424 code = 404
425 else:
425 else:
426 continue
426 continue
427 else:
427 else:
428 # An error dict without a code doesn't make much sense, so
428 # An error dict without a code doesn't make much sense, so
429 # treat as a server error.
429 # treat as a server error.
430 code = response.get(b'error').get(b'code', 500)
430 code = response.get(b'error').get(b'code', 500)
431
431
432 ptrmap = {p.oid(): p for p in pointers}
432 ptrmap = {p.oid(): p for p in pointers}
433 p = ptrmap.get(response[b'oid'], None)
433 p = ptrmap.get(response[b'oid'], None)
434 if p:
434 if p:
435 filename = getattr(p, 'filename', b'unknown')
435 filename = getattr(p, 'filename', b'unknown')
436 errors = {
436 errors = {
437 404: b'The object does not exist',
437 404: b'The object does not exist',
438 410: b'The object was removed by the owner',
438 410: b'The object was removed by the owner',
439 422: b'Validation error',
439 422: b'Validation error',
440 500: b'Internal server error',
440 500: b'Internal server error',
441 }
441 }
442 msg = errors.get(code, b'status code %d' % code)
442 msg = errors.get(code, b'status code %d' % code)
443 raise LfsRemoteError(
443 raise LfsRemoteError(
444 _(b'LFS server error for "%s": %s') % (filename, msg)
444 _(b'LFS server error for "%s": %s') % (filename, msg)
445 )
445 )
446 else:
446 else:
447 raise LfsRemoteError(
447 raise LfsRemoteError(
448 _(b'LFS server error. Unsolicited response for oid %s')
448 _(b'LFS server error. Unsolicited response for oid %s')
449 % response[b'oid']
449 % response[b'oid']
450 )
450 )
451
451
452 def _extractobjects(self, response, pointers, action):
452 def _extractobjects(self, response, pointers, action):
453 """extract objects from response of the batch API
453 """extract objects from response of the batch API
454
454
455 response: parsed JSON object returned by batch API
455 response: parsed JSON object returned by batch API
456 return response['objects'] filtered by action
456 return response['objects'] filtered by action
457 raise if any object has an error
457 raise if any object has an error
458 """
458 """
459 # Scan errors from objects - fail early
459 # Scan errors from objects - fail early
460 objects = response.get(b'objects', [])
460 objects = response.get(b'objects', [])
461 self._checkforservererror(pointers, objects, action)
461 self._checkforservererror(pointers, objects, action)
462
462
463 # Filter objects with given action. Practically, this skips uploading
463 # Filter objects with given action. Practically, this skips uploading
464 # objects which exist in the server.
464 # objects which exist in the server.
465 filteredobjects = [
465 filteredobjects = [
466 o for o in objects if action in o.get(b'actions', [])
466 o for o in objects if action in o.get(b'actions', [])
467 ]
467 ]
468
468
469 return filteredobjects
469 return filteredobjects
470
470
471 def _basictransfer(self, obj, action, localstore):
471 def _basictransfer(self, obj, action, localstore):
472 """Download or upload a single object using basic transfer protocol
472 """Download or upload a single object using basic transfer protocol
473
473
474 obj: dict, an object description returned by batch API
474 obj: dict, an object description returned by batch API
475 action: string, one of ['upload', 'download']
475 action: string, one of ['upload', 'download']
476 localstore: blobstore.local
476 localstore: blobstore.local
477
477
478 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
478 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
479 basic-transfers.md
479 basic-transfers.md
480 """
480 """
481 oid = obj[b'oid']
481 oid = obj[b'oid']
482 href = obj[b'actions'][action].get(b'href')
482 href = obj[b'actions'][action].get(b'href')
483 headers = obj[b'actions'][action].get(b'header', {}).items()
483 headers = obj[b'actions'][action].get(b'header', {}).items()
484
484
485 request = util.urlreq.request(pycompat.strurl(href))
485 request = util.urlreq.request(pycompat.strurl(href))
486 if action == b'upload':
486 if action == b'upload':
487 # If uploading blobs, read data from local blobstore.
487 # If uploading blobs, read data from local blobstore.
488 if not localstore.verify(oid):
488 if not localstore.verify(oid):
489 raise error.Abort(
489 raise error.Abort(
490 _(b'detected corrupt lfs object: %s') % oid,
490 _(b'detected corrupt lfs object: %s') % oid,
491 hint=_(b'run hg verify'),
491 hint=_(b'run hg verify'),
492 )
492 )
493
493
494 for k, v in headers:
494 for k, v in headers:
495 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
495 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
496
496
497 try:
497 try:
498 if action == b'upload':
498 if action == b'upload':
499 request.data = lfsuploadfile(self.ui, localstore.path(oid))
499 request.data = lfsuploadfile(self.ui, localstore.path(oid))
500 request.get_method = lambda: 'PUT'
500 request.get_method = lambda: 'PUT'
501 request.add_header('Content-Type', 'application/octet-stream')
501 request.add_header('Content-Type', 'application/octet-stream')
502 request.add_header('Content-Length', request.data.length)
502 request.add_header('Content-Length', request.data.length)
503
503
504 with contextlib.closing(self.urlopener.open(request)) as res:
504 with contextlib.closing(self.urlopener.open(request)) as res:
505 contentlength = res.info().get(b"content-length")
505 contentlength = res.info().get(b"content-length")
506 ui = self.ui # Shorten debug lines
506 ui = self.ui # Shorten debug lines
507 if self.ui.debugflag:
507 if self.ui.debugflag:
508 ui.debug(b'Status: %d\n' % res.status)
508 ui.debug(b'Status: %d\n' % res.status)
509 # lfs-test-server and hg serve return headers in different
509 # lfs-test-server and hg serve return headers in different
510 # order
510 # order
511 headers = pycompat.bytestr(res.info()).strip()
511 headers = pycompat.bytestr(res.info()).strip()
512 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
512 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
513
513
514 if action == b'download':
514 if action == b'download':
515 # If downloading blobs, store downloaded data to local
515 # If downloading blobs, store downloaded data to local
516 # blobstore
516 # blobstore
517 localstore.download(oid, res, contentlength)
517 localstore.download(oid, res, contentlength)
518 else:
518 else:
519 blocks = []
519 blocks = []
520 while True:
520 while True:
521 data = res.read(1048576)
521 data = res.read(1048576)
522 if not data:
522 if not data:
523 break
523 break
524 blocks.append(data)
524 blocks.append(data)
525
525
526 response = b"".join(blocks)
526 response = b"".join(blocks)
527 if response:
527 if response:
528 ui.debug(b'lfs %s response: %s' % (action, response))
528 ui.debug(b'lfs %s response: %s' % (action, response))
529 except util.urlerr.httperror as ex:
529 except util.urlerr.httperror as ex:
530 if self.ui.debugflag:
530 if self.ui.debugflag:
531 self.ui.debug(
531 self.ui.debug(
532 b'%s: %s\n' % (oid, ex.read())
532 b'%s: %s\n' % (oid, ex.read())
533 ) # XXX: also bytes?
533 ) # XXX: also bytes?
534 raise LfsRemoteError(
534 raise LfsRemoteError(
535 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
535 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
536 % (stringutil.forcebytestr(ex), oid, action)
536 % (stringutil.forcebytestr(ex), oid, action)
537 )
537 )
538 except util.urlerr.urlerror as ex:
538 except util.urlerr.urlerror as ex:
539 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
539 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
540 util.urllibcompat.getfullurl(request)
540 util.urllibcompat.getfullurl(request)
541 )
541 )
542 raise LfsRemoteError(
542 raise LfsRemoteError(
543 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
543 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
544 )
544 )
545 finally:
545 finally:
546 if request.data:
546 if request.data:
547 request.data.close()
547 request.data.close()
548
548
549 def _batch(self, pointers, localstore, action):
549 def _batch(self, pointers, localstore, action):
550 if action not in [b'upload', b'download']:
550 if action not in [b'upload', b'download']:
551 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
551 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
552
552
553 response = self._batchrequest(pointers, action)
553 response = self._batchrequest(pointers, action)
554 objects = self._extractobjects(response, pointers, action)
554 objects = self._extractobjects(response, pointers, action)
555 total = sum(x.get(b'size', 0) for x in objects)
555 total = sum(x.get(b'size', 0) for x in objects)
556 sizes = {}
556 sizes = {}
557 for obj in objects:
557 for obj in objects:
558 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
558 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
559 topic = {
559 topic = {
560 b'upload': _(b'lfs uploading'),
560 b'upload': _(b'lfs uploading'),
561 b'download': _(b'lfs downloading'),
561 b'download': _(b'lfs downloading'),
562 }[action]
562 }[action]
563 if len(objects) > 1:
563 if len(objects) > 1:
564 self.ui.note(
564 self.ui.note(
565 _(b'lfs: need to transfer %d objects (%s)\n')
565 _(b'lfs: need to transfer %d objects (%s)\n')
566 % (len(objects), util.bytecount(total))
566 % (len(objects), util.bytecount(total))
567 )
567 )
568
568
569 def transfer(chunk):
569 def transfer(chunk):
570 for obj in chunk:
570 for obj in chunk:
571 objsize = obj.get(b'size', 0)
571 objsize = obj.get(b'size', 0)
572 if self.ui.verbose:
572 if self.ui.verbose:
573 if action == b'download':
573 if action == b'download':
574 msg = _(b'lfs: downloading %s (%s)\n')
574 msg = _(b'lfs: downloading %s (%s)\n')
575 elif action == b'upload':
575 elif action == b'upload':
576 msg = _(b'lfs: uploading %s (%s)\n')
576 msg = _(b'lfs: uploading %s (%s)\n')
577 self.ui.note(
577 self.ui.note(
578 msg % (obj.get(b'oid'), util.bytecount(objsize))
578 msg % (obj.get(b'oid'), util.bytecount(objsize))
579 )
579 )
580 retry = self.retry
580 retry = self.retry
581 while True:
581 while True:
582 try:
582 try:
583 self._basictransfer(obj, action, localstore)
583 self._basictransfer(obj, action, localstore)
584 yield 1, obj.get(b'oid')
584 yield 1, obj.get(b'oid')
585 break
585 break
586 except socket.error as ex:
586 except socket.error as ex:
587 if retry > 0:
587 if retry > 0:
588 self.ui.note(
588 self.ui.note(
589 _(b'lfs: failed: %r (remaining retry %d)\n')
589 _(b'lfs: failed: %r (remaining retry %d)\n')
590 % (stringutil.forcebytestr(ex), retry)
590 % (stringutil.forcebytestr(ex), retry)
591 )
591 )
592 retry -= 1
592 retry -= 1
593 continue
593 continue
594 raise
594 raise
595
595
596 # Until https multiplexing gets sorted out
596 # Until https multiplexing gets sorted out
597 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
597 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
598 oids = worker.worker(
598 oids = worker.worker(
599 self.ui,
599 self.ui,
600 0.1,
600 0.1,
601 transfer,
601 transfer,
602 (),
602 (),
603 sorted(objects, key=lambda o: o.get(b'oid')),
603 sorted(objects, key=lambda o: o.get(b'oid')),
604 )
604 )
605 else:
605 else:
606 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
606 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
607
607
608 with self.ui.makeprogress(
608 with self.ui.makeprogress(
609 topic, unit=_(b"bytes"), total=total
609 topic, unit=_(b"bytes"), total=total
610 ) as progress:
610 ) as progress:
611 progress.update(0)
611 progress.update(0)
612 processed = 0
612 processed = 0
613 blobs = 0
613 blobs = 0
614 for _one, oid in oids:
614 for _one, oid in oids:
615 processed += sizes[oid]
615 processed += sizes[oid]
616 blobs += 1
616 blobs += 1
617 progress.update(processed)
617 progress.update(processed)
618 self.ui.note(_(b'lfs: processed: %s\n') % oid)
618 self.ui.note(_(b'lfs: processed: %s\n') % oid)
619
619
620 if blobs > 0:
620 if blobs > 0:
621 if action == b'upload':
621 if action == b'upload':
622 self.ui.status(
622 self.ui.status(
623 _(b'lfs: uploaded %d files (%s)\n')
623 _(b'lfs: uploaded %d files (%s)\n')
624 % (blobs, util.bytecount(processed))
624 % (blobs, util.bytecount(processed))
625 )
625 )
626 elif action == b'download':
626 elif action == b'download':
627 self.ui.status(
627 self.ui.status(
628 _(b'lfs: downloaded %d files (%s)\n')
628 _(b'lfs: downloaded %d files (%s)\n')
629 % (blobs, util.bytecount(processed))
629 % (blobs, util.bytecount(processed))
630 )
630 )
631
631
632 def __del__(self):
632 def __del__(self):
633 # copied from mercurial/httppeer.py
633 # copied from mercurial/httppeer.py
634 urlopener = getattr(self, 'urlopener', None)
634 urlopener = getattr(self, 'urlopener', None)
635 if urlopener:
635 if urlopener:
636 for h in urlopener.handlers:
636 for h in urlopener.handlers:
637 h.close()
637 h.close()
638 getattr(h, "close_all", lambda: None)()
638 getattr(h, "close_all", lambda: None)()
639
639
640
640
641 class _dummyremote(object):
641 class _dummyremote(object):
642 """Dummy store storing blobs to temp directory."""
642 """Dummy store storing blobs to temp directory."""
643
643
644 def __init__(self, repo, url):
644 def __init__(self, repo, url):
645 fullpath = repo.vfs.join(b'lfs', url.path)
645 fullpath = repo.vfs.join(b'lfs', url.path)
646 self.vfs = lfsvfs(fullpath)
646 self.vfs = lfsvfs(fullpath)
647
647
648 def writebatch(self, pointers, fromstore):
648 def writebatch(self, pointers, fromstore):
649 for p in _deduplicate(pointers):
649 for p in _deduplicate(pointers):
650 content = fromstore.read(p.oid(), verify=True)
650 content = fromstore.read(p.oid(), verify=True)
651 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
651 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
652 fp.write(content)
652 fp.write(content)
653
653
654 def readbatch(self, pointers, tostore):
654 def readbatch(self, pointers, tostore):
655 for p in _deduplicate(pointers):
655 for p in _deduplicate(pointers):
656 with self.vfs(p.oid(), b'rb') as fp:
656 with self.vfs(p.oid(), b'rb') as fp:
657 tostore.download(p.oid(), fp, None)
657 tostore.download(p.oid(), fp, None)
658
658
659
659
660 class _nullremote(object):
660 class _nullremote(object):
661 """Null store storing blobs to /dev/null."""
661 """Null store storing blobs to /dev/null."""
662
662
663 def __init__(self, repo, url):
663 def __init__(self, repo, url):
664 pass
664 pass
665
665
666 def writebatch(self, pointers, fromstore):
666 def writebatch(self, pointers, fromstore):
667 pass
667 pass
668
668
669 def readbatch(self, pointers, tostore):
669 def readbatch(self, pointers, tostore):
670 pass
670 pass
671
671
672
672
673 class _promptremote(object):
673 class _promptremote(object):
674 """Prompt user to set lfs.url when accessed."""
674 """Prompt user to set lfs.url when accessed."""
675
675
676 def __init__(self, repo, url):
676 def __init__(self, repo, url):
677 pass
677 pass
678
678
679 def writebatch(self, pointers, fromstore, ui=None):
679 def writebatch(self, pointers, fromstore, ui=None):
680 self._prompt()
680 self._prompt()
681
681
682 def readbatch(self, pointers, tostore, ui=None):
682 def readbatch(self, pointers, tostore, ui=None):
683 self._prompt()
683 self._prompt()
684
684
685 def _prompt(self):
685 def _prompt(self):
686 raise error.Abort(_(b'lfs.url needs to be configured'))
686 raise error.Abort(_(b'lfs.url needs to be configured'))
687
687
688
688
689 _storemap = {
689 _storemap = {
690 b'https': _gitlfsremote,
690 b'https': _gitlfsremote,
691 b'http': _gitlfsremote,
691 b'http': _gitlfsremote,
692 b'file': _dummyremote,
692 b'file': _dummyremote,
693 b'null': _nullremote,
693 b'null': _nullremote,
694 None: _promptremote,
694 None: _promptremote,
695 }
695 }
696
696
697
697
698 def _deduplicate(pointers):
698 def _deduplicate(pointers):
699 """Remove any duplicate oids that exist in the list"""
699 """Remove any duplicate oids that exist in the list"""
700 reduced = util.sortdict()
700 reduced = util.sortdict()
701 for p in pointers:
701 for p in pointers:
702 reduced[p.oid()] = p
702 reduced[p.oid()] = p
703 return reduced.values()
703 return reduced.values()
704
704
705
705
706 def _verify(oid, content):
706 def _verify(oid, content):
707 realoid = node.hex(hashlib.sha256(content).digest())
707 realoid = node.hex(hashlib.sha256(content).digest())
708 if realoid != oid:
708 if realoid != oid:
709 raise LfsCorruptionError(
709 raise LfsCorruptionError(
710 _(b'detected corrupt lfs object: %s') % oid,
710 _(b'detected corrupt lfs object: %s') % oid,
711 hint=_(b'run hg verify'),
711 hint=_(b'run hg verify'),
712 )
712 )
713
713
714
714
715 def remote(repo, remote=None):
715 def remote(repo, remote=None):
716 """remotestore factory. return a store in _storemap depending on config
716 """remotestore factory. return a store in _storemap depending on config
717
717
718 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
718 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
719 infer the endpoint, based on the remote repository using the same path
719 infer the endpoint, based on the remote repository using the same path
720 adjustments as git. As an extension, 'http' is supported as well so that
720 adjustments as git. As an extension, 'http' is supported as well so that
721 ``hg serve`` works out of the box.
721 ``hg serve`` works out of the box.
722
722
723 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
723 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
724 """
724 """
725 lfsurl = repo.ui.config(b'lfs', b'url')
725 lfsurl = repo.ui.config(b'lfs', b'url')
726 url = util.url(lfsurl or b'')
726 url = util.url(lfsurl or b'')
727 if lfsurl is None:
727 if lfsurl is None:
728 if remote:
728 if remote:
729 path = remote
729 path = remote
730 elif util.safehasattr(repo, b'_subtoppath'):
730 elif util.safehasattr(repo, b'_subtoppath'):
731 # The pull command sets this during the optional update phase, which
731 # The pull command sets this during the optional update phase, which
732 # tells exactly where the pull originated, whether 'paths.default'
732 # tells exactly where the pull originated, whether 'paths.default'
733 # or explicit.
733 # or explicit.
734 path = repo._subtoppath
734 path = repo._subtoppath
735 else:
735 else:
736 # TODO: investigate 'paths.remote:lfsurl' style path customization,
736 # TODO: investigate 'paths.remote:lfsurl' style path customization,
737 # and fall back to inferring from 'paths.remote' if unspecified.
737 # and fall back to inferring from 'paths.remote' if unspecified.
738 path = repo.ui.config(b'paths', b'default') or b''
738 path = repo.ui.config(b'paths', b'default') or b''
739
739
740 defaulturl = util.url(path)
740 defaulturl = util.url(path)
741
741
742 # TODO: support local paths as well.
742 # TODO: support local paths as well.
743 # TODO: consider the ssh -> https transformation that git applies
743 # TODO: consider the ssh -> https transformation that git applies
744 if defaulturl.scheme in (b'http', b'https'):
744 if defaulturl.scheme in (b'http', b'https'):
745 if defaulturl.path and defaulturl.path[:-1] != b'/':
745 if defaulturl.path and defaulturl.path[:-1] != b'/':
746 defaulturl.path += b'/'
746 defaulturl.path += b'/'
747 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
747 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
748
748
749 url = util.url(bytes(defaulturl))
749 url = util.url(bytes(defaulturl))
750 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
750 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
751
751
752 scheme = url.scheme
752 scheme = url.scheme
753 if scheme not in _storemap:
753 if scheme not in _storemap:
754 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
754 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
755 return _storemap[scheme](repo, url)
755 return _storemap[scheme](repo, url)
756
756
757
757
758 class LfsRemoteError(error.StorageError):
758 class LfsRemoteError(error.StorageError):
759 pass
759 pass
760
760
761
761
762 class LfsCorruptionError(error.Abort):
762 class LfsCorruptionError(error.Abort):
763 """Raised when a corrupt blob is detected, aborting an operation
763 """Raised when a corrupt blob is detected, aborting an operation
764
764
765 It exists to allow specialized handling on the server side."""
765 It exists to allow specialized handling on the server side."""
General Comments 0
You need to be logged in to leave comments. Login now