##// END OF EJS Templates
largefiles: docstrings for verify methods
Mads Kiilerich -
r18574:4db9e31a default
parent child Browse files
Show More
@@ -1,194 +1,199 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''base class for store implementations and store-related utility code'''
9 '''base class for store implementations and store-related utility code'''
10
10
11 import binascii
11 import binascii
12 import re
12 import re
13
13
14 from mercurial import util, node, hg
14 from mercurial import util, node, hg
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16
16
17 import lfutil
17 import lfutil
18
18
19 class StoreError(Exception):
19 class StoreError(Exception):
20 '''Raised when there is a problem getting files from or putting
20 '''Raised when there is a problem getting files from or putting
21 files to a central store.'''
21 files to a central store.'''
22 def __init__(self, filename, hash, url, detail):
22 def __init__(self, filename, hash, url, detail):
23 self.filename = filename
23 self.filename = filename
24 self.hash = hash
24 self.hash = hash
25 self.url = url
25 self.url = url
26 self.detail = detail
26 self.detail = detail
27
27
28 def longmessage(self):
28 def longmessage(self):
29 return (_("error getting id %s from url %s for file %s: %s\n") %
29 return (_("error getting id %s from url %s for file %s: %s\n") %
30 (self.hash, self.url, self.filename, self.detail))
30 (self.hash, self.url, self.filename, self.detail))
31
31
32 def __str__(self):
32 def __str__(self):
33 return "%s: %s" % (self.url, self.detail)
33 return "%s: %s" % (self.url, self.detail)
34
34
35 class basestore(object):
35 class basestore(object):
36 def __init__(self, ui, repo, url):
36 def __init__(self, ui, repo, url):
37 self.ui = ui
37 self.ui = ui
38 self.repo = repo
38 self.repo = repo
39 self.url = url
39 self.url = url
40
40
41 def put(self, source, hash):
41 def put(self, source, hash):
42 '''Put source file into the store under <filename>/<hash>.'''
42 '''Put source file into the store under <filename>/<hash>.'''
43 raise NotImplementedError('abstract method')
43 raise NotImplementedError('abstract method')
44
44
45 def exists(self, hashes):
45 def exists(self, hashes):
46 '''Check to see if the store contains the given hashes. Given an
46 '''Check to see if the store contains the given hashes. Given an
47 iterable of hashes it returns a mapping from hash to bool.'''
47 iterable of hashes it returns a mapping from hash to bool.'''
48 raise NotImplementedError('abstract method')
48 raise NotImplementedError('abstract method')
49
49
50 def get(self, files):
50 def get(self, files):
51 '''Get the specified largefiles from the store and write to local
51 '''Get the specified largefiles from the store and write to local
52 files under repo.root. files is a list of (filename, hash)
52 files under repo.root. files is a list of (filename, hash)
53 tuples. Return (success, missing), lists of files successfully
53 tuples. Return (success, missing), lists of files successfully
54 downloaded and those not found in the store. success is a list
54 downloaded and those not found in the store. success is a list
55 of (filename, hash) tuples; missing is a list of filenames that
55 of (filename, hash) tuples; missing is a list of filenames that
56 we could not get. (The detailed error message will already have
56 we could not get. (The detailed error message will already have
57 been presented to the user, so missing is just supplied as a
57 been presented to the user, so missing is just supplied as a
58 summary.)'''
58 summary.)'''
59 success = []
59 success = []
60 missing = []
60 missing = []
61 ui = self.ui
61 ui = self.ui
62
62
63 at = 0
63 at = 0
64 for filename, hash in files:
64 for filename, hash in files:
65 ui.progress(_('getting largefiles'), at, unit='lfile',
65 ui.progress(_('getting largefiles'), at, unit='lfile',
66 total=len(files))
66 total=len(files))
67 at += 1
67 at += 1
68 ui.note(_('getting %s:%s\n') % (filename, hash))
68 ui.note(_('getting %s:%s\n') % (filename, hash))
69
69
70 storefilename = lfutil.storepath(self.repo, hash)
70 storefilename = lfutil.storepath(self.repo, hash)
71 tmpfile = util.atomictempfile(storefilename + '.tmp',
71 tmpfile = util.atomictempfile(storefilename + '.tmp',
72 createmode=self.repo.store.createmode)
72 createmode=self.repo.store.createmode)
73
73
74 try:
74 try:
75 hhash = binascii.hexlify(self._getfile(tmpfile, filename, hash))
75 hhash = binascii.hexlify(self._getfile(tmpfile, filename, hash))
76 except StoreError, err:
76 except StoreError, err:
77 ui.warn(err.longmessage())
77 ui.warn(err.longmessage())
78 hhash = ""
78 hhash = ""
79 tmpfile.close() # has probably already been closed!
79 tmpfile.close() # has probably already been closed!
80
80
81 if hhash != hash:
81 if hhash != hash:
82 if hhash != "":
82 if hhash != "":
83 ui.warn(_('%s: data corruption (expected %s, got %s)\n')
83 ui.warn(_('%s: data corruption (expected %s, got %s)\n')
84 % (filename, hash, hhash))
84 % (filename, hash, hhash))
85 util.unlink(storefilename + '.tmp')
85 util.unlink(storefilename + '.tmp')
86 missing.append(filename)
86 missing.append(filename)
87 continue
87 continue
88
88
89 util.rename(storefilename + '.tmp', storefilename)
89 util.rename(storefilename + '.tmp', storefilename)
90 lfutil.linktousercache(self.repo, hash)
90 lfutil.linktousercache(self.repo, hash)
91 success.append((filename, hhash))
91 success.append((filename, hhash))
92
92
93 ui.progress(_('getting largefiles'), None)
93 ui.progress(_('getting largefiles'), None)
94 return (success, missing)
94 return (success, missing)
95
95
96 def verify(self, revs, contents=False):
96 def verify(self, revs, contents=False):
97 '''Verify the existence (and, optionally, contents) of every big
97 '''Verify the existence (and, optionally, contents) of every big
98 file revision referenced by every changeset in revs.
98 file revision referenced by every changeset in revs.
99 Return 0 if all is well, non-zero on any errors.'''
99 Return 0 if all is well, non-zero on any errors.'''
100 failed = False
100 failed = False
101
101
102 self.ui.status(_('searching %d changesets for largefiles\n') %
102 self.ui.status(_('searching %d changesets for largefiles\n') %
103 len(revs))
103 len(revs))
104 verified = set() # set of (filename, filenode) tuples
104 verified = set() # set of (filename, filenode) tuples
105
105
106 for rev in revs:
106 for rev in revs:
107 cctx = self.repo[rev]
107 cctx = self.repo[rev]
108 cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
108 cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
109
109
110 for standin in cctx:
110 for standin in cctx:
111 if self._verifyfile(cctx, cset, contents, standin, verified):
111 if self._verifyfile(cctx, cset, contents, standin, verified):
112 failed = True
112 failed = True
113
113
114 numrevs = len(verified)
114 numrevs = len(verified)
115 numlfiles = len(set([fname for (fname, fnode) in verified]))
115 numlfiles = len(set([fname for (fname, fnode) in verified]))
116 if contents:
116 if contents:
117 self.ui.status(
117 self.ui.status(
118 _('verified contents of %d revisions of %d largefiles\n')
118 _('verified contents of %d revisions of %d largefiles\n')
119 % (numrevs, numlfiles))
119 % (numrevs, numlfiles))
120 else:
120 else:
121 self.ui.status(
121 self.ui.status(
122 _('verified existence of %d revisions of %d largefiles\n')
122 _('verified existence of %d revisions of %d largefiles\n')
123 % (numrevs, numlfiles))
123 % (numrevs, numlfiles))
124 return int(failed)
124 return int(failed)
125
125
126 def _getfile(self, tmpfile, filename, hash):
126 def _getfile(self, tmpfile, filename, hash):
127 '''Fetch one revision of one file from the store and write it
127 '''Fetch one revision of one file from the store and write it
128 to tmpfile. Compute the hash of the file on-the-fly as it
128 to tmpfile. Compute the hash of the file on-the-fly as it
129 downloads and return the binary hash. Close tmpfile. Raise
129 downloads and return the binary hash. Close tmpfile. Raise
130 StoreError if unable to download the file (e.g. it does not
130 StoreError if unable to download the file (e.g. it does not
131 exist in the store).'''
131 exist in the store).'''
132 raise NotImplementedError('abstract method')
132 raise NotImplementedError('abstract method')
133
133
134 def _verifyfile(self, cctx, cset, contents, standin, verified):
134 def _verifyfile(self, cctx, cset, contents, standin, verified):
135 '''Perform the actual verification of a file in the store.
135 '''Perform the actual verification of a file in the store.
136 'cset' is only used in warnings.
137 'contents' controls verification of content hash.
138 'standin' is the standin path of the largefile to verify.
139 'verified' is maintained as a set of already verified files.
140 Returns _true_ if it is a standin and any problems are found!
136 '''
141 '''
137 raise NotImplementedError('abstract method')
142 raise NotImplementedError('abstract method')
138
143
139 import localstore, wirestore
144 import localstore, wirestore
140
145
141 _storeprovider = {
146 _storeprovider = {
142 'file': [localstore.localstore],
147 'file': [localstore.localstore],
143 'http': [wirestore.wirestore],
148 'http': [wirestore.wirestore],
144 'https': [wirestore.wirestore],
149 'https': [wirestore.wirestore],
145 'ssh': [wirestore.wirestore],
150 'ssh': [wirestore.wirestore],
146 }
151 }
147
152
148 _scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
153 _scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://')
149
154
150 # During clone this function is passed the src's ui object
155 # During clone this function is passed the src's ui object
151 # but it needs the dest's ui object so it can read out of
156 # but it needs the dest's ui object so it can read out of
152 # the config file. Use repo.ui instead.
157 # the config file. Use repo.ui instead.
153 def _openstore(repo, remote=None, put=False):
158 def _openstore(repo, remote=None, put=False):
154 ui = repo.ui
159 ui = repo.ui
155
160
156 if not remote:
161 if not remote:
157 lfpullsource = getattr(repo, 'lfpullsource', None)
162 lfpullsource = getattr(repo, 'lfpullsource', None)
158 if lfpullsource:
163 if lfpullsource:
159 path = ui.expandpath(lfpullsource)
164 path = ui.expandpath(lfpullsource)
160 else:
165 else:
161 path = ui.expandpath('default-push', 'default')
166 path = ui.expandpath('default-push', 'default')
162
167
163 # ui.expandpath() leaves 'default-push' and 'default' alone if
168 # ui.expandpath() leaves 'default-push' and 'default' alone if
164 # they cannot be expanded: fallback to the empty string,
169 # they cannot be expanded: fallback to the empty string,
165 # meaning the current directory.
170 # meaning the current directory.
166 if path == 'default-push' or path == 'default':
171 if path == 'default-push' or path == 'default':
167 path = ''
172 path = ''
168 remote = repo
173 remote = repo
169 else:
174 else:
170 path, _branches = hg.parseurl(path)
175 path, _branches = hg.parseurl(path)
171 remote = hg.peer(repo, {}, path)
176 remote = hg.peer(repo, {}, path)
172
177
173 # The path could be a scheme so use Mercurial's normal functionality
178 # The path could be a scheme so use Mercurial's normal functionality
174 # to resolve the scheme to a repository and use its path
179 # to resolve the scheme to a repository and use its path
175 path = util.safehasattr(remote, 'url') and remote.url() or remote.path
180 path = util.safehasattr(remote, 'url') and remote.url() or remote.path
176
181
177 match = _scheme_re.match(path)
182 match = _scheme_re.match(path)
178 if not match: # regular filesystem path
183 if not match: # regular filesystem path
179 scheme = 'file'
184 scheme = 'file'
180 else:
185 else:
181 scheme = match.group(1)
186 scheme = match.group(1)
182
187
183 try:
188 try:
184 storeproviders = _storeprovider[scheme]
189 storeproviders = _storeprovider[scheme]
185 except KeyError:
190 except KeyError:
186 raise util.Abort(_('unsupported URL scheme %r') % scheme)
191 raise util.Abort(_('unsupported URL scheme %r') % scheme)
187
192
188 for classobj in storeproviders:
193 for classobj in storeproviders:
189 try:
194 try:
190 return classobj(ui, repo, remote)
195 return classobj(ui, repo, remote)
191 except lfutil.storeprotonotcapable:
196 except lfutil.storeprotonotcapable:
192 pass
197 pass
193
198
194 raise util.Abort(_('%s does not appear to be a largefile store') % path)
199 raise util.Abort(_('%s does not appear to be a largefile store') % path)
@@ -1,570 +1,570 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os
11 import os
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 import lfutil
19 import lfutil
20 import basestore
20 import basestore
21
21
22 # -- Commands ----------------------------------------------------------
22 # -- Commands ----------------------------------------------------------
23
23
24 def lfconvert(ui, src, dest, *pats, **opts):
24 def lfconvert(ui, src, dest, *pats, **opts):
25 '''convert a normal repository to a largefiles repository
25 '''convert a normal repository to a largefiles repository
26
26
27 Convert repository SOURCE to a new repository DEST, identical to
27 Convert repository SOURCE to a new repository DEST, identical to
28 SOURCE except that certain files will be converted as largefiles:
28 SOURCE except that certain files will be converted as largefiles:
29 specifically, any file that matches any PATTERN *or* whose size is
29 specifically, any file that matches any PATTERN *or* whose size is
30 above the minimum size threshold is converted as a largefile. The
30 above the minimum size threshold is converted as a largefile. The
31 size used to determine whether or not to track a file as a
31 size used to determine whether or not to track a file as a
32 largefile is the size of the first version of the file. The
32 largefile is the size of the first version of the file. The
33 minimum size can be specified either with --size or in
33 minimum size can be specified either with --size or in
34 configuration as ``largefiles.size``.
34 configuration as ``largefiles.size``.
35
35
36 After running this command you will need to make sure that
36 After running this command you will need to make sure that
37 largefiles is enabled anywhere you intend to push the new
37 largefiles is enabled anywhere you intend to push the new
38 repository.
38 repository.
39
39
40 Use --to-normal to convert largefiles back to normal files; after
40 Use --to-normal to convert largefiles back to normal files; after
41 this, the DEST repository can be used without largefiles at all.'''
41 this, the DEST repository can be used without largefiles at all.'''
42
42
43 if opts['to_normal']:
43 if opts['to_normal']:
44 tolfile = False
44 tolfile = False
45 else:
45 else:
46 tolfile = True
46 tolfile = True
47 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
47 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
48
48
49 if not hg.islocal(src):
49 if not hg.islocal(src):
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
50 raise util.Abort(_('%s is not a local Mercurial repo') % src)
51 if not hg.islocal(dest):
51 if not hg.islocal(dest):
52 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
52 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
53
53
54 rsrc = hg.repository(ui, src)
54 rsrc = hg.repository(ui, src)
55 ui.status(_('initializing destination %s\n') % dest)
55 ui.status(_('initializing destination %s\n') % dest)
56 rdst = hg.repository(ui, dest, create=True)
56 rdst = hg.repository(ui, dest, create=True)
57
57
58 success = False
58 success = False
59 dstwlock = dstlock = None
59 dstwlock = dstlock = None
60 try:
60 try:
61 # Lock destination to prevent modification while it is converted to.
61 # Lock destination to prevent modification while it is converted to.
62 # Don't need to lock src because we are just reading from its history
62 # Don't need to lock src because we are just reading from its history
63 # which can't change.
63 # which can't change.
64 dstwlock = rdst.wlock()
64 dstwlock = rdst.wlock()
65 dstlock = rdst.lock()
65 dstlock = rdst.lock()
66
66
67 # Get a list of all changesets in the source. The easy way to do this
67 # Get a list of all changesets in the source. The easy way to do this
68 # is to simply walk the changelog, using changelog.nodesbetween().
68 # is to simply walk the changelog, using changelog.nodesbetween().
69 # Take a look at mercurial/revlog.py:639 for more details.
69 # Take a look at mercurial/revlog.py:639 for more details.
70 # Use a generator instead of a list to decrease memory usage
70 # Use a generator instead of a list to decrease memory usage
71 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
71 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
72 rsrc.heads())[0])
72 rsrc.heads())[0])
73 revmap = {node.nullid: node.nullid}
73 revmap = {node.nullid: node.nullid}
74 if tolfile:
74 if tolfile:
75 lfiles = set()
75 lfiles = set()
76 normalfiles = set()
76 normalfiles = set()
77 if not pats:
77 if not pats:
78 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
78 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
79 if pats:
79 if pats:
80 matcher = match_.match(rsrc.root, '', list(pats))
80 matcher = match_.match(rsrc.root, '', list(pats))
81 else:
81 else:
82 matcher = None
82 matcher = None
83
83
84 lfiletohash = {}
84 lfiletohash = {}
85 for ctx in ctxs:
85 for ctx in ctxs:
86 ui.progress(_('converting revisions'), ctx.rev(),
86 ui.progress(_('converting revisions'), ctx.rev(),
87 unit=_('revision'), total=rsrc['tip'].rev())
87 unit=_('revision'), total=rsrc['tip'].rev())
88 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
88 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
89 lfiles, normalfiles, matcher, size, lfiletohash)
89 lfiles, normalfiles, matcher, size, lfiletohash)
90 ui.progress(_('converting revisions'), None)
90 ui.progress(_('converting revisions'), None)
91
91
92 if os.path.exists(rdst.wjoin(lfutil.shortname)):
92 if os.path.exists(rdst.wjoin(lfutil.shortname)):
93 shutil.rmtree(rdst.wjoin(lfutil.shortname))
93 shutil.rmtree(rdst.wjoin(lfutil.shortname))
94
94
95 for f in lfiletohash.keys():
95 for f in lfiletohash.keys():
96 if os.path.isfile(rdst.wjoin(f)):
96 if os.path.isfile(rdst.wjoin(f)):
97 os.unlink(rdst.wjoin(f))
97 os.unlink(rdst.wjoin(f))
98 try:
98 try:
99 os.removedirs(os.path.dirname(rdst.wjoin(f)))
99 os.removedirs(os.path.dirname(rdst.wjoin(f)))
100 except OSError:
100 except OSError:
101 pass
101 pass
102
102
103 # If there were any files converted to largefiles, add largefiles
103 # If there were any files converted to largefiles, add largefiles
104 # to the destination repository's requirements.
104 # to the destination repository's requirements.
105 if lfiles:
105 if lfiles:
106 rdst.requirements.add('largefiles')
106 rdst.requirements.add('largefiles')
107 rdst._writerequirements()
107 rdst._writerequirements()
108 else:
108 else:
109 for ctx in ctxs:
109 for ctx in ctxs:
110 ui.progress(_('converting revisions'), ctx.rev(),
110 ui.progress(_('converting revisions'), ctx.rev(),
111 unit=_('revision'), total=rsrc['tip'].rev())
111 unit=_('revision'), total=rsrc['tip'].rev())
112 _addchangeset(ui, rsrc, rdst, ctx, revmap)
112 _addchangeset(ui, rsrc, rdst, ctx, revmap)
113
113
114 ui.progress(_('converting revisions'), None)
114 ui.progress(_('converting revisions'), None)
115 success = True
115 success = True
116 finally:
116 finally:
117 rdst.dirstate.clear()
117 rdst.dirstate.clear()
118 release(dstlock, dstwlock)
118 release(dstlock, dstwlock)
119 if not success:
119 if not success:
120 # we failed, remove the new directory
120 # we failed, remove the new directory
121 shutil.rmtree(rdst.root)
121 shutil.rmtree(rdst.root)
122
122
123 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
123 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
124 # Convert src parents to dst parents
124 # Convert src parents to dst parents
125 parents = _convertparents(ctx, revmap)
125 parents = _convertparents(ctx, revmap)
126
126
127 # Generate list of changed files
127 # Generate list of changed files
128 files = _getchangedfiles(ctx, parents)
128 files = _getchangedfiles(ctx, parents)
129
129
130 def getfilectx(repo, memctx, f):
130 def getfilectx(repo, memctx, f):
131 if lfutil.standin(f) in files:
131 if lfutil.standin(f) in files:
132 # if the file isn't in the manifest then it was removed
132 # if the file isn't in the manifest then it was removed
133 # or renamed, raise IOError to indicate this
133 # or renamed, raise IOError to indicate this
134 try:
134 try:
135 fctx = ctx.filectx(lfutil.standin(f))
135 fctx = ctx.filectx(lfutil.standin(f))
136 except error.LookupError:
136 except error.LookupError:
137 raise IOError
137 raise IOError
138 renamed = fctx.renamed()
138 renamed = fctx.renamed()
139 if renamed:
139 if renamed:
140 renamed = lfutil.splitstandin(renamed[0])
140 renamed = lfutil.splitstandin(renamed[0])
141
141
142 hash = fctx.data().strip()
142 hash = fctx.data().strip()
143 path = lfutil.findfile(rsrc, hash)
143 path = lfutil.findfile(rsrc, hash)
144
144
145 # If one file is missing, likely all files from this rev are
145 # If one file is missing, likely all files from this rev are
146 if path is None:
146 if path is None:
147 cachelfiles(ui, rsrc, ctx.node())
147 cachelfiles(ui, rsrc, ctx.node())
148 path = lfutil.findfile(rsrc, hash)
148 path = lfutil.findfile(rsrc, hash)
149
149
150 if path is None:
150 if path is None:
151 raise util.Abort(
151 raise util.Abort(
152 _("missing largefile \'%s\' from revision %s")
152 _("missing largefile \'%s\' from revision %s")
153 % (f, node.hex(ctx.node())))
153 % (f, node.hex(ctx.node())))
154
154
155 data = ''
155 data = ''
156 fd = None
156 fd = None
157 try:
157 try:
158 fd = open(path, 'rb')
158 fd = open(path, 'rb')
159 data = fd.read()
159 data = fd.read()
160 finally:
160 finally:
161 if fd:
161 if fd:
162 fd.close()
162 fd.close()
163 return context.memfilectx(f, data, 'l' in fctx.flags(),
163 return context.memfilectx(f, data, 'l' in fctx.flags(),
164 'x' in fctx.flags(), renamed)
164 'x' in fctx.flags(), renamed)
165 else:
165 else:
166 return _getnormalcontext(repo.ui, ctx, f, revmap)
166 return _getnormalcontext(repo.ui, ctx, f, revmap)
167
167
168 dstfiles = []
168 dstfiles = []
169 for file in files:
169 for file in files:
170 if lfutil.isstandin(file):
170 if lfutil.isstandin(file):
171 dstfiles.append(lfutil.splitstandin(file))
171 dstfiles.append(lfutil.splitstandin(file))
172 else:
172 else:
173 dstfiles.append(file)
173 dstfiles.append(file)
174 # Commit
174 # Commit
175 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
175 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
176
176
177 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
177 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
178 matcher, size, lfiletohash):
178 matcher, size, lfiletohash):
179 # Convert src parents to dst parents
179 # Convert src parents to dst parents
180 parents = _convertparents(ctx, revmap)
180 parents = _convertparents(ctx, revmap)
181
181
182 # Generate list of changed files
182 # Generate list of changed files
183 files = _getchangedfiles(ctx, parents)
183 files = _getchangedfiles(ctx, parents)
184
184
185 dstfiles = []
185 dstfiles = []
186 for f in files:
186 for f in files:
187 if f not in lfiles and f not in normalfiles:
187 if f not in lfiles and f not in normalfiles:
188 islfile = _islfile(f, ctx, matcher, size)
188 islfile = _islfile(f, ctx, matcher, size)
189 # If this file was renamed or copied then copy
189 # If this file was renamed or copied then copy
190 # the largefile-ness of its predecessor
190 # the largefile-ness of its predecessor
191 if f in ctx.manifest():
191 if f in ctx.manifest():
192 fctx = ctx.filectx(f)
192 fctx = ctx.filectx(f)
193 renamed = fctx.renamed()
193 renamed = fctx.renamed()
194 renamedlfile = renamed and renamed[0] in lfiles
194 renamedlfile = renamed and renamed[0] in lfiles
195 islfile |= renamedlfile
195 islfile |= renamedlfile
196 if 'l' in fctx.flags():
196 if 'l' in fctx.flags():
197 if renamedlfile:
197 if renamedlfile:
198 raise util.Abort(
198 raise util.Abort(
199 _('renamed/copied largefile %s becomes symlink')
199 _('renamed/copied largefile %s becomes symlink')
200 % f)
200 % f)
201 islfile = False
201 islfile = False
202 if islfile:
202 if islfile:
203 lfiles.add(f)
203 lfiles.add(f)
204 else:
204 else:
205 normalfiles.add(f)
205 normalfiles.add(f)
206
206
207 if f in lfiles:
207 if f in lfiles:
208 dstfiles.append(lfutil.standin(f))
208 dstfiles.append(lfutil.standin(f))
209 # largefile in manifest if it has not been removed/renamed
209 # largefile in manifest if it has not been removed/renamed
210 if f in ctx.manifest():
210 if f in ctx.manifest():
211 fctx = ctx.filectx(f)
211 fctx = ctx.filectx(f)
212 if 'l' in fctx.flags():
212 if 'l' in fctx.flags():
213 renamed = fctx.renamed()
213 renamed = fctx.renamed()
214 if renamed and renamed[0] in lfiles:
214 if renamed and renamed[0] in lfiles:
215 raise util.Abort(_('largefile %s becomes symlink') % f)
215 raise util.Abort(_('largefile %s becomes symlink') % f)
216
216
217 # largefile was modified, update standins
217 # largefile was modified, update standins
218 fullpath = rdst.wjoin(f)
218 fullpath = rdst.wjoin(f)
219 util.makedirs(os.path.dirname(fullpath))
219 util.makedirs(os.path.dirname(fullpath))
220 m = util.sha1('')
220 m = util.sha1('')
221 m.update(ctx[f].data())
221 m.update(ctx[f].data())
222 hash = m.hexdigest()
222 hash = m.hexdigest()
223 if f not in lfiletohash or lfiletohash[f] != hash:
223 if f not in lfiletohash or lfiletohash[f] != hash:
224 try:
224 try:
225 fd = open(fullpath, 'wb')
225 fd = open(fullpath, 'wb')
226 fd.write(ctx[f].data())
226 fd.write(ctx[f].data())
227 finally:
227 finally:
228 if fd:
228 if fd:
229 fd.close()
229 fd.close()
230 executable = 'x' in ctx[f].flags()
230 executable = 'x' in ctx[f].flags()
231 os.chmod(fullpath, lfutil.getmode(executable))
231 os.chmod(fullpath, lfutil.getmode(executable))
232 lfutil.writestandin(rdst, lfutil.standin(f), hash,
232 lfutil.writestandin(rdst, lfutil.standin(f), hash,
233 executable)
233 executable)
234 lfiletohash[f] = hash
234 lfiletohash[f] = hash
235 else:
235 else:
236 # normal file
236 # normal file
237 dstfiles.append(f)
237 dstfiles.append(f)
238
238
239 def getfilectx(repo, memctx, f):
239 def getfilectx(repo, memctx, f):
240 if lfutil.isstandin(f):
240 if lfutil.isstandin(f):
241 # if the file isn't in the manifest then it was removed
241 # if the file isn't in the manifest then it was removed
242 # or renamed, raise IOError to indicate this
242 # or renamed, raise IOError to indicate this
243 srcfname = lfutil.splitstandin(f)
243 srcfname = lfutil.splitstandin(f)
244 try:
244 try:
245 fctx = ctx.filectx(srcfname)
245 fctx = ctx.filectx(srcfname)
246 except error.LookupError:
246 except error.LookupError:
247 raise IOError
247 raise IOError
248 renamed = fctx.renamed()
248 renamed = fctx.renamed()
249 if renamed:
249 if renamed:
250 # standin is always a largefile because largefile-ness
250 # standin is always a largefile because largefile-ness
251 # doesn't change after rename or copy
251 # doesn't change after rename or copy
252 renamed = lfutil.standin(renamed[0])
252 renamed = lfutil.standin(renamed[0])
253
253
254 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
254 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
255 fctx.flags(), 'x' in fctx.flags(), renamed)
255 fctx.flags(), 'x' in fctx.flags(), renamed)
256 else:
256 else:
257 return _getnormalcontext(repo.ui, ctx, f, revmap)
257 return _getnormalcontext(repo.ui, ctx, f, revmap)
258
258
259 # Commit
259 # Commit
260 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
260 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
261
261
262 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
262 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
263 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
263 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
264 getfilectx, ctx.user(), ctx.date(), ctx.extra())
264 getfilectx, ctx.user(), ctx.date(), ctx.extra())
265 ret = rdst.commitctx(mctx)
265 ret = rdst.commitctx(mctx)
266 rdst.setparents(ret)
266 rdst.setparents(ret)
267 revmap[ctx.node()] = rdst.changelog.tip()
267 revmap[ctx.node()] = rdst.changelog.tip()
268
268
269 # Generate list of changed files
269 # Generate list of changed files
270 def _getchangedfiles(ctx, parents):
270 def _getchangedfiles(ctx, parents):
271 files = set(ctx.files())
271 files = set(ctx.files())
272 if node.nullid not in parents:
272 if node.nullid not in parents:
273 mc = ctx.manifest()
273 mc = ctx.manifest()
274 mp1 = ctx.parents()[0].manifest()
274 mp1 = ctx.parents()[0].manifest()
275 mp2 = ctx.parents()[1].manifest()
275 mp2 = ctx.parents()[1].manifest()
276 files |= (set(mp1) | set(mp2)) - set(mc)
276 files |= (set(mp1) | set(mp2)) - set(mc)
277 for f in mc:
277 for f in mc:
278 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
278 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
279 files.add(f)
279 files.add(f)
280 return files
280 return files
281
281
282 # Convert src parents to dst parents
282 # Convert src parents to dst parents
283 def _convertparents(ctx, revmap):
283 def _convertparents(ctx, revmap):
284 parents = []
284 parents = []
285 for p in ctx.parents():
285 for p in ctx.parents():
286 parents.append(revmap[p.node()])
286 parents.append(revmap[p.node()])
287 while len(parents) < 2:
287 while len(parents) < 2:
288 parents.append(node.nullid)
288 parents.append(node.nullid)
289 return parents
289 return parents
290
290
291 # Get memfilectx for a normal file
291 # Get memfilectx for a normal file
292 def _getnormalcontext(ui, ctx, f, revmap):
292 def _getnormalcontext(ui, ctx, f, revmap):
293 try:
293 try:
294 fctx = ctx.filectx(f)
294 fctx = ctx.filectx(f)
295 except error.LookupError:
295 except error.LookupError:
296 raise IOError
296 raise IOError
297 renamed = fctx.renamed()
297 renamed = fctx.renamed()
298 if renamed:
298 if renamed:
299 renamed = renamed[0]
299 renamed = renamed[0]
300
300
301 data = fctx.data()
301 data = fctx.data()
302 if f == '.hgtags':
302 if f == '.hgtags':
303 data = _converttags (ui, revmap, data)
303 data = _converttags (ui, revmap, data)
304 return context.memfilectx(f, data, 'l' in fctx.flags(),
304 return context.memfilectx(f, data, 'l' in fctx.flags(),
305 'x' in fctx.flags(), renamed)
305 'x' in fctx.flags(), renamed)
306
306
307 # Remap tag data using a revision map
307 # Remap tag data using a revision map
308 def _converttags(ui, revmap, data):
308 def _converttags(ui, revmap, data):
309 newdata = []
309 newdata = []
310 for line in data.splitlines():
310 for line in data.splitlines():
311 try:
311 try:
312 id, name = line.split(' ', 1)
312 id, name = line.split(' ', 1)
313 except ValueError:
313 except ValueError:
314 ui.warn(_('skipping incorrectly formatted tag %s\n'
314 ui.warn(_('skipping incorrectly formatted tag %s\n'
315 % line))
315 % line))
316 continue
316 continue
317 try:
317 try:
318 newid = node.bin(id)
318 newid = node.bin(id)
319 except TypeError:
319 except TypeError:
320 ui.warn(_('skipping incorrectly formatted id %s\n'
320 ui.warn(_('skipping incorrectly formatted id %s\n'
321 % id))
321 % id))
322 continue
322 continue
323 try:
323 try:
324 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
324 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
325 name))
325 name))
326 except KeyError:
326 except KeyError:
327 ui.warn(_('no mapping for id %s\n') % id)
327 ui.warn(_('no mapping for id %s\n') % id)
328 continue
328 continue
329 return ''.join(newdata)
329 return ''.join(newdata)
330
330
331 def _islfile(file, ctx, matcher, size):
331 def _islfile(file, ctx, matcher, size):
332 '''Return true if file should be considered a largefile, i.e.
332 '''Return true if file should be considered a largefile, i.e.
333 matcher matches it or it is larger than size.'''
333 matcher matches it or it is larger than size.'''
334 # never store special .hg* files as largefiles
334 # never store special .hg* files as largefiles
335 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
335 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
336 return False
336 return False
337 if matcher and matcher(file):
337 if matcher and matcher(file):
338 return True
338 return True
339 try:
339 try:
340 return ctx.filectx(file).size() >= size * 1024 * 1024
340 return ctx.filectx(file).size() >= size * 1024 * 1024
341 except error.LookupError:
341 except error.LookupError:
342 return False
342 return False
343
343
344 def uploadlfiles(ui, rsrc, rdst, files):
344 def uploadlfiles(ui, rsrc, rdst, files):
345 '''upload largefiles to the central store'''
345 '''upload largefiles to the central store'''
346
346
347 if not files:
347 if not files:
348 return
348 return
349
349
350 store = basestore._openstore(rsrc, rdst, put=True)
350 store = basestore._openstore(rsrc, rdst, put=True)
351
351
352 at = 0
352 at = 0
353 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
353 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
354 retval = store.exists(files)
354 retval = store.exists(files)
355 files = filter(lambda h: not retval[h], files)
355 files = filter(lambda h: not retval[h], files)
356 ui.debug("%d largefiles need to be uploaded\n" % len(files))
356 ui.debug("%d largefiles need to be uploaded\n" % len(files))
357
357
358 for hash in files:
358 for hash in files:
359 ui.progress(_('uploading largefiles'), at, unit='largefile',
359 ui.progress(_('uploading largefiles'), at, unit='largefile',
360 total=len(files))
360 total=len(files))
361 source = lfutil.findfile(rsrc, hash)
361 source = lfutil.findfile(rsrc, hash)
362 if not source:
362 if not source:
363 raise util.Abort(_('largefile %s missing from store'
363 raise util.Abort(_('largefile %s missing from store'
364 ' (needs to be uploaded)') % hash)
364 ' (needs to be uploaded)') % hash)
365 # XXX check for errors here
365 # XXX check for errors here
366 store.put(source, hash)
366 store.put(source, hash)
367 at += 1
367 at += 1
368 ui.progress(_('uploading largefiles'), None)
368 ui.progress(_('uploading largefiles'), None)
369
369
370 def verifylfiles(ui, repo, all=False, contents=False):
370 def verifylfiles(ui, repo, all=False, contents=False):
371 '''Verify that every big file revision in the current changeset
371 '''Verify that every largefile revision in the current changeset
372 exists in the central store. With --contents, also verify that
372 exists in the central store. With --contents, also verify that
373 the contents of each big file revision are correct (SHA-1 hash
373 the contents of each local largefile file revision are correct (SHA-1 hash
374 matches the revision ID). With --all, check every changeset in
374 matches the revision ID). With --all, check every changeset in
375 this repository.'''
375 this repository.'''
376 if all:
376 if all:
377 # Pass a list to the function rather than an iterator because we know a
377 # Pass a list to the function rather than an iterator because we know a
378 # list will work.
378 # list will work.
379 revs = range(len(repo))
379 revs = range(len(repo))
380 else:
380 else:
381 revs = ['.']
381 revs = ['.']
382
382
383 store = basestore._openstore(repo)
383 store = basestore._openstore(repo)
384 return store.verify(revs, contents=contents)
384 return store.verify(revs, contents=contents)
385
385
386 def debugdirstate(ui, repo):
386 def debugdirstate(ui, repo):
387 '''Show basic information for the largefiles dirstate'''
387 '''Show basic information for the largefiles dirstate'''
388 lfdirstate = lfutil.openlfdirstate(ui, repo)
388 lfdirstate = lfutil.openlfdirstate(ui, repo)
389 for file_, ent in sorted(lfdirstate._map.iteritems()):
389 for file_, ent in sorted(lfdirstate._map.iteritems()):
390 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
390 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
391 ui.write("%c %s %10d %s\n" % (ent[0], mode, ent[2], file_))
391 ui.write("%c %s %10d %s\n" % (ent[0], mode, ent[2], file_))
392
392
393 def cachelfiles(ui, repo, node, filelist=None):
393 def cachelfiles(ui, repo, node, filelist=None):
394 '''cachelfiles ensures that all largefiles needed by the specified revision
394 '''cachelfiles ensures that all largefiles needed by the specified revision
395 are present in the repository's largefile cache.
395 are present in the repository's largefile cache.
396
396
397 returns a tuple (cached, missing). cached is the list of files downloaded
397 returns a tuple (cached, missing). cached is the list of files downloaded
398 by this operation; missing is the list of files that were needed but could
398 by this operation; missing is the list of files that were needed but could
399 not be found.'''
399 not be found.'''
400 lfiles = lfutil.listlfiles(repo, node)
400 lfiles = lfutil.listlfiles(repo, node)
401 if filelist:
401 if filelist:
402 lfiles = set(lfiles) & set(filelist)
402 lfiles = set(lfiles) & set(filelist)
403 toget = []
403 toget = []
404
404
405 for lfile in lfiles:
405 for lfile in lfiles:
406 # If we are mid-merge, then we have to trust the standin that is in the
406 # If we are mid-merge, then we have to trust the standin that is in the
407 # working copy to have the correct hashvalue. This is because the
407 # working copy to have the correct hashvalue. This is because the
408 # original hg.merge() already updated the standin as part of the normal
408 # original hg.merge() already updated the standin as part of the normal
409 # merge process -- we just have to update the largefile to match.
409 # merge process -- we just have to update the largefile to match.
410 if (getattr(repo, "_ismerging", False) and
410 if (getattr(repo, "_ismerging", False) and
411 os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
411 os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
412 expectedhash = lfutil.readstandin(repo, lfile)
412 expectedhash = lfutil.readstandin(repo, lfile)
413 else:
413 else:
414 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
414 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
415
415
416 # if it exists and its hash matches, it might have been locally
416 # if it exists and its hash matches, it might have been locally
417 # modified before updating and the user chose 'local'. in this case,
417 # modified before updating and the user chose 'local'. in this case,
418 # it will not be in any store, so don't look for it.
418 # it will not be in any store, so don't look for it.
419 if ((not os.path.exists(repo.wjoin(lfile)) or
419 if ((not os.path.exists(repo.wjoin(lfile)) or
420 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
420 expectedhash != lfutil.hashfile(repo.wjoin(lfile))) and
421 not lfutil.findfile(repo, expectedhash)):
421 not lfutil.findfile(repo, expectedhash)):
422 toget.append((lfile, expectedhash))
422 toget.append((lfile, expectedhash))
423
423
424 if toget:
424 if toget:
425 store = basestore._openstore(repo)
425 store = basestore._openstore(repo)
426 ret = store.get(toget)
426 ret = store.get(toget)
427 return ret
427 return ret
428
428
429 return ([], [])
429 return ([], [])
430
430
431 def downloadlfiles(ui, repo, rev=None):
431 def downloadlfiles(ui, repo, rev=None):
432 matchfn = scmutil.match(repo[None],
432 matchfn = scmutil.match(repo[None],
433 [repo.wjoin(lfutil.shortname)], {})
433 [repo.wjoin(lfutil.shortname)], {})
434 def prepare(ctx, fns):
434 def prepare(ctx, fns):
435 pass
435 pass
436 totalsuccess = 0
436 totalsuccess = 0
437 totalmissing = 0
437 totalmissing = 0
438 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
438 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
439 prepare):
439 prepare):
440 success, missing = cachelfiles(ui, repo, ctx.node())
440 success, missing = cachelfiles(ui, repo, ctx.node())
441 totalsuccess += len(success)
441 totalsuccess += len(success)
442 totalmissing += len(missing)
442 totalmissing += len(missing)
443 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
443 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
444 if totalmissing > 0:
444 if totalmissing > 0:
445 ui.status(_("%d largefiles failed to download\n") % totalmissing)
445 ui.status(_("%d largefiles failed to download\n") % totalmissing)
446 return totalsuccess, totalmissing
446 return totalsuccess, totalmissing
447
447
448 def updatelfiles(ui, repo, filelist=None, printmessage=True):
448 def updatelfiles(ui, repo, filelist=None, printmessage=True):
449 wlock = repo.wlock()
449 wlock = repo.wlock()
450 try:
450 try:
451 lfdirstate = lfutil.openlfdirstate(ui, repo)
451 lfdirstate = lfutil.openlfdirstate(ui, repo)
452 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
452 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
453
453
454 if filelist is not None:
454 if filelist is not None:
455 lfiles = [f for f in lfiles if f in filelist]
455 lfiles = [f for f in lfiles if f in filelist]
456
456
457 printed = False
457 printed = False
458 if printmessage and lfiles:
458 if printmessage and lfiles:
459 ui.status(_('getting changed largefiles\n'))
459 ui.status(_('getting changed largefiles\n'))
460 printed = True
460 printed = True
461 cachelfiles(ui, repo, '.', lfiles)
461 cachelfiles(ui, repo, '.', lfiles)
462
462
463 updated, removed = 0, 0
463 updated, removed = 0, 0
464 for f in lfiles:
464 for f in lfiles:
465 i = _updatelfile(repo, lfdirstate, f)
465 i = _updatelfile(repo, lfdirstate, f)
466 if i:
466 if i:
467 if i > 0:
467 if i > 0:
468 updated += i
468 updated += i
469 else:
469 else:
470 removed -= i
470 removed -= i
471 if printmessage and (removed or updated) and not printed:
471 if printmessage and (removed or updated) and not printed:
472 ui.status(_('getting changed largefiles\n'))
472 ui.status(_('getting changed largefiles\n'))
473 printed = True
473 printed = True
474
474
475 lfdirstate.write()
475 lfdirstate.write()
476 if printed and printmessage:
476 if printed and printmessage:
477 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
477 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
478 removed))
478 removed))
479 finally:
479 finally:
480 wlock.release()
480 wlock.release()
481
481
482 def _updatelfile(repo, lfdirstate, lfile):
482 def _updatelfile(repo, lfdirstate, lfile):
483 '''updates a single largefile and copies the state of its standin from
483 '''updates a single largefile and copies the state of its standin from
484 the repository's dirstate to its state in the lfdirstate.
484 the repository's dirstate to its state in the lfdirstate.
485
485
486 returns 1 if the file was modified, -1 if the file was removed, 0 if the
486 returns 1 if the file was modified, -1 if the file was removed, 0 if the
487 file was unchanged, and None if the needed largefile was missing from the
487 file was unchanged, and None if the needed largefile was missing from the
488 cache.'''
488 cache.'''
489 ret = 0
489 ret = 0
490 abslfile = repo.wjoin(lfile)
490 abslfile = repo.wjoin(lfile)
491 absstandin = repo.wjoin(lfutil.standin(lfile))
491 absstandin = repo.wjoin(lfutil.standin(lfile))
492 if os.path.exists(absstandin):
492 if os.path.exists(absstandin):
493 if os.path.exists(absstandin + '.orig') and os.path.exists(abslfile):
493 if os.path.exists(absstandin + '.orig') and os.path.exists(abslfile):
494 shutil.copyfile(abslfile, abslfile + '.orig')
494 shutil.copyfile(abslfile, abslfile + '.orig')
495 expecthash = lfutil.readstandin(repo, lfile)
495 expecthash = lfutil.readstandin(repo, lfile)
496 if (expecthash != '' and
496 if (expecthash != '' and
497 (not os.path.exists(abslfile) or
497 (not os.path.exists(abslfile) or
498 expecthash != lfutil.hashfile(abslfile))):
498 expecthash != lfutil.hashfile(abslfile))):
499 if not lfutil.copyfromcache(repo, expecthash, lfile):
499 if not lfutil.copyfromcache(repo, expecthash, lfile):
500 # use normallookup() to allocate entry in largefiles dirstate,
500 # use normallookup() to allocate entry in largefiles dirstate,
501 # because lack of it misleads lfilesrepo.status() into
501 # because lack of it misleads lfilesrepo.status() into
502 # recognition that such cache missing files are REMOVED.
502 # recognition that such cache missing files are REMOVED.
503 lfdirstate.normallookup(lfile)
503 lfdirstate.normallookup(lfile)
504 return None # don't try to set the mode
504 return None # don't try to set the mode
505 else:
505 else:
506 # Synchronize largefile dirstate to the last modified time of
506 # Synchronize largefile dirstate to the last modified time of
507 # the file
507 # the file
508 lfdirstate.normal(lfile)
508 lfdirstate.normal(lfile)
509 ret = 1
509 ret = 1
510 mode = os.stat(absstandin).st_mode
510 mode = os.stat(absstandin).st_mode
511 if mode != os.stat(abslfile).st_mode:
511 if mode != os.stat(abslfile).st_mode:
512 os.chmod(abslfile, mode)
512 os.chmod(abslfile, mode)
513 ret = 1
513 ret = 1
514 else:
514 else:
515 # Remove lfiles for which the standin is deleted, unless the
515 # Remove lfiles for which the standin is deleted, unless the
516 # lfile is added to the repository again. This happens when a
516 # lfile is added to the repository again. This happens when a
517 # largefile is converted back to a normal file: the standin
517 # largefile is converted back to a normal file: the standin
518 # disappears, but a new (normal) file appears as the lfile.
518 # disappears, but a new (normal) file appears as the lfile.
519 if os.path.exists(abslfile) and lfile not in repo[None]:
519 if os.path.exists(abslfile) and lfile not in repo[None]:
520 util.unlinkpath(abslfile)
520 util.unlinkpath(abslfile)
521 ret = -1
521 ret = -1
522 state = repo.dirstate[lfutil.standin(lfile)]
522 state = repo.dirstate[lfutil.standin(lfile)]
523 if state == 'n':
523 if state == 'n':
524 # When rebasing, we need to synchronize the standin and the largefile,
524 # When rebasing, we need to synchronize the standin and the largefile,
525 # because otherwise the largefile will get reverted. But for commit's
525 # because otherwise the largefile will get reverted. But for commit's
526 # sake, we have to mark the file as unclean.
526 # sake, we have to mark the file as unclean.
527 if getattr(repo, "_isrebasing", False):
527 if getattr(repo, "_isrebasing", False):
528 lfdirstate.normallookup(lfile)
528 lfdirstate.normallookup(lfile)
529 else:
529 else:
530 lfdirstate.normal(lfile)
530 lfdirstate.normal(lfile)
531 elif state == 'r':
531 elif state == 'r':
532 lfdirstate.remove(lfile)
532 lfdirstate.remove(lfile)
533 elif state == 'a':
533 elif state == 'a':
534 lfdirstate.add(lfile)
534 lfdirstate.add(lfile)
535 elif state == '?':
535 elif state == '?':
536 lfdirstate.drop(lfile)
536 lfdirstate.drop(lfile)
537 return ret
537 return ret
538
538
539 def catlfile(repo, lfile, rev, filename):
539 def catlfile(repo, lfile, rev, filename):
540 hash = lfutil.readstandin(repo, lfile, rev)
540 hash = lfutil.readstandin(repo, lfile, rev)
541 if not lfutil.inusercache(repo.ui, hash):
541 if not lfutil.inusercache(repo.ui, hash):
542 store = basestore._openstore(repo)
542 store = basestore._openstore(repo)
543 success, missing = store.get([(lfile, hash)])
543 success, missing = store.get([(lfile, hash)])
544 if len(success) != 1:
544 if len(success) != 1:
545 raise util.Abort(
545 raise util.Abort(
546 _('largefile %s is not in cache and could not be downloaded')
546 _('largefile %s is not in cache and could not be downloaded')
547 % lfile)
547 % lfile)
548 path = lfutil.usercachepath(repo.ui, hash)
548 path = lfutil.usercachepath(repo.ui, hash)
549 fpout = cmdutil.makefileobj(repo, filename)
549 fpout = cmdutil.makefileobj(repo, filename)
550 fpin = open(path, "rb")
550 fpin = open(path, "rb")
551 fpout.write(fpin.read())
551 fpout.write(fpin.read())
552 fpout.close()
552 fpout.close()
553 fpin.close()
553 fpin.close()
554 return 0
554 return 0
555
555
556 # -- hg commands declarations ------------------------------------------------
556 # -- hg commands declarations ------------------------------------------------
557
557
558 cmdtable = {
558 cmdtable = {
559 'lfconvert': (lfconvert,
559 'lfconvert': (lfconvert,
560 [('s', 'size', '',
560 [('s', 'size', '',
561 _('minimum size (MB) for files to be converted '
561 _('minimum size (MB) for files to be converted '
562 'as largefiles'),
562 'as largefiles'),
563 'SIZE'),
563 'SIZE'),
564 ('', 'to-normal', False,
564 ('', 'to-normal', False,
565 _('convert from a largefiles repo to a normal repo')),
565 _('convert from a largefiles repo to a normal repo')),
566 ],
566 ],
567 _('hg lfconvert SOURCE DEST [FILE ...]')),
567 _('hg lfconvert SOURCE DEST [FILE ...]')),
568 }
568 }
569
569
570 commands.inferrepo += " lfconvert"
570 commands.inferrepo += " lfconvert"
General Comments 0
You need to be logged in to leave comments. Login now