##// END OF EJS Templates
archive: support 'wdir()'...
Matt Harbison -
r25601:3ec8351f default
parent child Browse files
Show More
@@ -1,1373 +1,1385 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
10
11 import os
11 import os
12 import copy
12 import copy
13
13
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 archival, pathutil, revset
15 archival, pathutil, revset
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17
17
18 import lfutil
18 import lfutil
19 import lfcommands
19 import lfcommands
20 import basestore
20 import basestore
21
21
22 # -- Utility functions: commonly/repeatedly needed functionality ---------------
22 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23
23
24 def composelargefilematcher(match, manifest):
24 def composelargefilematcher(match, manifest):
25 '''create a matcher that matches only the largefiles in the original
25 '''create a matcher that matches only the largefiles in the original
26 matcher'''
26 matcher'''
27 m = copy.copy(match)
27 m = copy.copy(match)
28 lfile = lambda f: lfutil.standin(f) in manifest
28 lfile = lambda f: lfutil.standin(f) in manifest
29 m._files = filter(lfile, m._files)
29 m._files = filter(lfile, m._files)
30 m._fileroots = set(m._files)
30 m._fileroots = set(m._files)
31 m._always = False
31 m._always = False
32 origmatchfn = m.matchfn
32 origmatchfn = m.matchfn
33 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
33 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
34 return m
34 return m
35
35
36 def composenormalfilematcher(match, manifest, exclude=None):
36 def composenormalfilematcher(match, manifest, exclude=None):
37 excluded = set()
37 excluded = set()
38 if exclude is not None:
38 if exclude is not None:
39 excluded.update(exclude)
39 excluded.update(exclude)
40
40
41 m = copy.copy(match)
41 m = copy.copy(match)
42 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
42 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
43 manifest or f in excluded)
43 manifest or f in excluded)
44 m._files = filter(notlfile, m._files)
44 m._files = filter(notlfile, m._files)
45 m._fileroots = set(m._files)
45 m._fileroots = set(m._files)
46 m._always = False
46 m._always = False
47 origmatchfn = m.matchfn
47 origmatchfn = m.matchfn
48 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
48 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
49 return m
49 return m
50
50
51 def installnormalfilesmatchfn(manifest):
51 def installnormalfilesmatchfn(manifest):
52 '''installmatchfn with a matchfn that ignores all largefiles'''
52 '''installmatchfn with a matchfn that ignores all largefiles'''
53 def overridematch(ctx, pats=[], opts={}, globbed=False,
53 def overridematch(ctx, pats=[], opts={}, globbed=False,
54 default='relpath', badfn=None):
54 default='relpath', badfn=None):
55 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
55 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
56 return composenormalfilematcher(match, manifest)
56 return composenormalfilematcher(match, manifest)
57 oldmatch = installmatchfn(overridematch)
57 oldmatch = installmatchfn(overridematch)
58
58
59 def installmatchfn(f):
59 def installmatchfn(f):
60 '''monkey patch the scmutil module with a custom match function.
60 '''monkey patch the scmutil module with a custom match function.
61 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
61 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
62 oldmatch = scmutil.match
62 oldmatch = scmutil.match
63 setattr(f, 'oldmatch', oldmatch)
63 setattr(f, 'oldmatch', oldmatch)
64 scmutil.match = f
64 scmutil.match = f
65 return oldmatch
65 return oldmatch
66
66
67 def restorematchfn():
67 def restorematchfn():
68 '''restores scmutil.match to what it was before installmatchfn
68 '''restores scmutil.match to what it was before installmatchfn
69 was called. no-op if scmutil.match is its original function.
69 was called. no-op if scmutil.match is its original function.
70
70
71 Note that n calls to installmatchfn will require n calls to
71 Note that n calls to installmatchfn will require n calls to
72 restore the original matchfn.'''
72 restore the original matchfn.'''
73 scmutil.match = getattr(scmutil.match, 'oldmatch')
73 scmutil.match = getattr(scmutil.match, 'oldmatch')
74
74
75 def installmatchandpatsfn(f):
75 def installmatchandpatsfn(f):
76 oldmatchandpats = scmutil.matchandpats
76 oldmatchandpats = scmutil.matchandpats
77 setattr(f, 'oldmatchandpats', oldmatchandpats)
77 setattr(f, 'oldmatchandpats', oldmatchandpats)
78 scmutil.matchandpats = f
78 scmutil.matchandpats = f
79 return oldmatchandpats
79 return oldmatchandpats
80
80
81 def restorematchandpatsfn():
81 def restorematchandpatsfn():
82 '''restores scmutil.matchandpats to what it was before
82 '''restores scmutil.matchandpats to what it was before
83 installmatchandpatsfn was called. No-op if scmutil.matchandpats
83 installmatchandpatsfn was called. No-op if scmutil.matchandpats
84 is its original function.
84 is its original function.
85
85
86 Note that n calls to installmatchandpatsfn will require n calls
86 Note that n calls to installmatchandpatsfn will require n calls
87 to restore the original matchfn.'''
87 to restore the original matchfn.'''
88 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
88 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
89 scmutil.matchandpats)
89 scmutil.matchandpats)
90
90
91 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
91 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
92 large = opts.get('large')
92 large = opts.get('large')
93 lfsize = lfutil.getminsize(
93 lfsize = lfutil.getminsize(
94 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
94 ui, lfutil.islfilesrepo(repo), opts.get('lfsize'))
95
95
96 lfmatcher = None
96 lfmatcher = None
97 if lfutil.islfilesrepo(repo):
97 if lfutil.islfilesrepo(repo):
98 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
98 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
99 if lfpats:
99 if lfpats:
100 lfmatcher = match_.match(repo.root, '', list(lfpats))
100 lfmatcher = match_.match(repo.root, '', list(lfpats))
101
101
102 lfnames = []
102 lfnames = []
103 m = matcher
103 m = matcher
104
104
105 wctx = repo[None]
105 wctx = repo[None]
106 for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
106 for f in repo.walk(match_.badmatch(m, lambda x, y: None)):
107 exact = m.exact(f)
107 exact = m.exact(f)
108 lfile = lfutil.standin(f) in wctx
108 lfile = lfutil.standin(f) in wctx
109 nfile = f in wctx
109 nfile = f in wctx
110 exists = lfile or nfile
110 exists = lfile or nfile
111
111
112 # addremove in core gets fancy with the name, add doesn't
112 # addremove in core gets fancy with the name, add doesn't
113 if isaddremove:
113 if isaddremove:
114 name = m.uipath(f)
114 name = m.uipath(f)
115 else:
115 else:
116 name = m.rel(f)
116 name = m.rel(f)
117
117
118 # Don't warn the user when they attempt to add a normal tracked file.
118 # Don't warn the user when they attempt to add a normal tracked file.
119 # The normal add code will do that for us.
119 # The normal add code will do that for us.
120 if exact and exists:
120 if exact and exists:
121 if lfile:
121 if lfile:
122 ui.warn(_('%s already a largefile\n') % name)
122 ui.warn(_('%s already a largefile\n') % name)
123 continue
123 continue
124
124
125 if (exact or not exists) and not lfutil.isstandin(f):
125 if (exact or not exists) and not lfutil.isstandin(f):
126 # In case the file was removed previously, but not committed
126 # In case the file was removed previously, but not committed
127 # (issue3507)
127 # (issue3507)
128 if not repo.wvfs.exists(f):
128 if not repo.wvfs.exists(f):
129 continue
129 continue
130
130
131 abovemin = (lfsize and
131 abovemin = (lfsize and
132 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
132 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
133 if large or abovemin or (lfmatcher and lfmatcher(f)):
133 if large or abovemin or (lfmatcher and lfmatcher(f)):
134 lfnames.append(f)
134 lfnames.append(f)
135 if ui.verbose or not exact:
135 if ui.verbose or not exact:
136 ui.status(_('adding %s as a largefile\n') % name)
136 ui.status(_('adding %s as a largefile\n') % name)
137
137
138 bad = []
138 bad = []
139
139
140 # Need to lock, otherwise there could be a race condition between
140 # Need to lock, otherwise there could be a race condition between
141 # when standins are created and added to the repo.
141 # when standins are created and added to the repo.
142 wlock = repo.wlock()
142 wlock = repo.wlock()
143 try:
143 try:
144 if not opts.get('dry_run'):
144 if not opts.get('dry_run'):
145 standins = []
145 standins = []
146 lfdirstate = lfutil.openlfdirstate(ui, repo)
146 lfdirstate = lfutil.openlfdirstate(ui, repo)
147 for f in lfnames:
147 for f in lfnames:
148 standinname = lfutil.standin(f)
148 standinname = lfutil.standin(f)
149 lfutil.writestandin(repo, standinname, hash='',
149 lfutil.writestandin(repo, standinname, hash='',
150 executable=lfutil.getexecutable(repo.wjoin(f)))
150 executable=lfutil.getexecutable(repo.wjoin(f)))
151 standins.append(standinname)
151 standins.append(standinname)
152 if lfdirstate[f] == 'r':
152 if lfdirstate[f] == 'r':
153 lfdirstate.normallookup(f)
153 lfdirstate.normallookup(f)
154 else:
154 else:
155 lfdirstate.add(f)
155 lfdirstate.add(f)
156 lfdirstate.write()
156 lfdirstate.write()
157 bad += [lfutil.splitstandin(f)
157 bad += [lfutil.splitstandin(f)
158 for f in repo[None].add(standins)
158 for f in repo[None].add(standins)
159 if f in m.files()]
159 if f in m.files()]
160
160
161 added = [f for f in lfnames if f not in bad]
161 added = [f for f in lfnames if f not in bad]
162 finally:
162 finally:
163 wlock.release()
163 wlock.release()
164 return added, bad
164 return added, bad
165
165
166 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
166 def removelargefiles(ui, repo, isaddremove, matcher, **opts):
167 after = opts.get('after')
167 after = opts.get('after')
168 m = composelargefilematcher(matcher, repo[None].manifest())
168 m = composelargefilematcher(matcher, repo[None].manifest())
169 try:
169 try:
170 repo.lfstatus = True
170 repo.lfstatus = True
171 s = repo.status(match=m, clean=not isaddremove)
171 s = repo.status(match=m, clean=not isaddremove)
172 finally:
172 finally:
173 repo.lfstatus = False
173 repo.lfstatus = False
174 manifest = repo[None].manifest()
174 manifest = repo[None].manifest()
175 modified, added, deleted, clean = [[f for f in list
175 modified, added, deleted, clean = [[f for f in list
176 if lfutil.standin(f) in manifest]
176 if lfutil.standin(f) in manifest]
177 for list in (s.modified, s.added,
177 for list in (s.modified, s.added,
178 s.deleted, s.clean)]
178 s.deleted, s.clean)]
179
179
180 def warn(files, msg):
180 def warn(files, msg):
181 for f in files:
181 for f in files:
182 ui.warn(msg % m.rel(f))
182 ui.warn(msg % m.rel(f))
183 return int(len(files) > 0)
183 return int(len(files) > 0)
184
184
185 result = 0
185 result = 0
186
186
187 if after:
187 if after:
188 remove = deleted
188 remove = deleted
189 result = warn(modified + added + clean,
189 result = warn(modified + added + clean,
190 _('not removing %s: file still exists\n'))
190 _('not removing %s: file still exists\n'))
191 else:
191 else:
192 remove = deleted + clean
192 remove = deleted + clean
193 result = warn(modified, _('not removing %s: file is modified (use -f'
193 result = warn(modified, _('not removing %s: file is modified (use -f'
194 ' to force removal)\n'))
194 ' to force removal)\n'))
195 result = warn(added, _('not removing %s: file has been marked for add'
195 result = warn(added, _('not removing %s: file has been marked for add'
196 ' (use forget to undo)\n')) or result
196 ' (use forget to undo)\n')) or result
197
197
198 # Need to lock because standin files are deleted then removed from the
198 # Need to lock because standin files are deleted then removed from the
199 # repository and we could race in-between.
199 # repository and we could race in-between.
200 wlock = repo.wlock()
200 wlock = repo.wlock()
201 try:
201 try:
202 lfdirstate = lfutil.openlfdirstate(ui, repo)
202 lfdirstate = lfutil.openlfdirstate(ui, repo)
203 for f in sorted(remove):
203 for f in sorted(remove):
204 if ui.verbose or not m.exact(f):
204 if ui.verbose or not m.exact(f):
205 # addremove in core gets fancy with the name, remove doesn't
205 # addremove in core gets fancy with the name, remove doesn't
206 if isaddremove:
206 if isaddremove:
207 name = m.uipath(f)
207 name = m.uipath(f)
208 else:
208 else:
209 name = m.rel(f)
209 name = m.rel(f)
210 ui.status(_('removing %s\n') % name)
210 ui.status(_('removing %s\n') % name)
211
211
212 if not opts.get('dry_run'):
212 if not opts.get('dry_run'):
213 if not after:
213 if not after:
214 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
214 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
215
215
216 if opts.get('dry_run'):
216 if opts.get('dry_run'):
217 return result
217 return result
218
218
219 remove = [lfutil.standin(f) for f in remove]
219 remove = [lfutil.standin(f) for f in remove]
220 # If this is being called by addremove, let the original addremove
220 # If this is being called by addremove, let the original addremove
221 # function handle this.
221 # function handle this.
222 if not isaddremove:
222 if not isaddremove:
223 for f in remove:
223 for f in remove:
224 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
224 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
225 repo[None].forget(remove)
225 repo[None].forget(remove)
226
226
227 for f in remove:
227 for f in remove:
228 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
228 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
229 False)
229 False)
230
230
231 lfdirstate.write()
231 lfdirstate.write()
232 finally:
232 finally:
233 wlock.release()
233 wlock.release()
234
234
235 return result
235 return result
236
236
237 # For overriding mercurial.hgweb.webcommands so that largefiles will
237 # For overriding mercurial.hgweb.webcommands so that largefiles will
238 # appear at their right place in the manifests.
238 # appear at their right place in the manifests.
239 def decodepath(orig, path):
239 def decodepath(orig, path):
240 return lfutil.splitstandin(path) or path
240 return lfutil.splitstandin(path) or path
241
241
242 # -- Wrappers: modify existing commands --------------------------------
242 # -- Wrappers: modify existing commands --------------------------------
243
243
244 def overrideadd(orig, ui, repo, *pats, **opts):
244 def overrideadd(orig, ui, repo, *pats, **opts):
245 if opts.get('normal') and opts.get('large'):
245 if opts.get('normal') and opts.get('large'):
246 raise util.Abort(_('--normal cannot be used with --large'))
246 raise util.Abort(_('--normal cannot be used with --large'))
247 return orig(ui, repo, *pats, **opts)
247 return orig(ui, repo, *pats, **opts)
248
248
249 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
249 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
250 # The --normal flag short circuits this override
250 # The --normal flag short circuits this override
251 if opts.get('normal'):
251 if opts.get('normal'):
252 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
252 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
253
253
254 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
254 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
255 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
255 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
256 ladded)
256 ladded)
257 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
257 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
258
258
259 bad.extend(f for f in lbad)
259 bad.extend(f for f in lbad)
260 return bad
260 return bad
261
261
262 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
262 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos):
263 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
263 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
264 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
264 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos)
265 return removelargefiles(ui, repo, False, matcher, after=after,
265 return removelargefiles(ui, repo, False, matcher, after=after,
266 force=force) or result
266 force=force) or result
267
267
268 def overridestatusfn(orig, repo, rev2, **opts):
268 def overridestatusfn(orig, repo, rev2, **opts):
269 try:
269 try:
270 repo._repo.lfstatus = True
270 repo._repo.lfstatus = True
271 return orig(repo, rev2, **opts)
271 return orig(repo, rev2, **opts)
272 finally:
272 finally:
273 repo._repo.lfstatus = False
273 repo._repo.lfstatus = False
274
274
275 def overridestatus(orig, ui, repo, *pats, **opts):
275 def overridestatus(orig, ui, repo, *pats, **opts):
276 try:
276 try:
277 repo.lfstatus = True
277 repo.lfstatus = True
278 return orig(ui, repo, *pats, **opts)
278 return orig(ui, repo, *pats, **opts)
279 finally:
279 finally:
280 repo.lfstatus = False
280 repo.lfstatus = False
281
281
282 def overridedirty(orig, repo, ignoreupdate=False):
282 def overridedirty(orig, repo, ignoreupdate=False):
283 try:
283 try:
284 repo._repo.lfstatus = True
284 repo._repo.lfstatus = True
285 return orig(repo, ignoreupdate)
285 return orig(repo, ignoreupdate)
286 finally:
286 finally:
287 repo._repo.lfstatus = False
287 repo._repo.lfstatus = False
288
288
289 def overridelog(orig, ui, repo, *pats, **opts):
289 def overridelog(orig, ui, repo, *pats, **opts):
290 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
290 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
291 default='relpath', badfn=None):
291 default='relpath', badfn=None):
292 """Matcher that merges root directory with .hglf, suitable for log.
292 """Matcher that merges root directory with .hglf, suitable for log.
293 It is still possible to match .hglf directly.
293 It is still possible to match .hglf directly.
294 For any listed files run log on the standin too.
294 For any listed files run log on the standin too.
295 matchfn tries both the given filename and with .hglf stripped.
295 matchfn tries both the given filename and with .hglf stripped.
296 """
296 """
297 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
297 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
298 badfn=badfn)
298 badfn=badfn)
299 m, p = copy.copy(matchandpats)
299 m, p = copy.copy(matchandpats)
300
300
301 if m.always():
301 if m.always():
302 # We want to match everything anyway, so there's no benefit trying
302 # We want to match everything anyway, so there's no benefit trying
303 # to add standins.
303 # to add standins.
304 return matchandpats
304 return matchandpats
305
305
306 pats = set(p)
306 pats = set(p)
307
307
308 def fixpats(pat, tostandin=lfutil.standin):
308 def fixpats(pat, tostandin=lfutil.standin):
309 if pat.startswith('set:'):
309 if pat.startswith('set:'):
310 return pat
310 return pat
311
311
312 kindpat = match_._patsplit(pat, None)
312 kindpat = match_._patsplit(pat, None)
313
313
314 if kindpat[0] is not None:
314 if kindpat[0] is not None:
315 return kindpat[0] + ':' + tostandin(kindpat[1])
315 return kindpat[0] + ':' + tostandin(kindpat[1])
316 return tostandin(kindpat[1])
316 return tostandin(kindpat[1])
317
317
318 if m._cwd:
318 if m._cwd:
319 hglf = lfutil.shortname
319 hglf = lfutil.shortname
320 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
320 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
321
321
322 def tostandin(f):
322 def tostandin(f):
323 # The file may already be a standin, so trucate the back
323 # The file may already be a standin, so trucate the back
324 # prefix and test before mangling it. This avoids turning
324 # prefix and test before mangling it. This avoids turning
325 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
325 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
326 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
326 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
327 return f
327 return f
328
328
329 # An absolute path is from outside the repo, so truncate the
329 # An absolute path is from outside the repo, so truncate the
330 # path to the root before building the standin. Otherwise cwd
330 # path to the root before building the standin. Otherwise cwd
331 # is somewhere in the repo, relative to root, and needs to be
331 # is somewhere in the repo, relative to root, and needs to be
332 # prepended before building the standin.
332 # prepended before building the standin.
333 if os.path.isabs(m._cwd):
333 if os.path.isabs(m._cwd):
334 f = f[len(back):]
334 f = f[len(back):]
335 else:
335 else:
336 f = m._cwd + '/' + f
336 f = m._cwd + '/' + f
337 return back + lfutil.standin(f)
337 return back + lfutil.standin(f)
338
338
339 pats.update(fixpats(f, tostandin) for f in p)
339 pats.update(fixpats(f, tostandin) for f in p)
340 else:
340 else:
341 def tostandin(f):
341 def tostandin(f):
342 if lfutil.splitstandin(f):
342 if lfutil.splitstandin(f):
343 return f
343 return f
344 return lfutil.standin(f)
344 return lfutil.standin(f)
345 pats.update(fixpats(f, tostandin) for f in p)
345 pats.update(fixpats(f, tostandin) for f in p)
346
346
347 for i in range(0, len(m._files)):
347 for i in range(0, len(m._files)):
348 # Don't add '.hglf' to m.files, since that is already covered by '.'
348 # Don't add '.hglf' to m.files, since that is already covered by '.'
349 if m._files[i] == '.':
349 if m._files[i] == '.':
350 continue
350 continue
351 standin = lfutil.standin(m._files[i])
351 standin = lfutil.standin(m._files[i])
352 # If the "standin" is a directory, append instead of replace to
352 # If the "standin" is a directory, append instead of replace to
353 # support naming a directory on the command line with only
353 # support naming a directory on the command line with only
354 # largefiles. The original directory is kept to support normal
354 # largefiles. The original directory is kept to support normal
355 # files.
355 # files.
356 if standin in repo[ctx.node()]:
356 if standin in repo[ctx.node()]:
357 m._files[i] = standin
357 m._files[i] = standin
358 elif m._files[i] not in repo[ctx.node()] \
358 elif m._files[i] not in repo[ctx.node()] \
359 and repo.wvfs.isdir(standin):
359 and repo.wvfs.isdir(standin):
360 m._files.append(standin)
360 m._files.append(standin)
361
361
362 m._fileroots = set(m._files)
362 m._fileroots = set(m._files)
363 m._always = False
363 m._always = False
364 origmatchfn = m.matchfn
364 origmatchfn = m.matchfn
365 def lfmatchfn(f):
365 def lfmatchfn(f):
366 lf = lfutil.splitstandin(f)
366 lf = lfutil.splitstandin(f)
367 if lf is not None and origmatchfn(lf):
367 if lf is not None and origmatchfn(lf):
368 return True
368 return True
369 r = origmatchfn(f)
369 r = origmatchfn(f)
370 return r
370 return r
371 m.matchfn = lfmatchfn
371 m.matchfn = lfmatchfn
372
372
373 ui.debug('updated patterns: %s\n' % sorted(pats))
373 ui.debug('updated patterns: %s\n' % sorted(pats))
374 return m, pats
374 return m, pats
375
375
376 # For hg log --patch, the match object is used in two different senses:
376 # For hg log --patch, the match object is used in two different senses:
377 # (1) to determine what revisions should be printed out, and
377 # (1) to determine what revisions should be printed out, and
378 # (2) to determine what files to print out diffs for.
378 # (2) to determine what files to print out diffs for.
379 # The magic matchandpats override should be used for case (1) but not for
379 # The magic matchandpats override should be used for case (1) but not for
380 # case (2).
380 # case (2).
381 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
381 def overridemakelogfilematcher(repo, pats, opts, badfn=None):
382 wctx = repo[None]
382 wctx = repo[None]
383 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
383 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
384 return lambda rev: match
384 return lambda rev: match
385
385
386 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
386 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
387 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
387 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
388 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
388 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
389
389
390 try:
390 try:
391 return orig(ui, repo, *pats, **opts)
391 return orig(ui, repo, *pats, **opts)
392 finally:
392 finally:
393 restorematchandpatsfn()
393 restorematchandpatsfn()
394 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
394 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
395
395
396 def overrideverify(orig, ui, repo, *pats, **opts):
396 def overrideverify(orig, ui, repo, *pats, **opts):
397 large = opts.pop('large', False)
397 large = opts.pop('large', False)
398 all = opts.pop('lfa', False)
398 all = opts.pop('lfa', False)
399 contents = opts.pop('lfc', False)
399 contents = opts.pop('lfc', False)
400
400
401 result = orig(ui, repo, *pats, **opts)
401 result = orig(ui, repo, *pats, **opts)
402 if large or all or contents:
402 if large or all or contents:
403 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
403 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
404 return result
404 return result
405
405
406 def overridedebugstate(orig, ui, repo, *pats, **opts):
406 def overridedebugstate(orig, ui, repo, *pats, **opts):
407 large = opts.pop('large', False)
407 large = opts.pop('large', False)
408 if large:
408 if large:
409 class fakerepo(object):
409 class fakerepo(object):
410 dirstate = lfutil.openlfdirstate(ui, repo)
410 dirstate = lfutil.openlfdirstate(ui, repo)
411 orig(ui, fakerepo, *pats, **opts)
411 orig(ui, fakerepo, *pats, **opts)
412 else:
412 else:
413 orig(ui, repo, *pats, **opts)
413 orig(ui, repo, *pats, **opts)
414
414
415 # Before starting the manifest merge, merge.updates will call
415 # Before starting the manifest merge, merge.updates will call
416 # _checkunknownfile to check if there are any files in the merged-in
416 # _checkunknownfile to check if there are any files in the merged-in
417 # changeset that collide with unknown files in the working copy.
417 # changeset that collide with unknown files in the working copy.
418 #
418 #
419 # The largefiles are seen as unknown, so this prevents us from merging
419 # The largefiles are seen as unknown, so this prevents us from merging
420 # in a file 'foo' if we already have a largefile with the same name.
420 # in a file 'foo' if we already have a largefile with the same name.
421 #
421 #
422 # The overridden function filters the unknown files by removing any
422 # The overridden function filters the unknown files by removing any
423 # largefiles. This makes the merge proceed and we can then handle this
423 # largefiles. This makes the merge proceed and we can then handle this
424 # case further in the overridden calculateupdates function below.
424 # case further in the overridden calculateupdates function below.
425 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
425 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
426 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
426 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
427 return False
427 return False
428 return origfn(repo, wctx, mctx, f, f2)
428 return origfn(repo, wctx, mctx, f, f2)
429
429
430 # The manifest merge handles conflicts on the manifest level. We want
430 # The manifest merge handles conflicts on the manifest level. We want
431 # to handle changes in largefile-ness of files at this level too.
431 # to handle changes in largefile-ness of files at this level too.
432 #
432 #
433 # The strategy is to run the original calculateupdates and then process
433 # The strategy is to run the original calculateupdates and then process
434 # the action list it outputs. There are two cases we need to deal with:
434 # the action list it outputs. There are two cases we need to deal with:
435 #
435 #
436 # 1. Normal file in p1, largefile in p2. Here the largefile is
436 # 1. Normal file in p1, largefile in p2. Here the largefile is
437 # detected via its standin file, which will enter the working copy
437 # detected via its standin file, which will enter the working copy
438 # with a "get" action. It is not "merge" since the standin is all
438 # with a "get" action. It is not "merge" since the standin is all
439 # Mercurial is concerned with at this level -- the link to the
439 # Mercurial is concerned with at this level -- the link to the
440 # existing normal file is not relevant here.
440 # existing normal file is not relevant here.
441 #
441 #
442 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
442 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
443 # since the largefile will be present in the working copy and
443 # since the largefile will be present in the working copy and
444 # different from the normal file in p2. Mercurial therefore
444 # different from the normal file in p2. Mercurial therefore
445 # triggers a merge action.
445 # triggers a merge action.
446 #
446 #
447 # In both cases, we prompt the user and emit new actions to either
447 # In both cases, we prompt the user and emit new actions to either
448 # remove the standin (if the normal file was kept) or to remove the
448 # remove the standin (if the normal file was kept) or to remove the
449 # normal file and get the standin (if the largefile was kept). The
449 # normal file and get the standin (if the largefile was kept). The
450 # default prompt answer is to use the largefile version since it was
450 # default prompt answer is to use the largefile version since it was
451 # presumably changed on purpose.
451 # presumably changed on purpose.
452 #
452 #
453 # Finally, the merge.applyupdates function will then take care of
453 # Finally, the merge.applyupdates function will then take care of
454 # writing the files into the working copy and lfcommands.updatelfiles
454 # writing the files into the working copy and lfcommands.updatelfiles
455 # will update the largefiles.
455 # will update the largefiles.
456 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
456 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
457 partial, acceptremote, followcopies):
457 partial, acceptremote, followcopies):
458 overwrite = force and not branchmerge
458 overwrite = force and not branchmerge
459 actions, diverge, renamedelete = origfn(
459 actions, diverge, renamedelete = origfn(
460 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
460 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
461 followcopies)
461 followcopies)
462
462
463 if overwrite:
463 if overwrite:
464 return actions, diverge, renamedelete
464 return actions, diverge, renamedelete
465
465
466 # Convert to dictionary with filename as key and action as value.
466 # Convert to dictionary with filename as key and action as value.
467 lfiles = set()
467 lfiles = set()
468 for f in actions:
468 for f in actions:
469 splitstandin = f and lfutil.splitstandin(f)
469 splitstandin = f and lfutil.splitstandin(f)
470 if splitstandin in p1:
470 if splitstandin in p1:
471 lfiles.add(splitstandin)
471 lfiles.add(splitstandin)
472 elif lfutil.standin(f) in p1:
472 elif lfutil.standin(f) in p1:
473 lfiles.add(f)
473 lfiles.add(f)
474
474
475 for lfile in lfiles:
475 for lfile in lfiles:
476 standin = lfutil.standin(lfile)
476 standin = lfutil.standin(lfile)
477 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
477 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
478 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
478 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
479 if sm in ('g', 'dc') and lm != 'r':
479 if sm in ('g', 'dc') and lm != 'r':
480 # Case 1: normal file in the working copy, largefile in
480 # Case 1: normal file in the working copy, largefile in
481 # the second parent
481 # the second parent
482 usermsg = _('remote turned local normal file %s into a largefile\n'
482 usermsg = _('remote turned local normal file %s into a largefile\n'
483 'use (l)argefile or keep (n)ormal file?'
483 'use (l)argefile or keep (n)ormal file?'
484 '$$ &Largefile $$ &Normal file') % lfile
484 '$$ &Largefile $$ &Normal file') % lfile
485 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
485 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
486 actions[lfile] = ('r', None, 'replaced by standin')
486 actions[lfile] = ('r', None, 'replaced by standin')
487 actions[standin] = ('g', sargs, 'replaces standin')
487 actions[standin] = ('g', sargs, 'replaces standin')
488 else: # keep local normal file
488 else: # keep local normal file
489 actions[lfile] = ('k', None, 'replaces standin')
489 actions[lfile] = ('k', None, 'replaces standin')
490 if branchmerge:
490 if branchmerge:
491 actions[standin] = ('k', None, 'replaced by non-standin')
491 actions[standin] = ('k', None, 'replaced by non-standin')
492 else:
492 else:
493 actions[standin] = ('r', None, 'replaced by non-standin')
493 actions[standin] = ('r', None, 'replaced by non-standin')
494 elif lm in ('g', 'dc') and sm != 'r':
494 elif lm in ('g', 'dc') and sm != 'r':
495 # Case 2: largefile in the working copy, normal file in
495 # Case 2: largefile in the working copy, normal file in
496 # the second parent
496 # the second parent
497 usermsg = _('remote turned local largefile %s into a normal file\n'
497 usermsg = _('remote turned local largefile %s into a normal file\n'
498 'keep (l)argefile or use (n)ormal file?'
498 'keep (l)argefile or use (n)ormal file?'
499 '$$ &Largefile $$ &Normal file') % lfile
499 '$$ &Largefile $$ &Normal file') % lfile
500 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
500 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
501 if branchmerge:
501 if branchmerge:
502 # largefile can be restored from standin safely
502 # largefile can be restored from standin safely
503 actions[lfile] = ('k', None, 'replaced by standin')
503 actions[lfile] = ('k', None, 'replaced by standin')
504 actions[standin] = ('k', None, 'replaces standin')
504 actions[standin] = ('k', None, 'replaces standin')
505 else:
505 else:
506 # "lfile" should be marked as "removed" without
506 # "lfile" should be marked as "removed" without
507 # removal of itself
507 # removal of itself
508 actions[lfile] = ('lfmr', None,
508 actions[lfile] = ('lfmr', None,
509 'forget non-standin largefile')
509 'forget non-standin largefile')
510
510
511 # linear-merge should treat this largefile as 're-added'
511 # linear-merge should treat this largefile as 're-added'
512 actions[standin] = ('a', None, 'keep standin')
512 actions[standin] = ('a', None, 'keep standin')
513 else: # pick remote normal file
513 else: # pick remote normal file
514 actions[lfile] = ('g', largs, 'replaces standin')
514 actions[lfile] = ('g', largs, 'replaces standin')
515 actions[standin] = ('r', None, 'replaced by non-standin')
515 actions[standin] = ('r', None, 'replaced by non-standin')
516
516
517 return actions, diverge, renamedelete
517 return actions, diverge, renamedelete
518
518
519 def mergerecordupdates(orig, repo, actions, branchmerge):
519 def mergerecordupdates(orig, repo, actions, branchmerge):
520 if 'lfmr' in actions:
520 if 'lfmr' in actions:
521 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
521 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
522 for lfile, args, msg in actions['lfmr']:
522 for lfile, args, msg in actions['lfmr']:
523 # this should be executed before 'orig', to execute 'remove'
523 # this should be executed before 'orig', to execute 'remove'
524 # before all other actions
524 # before all other actions
525 repo.dirstate.remove(lfile)
525 repo.dirstate.remove(lfile)
526 # make sure lfile doesn't get synclfdirstate'd as normal
526 # make sure lfile doesn't get synclfdirstate'd as normal
527 lfdirstate.add(lfile)
527 lfdirstate.add(lfile)
528 lfdirstate.write()
528 lfdirstate.write()
529
529
530 return orig(repo, actions, branchmerge)
530 return orig(repo, actions, branchmerge)
531
531
532
532
533 # Override filemerge to prompt the user about how they wish to merge
533 # Override filemerge to prompt the user about how they wish to merge
534 # largefiles. This will handle identical edits without prompting the user.
534 # largefiles. This will handle identical edits without prompting the user.
535 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
535 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
536 if not lfutil.isstandin(orig):
536 if not lfutil.isstandin(orig):
537 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
537 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
538
538
539 ahash = fca.data().strip().lower()
539 ahash = fca.data().strip().lower()
540 dhash = fcd.data().strip().lower()
540 dhash = fcd.data().strip().lower()
541 ohash = fco.data().strip().lower()
541 ohash = fco.data().strip().lower()
542 if (ohash != ahash and
542 if (ohash != ahash and
543 ohash != dhash and
543 ohash != dhash and
544 (dhash == ahash or
544 (dhash == ahash or
545 repo.ui.promptchoice(
545 repo.ui.promptchoice(
546 _('largefile %s has a merge conflict\nancestor was %s\n'
546 _('largefile %s has a merge conflict\nancestor was %s\n'
547 'keep (l)ocal %s or\ntake (o)ther %s?'
547 'keep (l)ocal %s or\ntake (o)ther %s?'
548 '$$ &Local $$ &Other') %
548 '$$ &Local $$ &Other') %
549 (lfutil.splitstandin(orig), ahash, dhash, ohash),
549 (lfutil.splitstandin(orig), ahash, dhash, ohash),
550 0) == 1)):
550 0) == 1)):
551 repo.wwrite(fcd.path(), fco.data(), fco.flags())
551 repo.wwrite(fcd.path(), fco.data(), fco.flags())
552 return 0
552 return 0
553
553
554 def copiespathcopies(orig, ctx1, ctx2, match=None):
554 def copiespathcopies(orig, ctx1, ctx2, match=None):
555 copies = orig(ctx1, ctx2, match=match)
555 copies = orig(ctx1, ctx2, match=match)
556 updated = {}
556 updated = {}
557
557
558 for k, v in copies.iteritems():
558 for k, v in copies.iteritems():
559 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
559 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
560
560
561 return updated
561 return updated
562
562
563 # Copy first changes the matchers to match standins instead of
563 # Copy first changes the matchers to match standins instead of
564 # largefiles. Then it overrides util.copyfile in that function it
564 # largefiles. Then it overrides util.copyfile in that function it
565 # checks if the destination largefile already exists. It also keeps a
565 # checks if the destination largefile already exists. It also keeps a
566 # list of copied files so that the largefiles can be copied and the
566 # list of copied files so that the largefiles can be copied and the
567 # dirstate updated.
567 # dirstate updated.
568 def overridecopy(orig, ui, repo, pats, opts, rename=False):
568 def overridecopy(orig, ui, repo, pats, opts, rename=False):
569 # doesn't remove largefile on rename
569 # doesn't remove largefile on rename
570 if len(pats) < 2:
570 if len(pats) < 2:
571 # this isn't legal, let the original function deal with it
571 # this isn't legal, let the original function deal with it
572 return orig(ui, repo, pats, opts, rename)
572 return orig(ui, repo, pats, opts, rename)
573
573
574 # This could copy both lfiles and normal files in one command,
574 # This could copy both lfiles and normal files in one command,
575 # but we don't want to do that. First replace their matcher to
575 # but we don't want to do that. First replace their matcher to
576 # only match normal files and run it, then replace it to just
576 # only match normal files and run it, then replace it to just
577 # match largefiles and run it again.
577 # match largefiles and run it again.
578 nonormalfiles = False
578 nonormalfiles = False
579 nolfiles = False
579 nolfiles = False
580 installnormalfilesmatchfn(repo[None].manifest())
580 installnormalfilesmatchfn(repo[None].manifest())
581 try:
581 try:
582 result = orig(ui, repo, pats, opts, rename)
582 result = orig(ui, repo, pats, opts, rename)
583 except util.Abort, e:
583 except util.Abort, e:
584 if str(e) != _('no files to copy'):
584 if str(e) != _('no files to copy'):
585 raise e
585 raise e
586 else:
586 else:
587 nonormalfiles = True
587 nonormalfiles = True
588 result = 0
588 result = 0
589 finally:
589 finally:
590 restorematchfn()
590 restorematchfn()
591
591
592 # The first rename can cause our current working directory to be removed.
592 # The first rename can cause our current working directory to be removed.
593 # In that case there is nothing left to copy/rename so just quit.
593 # In that case there is nothing left to copy/rename so just quit.
594 try:
594 try:
595 repo.getcwd()
595 repo.getcwd()
596 except OSError:
596 except OSError:
597 return result
597 return result
598
598
599 def makestandin(relpath):
599 def makestandin(relpath):
600 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
600 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
601 return os.path.join(repo.wjoin(lfutil.standin(path)))
601 return os.path.join(repo.wjoin(lfutil.standin(path)))
602
602
603 fullpats = scmutil.expandpats(pats)
603 fullpats = scmutil.expandpats(pats)
604 dest = fullpats[-1]
604 dest = fullpats[-1]
605
605
606 if os.path.isdir(dest):
606 if os.path.isdir(dest):
607 if not os.path.isdir(makestandin(dest)):
607 if not os.path.isdir(makestandin(dest)):
608 os.makedirs(makestandin(dest))
608 os.makedirs(makestandin(dest))
609
609
610 try:
610 try:
611 # When we call orig below it creates the standins but we don't add
611 # When we call orig below it creates the standins but we don't add
612 # them to the dir state until later so lock during that time.
612 # them to the dir state until later so lock during that time.
613 wlock = repo.wlock()
613 wlock = repo.wlock()
614
614
615 manifest = repo[None].manifest()
615 manifest = repo[None].manifest()
616 def overridematch(ctx, pats=[], opts={}, globbed=False,
616 def overridematch(ctx, pats=[], opts={}, globbed=False,
617 default='relpath', badfn=None):
617 default='relpath', badfn=None):
618 newpats = []
618 newpats = []
619 # The patterns were previously mangled to add the standin
619 # The patterns were previously mangled to add the standin
620 # directory; we need to remove that now
620 # directory; we need to remove that now
621 for pat in pats:
621 for pat in pats:
622 if match_.patkind(pat) is None and lfutil.shortname in pat:
622 if match_.patkind(pat) is None and lfutil.shortname in pat:
623 newpats.append(pat.replace(lfutil.shortname, ''))
623 newpats.append(pat.replace(lfutil.shortname, ''))
624 else:
624 else:
625 newpats.append(pat)
625 newpats.append(pat)
626 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
626 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
627 m = copy.copy(match)
627 m = copy.copy(match)
628 lfile = lambda f: lfutil.standin(f) in manifest
628 lfile = lambda f: lfutil.standin(f) in manifest
629 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
629 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
630 m._fileroots = set(m._files)
630 m._fileroots = set(m._files)
631 origmatchfn = m.matchfn
631 origmatchfn = m.matchfn
632 m.matchfn = lambda f: (lfutil.isstandin(f) and
632 m.matchfn = lambda f: (lfutil.isstandin(f) and
633 (f in manifest) and
633 (f in manifest) and
634 origmatchfn(lfutil.splitstandin(f)) or
634 origmatchfn(lfutil.splitstandin(f)) or
635 None)
635 None)
636 return m
636 return m
637 oldmatch = installmatchfn(overridematch)
637 oldmatch = installmatchfn(overridematch)
638 listpats = []
638 listpats = []
639 for pat in pats:
639 for pat in pats:
640 if match_.patkind(pat) is not None:
640 if match_.patkind(pat) is not None:
641 listpats.append(pat)
641 listpats.append(pat)
642 else:
642 else:
643 listpats.append(makestandin(pat))
643 listpats.append(makestandin(pat))
644
644
645 try:
645 try:
646 origcopyfile = util.copyfile
646 origcopyfile = util.copyfile
647 copiedfiles = []
647 copiedfiles = []
648 def overridecopyfile(src, dest):
648 def overridecopyfile(src, dest):
649 if (lfutil.shortname in src and
649 if (lfutil.shortname in src and
650 dest.startswith(repo.wjoin(lfutil.shortname))):
650 dest.startswith(repo.wjoin(lfutil.shortname))):
651 destlfile = dest.replace(lfutil.shortname, '')
651 destlfile = dest.replace(lfutil.shortname, '')
652 if not opts['force'] and os.path.exists(destlfile):
652 if not opts['force'] and os.path.exists(destlfile):
653 raise IOError('',
653 raise IOError('',
654 _('destination largefile already exists'))
654 _('destination largefile already exists'))
655 copiedfiles.append((src, dest))
655 copiedfiles.append((src, dest))
656 origcopyfile(src, dest)
656 origcopyfile(src, dest)
657
657
658 util.copyfile = overridecopyfile
658 util.copyfile = overridecopyfile
659 result += orig(ui, repo, listpats, opts, rename)
659 result += orig(ui, repo, listpats, opts, rename)
660 finally:
660 finally:
661 util.copyfile = origcopyfile
661 util.copyfile = origcopyfile
662
662
663 lfdirstate = lfutil.openlfdirstate(ui, repo)
663 lfdirstate = lfutil.openlfdirstate(ui, repo)
664 for (src, dest) in copiedfiles:
664 for (src, dest) in copiedfiles:
665 if (lfutil.shortname in src and
665 if (lfutil.shortname in src and
666 dest.startswith(repo.wjoin(lfutil.shortname))):
666 dest.startswith(repo.wjoin(lfutil.shortname))):
667 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
667 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
668 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
668 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
669 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
669 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
670 if not os.path.isdir(destlfiledir):
670 if not os.path.isdir(destlfiledir):
671 os.makedirs(destlfiledir)
671 os.makedirs(destlfiledir)
672 if rename:
672 if rename:
673 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
673 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
674
674
675 # The file is gone, but this deletes any empty parent
675 # The file is gone, but this deletes any empty parent
676 # directories as a side-effect.
676 # directories as a side-effect.
677 util.unlinkpath(repo.wjoin(srclfile), True)
677 util.unlinkpath(repo.wjoin(srclfile), True)
678 lfdirstate.remove(srclfile)
678 lfdirstate.remove(srclfile)
679 else:
679 else:
680 util.copyfile(repo.wjoin(srclfile),
680 util.copyfile(repo.wjoin(srclfile),
681 repo.wjoin(destlfile))
681 repo.wjoin(destlfile))
682
682
683 lfdirstate.add(destlfile)
683 lfdirstate.add(destlfile)
684 lfdirstate.write()
684 lfdirstate.write()
685 except util.Abort, e:
685 except util.Abort, e:
686 if str(e) != _('no files to copy'):
686 if str(e) != _('no files to copy'):
687 raise e
687 raise e
688 else:
688 else:
689 nolfiles = True
689 nolfiles = True
690 finally:
690 finally:
691 restorematchfn()
691 restorematchfn()
692 wlock.release()
692 wlock.release()
693
693
694 if nolfiles and nonormalfiles:
694 if nolfiles and nonormalfiles:
695 raise util.Abort(_('no files to copy'))
695 raise util.Abort(_('no files to copy'))
696
696
697 return result
697 return result
698
698
699 # When the user calls revert, we have to be careful to not revert any
699 # When the user calls revert, we have to be careful to not revert any
700 # changes to other largefiles accidentally. This means we have to keep
700 # changes to other largefiles accidentally. This means we have to keep
701 # track of the largefiles that are being reverted so we only pull down
701 # track of the largefiles that are being reverted so we only pull down
702 # the necessary largefiles.
702 # the necessary largefiles.
703 #
703 #
704 # Standins are only updated (to match the hash of largefiles) before
704 # Standins are only updated (to match the hash of largefiles) before
705 # commits. Update the standins then run the original revert, changing
705 # commits. Update the standins then run the original revert, changing
706 # the matcher to hit standins instead of largefiles. Based on the
706 # the matcher to hit standins instead of largefiles. Based on the
707 # resulting standins update the largefiles.
707 # resulting standins update the largefiles.
708 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
708 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
709 # Because we put the standins in a bad state (by updating them)
709 # Because we put the standins in a bad state (by updating them)
710 # and then return them to a correct state we need to lock to
710 # and then return them to a correct state we need to lock to
711 # prevent others from changing them in their incorrect state.
711 # prevent others from changing them in their incorrect state.
712 wlock = repo.wlock()
712 wlock = repo.wlock()
713 try:
713 try:
714 lfdirstate = lfutil.openlfdirstate(ui, repo)
714 lfdirstate = lfutil.openlfdirstate(ui, repo)
715 s = lfutil.lfdirstatestatus(lfdirstate, repo)
715 s = lfutil.lfdirstatestatus(lfdirstate, repo)
716 lfdirstate.write()
716 lfdirstate.write()
717 for lfile in s.modified:
717 for lfile in s.modified:
718 lfutil.updatestandin(repo, lfutil.standin(lfile))
718 lfutil.updatestandin(repo, lfutil.standin(lfile))
719 for lfile in s.deleted:
719 for lfile in s.deleted:
720 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
720 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
721 os.unlink(repo.wjoin(lfutil.standin(lfile)))
721 os.unlink(repo.wjoin(lfutil.standin(lfile)))
722
722
723 oldstandins = lfutil.getstandinsstate(repo)
723 oldstandins = lfutil.getstandinsstate(repo)
724
724
725 def overridematch(mctx, pats=[], opts={}, globbed=False,
725 def overridematch(mctx, pats=[], opts={}, globbed=False,
726 default='relpath', badfn=None):
726 default='relpath', badfn=None):
727 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
727 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
728 m = copy.copy(match)
728 m = copy.copy(match)
729
729
730 # revert supports recursing into subrepos, and though largefiles
730 # revert supports recursing into subrepos, and though largefiles
731 # currently doesn't work correctly in that case, this match is
731 # currently doesn't work correctly in that case, this match is
732 # called, so the lfdirstate above may not be the correct one for
732 # called, so the lfdirstate above may not be the correct one for
733 # this invocation of match.
733 # this invocation of match.
734 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
734 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
735 False)
735 False)
736
736
737 def tostandin(f):
737 def tostandin(f):
738 standin = lfutil.standin(f)
738 standin = lfutil.standin(f)
739 if standin in ctx or standin in mctx:
739 if standin in ctx or standin in mctx:
740 return standin
740 return standin
741 elif standin in repo[None] or lfdirstate[f] == 'r':
741 elif standin in repo[None] or lfdirstate[f] == 'r':
742 return None
742 return None
743 return f
743 return f
744 m._files = [tostandin(f) for f in m._files]
744 m._files = [tostandin(f) for f in m._files]
745 m._files = [f for f in m._files if f is not None]
745 m._files = [f for f in m._files if f is not None]
746 m._fileroots = set(m._files)
746 m._fileroots = set(m._files)
747 origmatchfn = m.matchfn
747 origmatchfn = m.matchfn
748 def matchfn(f):
748 def matchfn(f):
749 if lfutil.isstandin(f):
749 if lfutil.isstandin(f):
750 return (origmatchfn(lfutil.splitstandin(f)) and
750 return (origmatchfn(lfutil.splitstandin(f)) and
751 (f in ctx or f in mctx))
751 (f in ctx or f in mctx))
752 return origmatchfn(f)
752 return origmatchfn(f)
753 m.matchfn = matchfn
753 m.matchfn = matchfn
754 return m
754 return m
755 oldmatch = installmatchfn(overridematch)
755 oldmatch = installmatchfn(overridematch)
756 try:
756 try:
757 orig(ui, repo, ctx, parents, *pats, **opts)
757 orig(ui, repo, ctx, parents, *pats, **opts)
758 finally:
758 finally:
759 restorematchfn()
759 restorematchfn()
760
760
761 newstandins = lfutil.getstandinsstate(repo)
761 newstandins = lfutil.getstandinsstate(repo)
762 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
762 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
763 # lfdirstate should be 'normallookup'-ed for updated files,
763 # lfdirstate should be 'normallookup'-ed for updated files,
764 # because reverting doesn't touch dirstate for 'normal' files
764 # because reverting doesn't touch dirstate for 'normal' files
765 # when target revision is explicitly specified: in such case,
765 # when target revision is explicitly specified: in such case,
766 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
766 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
767 # of target (standin) file.
767 # of target (standin) file.
768 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
768 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
769 normallookup=True)
769 normallookup=True)
770
770
771 finally:
771 finally:
772 wlock.release()
772 wlock.release()
773
773
774 # after pulling changesets, we need to take some extra care to get
774 # after pulling changesets, we need to take some extra care to get
775 # largefiles updated remotely
775 # largefiles updated remotely
776 def overridepull(orig, ui, repo, source=None, **opts):
776 def overridepull(orig, ui, repo, source=None, **opts):
777 revsprepull = len(repo)
777 revsprepull = len(repo)
778 if not source:
778 if not source:
779 source = 'default'
779 source = 'default'
780 repo.lfpullsource = source
780 repo.lfpullsource = source
781 result = orig(ui, repo, source, **opts)
781 result = orig(ui, repo, source, **opts)
782 revspostpull = len(repo)
782 revspostpull = len(repo)
783 lfrevs = opts.get('lfrev', [])
783 lfrevs = opts.get('lfrev', [])
784 if opts.get('all_largefiles'):
784 if opts.get('all_largefiles'):
785 lfrevs.append('pulled()')
785 lfrevs.append('pulled()')
786 if lfrevs and revspostpull > revsprepull:
786 if lfrevs and revspostpull > revsprepull:
787 numcached = 0
787 numcached = 0
788 repo.firstpulled = revsprepull # for pulled() revset expression
788 repo.firstpulled = revsprepull # for pulled() revset expression
789 try:
789 try:
790 for rev in scmutil.revrange(repo, lfrevs):
790 for rev in scmutil.revrange(repo, lfrevs):
791 ui.note(_('pulling largefiles for revision %s\n') % rev)
791 ui.note(_('pulling largefiles for revision %s\n') % rev)
792 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
792 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
793 numcached += len(cached)
793 numcached += len(cached)
794 finally:
794 finally:
795 del repo.firstpulled
795 del repo.firstpulled
796 ui.status(_("%d largefiles cached\n") % numcached)
796 ui.status(_("%d largefiles cached\n") % numcached)
797 return result
797 return result
798
798
799 def pulledrevsetsymbol(repo, subset, x):
799 def pulledrevsetsymbol(repo, subset, x):
800 """``pulled()``
800 """``pulled()``
801 Changesets that just has been pulled.
801 Changesets that just has been pulled.
802
802
803 Only available with largefiles from pull --lfrev expressions.
803 Only available with largefiles from pull --lfrev expressions.
804
804
805 .. container:: verbose
805 .. container:: verbose
806
806
807 Some examples:
807 Some examples:
808
808
809 - pull largefiles for all new changesets::
809 - pull largefiles for all new changesets::
810
810
811 hg pull -lfrev "pulled()"
811 hg pull -lfrev "pulled()"
812
812
813 - pull largefiles for all new branch heads::
813 - pull largefiles for all new branch heads::
814
814
815 hg pull -lfrev "head(pulled()) and not closed()"
815 hg pull -lfrev "head(pulled()) and not closed()"
816
816
817 """
817 """
818
818
819 try:
819 try:
820 firstpulled = repo.firstpulled
820 firstpulled = repo.firstpulled
821 except AttributeError:
821 except AttributeError:
822 raise util.Abort(_("pulled() only available in --lfrev"))
822 raise util.Abort(_("pulled() only available in --lfrev"))
823 return revset.baseset([r for r in subset if r >= firstpulled])
823 return revset.baseset([r for r in subset if r >= firstpulled])
824
824
825 def overrideclone(orig, ui, source, dest=None, **opts):
825 def overrideclone(orig, ui, source, dest=None, **opts):
826 d = dest
826 d = dest
827 if d is None:
827 if d is None:
828 d = hg.defaultdest(source)
828 d = hg.defaultdest(source)
829 if opts.get('all_largefiles') and not hg.islocal(d):
829 if opts.get('all_largefiles') and not hg.islocal(d):
830 raise util.Abort(_(
830 raise util.Abort(_(
831 '--all-largefiles is incompatible with non-local destination %s') %
831 '--all-largefiles is incompatible with non-local destination %s') %
832 d)
832 d)
833
833
834 return orig(ui, source, dest, **opts)
834 return orig(ui, source, dest, **opts)
835
835
836 def hgclone(orig, ui, opts, *args, **kwargs):
836 def hgclone(orig, ui, opts, *args, **kwargs):
837 result = orig(ui, opts, *args, **kwargs)
837 result = orig(ui, opts, *args, **kwargs)
838
838
839 if result is not None:
839 if result is not None:
840 sourcerepo, destrepo = result
840 sourcerepo, destrepo = result
841 repo = destrepo.local()
841 repo = destrepo.local()
842
842
843 # When cloning to a remote repo (like through SSH), no repo is available
843 # When cloning to a remote repo (like through SSH), no repo is available
844 # from the peer. Therefore the largefiles can't be downloaded and the
844 # from the peer. Therefore the largefiles can't be downloaded and the
845 # hgrc can't be updated.
845 # hgrc can't be updated.
846 if not repo:
846 if not repo:
847 return result
847 return result
848
848
849 # If largefiles is required for this repo, permanently enable it locally
849 # If largefiles is required for this repo, permanently enable it locally
850 if 'largefiles' in repo.requirements:
850 if 'largefiles' in repo.requirements:
851 fp = repo.vfs('hgrc', 'a', text=True)
851 fp = repo.vfs('hgrc', 'a', text=True)
852 try:
852 try:
853 fp.write('\n[extensions]\nlargefiles=\n')
853 fp.write('\n[extensions]\nlargefiles=\n')
854 finally:
854 finally:
855 fp.close()
855 fp.close()
856
856
857 # Caching is implicitly limited to 'rev' option, since the dest repo was
857 # Caching is implicitly limited to 'rev' option, since the dest repo was
858 # truncated at that point. The user may expect a download count with
858 # truncated at that point. The user may expect a download count with
859 # this option, so attempt whether or not this is a largefile repo.
859 # this option, so attempt whether or not this is a largefile repo.
860 if opts.get('all_largefiles'):
860 if opts.get('all_largefiles'):
861 success, missing = lfcommands.downloadlfiles(ui, repo, None)
861 success, missing = lfcommands.downloadlfiles(ui, repo, None)
862
862
863 if missing != 0:
863 if missing != 0:
864 return None
864 return None
865
865
866 return result
866 return result
867
867
868 def overriderebase(orig, ui, repo, **opts):
868 def overriderebase(orig, ui, repo, **opts):
869 if not util.safehasattr(repo, '_largefilesenabled'):
869 if not util.safehasattr(repo, '_largefilesenabled'):
870 return orig(ui, repo, **opts)
870 return orig(ui, repo, **opts)
871
871
872 resuming = opts.get('continue')
872 resuming = opts.get('continue')
873 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
873 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
874 repo._lfstatuswriters.append(lambda *msg, **opts: None)
874 repo._lfstatuswriters.append(lambda *msg, **opts: None)
875 try:
875 try:
876 return orig(ui, repo, **opts)
876 return orig(ui, repo, **opts)
877 finally:
877 finally:
878 repo._lfstatuswriters.pop()
878 repo._lfstatuswriters.pop()
879 repo._lfcommithooks.pop()
879 repo._lfcommithooks.pop()
880
880
881 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
881 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
882 prefix='', mtime=None, subrepos=None):
882 prefix='', mtime=None, subrepos=None):
883 # No need to lock because we are only reading history and
883 # No need to lock because we are only reading history and
884 # largefile caches, neither of which are modified.
884 # largefile caches, neither of which are modified.
885 if node is not None:
885 lfcommands.cachelfiles(repo.ui, repo, node)
886 lfcommands.cachelfiles(repo.ui, repo, node)
886
887
887 if kind not in archival.archivers:
888 if kind not in archival.archivers:
888 raise util.Abort(_("unknown archive type '%s'") % kind)
889 raise util.Abort(_("unknown archive type '%s'") % kind)
889
890
890 ctx = repo[node]
891 ctx = repo[node]
891
892
892 if kind == 'files':
893 if kind == 'files':
893 if prefix:
894 if prefix:
894 raise util.Abort(
895 raise util.Abort(
895 _('cannot give prefix when archiving to files'))
896 _('cannot give prefix when archiving to files'))
896 else:
897 else:
897 prefix = archival.tidyprefix(dest, kind, prefix)
898 prefix = archival.tidyprefix(dest, kind, prefix)
898
899
899 def write(name, mode, islink, getdata):
900 def write(name, mode, islink, getdata):
900 if matchfn and not matchfn(name):
901 if matchfn and not matchfn(name):
901 return
902 return
902 data = getdata()
903 data = getdata()
903 if decode:
904 if decode:
904 data = repo.wwritedata(name, data)
905 data = repo.wwritedata(name, data)
905 archiver.addfile(prefix + name, mode, islink, data)
906 archiver.addfile(prefix + name, mode, islink, data)
906
907
907 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
908 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
908
909
909 if repo.ui.configbool("ui", "archivemeta", True):
910 if repo.ui.configbool("ui", "archivemeta", True):
910 write('.hg_archival.txt', 0644, False,
911 write('.hg_archival.txt', 0644, False,
911 lambda: archival.buildmetadata(ctx))
912 lambda: archival.buildmetadata(ctx))
912
913
913 for f in ctx:
914 for f in ctx:
914 ff = ctx.flags(f)
915 ff = ctx.flags(f)
915 getdata = ctx[f].data
916 getdata = ctx[f].data
916 if lfutil.isstandin(f):
917 if lfutil.isstandin(f):
918 if node is not None:
917 path = lfutil.findfile(repo, getdata().strip())
919 path = lfutil.findfile(repo, getdata().strip())
920
918 if path is None:
921 if path is None:
919 raise util.Abort(
922 raise util.Abort(
920 _('largefile %s not found in repo store or system cache')
923 _('largefile %s not found in repo store or system cache')
921 % lfutil.splitstandin(f))
924 % lfutil.splitstandin(f))
925 else:
926 path = lfutil.splitstandin(f)
927
922 f = lfutil.splitstandin(f)
928 f = lfutil.splitstandin(f)
923
929
924 def getdatafn():
930 def getdatafn():
925 fd = None
931 fd = None
926 try:
932 try:
927 fd = open(path, 'rb')
933 fd = open(path, 'rb')
928 return fd.read()
934 return fd.read()
929 finally:
935 finally:
930 if fd:
936 if fd:
931 fd.close()
937 fd.close()
932
938
933 getdata = getdatafn
939 getdata = getdatafn
934 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
940 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
935
941
936 if subrepos:
942 if subrepos:
937 for subpath in sorted(ctx.substate):
943 for subpath in sorted(ctx.substate):
938 sub = ctx.sub(subpath)
944 sub = ctx.workingsub(subpath)
939 submatch = match_.narrowmatcher(subpath, matchfn)
945 submatch = match_.narrowmatcher(subpath, matchfn)
940 sub.archive(archiver, prefix, submatch)
946 sub.archive(archiver, prefix, submatch)
941
947
942 archiver.done()
948 archiver.done()
943
949
944 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
950 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
945 repo._get(repo._state + ('hg',))
951 repo._get(repo._state + ('hg',))
946 rev = repo._state[1]
952 rev = repo._state[1]
947 ctx = repo._repo[rev]
953 ctx = repo._repo[rev]
948
954
955 if ctx.node() is not None:
949 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
956 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
950
957
951 def write(name, mode, islink, getdata):
958 def write(name, mode, islink, getdata):
952 # At this point, the standin has been replaced with the largefile name,
959 # At this point, the standin has been replaced with the largefile name,
953 # so the normal matcher works here without the lfutil variants.
960 # so the normal matcher works here without the lfutil variants.
954 if match and not match(f):
961 if match and not match(f):
955 return
962 return
956 data = getdata()
963 data = getdata()
957
964
958 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
965 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
959
966
960 for f in ctx:
967 for f in ctx:
961 ff = ctx.flags(f)
968 ff = ctx.flags(f)
962 getdata = ctx[f].data
969 getdata = ctx[f].data
963 if lfutil.isstandin(f):
970 if lfutil.isstandin(f):
971 if ctx.node() is not None:
964 path = lfutil.findfile(repo._repo, getdata().strip())
972 path = lfutil.findfile(repo._repo, getdata().strip())
973
965 if path is None:
974 if path is None:
966 raise util.Abort(
975 raise util.Abort(
967 _('largefile %s not found in repo store or system cache')
976 _('largefile %s not found in repo store or system cache')
968 % lfutil.splitstandin(f))
977 % lfutil.splitstandin(f))
978 else:
979 path = lfutil.splitstandin(f)
980
969 f = lfutil.splitstandin(f)
981 f = lfutil.splitstandin(f)
970
982
971 def getdatafn():
983 def getdatafn():
972 fd = None
984 fd = None
973 try:
985 try:
974 fd = open(os.path.join(prefix, path), 'rb')
986 fd = open(os.path.join(prefix, path), 'rb')
975 return fd.read()
987 return fd.read()
976 finally:
988 finally:
977 if fd:
989 if fd:
978 fd.close()
990 fd.close()
979
991
980 getdata = getdatafn
992 getdata = getdatafn
981
993
982 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
994 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
983
995
984 for subpath in sorted(ctx.substate):
996 for subpath in sorted(ctx.substate):
985 sub = ctx.sub(subpath)
997 sub = ctx.workingsub(subpath)
986 submatch = match_.narrowmatcher(subpath, match)
998 submatch = match_.narrowmatcher(subpath, match)
987 sub.archive(archiver, prefix + repo._path + '/', submatch)
999 sub.archive(archiver, prefix + repo._path + '/', submatch)
988
1000
989 # If a largefile is modified, the change is not reflected in its
1001 # If a largefile is modified, the change is not reflected in its
990 # standin until a commit. cmdutil.bailifchanged() raises an exception
1002 # standin until a commit. cmdutil.bailifchanged() raises an exception
991 # if the repo has uncommitted changes. Wrap it to also check if
1003 # if the repo has uncommitted changes. Wrap it to also check if
992 # largefiles were changed. This is used by bisect, backout and fetch.
1004 # largefiles were changed. This is used by bisect, backout and fetch.
993 def overridebailifchanged(orig, repo, *args, **kwargs):
1005 def overridebailifchanged(orig, repo, *args, **kwargs):
994 orig(repo, *args, **kwargs)
1006 orig(repo, *args, **kwargs)
995 repo.lfstatus = True
1007 repo.lfstatus = True
996 s = repo.status()
1008 s = repo.status()
997 repo.lfstatus = False
1009 repo.lfstatus = False
998 if s.modified or s.added or s.removed or s.deleted:
1010 if s.modified or s.added or s.removed or s.deleted:
999 raise util.Abort(_('uncommitted changes'))
1011 raise util.Abort(_('uncommitted changes'))
1000
1012
1001 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1013 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly):
1002 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1014 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1003 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1015 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly)
1004 m = composelargefilematcher(match, repo[None].manifest())
1016 m = composelargefilematcher(match, repo[None].manifest())
1005
1017
1006 try:
1018 try:
1007 repo.lfstatus = True
1019 repo.lfstatus = True
1008 s = repo.status(match=m, clean=True)
1020 s = repo.status(match=m, clean=True)
1009 finally:
1021 finally:
1010 repo.lfstatus = False
1022 repo.lfstatus = False
1011 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1023 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1012 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1024 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
1013
1025
1014 for f in forget:
1026 for f in forget:
1015 if lfutil.standin(f) not in repo.dirstate and not \
1027 if lfutil.standin(f) not in repo.dirstate and not \
1016 repo.wvfs.isdir(lfutil.standin(f)):
1028 repo.wvfs.isdir(lfutil.standin(f)):
1017 ui.warn(_('not removing %s: file is already untracked\n')
1029 ui.warn(_('not removing %s: file is already untracked\n')
1018 % m.rel(f))
1030 % m.rel(f))
1019 bad.append(f)
1031 bad.append(f)
1020
1032
1021 for f in forget:
1033 for f in forget:
1022 if ui.verbose or not m.exact(f):
1034 if ui.verbose or not m.exact(f):
1023 ui.status(_('removing %s\n') % m.rel(f))
1035 ui.status(_('removing %s\n') % m.rel(f))
1024
1036
1025 # Need to lock because standin files are deleted then removed from the
1037 # Need to lock because standin files are deleted then removed from the
1026 # repository and we could race in-between.
1038 # repository and we could race in-between.
1027 wlock = repo.wlock()
1039 wlock = repo.wlock()
1028 try:
1040 try:
1029 lfdirstate = lfutil.openlfdirstate(ui, repo)
1041 lfdirstate = lfutil.openlfdirstate(ui, repo)
1030 for f in forget:
1042 for f in forget:
1031 if lfdirstate[f] == 'a':
1043 if lfdirstate[f] == 'a':
1032 lfdirstate.drop(f)
1044 lfdirstate.drop(f)
1033 else:
1045 else:
1034 lfdirstate.remove(f)
1046 lfdirstate.remove(f)
1035 lfdirstate.write()
1047 lfdirstate.write()
1036 standins = [lfutil.standin(f) for f in forget]
1048 standins = [lfutil.standin(f) for f in forget]
1037 for f in standins:
1049 for f in standins:
1038 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1050 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1039 rejected = repo[None].forget(standins)
1051 rejected = repo[None].forget(standins)
1040 finally:
1052 finally:
1041 wlock.release()
1053 wlock.release()
1042
1054
1043 bad.extend(f for f in rejected if f in m.files())
1055 bad.extend(f for f in rejected if f in m.files())
1044 forgot.extend(f for f in forget if f not in rejected)
1056 forgot.extend(f for f in forget if f not in rejected)
1045 return bad, forgot
1057 return bad, forgot
1046
1058
1047 def _getoutgoings(repo, other, missing, addfunc):
1059 def _getoutgoings(repo, other, missing, addfunc):
1048 """get pairs of filename and largefile hash in outgoing revisions
1060 """get pairs of filename and largefile hash in outgoing revisions
1049 in 'missing'.
1061 in 'missing'.
1050
1062
1051 largefiles already existing on 'other' repository are ignored.
1063 largefiles already existing on 'other' repository are ignored.
1052
1064
1053 'addfunc' is invoked with each unique pairs of filename and
1065 'addfunc' is invoked with each unique pairs of filename and
1054 largefile hash value.
1066 largefile hash value.
1055 """
1067 """
1056 knowns = set()
1068 knowns = set()
1057 lfhashes = set()
1069 lfhashes = set()
1058 def dedup(fn, lfhash):
1070 def dedup(fn, lfhash):
1059 k = (fn, lfhash)
1071 k = (fn, lfhash)
1060 if k not in knowns:
1072 if k not in knowns:
1061 knowns.add(k)
1073 knowns.add(k)
1062 lfhashes.add(lfhash)
1074 lfhashes.add(lfhash)
1063 lfutil.getlfilestoupload(repo, missing, dedup)
1075 lfutil.getlfilestoupload(repo, missing, dedup)
1064 if lfhashes:
1076 if lfhashes:
1065 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1077 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1066 for fn, lfhash in knowns:
1078 for fn, lfhash in knowns:
1067 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1079 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1068 addfunc(fn, lfhash)
1080 addfunc(fn, lfhash)
1069
1081
1070 def outgoinghook(ui, repo, other, opts, missing):
1082 def outgoinghook(ui, repo, other, opts, missing):
1071 if opts.pop('large', None):
1083 if opts.pop('large', None):
1072 lfhashes = set()
1084 lfhashes = set()
1073 if ui.debugflag:
1085 if ui.debugflag:
1074 toupload = {}
1086 toupload = {}
1075 def addfunc(fn, lfhash):
1087 def addfunc(fn, lfhash):
1076 if fn not in toupload:
1088 if fn not in toupload:
1077 toupload[fn] = []
1089 toupload[fn] = []
1078 toupload[fn].append(lfhash)
1090 toupload[fn].append(lfhash)
1079 lfhashes.add(lfhash)
1091 lfhashes.add(lfhash)
1080 def showhashes(fn):
1092 def showhashes(fn):
1081 for lfhash in sorted(toupload[fn]):
1093 for lfhash in sorted(toupload[fn]):
1082 ui.debug(' %s\n' % (lfhash))
1094 ui.debug(' %s\n' % (lfhash))
1083 else:
1095 else:
1084 toupload = set()
1096 toupload = set()
1085 def addfunc(fn, lfhash):
1097 def addfunc(fn, lfhash):
1086 toupload.add(fn)
1098 toupload.add(fn)
1087 lfhashes.add(lfhash)
1099 lfhashes.add(lfhash)
1088 def showhashes(fn):
1100 def showhashes(fn):
1089 pass
1101 pass
1090 _getoutgoings(repo, other, missing, addfunc)
1102 _getoutgoings(repo, other, missing, addfunc)
1091
1103
1092 if not toupload:
1104 if not toupload:
1093 ui.status(_('largefiles: no files to upload\n'))
1105 ui.status(_('largefiles: no files to upload\n'))
1094 else:
1106 else:
1095 ui.status(_('largefiles to upload (%d entities):\n')
1107 ui.status(_('largefiles to upload (%d entities):\n')
1096 % (len(lfhashes)))
1108 % (len(lfhashes)))
1097 for file in sorted(toupload):
1109 for file in sorted(toupload):
1098 ui.status(lfutil.splitstandin(file) + '\n')
1110 ui.status(lfutil.splitstandin(file) + '\n')
1099 showhashes(file)
1111 showhashes(file)
1100 ui.status('\n')
1112 ui.status('\n')
1101
1113
1102 def summaryremotehook(ui, repo, opts, changes):
1114 def summaryremotehook(ui, repo, opts, changes):
1103 largeopt = opts.get('large', False)
1115 largeopt = opts.get('large', False)
1104 if changes is None:
1116 if changes is None:
1105 if largeopt:
1117 if largeopt:
1106 return (False, True) # only outgoing check is needed
1118 return (False, True) # only outgoing check is needed
1107 else:
1119 else:
1108 return (False, False)
1120 return (False, False)
1109 elif largeopt:
1121 elif largeopt:
1110 url, branch, peer, outgoing = changes[1]
1122 url, branch, peer, outgoing = changes[1]
1111 if peer is None:
1123 if peer is None:
1112 # i18n: column positioning for "hg summary"
1124 # i18n: column positioning for "hg summary"
1113 ui.status(_('largefiles: (no remote repo)\n'))
1125 ui.status(_('largefiles: (no remote repo)\n'))
1114 return
1126 return
1115
1127
1116 toupload = set()
1128 toupload = set()
1117 lfhashes = set()
1129 lfhashes = set()
1118 def addfunc(fn, lfhash):
1130 def addfunc(fn, lfhash):
1119 toupload.add(fn)
1131 toupload.add(fn)
1120 lfhashes.add(lfhash)
1132 lfhashes.add(lfhash)
1121 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1133 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1122
1134
1123 if not toupload:
1135 if not toupload:
1124 # i18n: column positioning for "hg summary"
1136 # i18n: column positioning for "hg summary"
1125 ui.status(_('largefiles: (no files to upload)\n'))
1137 ui.status(_('largefiles: (no files to upload)\n'))
1126 else:
1138 else:
1127 # i18n: column positioning for "hg summary"
1139 # i18n: column positioning for "hg summary"
1128 ui.status(_('largefiles: %d entities for %d files to upload\n')
1140 ui.status(_('largefiles: %d entities for %d files to upload\n')
1129 % (len(lfhashes), len(toupload)))
1141 % (len(lfhashes), len(toupload)))
1130
1142
1131 def overridesummary(orig, ui, repo, *pats, **opts):
1143 def overridesummary(orig, ui, repo, *pats, **opts):
1132 try:
1144 try:
1133 repo.lfstatus = True
1145 repo.lfstatus = True
1134 orig(ui, repo, *pats, **opts)
1146 orig(ui, repo, *pats, **opts)
1135 finally:
1147 finally:
1136 repo.lfstatus = False
1148 repo.lfstatus = False
1137
1149
1138 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1150 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1139 similarity=None):
1151 similarity=None):
1140 if not lfutil.islfilesrepo(repo):
1152 if not lfutil.islfilesrepo(repo):
1141 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1153 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1142 # Get the list of missing largefiles so we can remove them
1154 # Get the list of missing largefiles so we can remove them
1143 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1155 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1144 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1156 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1145 False, False, False)
1157 False, False, False)
1146
1158
1147 # Call into the normal remove code, but the removing of the standin, we want
1159 # Call into the normal remove code, but the removing of the standin, we want
1148 # to have handled by original addremove. Monkey patching here makes sure
1160 # to have handled by original addremove. Monkey patching here makes sure
1149 # we don't remove the standin in the largefiles code, preventing a very
1161 # we don't remove the standin in the largefiles code, preventing a very
1150 # confused state later.
1162 # confused state later.
1151 if s.deleted:
1163 if s.deleted:
1152 m = copy.copy(matcher)
1164 m = copy.copy(matcher)
1153
1165
1154 # The m._files and m._map attributes are not changed to the deleted list
1166 # The m._files and m._map attributes are not changed to the deleted list
1155 # because that affects the m.exact() test, which in turn governs whether
1167 # because that affects the m.exact() test, which in turn governs whether
1156 # or not the file name is printed, and how. Simply limit the original
1168 # or not the file name is printed, and how. Simply limit the original
1157 # matches to those in the deleted status list.
1169 # matches to those in the deleted status list.
1158 matchfn = m.matchfn
1170 matchfn = m.matchfn
1159 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1171 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1160
1172
1161 removelargefiles(repo.ui, repo, True, m, **opts)
1173 removelargefiles(repo.ui, repo, True, m, **opts)
1162 # Call into the normal add code, and any files that *should* be added as
1174 # Call into the normal add code, and any files that *should* be added as
1163 # largefiles will be
1175 # largefiles will be
1164 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1176 added, bad = addlargefiles(repo.ui, repo, True, matcher, **opts)
1165 # Now that we've handled largefiles, hand off to the original addremove
1177 # Now that we've handled largefiles, hand off to the original addremove
1166 # function to take care of the rest. Make sure it doesn't do anything with
1178 # function to take care of the rest. Make sure it doesn't do anything with
1167 # largefiles by passing a matcher that will ignore them.
1179 # largefiles by passing a matcher that will ignore them.
1168 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1180 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1169 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1181 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1170
1182
1171 # Calling purge with --all will cause the largefiles to be deleted.
1183 # Calling purge with --all will cause the largefiles to be deleted.
1172 # Override repo.status to prevent this from happening.
1184 # Override repo.status to prevent this from happening.
1173 def overridepurge(orig, ui, repo, *dirs, **opts):
1185 def overridepurge(orig, ui, repo, *dirs, **opts):
1174 # XXX Monkey patching a repoview will not work. The assigned attribute will
1186 # XXX Monkey patching a repoview will not work. The assigned attribute will
1175 # be set on the unfiltered repo, but we will only lookup attributes in the
1187 # be set on the unfiltered repo, but we will only lookup attributes in the
1176 # unfiltered repo if the lookup in the repoview object itself fails. As the
1188 # unfiltered repo if the lookup in the repoview object itself fails. As the
1177 # monkey patched method exists on the repoview class the lookup will not
1189 # monkey patched method exists on the repoview class the lookup will not
1178 # fail. As a result, the original version will shadow the monkey patched
1190 # fail. As a result, the original version will shadow the monkey patched
1179 # one, defeating the monkey patch.
1191 # one, defeating the monkey patch.
1180 #
1192 #
1181 # As a work around we use an unfiltered repo here. We should do something
1193 # As a work around we use an unfiltered repo here. We should do something
1182 # cleaner instead.
1194 # cleaner instead.
1183 repo = repo.unfiltered()
1195 repo = repo.unfiltered()
1184 oldstatus = repo.status
1196 oldstatus = repo.status
1185 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1197 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1186 clean=False, unknown=False, listsubrepos=False):
1198 clean=False, unknown=False, listsubrepos=False):
1187 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1199 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1188 listsubrepos)
1200 listsubrepos)
1189 lfdirstate = lfutil.openlfdirstate(ui, repo)
1201 lfdirstate = lfutil.openlfdirstate(ui, repo)
1190 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1202 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1191 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1203 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1192 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1204 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1193 unknown, ignored, r.clean)
1205 unknown, ignored, r.clean)
1194 repo.status = overridestatus
1206 repo.status = overridestatus
1195 orig(ui, repo, *dirs, **opts)
1207 orig(ui, repo, *dirs, **opts)
1196 repo.status = oldstatus
1208 repo.status = oldstatus
1197 def overriderollback(orig, ui, repo, **opts):
1209 def overriderollback(orig, ui, repo, **opts):
1198 wlock = repo.wlock()
1210 wlock = repo.wlock()
1199 try:
1211 try:
1200 before = repo.dirstate.parents()
1212 before = repo.dirstate.parents()
1201 orphans = set(f for f in repo.dirstate
1213 orphans = set(f for f in repo.dirstate
1202 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1214 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1203 result = orig(ui, repo, **opts)
1215 result = orig(ui, repo, **opts)
1204 after = repo.dirstate.parents()
1216 after = repo.dirstate.parents()
1205 if before == after:
1217 if before == after:
1206 return result # no need to restore standins
1218 return result # no need to restore standins
1207
1219
1208 pctx = repo['.']
1220 pctx = repo['.']
1209 for f in repo.dirstate:
1221 for f in repo.dirstate:
1210 if lfutil.isstandin(f):
1222 if lfutil.isstandin(f):
1211 orphans.discard(f)
1223 orphans.discard(f)
1212 if repo.dirstate[f] == 'r':
1224 if repo.dirstate[f] == 'r':
1213 repo.wvfs.unlinkpath(f, ignoremissing=True)
1225 repo.wvfs.unlinkpath(f, ignoremissing=True)
1214 elif f in pctx:
1226 elif f in pctx:
1215 fctx = pctx[f]
1227 fctx = pctx[f]
1216 repo.wwrite(f, fctx.data(), fctx.flags())
1228 repo.wwrite(f, fctx.data(), fctx.flags())
1217 else:
1229 else:
1218 # content of standin is not so important in 'a',
1230 # content of standin is not so important in 'a',
1219 # 'm' or 'n' (coming from the 2nd parent) cases
1231 # 'm' or 'n' (coming from the 2nd parent) cases
1220 lfutil.writestandin(repo, f, '', False)
1232 lfutil.writestandin(repo, f, '', False)
1221 for standin in orphans:
1233 for standin in orphans:
1222 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1234 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1223
1235
1224 lfdirstate = lfutil.openlfdirstate(ui, repo)
1236 lfdirstate = lfutil.openlfdirstate(ui, repo)
1225 orphans = set(lfdirstate)
1237 orphans = set(lfdirstate)
1226 lfiles = lfutil.listlfiles(repo)
1238 lfiles = lfutil.listlfiles(repo)
1227 for file in lfiles:
1239 for file in lfiles:
1228 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1240 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1229 orphans.discard(file)
1241 orphans.discard(file)
1230 for lfile in orphans:
1242 for lfile in orphans:
1231 lfdirstate.drop(lfile)
1243 lfdirstate.drop(lfile)
1232 lfdirstate.write()
1244 lfdirstate.write()
1233 finally:
1245 finally:
1234 wlock.release()
1246 wlock.release()
1235 return result
1247 return result
1236
1248
1237 def overridetransplant(orig, ui, repo, *revs, **opts):
1249 def overridetransplant(orig, ui, repo, *revs, **opts):
1238 resuming = opts.get('continue')
1250 resuming = opts.get('continue')
1239 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1251 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1240 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1252 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1241 try:
1253 try:
1242 result = orig(ui, repo, *revs, **opts)
1254 result = orig(ui, repo, *revs, **opts)
1243 finally:
1255 finally:
1244 repo._lfstatuswriters.pop()
1256 repo._lfstatuswriters.pop()
1245 repo._lfcommithooks.pop()
1257 repo._lfcommithooks.pop()
1246 return result
1258 return result
1247
1259
1248 def overridecat(orig, ui, repo, file1, *pats, **opts):
1260 def overridecat(orig, ui, repo, file1, *pats, **opts):
1249 ctx = scmutil.revsingle(repo, opts.get('rev'))
1261 ctx = scmutil.revsingle(repo, opts.get('rev'))
1250 err = 1
1262 err = 1
1251 notbad = set()
1263 notbad = set()
1252 m = scmutil.match(ctx, (file1,) + pats, opts)
1264 m = scmutil.match(ctx, (file1,) + pats, opts)
1253 origmatchfn = m.matchfn
1265 origmatchfn = m.matchfn
1254 def lfmatchfn(f):
1266 def lfmatchfn(f):
1255 if origmatchfn(f):
1267 if origmatchfn(f):
1256 return True
1268 return True
1257 lf = lfutil.splitstandin(f)
1269 lf = lfutil.splitstandin(f)
1258 if lf is None:
1270 if lf is None:
1259 return False
1271 return False
1260 notbad.add(lf)
1272 notbad.add(lf)
1261 return origmatchfn(lf)
1273 return origmatchfn(lf)
1262 m.matchfn = lfmatchfn
1274 m.matchfn = lfmatchfn
1263 origbadfn = m.bad
1275 origbadfn = m.bad
1264 def lfbadfn(f, msg):
1276 def lfbadfn(f, msg):
1265 if not f in notbad:
1277 if not f in notbad:
1266 origbadfn(f, msg)
1278 origbadfn(f, msg)
1267 m.bad = lfbadfn
1279 m.bad = lfbadfn
1268
1280
1269 origvisitdirfn = m.visitdir
1281 origvisitdirfn = m.visitdir
1270 def lfvisitdirfn(dir):
1282 def lfvisitdirfn(dir):
1271 if dir == lfutil.shortname:
1283 if dir == lfutil.shortname:
1272 return True
1284 return True
1273 ret = origvisitdirfn(dir)
1285 ret = origvisitdirfn(dir)
1274 if ret:
1286 if ret:
1275 return ret
1287 return ret
1276 lf = lfutil.splitstandin(dir)
1288 lf = lfutil.splitstandin(dir)
1277 if lf is None:
1289 if lf is None:
1278 return False
1290 return False
1279 return origvisitdirfn(lf)
1291 return origvisitdirfn(lf)
1280 m.visitdir = lfvisitdirfn
1292 m.visitdir = lfvisitdirfn
1281
1293
1282 for f in ctx.walk(m):
1294 for f in ctx.walk(m):
1283 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1295 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1284 pathname=f)
1296 pathname=f)
1285 lf = lfutil.splitstandin(f)
1297 lf = lfutil.splitstandin(f)
1286 if lf is None or origmatchfn(f):
1298 if lf is None or origmatchfn(f):
1287 # duplicating unreachable code from commands.cat
1299 # duplicating unreachable code from commands.cat
1288 data = ctx[f].data()
1300 data = ctx[f].data()
1289 if opts.get('decode'):
1301 if opts.get('decode'):
1290 data = repo.wwritedata(f, data)
1302 data = repo.wwritedata(f, data)
1291 fp.write(data)
1303 fp.write(data)
1292 else:
1304 else:
1293 hash = lfutil.readstandin(repo, lf, ctx.rev())
1305 hash = lfutil.readstandin(repo, lf, ctx.rev())
1294 if not lfutil.inusercache(repo.ui, hash):
1306 if not lfutil.inusercache(repo.ui, hash):
1295 store = basestore._openstore(repo)
1307 store = basestore._openstore(repo)
1296 success, missing = store.get([(lf, hash)])
1308 success, missing = store.get([(lf, hash)])
1297 if len(success) != 1:
1309 if len(success) != 1:
1298 raise util.Abort(
1310 raise util.Abort(
1299 _('largefile %s is not in cache and could not be '
1311 _('largefile %s is not in cache and could not be '
1300 'downloaded') % lf)
1312 'downloaded') % lf)
1301 path = lfutil.usercachepath(repo.ui, hash)
1313 path = lfutil.usercachepath(repo.ui, hash)
1302 fpin = open(path, "rb")
1314 fpin = open(path, "rb")
1303 for chunk in util.filechunkiter(fpin, 128 * 1024):
1315 for chunk in util.filechunkiter(fpin, 128 * 1024):
1304 fp.write(chunk)
1316 fp.write(chunk)
1305 fpin.close()
1317 fpin.close()
1306 fp.close()
1318 fp.close()
1307 err = 0
1319 err = 0
1308 return err
1320 return err
1309
1321
1310 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1322 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1311 *args, **kwargs):
1323 *args, **kwargs):
1312 wlock = repo.wlock()
1324 wlock = repo.wlock()
1313 try:
1325 try:
1314 # branch | | |
1326 # branch | | |
1315 # merge | force | partial | action
1327 # merge | force | partial | action
1316 # -------+-------+---------+--------------
1328 # -------+-------+---------+--------------
1317 # x | x | x | linear-merge
1329 # x | x | x | linear-merge
1318 # o | x | x | branch-merge
1330 # o | x | x | branch-merge
1319 # x | o | x | overwrite (as clean update)
1331 # x | o | x | overwrite (as clean update)
1320 # o | o | x | force-branch-merge (*1)
1332 # o | o | x | force-branch-merge (*1)
1321 # x | x | o | (*)
1333 # x | x | o | (*)
1322 # o | x | o | (*)
1334 # o | x | o | (*)
1323 # x | o | o | overwrite (as revert)
1335 # x | o | o | overwrite (as revert)
1324 # o | o | o | (*)
1336 # o | o | o | (*)
1325 #
1337 #
1326 # (*) don't care
1338 # (*) don't care
1327 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1339 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1328
1340
1329 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1341 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1330 unsure, s = lfdirstate.status(match_.always(repo.root,
1342 unsure, s = lfdirstate.status(match_.always(repo.root,
1331 repo.getcwd()),
1343 repo.getcwd()),
1332 [], False, False, False)
1344 [], False, False, False)
1333 pctx = repo['.']
1345 pctx = repo['.']
1334 for lfile in unsure + s.modified:
1346 for lfile in unsure + s.modified:
1335 lfileabs = repo.wvfs.join(lfile)
1347 lfileabs = repo.wvfs.join(lfile)
1336 if not os.path.exists(lfileabs):
1348 if not os.path.exists(lfileabs):
1337 continue
1349 continue
1338 lfhash = lfutil.hashrepofile(repo, lfile)
1350 lfhash = lfutil.hashrepofile(repo, lfile)
1339 standin = lfutil.standin(lfile)
1351 standin = lfutil.standin(lfile)
1340 lfutil.writestandin(repo, standin, lfhash,
1352 lfutil.writestandin(repo, standin, lfhash,
1341 lfutil.getexecutable(lfileabs))
1353 lfutil.getexecutable(lfileabs))
1342 if (standin in pctx and
1354 if (standin in pctx and
1343 lfhash == lfutil.readstandin(repo, lfile, '.')):
1355 lfhash == lfutil.readstandin(repo, lfile, '.')):
1344 lfdirstate.normal(lfile)
1356 lfdirstate.normal(lfile)
1345 for lfile in s.added:
1357 for lfile in s.added:
1346 lfutil.updatestandin(repo, lfutil.standin(lfile))
1358 lfutil.updatestandin(repo, lfutil.standin(lfile))
1347 lfdirstate.write()
1359 lfdirstate.write()
1348
1360
1349 oldstandins = lfutil.getstandinsstate(repo)
1361 oldstandins = lfutil.getstandinsstate(repo)
1350
1362
1351 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1363 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1352
1364
1353 newstandins = lfutil.getstandinsstate(repo)
1365 newstandins = lfutil.getstandinsstate(repo)
1354 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1366 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1355 if branchmerge or force or partial:
1367 if branchmerge or force or partial:
1356 filelist.extend(s.deleted + s.removed)
1368 filelist.extend(s.deleted + s.removed)
1357
1369
1358 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1370 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1359 normallookup=partial)
1371 normallookup=partial)
1360
1372
1361 return result
1373 return result
1362 finally:
1374 finally:
1363 wlock.release()
1375 wlock.release()
1364
1376
1365 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1377 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1366 result = orig(repo, files, *args, **kwargs)
1378 result = orig(repo, files, *args, **kwargs)
1367
1379
1368 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1380 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1369 if filelist:
1381 if filelist:
1370 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1382 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1371 printmessage=False, normallookup=True)
1383 printmessage=False, normallookup=True)
1372
1384
1373 return result
1385 return result
@@ -1,326 +1,326 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import match as matchmod
9 import match as matchmod
10 import cmdutil
10 import cmdutil
11 import scmutil, util, encoding
11 import scmutil, util, encoding
12 import cStringIO, os, tarfile, time, zipfile
12 import cStringIO, os, tarfile, time, zipfile
13 import zlib, gzip
13 import zlib, gzip
14 import struct
14 import struct
15 import error
15 import error
16
16
17 # from unzip source code:
17 # from unzip source code:
18 _UNX_IFREG = 0x8000
18 _UNX_IFREG = 0x8000
19 _UNX_IFLNK = 0xa000
19 _UNX_IFLNK = 0xa000
20
20
21 def tidyprefix(dest, kind, prefix):
21 def tidyprefix(dest, kind, prefix):
22 '''choose prefix to use for names in archive. make sure prefix is
22 '''choose prefix to use for names in archive. make sure prefix is
23 safe for consumers.'''
23 safe for consumers.'''
24
24
25 if prefix:
25 if prefix:
26 prefix = util.normpath(prefix)
26 prefix = util.normpath(prefix)
27 else:
27 else:
28 if not isinstance(dest, str):
28 if not isinstance(dest, str):
29 raise ValueError('dest must be string if no prefix')
29 raise ValueError('dest must be string if no prefix')
30 prefix = os.path.basename(dest)
30 prefix = os.path.basename(dest)
31 lower = prefix.lower()
31 lower = prefix.lower()
32 for sfx in exts.get(kind, []):
32 for sfx in exts.get(kind, []):
33 if lower.endswith(sfx):
33 if lower.endswith(sfx):
34 prefix = prefix[:-len(sfx)]
34 prefix = prefix[:-len(sfx)]
35 break
35 break
36 lpfx = os.path.normpath(util.localpath(prefix))
36 lpfx = os.path.normpath(util.localpath(prefix))
37 prefix = util.pconvert(lpfx)
37 prefix = util.pconvert(lpfx)
38 if not prefix.endswith('/'):
38 if not prefix.endswith('/'):
39 prefix += '/'
39 prefix += '/'
40 # Drop the leading '.' path component if present, so Windows can read the
40 # Drop the leading '.' path component if present, so Windows can read the
41 # zip files (issue4634)
41 # zip files (issue4634)
42 if prefix.startswith('./'):
42 if prefix.startswith('./'):
43 prefix = prefix[2:]
43 prefix = prefix[2:]
44 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
44 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
45 raise util.Abort(_('archive prefix contains illegal components'))
45 raise util.Abort(_('archive prefix contains illegal components'))
46 return prefix
46 return prefix
47
47
48 exts = {
48 exts = {
49 'tar': ['.tar'],
49 'tar': ['.tar'],
50 'tbz2': ['.tbz2', '.tar.bz2'],
50 'tbz2': ['.tbz2', '.tar.bz2'],
51 'tgz': ['.tgz', '.tar.gz'],
51 'tgz': ['.tgz', '.tar.gz'],
52 'zip': ['.zip'],
52 'zip': ['.zip'],
53 }
53 }
54
54
55 def guesskind(dest):
55 def guesskind(dest):
56 for kind, extensions in exts.iteritems():
56 for kind, extensions in exts.iteritems():
57 if any(dest.endswith(ext) for ext in extensions):
57 if any(dest.endswith(ext) for ext in extensions):
58 return kind
58 return kind
59 return None
59 return None
60
60
61 def _rootctx(repo):
61 def _rootctx(repo):
62 # repo[0] may be hidden
62 # repo[0] may be hidden
63 for rev in repo:
63 for rev in repo:
64 return repo[rev]
64 return repo[rev]
65 return repo['null']
65 return repo['null']
66
66
67 def buildmetadata(ctx):
67 def buildmetadata(ctx):
68 '''build content of .hg_archival.txt'''
68 '''build content of .hg_archival.txt'''
69 repo = ctx.repo()
69 repo = ctx.repo()
70 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
70 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
71 _rootctx(repo).hex(), ctx.hex(), encoding.fromlocal(ctx.branch()))
71 _rootctx(repo).hex(), ctx.hex(), encoding.fromlocal(ctx.branch()))
72
72
73 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
73 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
74 if repo.tagtype(t) == 'global')
74 if repo.tagtype(t) == 'global')
75 if not tags:
75 if not tags:
76 repo.ui.pushbuffer()
76 repo.ui.pushbuffer()
77 opts = {'template': '{latesttag}\n{latesttagdistance}',
77 opts = {'template': '{latesttag}\n{latesttagdistance}',
78 'style': '', 'patch': None, 'git': None}
78 'style': '', 'patch': None, 'git': None}
79 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
79 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
80 ltags, dist = repo.ui.popbuffer().split('\n')
80 ltags, dist = repo.ui.popbuffer().split('\n')
81 ltags = ltags.split(':')
81 ltags = ltags.split(':')
82 changessince = len(repo.revs('only(.,%s)', ltags[0]))
82 changessince = len(repo.revs('only(.,%s)', ltags[0]))
83 tags = ''.join('latesttag: %s\n' % t for t in ltags)
83 tags = ''.join('latesttag: %s\n' % t for t in ltags)
84 tags += 'latesttagdistance: %s\n' % dist
84 tags += 'latesttagdistance: %s\n' % dist
85 tags += 'changessincelatesttag: %s\n' % changessince
85 tags += 'changessincelatesttag: %s\n' % changessince
86
86
87 return base + tags
87 return base + tags
88
88
89 class tarit(object):
89 class tarit(object):
90 '''write archive to tar file or stream. can write uncompressed,
90 '''write archive to tar file or stream. can write uncompressed,
91 or compress with gzip or bzip2.'''
91 or compress with gzip or bzip2.'''
92
92
93 class GzipFileWithTime(gzip.GzipFile):
93 class GzipFileWithTime(gzip.GzipFile):
94
94
95 def __init__(self, *args, **kw):
95 def __init__(self, *args, **kw):
96 timestamp = None
96 timestamp = None
97 if 'timestamp' in kw:
97 if 'timestamp' in kw:
98 timestamp = kw.pop('timestamp')
98 timestamp = kw.pop('timestamp')
99 if timestamp is None:
99 if timestamp is None:
100 self.timestamp = time.time()
100 self.timestamp = time.time()
101 else:
101 else:
102 self.timestamp = timestamp
102 self.timestamp = timestamp
103 gzip.GzipFile.__init__(self, *args, **kw)
103 gzip.GzipFile.__init__(self, *args, **kw)
104
104
105 def _write_gzip_header(self):
105 def _write_gzip_header(self):
106 self.fileobj.write('\037\213') # magic header
106 self.fileobj.write('\037\213') # magic header
107 self.fileobj.write('\010') # compression method
107 self.fileobj.write('\010') # compression method
108 # Python 2.6 introduced self.name and deprecated self.filename
108 # Python 2.6 introduced self.name and deprecated self.filename
109 try:
109 try:
110 fname = self.name
110 fname = self.name
111 except AttributeError:
111 except AttributeError:
112 fname = self.filename
112 fname = self.filename
113 if fname and fname.endswith('.gz'):
113 if fname and fname.endswith('.gz'):
114 fname = fname[:-3]
114 fname = fname[:-3]
115 flags = 0
115 flags = 0
116 if fname:
116 if fname:
117 flags = gzip.FNAME
117 flags = gzip.FNAME
118 self.fileobj.write(chr(flags))
118 self.fileobj.write(chr(flags))
119 gzip.write32u(self.fileobj, long(self.timestamp))
119 gzip.write32u(self.fileobj, long(self.timestamp))
120 self.fileobj.write('\002')
120 self.fileobj.write('\002')
121 self.fileobj.write('\377')
121 self.fileobj.write('\377')
122 if fname:
122 if fname:
123 self.fileobj.write(fname + '\000')
123 self.fileobj.write(fname + '\000')
124
124
125 def __init__(self, dest, mtime, kind=''):
125 def __init__(self, dest, mtime, kind=''):
126 self.mtime = mtime
126 self.mtime = mtime
127 self.fileobj = None
127 self.fileobj = None
128
128
129 def taropen(name, mode, fileobj=None):
129 def taropen(name, mode, fileobj=None):
130 if kind == 'gz':
130 if kind == 'gz':
131 mode = mode[0]
131 mode = mode[0]
132 if not fileobj:
132 if not fileobj:
133 fileobj = open(name, mode + 'b')
133 fileobj = open(name, mode + 'b')
134 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
134 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
135 zlib.Z_BEST_COMPRESSION,
135 zlib.Z_BEST_COMPRESSION,
136 fileobj, timestamp=mtime)
136 fileobj, timestamp=mtime)
137 self.fileobj = gzfileobj
137 self.fileobj = gzfileobj
138 return tarfile.TarFile.taropen(name, mode, gzfileobj)
138 return tarfile.TarFile.taropen(name, mode, gzfileobj)
139 else:
139 else:
140 return tarfile.open(name, mode + kind, fileobj)
140 return tarfile.open(name, mode + kind, fileobj)
141
141
142 if isinstance(dest, str):
142 if isinstance(dest, str):
143 self.z = taropen(dest, mode='w:')
143 self.z = taropen(dest, mode='w:')
144 else:
144 else:
145 # Python 2.5-2.5.1 have a regression that requires a name arg
145 # Python 2.5-2.5.1 have a regression that requires a name arg
146 self.z = taropen(name='', mode='w|', fileobj=dest)
146 self.z = taropen(name='', mode='w|', fileobj=dest)
147
147
148 def addfile(self, name, mode, islink, data):
148 def addfile(self, name, mode, islink, data):
149 i = tarfile.TarInfo(name)
149 i = tarfile.TarInfo(name)
150 i.mtime = self.mtime
150 i.mtime = self.mtime
151 i.size = len(data)
151 i.size = len(data)
152 if islink:
152 if islink:
153 i.type = tarfile.SYMTYPE
153 i.type = tarfile.SYMTYPE
154 i.mode = 0777
154 i.mode = 0777
155 i.linkname = data
155 i.linkname = data
156 data = None
156 data = None
157 i.size = 0
157 i.size = 0
158 else:
158 else:
159 i.mode = mode
159 i.mode = mode
160 data = cStringIO.StringIO(data)
160 data = cStringIO.StringIO(data)
161 self.z.addfile(i, data)
161 self.z.addfile(i, data)
162
162
163 def done(self):
163 def done(self):
164 self.z.close()
164 self.z.close()
165 if self.fileobj:
165 if self.fileobj:
166 self.fileobj.close()
166 self.fileobj.close()
167
167
168 class tellable(object):
168 class tellable(object):
169 '''provide tell method for zipfile.ZipFile when writing to http
169 '''provide tell method for zipfile.ZipFile when writing to http
170 response file object.'''
170 response file object.'''
171
171
172 def __init__(self, fp):
172 def __init__(self, fp):
173 self.fp = fp
173 self.fp = fp
174 self.offset = 0
174 self.offset = 0
175
175
176 def __getattr__(self, key):
176 def __getattr__(self, key):
177 return getattr(self.fp, key)
177 return getattr(self.fp, key)
178
178
179 def write(self, s):
179 def write(self, s):
180 self.fp.write(s)
180 self.fp.write(s)
181 self.offset += len(s)
181 self.offset += len(s)
182
182
183 def tell(self):
183 def tell(self):
184 return self.offset
184 return self.offset
185
185
186 class zipit(object):
186 class zipit(object):
187 '''write archive to zip file or stream. can write uncompressed,
187 '''write archive to zip file or stream. can write uncompressed,
188 or compressed with deflate.'''
188 or compressed with deflate.'''
189
189
190 def __init__(self, dest, mtime, compress=True):
190 def __init__(self, dest, mtime, compress=True):
191 if not isinstance(dest, str):
191 if not isinstance(dest, str):
192 try:
192 try:
193 dest.tell()
193 dest.tell()
194 except (AttributeError, IOError):
194 except (AttributeError, IOError):
195 dest = tellable(dest)
195 dest = tellable(dest)
196 self.z = zipfile.ZipFile(dest, 'w',
196 self.z = zipfile.ZipFile(dest, 'w',
197 compress and zipfile.ZIP_DEFLATED or
197 compress and zipfile.ZIP_DEFLATED or
198 zipfile.ZIP_STORED)
198 zipfile.ZIP_STORED)
199
199
200 # Python's zipfile module emits deprecation warnings if we try
200 # Python's zipfile module emits deprecation warnings if we try
201 # to store files with a date before 1980.
201 # to store files with a date before 1980.
202 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
202 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
203 if mtime < epoch:
203 if mtime < epoch:
204 mtime = epoch
204 mtime = epoch
205
205
206 self.mtime = mtime
206 self.mtime = mtime
207 self.date_time = time.gmtime(mtime)[:6]
207 self.date_time = time.gmtime(mtime)[:6]
208
208
209 def addfile(self, name, mode, islink, data):
209 def addfile(self, name, mode, islink, data):
210 i = zipfile.ZipInfo(name, self.date_time)
210 i = zipfile.ZipInfo(name, self.date_time)
211 i.compress_type = self.z.compression
211 i.compress_type = self.z.compression
212 # unzip will not honor unix file modes unless file creator is
212 # unzip will not honor unix file modes unless file creator is
213 # set to unix (id 3).
213 # set to unix (id 3).
214 i.create_system = 3
214 i.create_system = 3
215 ftype = _UNX_IFREG
215 ftype = _UNX_IFREG
216 if islink:
216 if islink:
217 mode = 0777
217 mode = 0777
218 ftype = _UNX_IFLNK
218 ftype = _UNX_IFLNK
219 i.external_attr = (mode | ftype) << 16L
219 i.external_attr = (mode | ftype) << 16L
220 # add "extended-timestamp" extra block, because zip archives
220 # add "extended-timestamp" extra block, because zip archives
221 # without this will be extracted with unexpected timestamp,
221 # without this will be extracted with unexpected timestamp,
222 # if TZ is not configured as GMT
222 # if TZ is not configured as GMT
223 i.extra += struct.pack('<hhBl',
223 i.extra += struct.pack('<hhBl',
224 0x5455, # block type: "extended-timestamp"
224 0x5455, # block type: "extended-timestamp"
225 1 + 4, # size of this block
225 1 + 4, # size of this block
226 1, # "modification time is present"
226 1, # "modification time is present"
227 int(self.mtime)) # last modification (UTC)
227 int(self.mtime)) # last modification (UTC)
228 self.z.writestr(i, data)
228 self.z.writestr(i, data)
229
229
230 def done(self):
230 def done(self):
231 self.z.close()
231 self.z.close()
232
232
233 class fileit(object):
233 class fileit(object):
234 '''write archive as files in directory.'''
234 '''write archive as files in directory.'''
235
235
236 def __init__(self, name, mtime):
236 def __init__(self, name, mtime):
237 self.basedir = name
237 self.basedir = name
238 self.opener = scmutil.opener(self.basedir)
238 self.opener = scmutil.opener(self.basedir)
239
239
240 def addfile(self, name, mode, islink, data):
240 def addfile(self, name, mode, islink, data):
241 if islink:
241 if islink:
242 self.opener.symlink(data, name)
242 self.opener.symlink(data, name)
243 return
243 return
244 f = self.opener(name, "w", atomictemp=True)
244 f = self.opener(name, "w", atomictemp=True)
245 f.write(data)
245 f.write(data)
246 f.close()
246 f.close()
247 destfile = os.path.join(self.basedir, name)
247 destfile = os.path.join(self.basedir, name)
248 os.chmod(destfile, mode)
248 os.chmod(destfile, mode)
249
249
250 def done(self):
250 def done(self):
251 pass
251 pass
252
252
253 archivers = {
253 archivers = {
254 'files': fileit,
254 'files': fileit,
255 'tar': tarit,
255 'tar': tarit,
256 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
256 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
257 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
257 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
258 'uzip': lambda name, mtime: zipit(name, mtime, False),
258 'uzip': lambda name, mtime: zipit(name, mtime, False),
259 'zip': zipit,
259 'zip': zipit,
260 }
260 }
261
261
262 def archive(repo, dest, node, kind, decode=True, matchfn=None,
262 def archive(repo, dest, node, kind, decode=True, matchfn=None,
263 prefix='', mtime=None, subrepos=False):
263 prefix='', mtime=None, subrepos=False):
264 '''create archive of repo as it was at node.
264 '''create archive of repo as it was at node.
265
265
266 dest can be name of directory, name of archive file, or file
266 dest can be name of directory, name of archive file, or file
267 object to write archive to.
267 object to write archive to.
268
268
269 kind is type of archive to create.
269 kind is type of archive to create.
270
270
271 decode tells whether to put files through decode filters from
271 decode tells whether to put files through decode filters from
272 hgrc.
272 hgrc.
273
273
274 matchfn is function to filter names of files to write to archive.
274 matchfn is function to filter names of files to write to archive.
275
275
276 prefix is name of path to put before every archive member.'''
276 prefix is name of path to put before every archive member.'''
277
277
278 if kind == 'files':
278 if kind == 'files':
279 if prefix:
279 if prefix:
280 raise util.Abort(_('cannot give prefix when archiving to files'))
280 raise util.Abort(_('cannot give prefix when archiving to files'))
281 else:
281 else:
282 prefix = tidyprefix(dest, kind, prefix)
282 prefix = tidyprefix(dest, kind, prefix)
283
283
284 def write(name, mode, islink, getdata):
284 def write(name, mode, islink, getdata):
285 data = getdata()
285 data = getdata()
286 if decode:
286 if decode:
287 data = repo.wwritedata(name, data)
287 data = repo.wwritedata(name, data)
288 archiver.addfile(prefix + name, mode, islink, data)
288 archiver.addfile(prefix + name, mode, islink, data)
289
289
290 if kind not in archivers:
290 if kind not in archivers:
291 raise util.Abort(_("unknown archive type '%s'") % kind)
291 raise util.Abort(_("unknown archive type '%s'") % kind)
292
292
293 ctx = repo[node]
293 ctx = repo[node]
294 archiver = archivers[kind](dest, mtime or ctx.date()[0])
294 archiver = archivers[kind](dest, mtime or ctx.date()[0])
295
295
296 if repo.ui.configbool("ui", "archivemeta", True):
296 if repo.ui.configbool("ui", "archivemeta", True):
297 name = '.hg_archival.txt'
297 name = '.hg_archival.txt'
298 if not matchfn or matchfn(name):
298 if not matchfn or matchfn(name):
299 write(name, 0644, False, lambda: buildmetadata(ctx))
299 write(name, 0644, False, lambda: buildmetadata(ctx))
300
300
301 if matchfn:
301 if matchfn:
302 files = [f for f in ctx.manifest().keys() if matchfn(f)]
302 files = [f for f in ctx.manifest().keys() if matchfn(f)]
303 else:
303 else:
304 files = ctx.manifest().keys()
304 files = ctx.manifest().keys()
305 total = len(files)
305 total = len(files)
306 if total:
306 if total:
307 files.sort()
307 files.sort()
308 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
308 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
309 for i, f in enumerate(files):
309 for i, f in enumerate(files):
310 ff = ctx.flags(f)
310 ff = ctx.flags(f)
311 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
311 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
312 repo.ui.progress(_('archiving'), i + 1, item=f,
312 repo.ui.progress(_('archiving'), i + 1, item=f,
313 unit=_('files'), total=total)
313 unit=_('files'), total=total)
314 repo.ui.progress(_('archiving'), None)
314 repo.ui.progress(_('archiving'), None)
315
315
316 if subrepos:
316 if subrepos:
317 for subpath in sorted(ctx.substate):
317 for subpath in sorted(ctx.substate):
318 sub = ctx.sub(subpath)
318 sub = ctx.workingsub(subpath)
319 submatch = matchmod.narrowmatcher(subpath, matchfn)
319 submatch = matchmod.narrowmatcher(subpath, matchfn)
320 total += sub.archive(archiver, prefix, submatch)
320 total += sub.archive(archiver, prefix, submatch)
321
321
322 if total == 0:
322 if total == 0:
323 raise error.Abort(_('no files match the archive pattern'))
323 raise error.Abort(_('no files match the archive pattern'))
324
324
325 archiver.done()
325 archiver.done()
326 return total
326 return total
@@ -1,1910 +1,1910 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import copy
8 import copy
9 import errno, os, re, posixpath, sys
9 import errno, os, re, posixpath, sys
10 import xml.dom.minidom
10 import xml.dom.minidom
11 import stat, subprocess, tarfile
11 import stat, subprocess, tarfile
12 from i18n import _
12 from i18n import _
13 import config, util, node, error, cmdutil, scmutil, match as matchmod
13 import config, util, node, error, cmdutil, scmutil, match as matchmod
14 import phases
14 import phases
15 import pathutil
15 import pathutil
16 import exchange
16 import exchange
17 hg = None
17 hg = None
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 nullstate = ('', '', 'empty')
20 nullstate = ('', '', 'empty')
21
21
22 def _expandedabspath(path):
22 def _expandedabspath(path):
23 '''
23 '''
24 get a path or url and if it is a path expand it and return an absolute path
24 get a path or url and if it is a path expand it and return an absolute path
25 '''
25 '''
26 expandedpath = util.urllocalpath(util.expandpath(path))
26 expandedpath = util.urllocalpath(util.expandpath(path))
27 u = util.url(expandedpath)
27 u = util.url(expandedpath)
28 if not u.scheme:
28 if not u.scheme:
29 path = util.normpath(os.path.abspath(u.path))
29 path = util.normpath(os.path.abspath(u.path))
30 return path
30 return path
31
31
32 def _getstorehashcachename(remotepath):
32 def _getstorehashcachename(remotepath):
33 '''get a unique filename for the store hash cache of a remote repository'''
33 '''get a unique filename for the store hash cache of a remote repository'''
34 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
34 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
35
35
36 class SubrepoAbort(error.Abort):
36 class SubrepoAbort(error.Abort):
37 """Exception class used to avoid handling a subrepo error more than once"""
37 """Exception class used to avoid handling a subrepo error more than once"""
38 def __init__(self, *args, **kw):
38 def __init__(self, *args, **kw):
39 error.Abort.__init__(self, *args, **kw)
39 error.Abort.__init__(self, *args, **kw)
40 self.subrepo = kw.get('subrepo')
40 self.subrepo = kw.get('subrepo')
41 self.cause = kw.get('cause')
41 self.cause = kw.get('cause')
42
42
43 def annotatesubrepoerror(func):
43 def annotatesubrepoerror(func):
44 def decoratedmethod(self, *args, **kargs):
44 def decoratedmethod(self, *args, **kargs):
45 try:
45 try:
46 res = func(self, *args, **kargs)
46 res = func(self, *args, **kargs)
47 except SubrepoAbort, ex:
47 except SubrepoAbort, ex:
48 # This exception has already been handled
48 # This exception has already been handled
49 raise ex
49 raise ex
50 except error.Abort, ex:
50 except error.Abort, ex:
51 subrepo = subrelpath(self)
51 subrepo = subrelpath(self)
52 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
52 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
53 # avoid handling this exception by raising a SubrepoAbort exception
53 # avoid handling this exception by raising a SubrepoAbort exception
54 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
54 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
55 cause=sys.exc_info())
55 cause=sys.exc_info())
56 return res
56 return res
57 return decoratedmethod
57 return decoratedmethod
58
58
59 def state(ctx, ui):
59 def state(ctx, ui):
60 """return a state dict, mapping subrepo paths configured in .hgsub
60 """return a state dict, mapping subrepo paths configured in .hgsub
61 to tuple: (source from .hgsub, revision from .hgsubstate, kind
61 to tuple: (source from .hgsub, revision from .hgsubstate, kind
62 (key in types dict))
62 (key in types dict))
63 """
63 """
64 p = config.config()
64 p = config.config()
65 def read(f, sections=None, remap=None):
65 def read(f, sections=None, remap=None):
66 if f in ctx:
66 if f in ctx:
67 try:
67 try:
68 data = ctx[f].data()
68 data = ctx[f].data()
69 except IOError, err:
69 except IOError, err:
70 if err.errno != errno.ENOENT:
70 if err.errno != errno.ENOENT:
71 raise
71 raise
72 # handle missing subrepo spec files as removed
72 # handle missing subrepo spec files as removed
73 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
73 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
74 util.pathto(ctx.repo().root, ctx.repo().getcwd(), f))
74 util.pathto(ctx.repo().root, ctx.repo().getcwd(), f))
75 return
75 return
76 p.parse(f, data, sections, remap, read)
76 p.parse(f, data, sections, remap, read)
77 else:
77 else:
78 repo = ctx.repo()
78 repo = ctx.repo()
79 raise util.Abort(_("subrepo spec file \'%s\' not found") %
79 raise util.Abort(_("subrepo spec file \'%s\' not found") %
80 util.pathto(repo.root, repo.getcwd(), f))
80 util.pathto(repo.root, repo.getcwd(), f))
81
81
82 if '.hgsub' in ctx:
82 if '.hgsub' in ctx:
83 read('.hgsub')
83 read('.hgsub')
84
84
85 for path, src in ui.configitems('subpaths'):
85 for path, src in ui.configitems('subpaths'):
86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
87
87
88 rev = {}
88 rev = {}
89 if '.hgsubstate' in ctx:
89 if '.hgsubstate' in ctx:
90 try:
90 try:
91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
92 l = l.lstrip()
92 l = l.lstrip()
93 if not l:
93 if not l:
94 continue
94 continue
95 try:
95 try:
96 revision, path = l.split(" ", 1)
96 revision, path = l.split(" ", 1)
97 except ValueError:
97 except ValueError:
98 repo = ctx.repo()
98 repo = ctx.repo()
99 raise util.Abort(_("invalid subrepository revision "
99 raise util.Abort(_("invalid subrepository revision "
100 "specifier in \'%s\' line %d")
100 "specifier in \'%s\' line %d")
101 % (util.pathto(repo.root, repo.getcwd(),
101 % (util.pathto(repo.root, repo.getcwd(),
102 '.hgsubstate'), (i + 1)))
102 '.hgsubstate'), (i + 1)))
103 rev[path] = revision
103 rev[path] = revision
104 except IOError, err:
104 except IOError, err:
105 if err.errno != errno.ENOENT:
105 if err.errno != errno.ENOENT:
106 raise
106 raise
107
107
108 def remap(src):
108 def remap(src):
109 for pattern, repl in p.items('subpaths'):
109 for pattern, repl in p.items('subpaths'):
110 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
110 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
111 # does a string decode.
111 # does a string decode.
112 repl = repl.encode('string-escape')
112 repl = repl.encode('string-escape')
113 # However, we still want to allow back references to go
113 # However, we still want to allow back references to go
114 # through unharmed, so we turn r'\\1' into r'\1'. Again,
114 # through unharmed, so we turn r'\\1' into r'\1'. Again,
115 # extra escapes are needed because re.sub string decodes.
115 # extra escapes are needed because re.sub string decodes.
116 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
116 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
117 try:
117 try:
118 src = re.sub(pattern, repl, src, 1)
118 src = re.sub(pattern, repl, src, 1)
119 except re.error, e:
119 except re.error, e:
120 raise util.Abort(_("bad subrepository pattern in %s: %s")
120 raise util.Abort(_("bad subrepository pattern in %s: %s")
121 % (p.source('subpaths', pattern), e))
121 % (p.source('subpaths', pattern), e))
122 return src
122 return src
123
123
124 state = {}
124 state = {}
125 for path, src in p[''].items():
125 for path, src in p[''].items():
126 kind = 'hg'
126 kind = 'hg'
127 if src.startswith('['):
127 if src.startswith('['):
128 if ']' not in src:
128 if ']' not in src:
129 raise util.Abort(_('missing ] in subrepo source'))
129 raise util.Abort(_('missing ] in subrepo source'))
130 kind, src = src.split(']', 1)
130 kind, src = src.split(']', 1)
131 kind = kind[1:]
131 kind = kind[1:]
132 src = src.lstrip() # strip any extra whitespace after ']'
132 src = src.lstrip() # strip any extra whitespace after ']'
133
133
134 if not util.url(src).isabs():
134 if not util.url(src).isabs():
135 parent = _abssource(ctx.repo(), abort=False)
135 parent = _abssource(ctx.repo(), abort=False)
136 if parent:
136 if parent:
137 parent = util.url(parent)
137 parent = util.url(parent)
138 parent.path = posixpath.join(parent.path or '', src)
138 parent.path = posixpath.join(parent.path or '', src)
139 parent.path = posixpath.normpath(parent.path)
139 parent.path = posixpath.normpath(parent.path)
140 joined = str(parent)
140 joined = str(parent)
141 # Remap the full joined path and use it if it changes,
141 # Remap the full joined path and use it if it changes,
142 # else remap the original source.
142 # else remap the original source.
143 remapped = remap(joined)
143 remapped = remap(joined)
144 if remapped == joined:
144 if remapped == joined:
145 src = remap(src)
145 src = remap(src)
146 else:
146 else:
147 src = remapped
147 src = remapped
148
148
149 src = remap(src)
149 src = remap(src)
150 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
150 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
151
151
152 return state
152 return state
153
153
154 def writestate(repo, state):
154 def writestate(repo, state):
155 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
155 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
156 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
156 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
157 if state[s][1] != nullstate[1]]
157 if state[s][1] != nullstate[1]]
158 repo.wwrite('.hgsubstate', ''.join(lines), '')
158 repo.wwrite('.hgsubstate', ''.join(lines), '')
159
159
160 def submerge(repo, wctx, mctx, actx, overwrite):
160 def submerge(repo, wctx, mctx, actx, overwrite):
161 """delegated from merge.applyupdates: merging of .hgsubstate file
161 """delegated from merge.applyupdates: merging of .hgsubstate file
162 in working context, merging context and ancestor context"""
162 in working context, merging context and ancestor context"""
163 if mctx == actx: # backwards?
163 if mctx == actx: # backwards?
164 actx = wctx.p1()
164 actx = wctx.p1()
165 s1 = wctx.substate
165 s1 = wctx.substate
166 s2 = mctx.substate
166 s2 = mctx.substate
167 sa = actx.substate
167 sa = actx.substate
168 sm = {}
168 sm = {}
169
169
170 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
170 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
171
171
172 def debug(s, msg, r=""):
172 def debug(s, msg, r=""):
173 if r:
173 if r:
174 r = "%s:%s:%s" % r
174 r = "%s:%s:%s" % r
175 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
175 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
176
176
177 for s, l in sorted(s1.iteritems()):
177 for s, l in sorted(s1.iteritems()):
178 a = sa.get(s, nullstate)
178 a = sa.get(s, nullstate)
179 ld = l # local state with possible dirty flag for compares
179 ld = l # local state with possible dirty flag for compares
180 if wctx.sub(s).dirty():
180 if wctx.sub(s).dirty():
181 ld = (l[0], l[1] + "+")
181 ld = (l[0], l[1] + "+")
182 if wctx == actx: # overwrite
182 if wctx == actx: # overwrite
183 a = ld
183 a = ld
184
184
185 if s in s2:
185 if s in s2:
186 r = s2[s]
186 r = s2[s]
187 if ld == r or r == a: # no change or local is newer
187 if ld == r or r == a: # no change or local is newer
188 sm[s] = l
188 sm[s] = l
189 continue
189 continue
190 elif ld == a: # other side changed
190 elif ld == a: # other side changed
191 debug(s, "other changed, get", r)
191 debug(s, "other changed, get", r)
192 wctx.sub(s).get(r, overwrite)
192 wctx.sub(s).get(r, overwrite)
193 sm[s] = r
193 sm[s] = r
194 elif ld[0] != r[0]: # sources differ
194 elif ld[0] != r[0]: # sources differ
195 if repo.ui.promptchoice(
195 if repo.ui.promptchoice(
196 _(' subrepository sources for %s differ\n'
196 _(' subrepository sources for %s differ\n'
197 'use (l)ocal source (%s) or (r)emote source (%s)?'
197 'use (l)ocal source (%s) or (r)emote source (%s)?'
198 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
198 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
199 debug(s, "prompt changed, get", r)
199 debug(s, "prompt changed, get", r)
200 wctx.sub(s).get(r, overwrite)
200 wctx.sub(s).get(r, overwrite)
201 sm[s] = r
201 sm[s] = r
202 elif ld[1] == a[1]: # local side is unchanged
202 elif ld[1] == a[1]: # local side is unchanged
203 debug(s, "other side changed, get", r)
203 debug(s, "other side changed, get", r)
204 wctx.sub(s).get(r, overwrite)
204 wctx.sub(s).get(r, overwrite)
205 sm[s] = r
205 sm[s] = r
206 else:
206 else:
207 debug(s, "both sides changed")
207 debug(s, "both sides changed")
208 srepo = wctx.sub(s)
208 srepo = wctx.sub(s)
209 option = repo.ui.promptchoice(
209 option = repo.ui.promptchoice(
210 _(' subrepository %s diverged (local revision: %s, '
210 _(' subrepository %s diverged (local revision: %s, '
211 'remote revision: %s)\n'
211 'remote revision: %s)\n'
212 '(M)erge, keep (l)ocal or keep (r)emote?'
212 '(M)erge, keep (l)ocal or keep (r)emote?'
213 '$$ &Merge $$ &Local $$ &Remote')
213 '$$ &Merge $$ &Local $$ &Remote')
214 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
214 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
215 if option == 0:
215 if option == 0:
216 wctx.sub(s).merge(r)
216 wctx.sub(s).merge(r)
217 sm[s] = l
217 sm[s] = l
218 debug(s, "merge with", r)
218 debug(s, "merge with", r)
219 elif option == 1:
219 elif option == 1:
220 sm[s] = l
220 sm[s] = l
221 debug(s, "keep local subrepo revision", l)
221 debug(s, "keep local subrepo revision", l)
222 else:
222 else:
223 wctx.sub(s).get(r, overwrite)
223 wctx.sub(s).get(r, overwrite)
224 sm[s] = r
224 sm[s] = r
225 debug(s, "get remote subrepo revision", r)
225 debug(s, "get remote subrepo revision", r)
226 elif ld == a: # remote removed, local unchanged
226 elif ld == a: # remote removed, local unchanged
227 debug(s, "remote removed, remove")
227 debug(s, "remote removed, remove")
228 wctx.sub(s).remove()
228 wctx.sub(s).remove()
229 elif a == nullstate: # not present in remote or ancestor
229 elif a == nullstate: # not present in remote or ancestor
230 debug(s, "local added, keep")
230 debug(s, "local added, keep")
231 sm[s] = l
231 sm[s] = l
232 continue
232 continue
233 else:
233 else:
234 if repo.ui.promptchoice(
234 if repo.ui.promptchoice(
235 _(' local changed subrepository %s which remote removed\n'
235 _(' local changed subrepository %s which remote removed\n'
236 'use (c)hanged version or (d)elete?'
236 'use (c)hanged version or (d)elete?'
237 '$$ &Changed $$ &Delete') % s, 0):
237 '$$ &Changed $$ &Delete') % s, 0):
238 debug(s, "prompt remove")
238 debug(s, "prompt remove")
239 wctx.sub(s).remove()
239 wctx.sub(s).remove()
240
240
241 for s, r in sorted(s2.items()):
241 for s, r in sorted(s2.items()):
242 if s in s1:
242 if s in s1:
243 continue
243 continue
244 elif s not in sa:
244 elif s not in sa:
245 debug(s, "remote added, get", r)
245 debug(s, "remote added, get", r)
246 mctx.sub(s).get(r)
246 mctx.sub(s).get(r)
247 sm[s] = r
247 sm[s] = r
248 elif r != sa[s]:
248 elif r != sa[s]:
249 if repo.ui.promptchoice(
249 if repo.ui.promptchoice(
250 _(' remote changed subrepository %s which local removed\n'
250 _(' remote changed subrepository %s which local removed\n'
251 'use (c)hanged version or (d)elete?'
251 'use (c)hanged version or (d)elete?'
252 '$$ &Changed $$ &Delete') % s, 0) == 0:
252 '$$ &Changed $$ &Delete') % s, 0) == 0:
253 debug(s, "prompt recreate", r)
253 debug(s, "prompt recreate", r)
254 mctx.sub(s).get(r)
254 mctx.sub(s).get(r)
255 sm[s] = r
255 sm[s] = r
256
256
257 # record merged .hgsubstate
257 # record merged .hgsubstate
258 writestate(repo, sm)
258 writestate(repo, sm)
259 return sm
259 return sm
260
260
261 def _updateprompt(ui, sub, dirty, local, remote):
261 def _updateprompt(ui, sub, dirty, local, remote):
262 if dirty:
262 if dirty:
263 msg = (_(' subrepository sources for %s differ\n'
263 msg = (_(' subrepository sources for %s differ\n'
264 'use (l)ocal source (%s) or (r)emote source (%s)?'
264 'use (l)ocal source (%s) or (r)emote source (%s)?'
265 '$$ &Local $$ &Remote')
265 '$$ &Local $$ &Remote')
266 % (subrelpath(sub), local, remote))
266 % (subrelpath(sub), local, remote))
267 else:
267 else:
268 msg = (_(' subrepository sources for %s differ (in checked out '
268 msg = (_(' subrepository sources for %s differ (in checked out '
269 'version)\n'
269 'version)\n'
270 'use (l)ocal source (%s) or (r)emote source (%s)?'
270 'use (l)ocal source (%s) or (r)emote source (%s)?'
271 '$$ &Local $$ &Remote')
271 '$$ &Local $$ &Remote')
272 % (subrelpath(sub), local, remote))
272 % (subrelpath(sub), local, remote))
273 return ui.promptchoice(msg, 0)
273 return ui.promptchoice(msg, 0)
274
274
275 def reporelpath(repo):
275 def reporelpath(repo):
276 """return path to this (sub)repo as seen from outermost repo"""
276 """return path to this (sub)repo as seen from outermost repo"""
277 parent = repo
277 parent = repo
278 while util.safehasattr(parent, '_subparent'):
278 while util.safehasattr(parent, '_subparent'):
279 parent = parent._subparent
279 parent = parent._subparent
280 return repo.root[len(pathutil.normasprefix(parent.root)):]
280 return repo.root[len(pathutil.normasprefix(parent.root)):]
281
281
282 def subrelpath(sub):
282 def subrelpath(sub):
283 """return path to this subrepo as seen from outermost repo"""
283 """return path to this subrepo as seen from outermost repo"""
284 return sub._relpath
284 return sub._relpath
285
285
286 def _abssource(repo, push=False, abort=True):
286 def _abssource(repo, push=False, abort=True):
287 """return pull/push path of repo - either based on parent repo .hgsub info
287 """return pull/push path of repo - either based on parent repo .hgsub info
288 or on the top repo config. Abort or return None if no source found."""
288 or on the top repo config. Abort or return None if no source found."""
289 if util.safehasattr(repo, '_subparent'):
289 if util.safehasattr(repo, '_subparent'):
290 source = util.url(repo._subsource)
290 source = util.url(repo._subsource)
291 if source.isabs():
291 if source.isabs():
292 return str(source)
292 return str(source)
293 source.path = posixpath.normpath(source.path)
293 source.path = posixpath.normpath(source.path)
294 parent = _abssource(repo._subparent, push, abort=False)
294 parent = _abssource(repo._subparent, push, abort=False)
295 if parent:
295 if parent:
296 parent = util.url(util.pconvert(parent))
296 parent = util.url(util.pconvert(parent))
297 parent.path = posixpath.join(parent.path or '', source.path)
297 parent.path = posixpath.join(parent.path or '', source.path)
298 parent.path = posixpath.normpath(parent.path)
298 parent.path = posixpath.normpath(parent.path)
299 return str(parent)
299 return str(parent)
300 else: # recursion reached top repo
300 else: # recursion reached top repo
301 if util.safehasattr(repo, '_subtoppath'):
301 if util.safehasattr(repo, '_subtoppath'):
302 return repo._subtoppath
302 return repo._subtoppath
303 if push and repo.ui.config('paths', 'default-push'):
303 if push and repo.ui.config('paths', 'default-push'):
304 return repo.ui.config('paths', 'default-push')
304 return repo.ui.config('paths', 'default-push')
305 if repo.ui.config('paths', 'default'):
305 if repo.ui.config('paths', 'default'):
306 return repo.ui.config('paths', 'default')
306 return repo.ui.config('paths', 'default')
307 if repo.shared():
307 if repo.shared():
308 # chop off the .hg component to get the default path form
308 # chop off the .hg component to get the default path form
309 return os.path.dirname(repo.sharedpath)
309 return os.path.dirname(repo.sharedpath)
310 if abort:
310 if abort:
311 raise util.Abort(_("default path for subrepository not found"))
311 raise util.Abort(_("default path for subrepository not found"))
312
312
313 def _sanitize(ui, vfs, ignore):
313 def _sanitize(ui, vfs, ignore):
314 for dirname, dirs, names in vfs.walk():
314 for dirname, dirs, names in vfs.walk():
315 for i, d in enumerate(dirs):
315 for i, d in enumerate(dirs):
316 if d.lower() == ignore:
316 if d.lower() == ignore:
317 del dirs[i]
317 del dirs[i]
318 break
318 break
319 if os.path.basename(dirname).lower() != '.hg':
319 if os.path.basename(dirname).lower() != '.hg':
320 continue
320 continue
321 for f in names:
321 for f in names:
322 if f.lower() == 'hgrc':
322 if f.lower() == 'hgrc':
323 ui.warn(_("warning: removing potentially hostile 'hgrc' "
323 ui.warn(_("warning: removing potentially hostile 'hgrc' "
324 "in '%s'\n") % vfs.join(dirname))
324 "in '%s'\n") % vfs.join(dirname))
325 vfs.unlink(vfs.reljoin(dirname, f))
325 vfs.unlink(vfs.reljoin(dirname, f))
326
326
327 def subrepo(ctx, path, allowwdir=False):
327 def subrepo(ctx, path, allowwdir=False):
328 """return instance of the right subrepo class for subrepo in path"""
328 """return instance of the right subrepo class for subrepo in path"""
329 # subrepo inherently violates our import layering rules
329 # subrepo inherently violates our import layering rules
330 # because it wants to make repo objects from deep inside the stack
330 # because it wants to make repo objects from deep inside the stack
331 # so we manually delay the circular imports to not break
331 # so we manually delay the circular imports to not break
332 # scripts that don't use our demand-loading
332 # scripts that don't use our demand-loading
333 global hg
333 global hg
334 import hg as h
334 import hg as h
335 hg = h
335 hg = h
336
336
337 pathutil.pathauditor(ctx.repo().root)(path)
337 pathutil.pathauditor(ctx.repo().root)(path)
338 state = ctx.substate[path]
338 state = ctx.substate[path]
339 if state[2] not in types:
339 if state[2] not in types:
340 raise util.Abort(_('unknown subrepo type %s') % state[2])
340 raise util.Abort(_('unknown subrepo type %s') % state[2])
341 if allowwdir:
341 if allowwdir:
342 state = (state[0], ctx.subrev(path), state[2])
342 state = (state[0], ctx.subrev(path), state[2])
343 return types[state[2]](ctx, path, state[:2])
343 return types[state[2]](ctx, path, state[:2])
344
344
345 def nullsubrepo(ctx, path, pctx):
345 def nullsubrepo(ctx, path, pctx):
346 """return an empty subrepo in pctx for the extant subrepo in ctx"""
346 """return an empty subrepo in pctx for the extant subrepo in ctx"""
347 # subrepo inherently violates our import layering rules
347 # subrepo inherently violates our import layering rules
348 # because it wants to make repo objects from deep inside the stack
348 # because it wants to make repo objects from deep inside the stack
349 # so we manually delay the circular imports to not break
349 # so we manually delay the circular imports to not break
350 # scripts that don't use our demand-loading
350 # scripts that don't use our demand-loading
351 global hg
351 global hg
352 import hg as h
352 import hg as h
353 hg = h
353 hg = h
354
354
355 pathutil.pathauditor(ctx.repo().root)(path)
355 pathutil.pathauditor(ctx.repo().root)(path)
356 state = ctx.substate[path]
356 state = ctx.substate[path]
357 if state[2] not in types:
357 if state[2] not in types:
358 raise util.Abort(_('unknown subrepo type %s') % state[2])
358 raise util.Abort(_('unknown subrepo type %s') % state[2])
359 subrev = ''
359 subrev = ''
360 if state[2] == 'hg':
360 if state[2] == 'hg':
361 subrev = "0" * 40
361 subrev = "0" * 40
362 return types[state[2]](pctx, path, (state[0], subrev))
362 return types[state[2]](pctx, path, (state[0], subrev))
363
363
364 def newcommitphase(ui, ctx):
364 def newcommitphase(ui, ctx):
365 commitphase = phases.newcommitphase(ui)
365 commitphase = phases.newcommitphase(ui)
366 substate = getattr(ctx, "substate", None)
366 substate = getattr(ctx, "substate", None)
367 if not substate:
367 if not substate:
368 return commitphase
368 return commitphase
369 check = ui.config('phases', 'checksubrepos', 'follow')
369 check = ui.config('phases', 'checksubrepos', 'follow')
370 if check not in ('ignore', 'follow', 'abort'):
370 if check not in ('ignore', 'follow', 'abort'):
371 raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
371 raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
372 % (check))
372 % (check))
373 if check == 'ignore':
373 if check == 'ignore':
374 return commitphase
374 return commitphase
375 maxphase = phases.public
375 maxphase = phases.public
376 maxsub = None
376 maxsub = None
377 for s in sorted(substate):
377 for s in sorted(substate):
378 sub = ctx.sub(s)
378 sub = ctx.sub(s)
379 subphase = sub.phase(substate[s][1])
379 subphase = sub.phase(substate[s][1])
380 if maxphase < subphase:
380 if maxphase < subphase:
381 maxphase = subphase
381 maxphase = subphase
382 maxsub = s
382 maxsub = s
383 if commitphase < maxphase:
383 if commitphase < maxphase:
384 if check == 'abort':
384 if check == 'abort':
385 raise util.Abort(_("can't commit in %s phase"
385 raise util.Abort(_("can't commit in %s phase"
386 " conflicting %s from subrepository %s") %
386 " conflicting %s from subrepository %s") %
387 (phases.phasenames[commitphase],
387 (phases.phasenames[commitphase],
388 phases.phasenames[maxphase], maxsub))
388 phases.phasenames[maxphase], maxsub))
389 ui.warn(_("warning: changes are committed in"
389 ui.warn(_("warning: changes are committed in"
390 " %s phase from subrepository %s\n") %
390 " %s phase from subrepository %s\n") %
391 (phases.phasenames[maxphase], maxsub))
391 (phases.phasenames[maxphase], maxsub))
392 return maxphase
392 return maxphase
393 return commitphase
393 return commitphase
394
394
395 # subrepo classes need to implement the following abstract class:
395 # subrepo classes need to implement the following abstract class:
396
396
397 class abstractsubrepo(object):
397 class abstractsubrepo(object):
398
398
399 def __init__(self, ctx, path):
399 def __init__(self, ctx, path):
400 """Initialize abstractsubrepo part
400 """Initialize abstractsubrepo part
401
401
402 ``ctx`` is the context referring this subrepository in the
402 ``ctx`` is the context referring this subrepository in the
403 parent repository.
403 parent repository.
404
404
405 ``path`` is the path to this subrepositiry as seen from
405 ``path`` is the path to this subrepositiry as seen from
406 innermost repository.
406 innermost repository.
407 """
407 """
408 self.ui = ctx.repo().ui
408 self.ui = ctx.repo().ui
409 self._ctx = ctx
409 self._ctx = ctx
410 self._path = path
410 self._path = path
411
411
412 def storeclean(self, path):
412 def storeclean(self, path):
413 """
413 """
414 returns true if the repository has not changed since it was last
414 returns true if the repository has not changed since it was last
415 cloned from or pushed to a given repository.
415 cloned from or pushed to a given repository.
416 """
416 """
417 return False
417 return False
418
418
419 def dirty(self, ignoreupdate=False):
419 def dirty(self, ignoreupdate=False):
420 """returns true if the dirstate of the subrepo is dirty or does not
420 """returns true if the dirstate of the subrepo is dirty or does not
421 match current stored state. If ignoreupdate is true, only check
421 match current stored state. If ignoreupdate is true, only check
422 whether the subrepo has uncommitted changes in its dirstate.
422 whether the subrepo has uncommitted changes in its dirstate.
423 """
423 """
424 raise NotImplementedError
424 raise NotImplementedError
425
425
426 def dirtyreason(self, ignoreupdate=False):
426 def dirtyreason(self, ignoreupdate=False):
427 """return reason string if it is ``dirty()``
427 """return reason string if it is ``dirty()``
428
428
429 Returned string should have enough information for the message
429 Returned string should have enough information for the message
430 of exception.
430 of exception.
431
431
432 This returns None, otherwise.
432 This returns None, otherwise.
433 """
433 """
434 if self.dirty(ignoreupdate=ignoreupdate):
434 if self.dirty(ignoreupdate=ignoreupdate):
435 return _("uncommitted changes in subrepository '%s'"
435 return _("uncommitted changes in subrepository '%s'"
436 ) % subrelpath(self)
436 ) % subrelpath(self)
437
437
438 def bailifchanged(self, ignoreupdate=False):
438 def bailifchanged(self, ignoreupdate=False):
439 """raise Abort if subrepository is ``dirty()``
439 """raise Abort if subrepository is ``dirty()``
440 """
440 """
441 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
441 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
442 if dirtyreason:
442 if dirtyreason:
443 raise util.Abort(dirtyreason)
443 raise util.Abort(dirtyreason)
444
444
445 def basestate(self):
445 def basestate(self):
446 """current working directory base state, disregarding .hgsubstate
446 """current working directory base state, disregarding .hgsubstate
447 state and working directory modifications"""
447 state and working directory modifications"""
448 raise NotImplementedError
448 raise NotImplementedError
449
449
450 def checknested(self, path):
450 def checknested(self, path):
451 """check if path is a subrepository within this repository"""
451 """check if path is a subrepository within this repository"""
452 return False
452 return False
453
453
454 def commit(self, text, user, date):
454 def commit(self, text, user, date):
455 """commit the current changes to the subrepo with the given
455 """commit the current changes to the subrepo with the given
456 log message. Use given user and date if possible. Return the
456 log message. Use given user and date if possible. Return the
457 new state of the subrepo.
457 new state of the subrepo.
458 """
458 """
459 raise NotImplementedError
459 raise NotImplementedError
460
460
461 def phase(self, state):
461 def phase(self, state):
462 """returns phase of specified state in the subrepository.
462 """returns phase of specified state in the subrepository.
463 """
463 """
464 return phases.public
464 return phases.public
465
465
466 def remove(self):
466 def remove(self):
467 """remove the subrepo
467 """remove the subrepo
468
468
469 (should verify the dirstate is not dirty first)
469 (should verify the dirstate is not dirty first)
470 """
470 """
471 raise NotImplementedError
471 raise NotImplementedError
472
472
473 def get(self, state, overwrite=False):
473 def get(self, state, overwrite=False):
474 """run whatever commands are needed to put the subrepo into
474 """run whatever commands are needed to put the subrepo into
475 this state
475 this state
476 """
476 """
477 raise NotImplementedError
477 raise NotImplementedError
478
478
479 def merge(self, state):
479 def merge(self, state):
480 """merge currently-saved state with the new state."""
480 """merge currently-saved state with the new state."""
481 raise NotImplementedError
481 raise NotImplementedError
482
482
483 def push(self, opts):
483 def push(self, opts):
484 """perform whatever action is analogous to 'hg push'
484 """perform whatever action is analogous to 'hg push'
485
485
486 This may be a no-op on some systems.
486 This may be a no-op on some systems.
487 """
487 """
488 raise NotImplementedError
488 raise NotImplementedError
489
489
490 def add(self, ui, match, prefix, explicitonly, **opts):
490 def add(self, ui, match, prefix, explicitonly, **opts):
491 return []
491 return []
492
492
493 def addremove(self, matcher, prefix, opts, dry_run, similarity):
493 def addremove(self, matcher, prefix, opts, dry_run, similarity):
494 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
494 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
495 return 1
495 return 1
496
496
497 def cat(self, match, prefix, **opts):
497 def cat(self, match, prefix, **opts):
498 return 1
498 return 1
499
499
500 def status(self, rev2, **opts):
500 def status(self, rev2, **opts):
501 return scmutil.status([], [], [], [], [], [], [])
501 return scmutil.status([], [], [], [], [], [], [])
502
502
503 def diff(self, ui, diffopts, node2, match, prefix, **opts):
503 def diff(self, ui, diffopts, node2, match, prefix, **opts):
504 pass
504 pass
505
505
506 def outgoing(self, ui, dest, opts):
506 def outgoing(self, ui, dest, opts):
507 return 1
507 return 1
508
508
509 def incoming(self, ui, source, opts):
509 def incoming(self, ui, source, opts):
510 return 1
510 return 1
511
511
512 def files(self):
512 def files(self):
513 """return filename iterator"""
513 """return filename iterator"""
514 raise NotImplementedError
514 raise NotImplementedError
515
515
516 def filedata(self, name):
516 def filedata(self, name):
517 """return file data"""
517 """return file data"""
518 raise NotImplementedError
518 raise NotImplementedError
519
519
520 def fileflags(self, name):
520 def fileflags(self, name):
521 """return file flags"""
521 """return file flags"""
522 return ''
522 return ''
523
523
524 def getfileset(self, expr):
524 def getfileset(self, expr):
525 """Resolve the fileset expression for this repo"""
525 """Resolve the fileset expression for this repo"""
526 return set()
526 return set()
527
527
528 def printfiles(self, ui, m, fm, fmt, subrepos):
528 def printfiles(self, ui, m, fm, fmt, subrepos):
529 """handle the files command for this subrepo"""
529 """handle the files command for this subrepo"""
530 return 1
530 return 1
531
531
532 def archive(self, archiver, prefix, match=None):
532 def archive(self, archiver, prefix, match=None):
533 if match is not None:
533 if match is not None:
534 files = [f for f in self.files() if match(f)]
534 files = [f for f in self.files() if match(f)]
535 else:
535 else:
536 files = self.files()
536 files = self.files()
537 total = len(files)
537 total = len(files)
538 relpath = subrelpath(self)
538 relpath = subrelpath(self)
539 self.ui.progress(_('archiving (%s)') % relpath, 0,
539 self.ui.progress(_('archiving (%s)') % relpath, 0,
540 unit=_('files'), total=total)
540 unit=_('files'), total=total)
541 for i, name in enumerate(files):
541 for i, name in enumerate(files):
542 flags = self.fileflags(name)
542 flags = self.fileflags(name)
543 mode = 'x' in flags and 0755 or 0644
543 mode = 'x' in flags and 0755 or 0644
544 symlink = 'l' in flags
544 symlink = 'l' in flags
545 archiver.addfile(prefix + self._path + '/' + name,
545 archiver.addfile(prefix + self._path + '/' + name,
546 mode, symlink, self.filedata(name))
546 mode, symlink, self.filedata(name))
547 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
547 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
548 unit=_('files'), total=total)
548 unit=_('files'), total=total)
549 self.ui.progress(_('archiving (%s)') % relpath, None)
549 self.ui.progress(_('archiving (%s)') % relpath, None)
550 return total
550 return total
551
551
552 def walk(self, match):
552 def walk(self, match):
553 '''
553 '''
554 walk recursively through the directory tree, finding all files
554 walk recursively through the directory tree, finding all files
555 matched by the match function
555 matched by the match function
556 '''
556 '''
557 pass
557 pass
558
558
559 def forget(self, match, prefix):
559 def forget(self, match, prefix):
560 return ([], [])
560 return ([], [])
561
561
562 def removefiles(self, matcher, prefix, after, force, subrepos):
562 def removefiles(self, matcher, prefix, after, force, subrepos):
563 """remove the matched files from the subrepository and the filesystem,
563 """remove the matched files from the subrepository and the filesystem,
564 possibly by force and/or after the file has been removed from the
564 possibly by force and/or after the file has been removed from the
565 filesystem. Return 0 on success, 1 on any warning.
565 filesystem. Return 0 on success, 1 on any warning.
566 """
566 """
567 return 1
567 return 1
568
568
569 def revert(self, substate, *pats, **opts):
569 def revert(self, substate, *pats, **opts):
570 self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
570 self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
571 % (substate[0], substate[2]))
571 % (substate[0], substate[2]))
572 return []
572 return []
573
573
574 def shortid(self, revid):
574 def shortid(self, revid):
575 return revid
575 return revid
576
576
577 def verify(self):
577 def verify(self):
578 '''verify the integrity of the repository. Return 0 on success or
578 '''verify the integrity of the repository. Return 0 on success or
579 warning, 1 on any error.
579 warning, 1 on any error.
580 '''
580 '''
581 return 0
581 return 0
582
582
583 @propertycache
583 @propertycache
584 def wvfs(self):
584 def wvfs(self):
585 """return vfs to access the working directory of this subrepository
585 """return vfs to access the working directory of this subrepository
586 """
586 """
587 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
587 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
588
588
589 @propertycache
589 @propertycache
590 def _relpath(self):
590 def _relpath(self):
591 """return path to this subrepository as seen from outermost repository
591 """return path to this subrepository as seen from outermost repository
592 """
592 """
593 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
593 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
594
594
595 class hgsubrepo(abstractsubrepo):
595 class hgsubrepo(abstractsubrepo):
596 def __init__(self, ctx, path, state):
596 def __init__(self, ctx, path, state):
597 super(hgsubrepo, self).__init__(ctx, path)
597 super(hgsubrepo, self).__init__(ctx, path)
598 self._state = state
598 self._state = state
599 r = ctx.repo()
599 r = ctx.repo()
600 root = r.wjoin(path)
600 root = r.wjoin(path)
601 create = not r.wvfs.exists('%s/.hg' % path)
601 create = not r.wvfs.exists('%s/.hg' % path)
602 self._repo = hg.repository(r.baseui, root, create=create)
602 self._repo = hg.repository(r.baseui, root, create=create)
603
603
604 # Propagate the parent's --hidden option
604 # Propagate the parent's --hidden option
605 if r is r.unfiltered():
605 if r is r.unfiltered():
606 self._repo = self._repo.unfiltered()
606 self._repo = self._repo.unfiltered()
607
607
608 self.ui = self._repo.ui
608 self.ui = self._repo.ui
609 for s, k in [('ui', 'commitsubrepos')]:
609 for s, k in [('ui', 'commitsubrepos')]:
610 v = r.ui.config(s, k)
610 v = r.ui.config(s, k)
611 if v:
611 if v:
612 self.ui.setconfig(s, k, v, 'subrepo')
612 self.ui.setconfig(s, k, v, 'subrepo')
613 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
613 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
614 self._initrepo(r, state[0], create)
614 self._initrepo(r, state[0], create)
615
615
616 def storeclean(self, path):
616 def storeclean(self, path):
617 lock = self._repo.lock()
617 lock = self._repo.lock()
618 try:
618 try:
619 return self._storeclean(path)
619 return self._storeclean(path)
620 finally:
620 finally:
621 lock.release()
621 lock.release()
622
622
623 def _storeclean(self, path):
623 def _storeclean(self, path):
624 clean = True
624 clean = True
625 itercache = self._calcstorehash(path)
625 itercache = self._calcstorehash(path)
626 for filehash in self._readstorehashcache(path):
626 for filehash in self._readstorehashcache(path):
627 if filehash != next(itercache, None):
627 if filehash != next(itercache, None):
628 clean = False
628 clean = False
629 break
629 break
630 if clean:
630 if clean:
631 # if not empty:
631 # if not empty:
632 # the cached and current pull states have a different size
632 # the cached and current pull states have a different size
633 clean = next(itercache, None) is None
633 clean = next(itercache, None) is None
634 return clean
634 return clean
635
635
636 def _calcstorehash(self, remotepath):
636 def _calcstorehash(self, remotepath):
637 '''calculate a unique "store hash"
637 '''calculate a unique "store hash"
638
638
639 This method is used to to detect when there are changes that may
639 This method is used to to detect when there are changes that may
640 require a push to a given remote path.'''
640 require a push to a given remote path.'''
641 # sort the files that will be hashed in increasing (likely) file size
641 # sort the files that will be hashed in increasing (likely) file size
642 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
642 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
643 yield '# %s\n' % _expandedabspath(remotepath)
643 yield '# %s\n' % _expandedabspath(remotepath)
644 vfs = self._repo.vfs
644 vfs = self._repo.vfs
645 for relname in filelist:
645 for relname in filelist:
646 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
646 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
647 yield '%s = %s\n' % (relname, filehash)
647 yield '%s = %s\n' % (relname, filehash)
648
648
649 @propertycache
649 @propertycache
650 def _cachestorehashvfs(self):
650 def _cachestorehashvfs(self):
651 return scmutil.vfs(self._repo.join('cache/storehash'))
651 return scmutil.vfs(self._repo.join('cache/storehash'))
652
652
653 def _readstorehashcache(self, remotepath):
653 def _readstorehashcache(self, remotepath):
654 '''read the store hash cache for a given remote repository'''
654 '''read the store hash cache for a given remote repository'''
655 cachefile = _getstorehashcachename(remotepath)
655 cachefile = _getstorehashcachename(remotepath)
656 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
656 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
657
657
658 def _cachestorehash(self, remotepath):
658 def _cachestorehash(self, remotepath):
659 '''cache the current store hash
659 '''cache the current store hash
660
660
661 Each remote repo requires its own store hash cache, because a subrepo
661 Each remote repo requires its own store hash cache, because a subrepo
662 store may be "clean" versus a given remote repo, but not versus another
662 store may be "clean" versus a given remote repo, but not versus another
663 '''
663 '''
664 cachefile = _getstorehashcachename(remotepath)
664 cachefile = _getstorehashcachename(remotepath)
665 lock = self._repo.lock()
665 lock = self._repo.lock()
666 try:
666 try:
667 storehash = list(self._calcstorehash(remotepath))
667 storehash = list(self._calcstorehash(remotepath))
668 vfs = self._cachestorehashvfs
668 vfs = self._cachestorehashvfs
669 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
669 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
670 finally:
670 finally:
671 lock.release()
671 lock.release()
672
672
673 def _getctx(self):
673 def _getctx(self):
674 '''fetch the context for this subrepo revision, possibly a workingctx
674 '''fetch the context for this subrepo revision, possibly a workingctx
675 '''
675 '''
676 if self._ctx.rev() is None:
676 if self._ctx.rev() is None:
677 return self._repo[None] # workingctx if parent is workingctx
677 return self._repo[None] # workingctx if parent is workingctx
678 else:
678 else:
679 rev = self._state[1]
679 rev = self._state[1]
680 return self._repo[rev]
680 return self._repo[rev]
681
681
682 @annotatesubrepoerror
682 @annotatesubrepoerror
683 def _initrepo(self, parentrepo, source, create):
683 def _initrepo(self, parentrepo, source, create):
684 self._repo._subparent = parentrepo
684 self._repo._subparent = parentrepo
685 self._repo._subsource = source
685 self._repo._subsource = source
686
686
687 if create:
687 if create:
688 lines = ['[paths]\n']
688 lines = ['[paths]\n']
689
689
690 def addpathconfig(key, value):
690 def addpathconfig(key, value):
691 if value:
691 if value:
692 lines.append('%s = %s\n' % (key, value))
692 lines.append('%s = %s\n' % (key, value))
693 self.ui.setconfig('paths', key, value, 'subrepo')
693 self.ui.setconfig('paths', key, value, 'subrepo')
694
694
695 defpath = _abssource(self._repo, abort=False)
695 defpath = _abssource(self._repo, abort=False)
696 defpushpath = _abssource(self._repo, True, abort=False)
696 defpushpath = _abssource(self._repo, True, abort=False)
697 addpathconfig('default', defpath)
697 addpathconfig('default', defpath)
698 if defpath != defpushpath:
698 if defpath != defpushpath:
699 addpathconfig('default-push', defpushpath)
699 addpathconfig('default-push', defpushpath)
700
700
701 fp = self._repo.vfs("hgrc", "w", text=True)
701 fp = self._repo.vfs("hgrc", "w", text=True)
702 try:
702 try:
703 fp.write(''.join(lines))
703 fp.write(''.join(lines))
704 finally:
704 finally:
705 fp.close()
705 fp.close()
706
706
707 @annotatesubrepoerror
707 @annotatesubrepoerror
708 def add(self, ui, match, prefix, explicitonly, **opts):
708 def add(self, ui, match, prefix, explicitonly, **opts):
709 return cmdutil.add(ui, self._repo, match,
709 return cmdutil.add(ui, self._repo, match,
710 self.wvfs.reljoin(prefix, self._path),
710 self.wvfs.reljoin(prefix, self._path),
711 explicitonly, **opts)
711 explicitonly, **opts)
712
712
713 @annotatesubrepoerror
713 @annotatesubrepoerror
714 def addremove(self, m, prefix, opts, dry_run, similarity):
714 def addremove(self, m, prefix, opts, dry_run, similarity):
715 # In the same way as sub directories are processed, once in a subrepo,
715 # In the same way as sub directories are processed, once in a subrepo,
716 # always entry any of its subrepos. Don't corrupt the options that will
716 # always entry any of its subrepos. Don't corrupt the options that will
717 # be used to process sibling subrepos however.
717 # be used to process sibling subrepos however.
718 opts = copy.copy(opts)
718 opts = copy.copy(opts)
719 opts['subrepos'] = True
719 opts['subrepos'] = True
720 return scmutil.addremove(self._repo, m,
720 return scmutil.addremove(self._repo, m,
721 self.wvfs.reljoin(prefix, self._path), opts,
721 self.wvfs.reljoin(prefix, self._path), opts,
722 dry_run, similarity)
722 dry_run, similarity)
723
723
724 @annotatesubrepoerror
724 @annotatesubrepoerror
725 def cat(self, match, prefix, **opts):
725 def cat(self, match, prefix, **opts):
726 rev = self._state[1]
726 rev = self._state[1]
727 ctx = self._repo[rev]
727 ctx = self._repo[rev]
728 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
728 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
729
729
730 @annotatesubrepoerror
730 @annotatesubrepoerror
731 def status(self, rev2, **opts):
731 def status(self, rev2, **opts):
732 try:
732 try:
733 rev1 = self._state[1]
733 rev1 = self._state[1]
734 ctx1 = self._repo[rev1]
734 ctx1 = self._repo[rev1]
735 ctx2 = self._repo[rev2]
735 ctx2 = self._repo[rev2]
736 return self._repo.status(ctx1, ctx2, **opts)
736 return self._repo.status(ctx1, ctx2, **opts)
737 except error.RepoLookupError, inst:
737 except error.RepoLookupError, inst:
738 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
738 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
739 % (inst, subrelpath(self)))
739 % (inst, subrelpath(self)))
740 return scmutil.status([], [], [], [], [], [], [])
740 return scmutil.status([], [], [], [], [], [], [])
741
741
742 @annotatesubrepoerror
742 @annotatesubrepoerror
743 def diff(self, ui, diffopts, node2, match, prefix, **opts):
743 def diff(self, ui, diffopts, node2, match, prefix, **opts):
744 try:
744 try:
745 node1 = node.bin(self._state[1])
745 node1 = node.bin(self._state[1])
746 # We currently expect node2 to come from substate and be
746 # We currently expect node2 to come from substate and be
747 # in hex format
747 # in hex format
748 if node2 is not None:
748 if node2 is not None:
749 node2 = node.bin(node2)
749 node2 = node.bin(node2)
750 cmdutil.diffordiffstat(ui, self._repo, diffopts,
750 cmdutil.diffordiffstat(ui, self._repo, diffopts,
751 node1, node2, match,
751 node1, node2, match,
752 prefix=posixpath.join(prefix, self._path),
752 prefix=posixpath.join(prefix, self._path),
753 listsubrepos=True, **opts)
753 listsubrepos=True, **opts)
754 except error.RepoLookupError, inst:
754 except error.RepoLookupError, inst:
755 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
755 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
756 % (inst, subrelpath(self)))
756 % (inst, subrelpath(self)))
757
757
758 @annotatesubrepoerror
758 @annotatesubrepoerror
759 def archive(self, archiver, prefix, match=None):
759 def archive(self, archiver, prefix, match=None):
760 self._get(self._state + ('hg',))
760 self._get(self._state + ('hg',))
761 total = abstractsubrepo.archive(self, archiver, prefix, match)
761 total = abstractsubrepo.archive(self, archiver, prefix, match)
762 rev = self._state[1]
762 rev = self._state[1]
763 ctx = self._repo[rev]
763 ctx = self._repo[rev]
764 for subpath in ctx.substate:
764 for subpath in ctx.substate:
765 s = subrepo(ctx, subpath)
765 s = subrepo(ctx, subpath, True)
766 submatch = matchmod.narrowmatcher(subpath, match)
766 submatch = matchmod.narrowmatcher(subpath, match)
767 total += s.archive(archiver, prefix + self._path + '/', submatch)
767 total += s.archive(archiver, prefix + self._path + '/', submatch)
768 return total
768 return total
769
769
770 @annotatesubrepoerror
770 @annotatesubrepoerror
771 def dirty(self, ignoreupdate=False):
771 def dirty(self, ignoreupdate=False):
772 r = self._state[1]
772 r = self._state[1]
773 if r == '' and not ignoreupdate: # no state recorded
773 if r == '' and not ignoreupdate: # no state recorded
774 return True
774 return True
775 w = self._repo[None]
775 w = self._repo[None]
776 if r != w.p1().hex() and not ignoreupdate:
776 if r != w.p1().hex() and not ignoreupdate:
777 # different version checked out
777 # different version checked out
778 return True
778 return True
779 return w.dirty() # working directory changed
779 return w.dirty() # working directory changed
780
780
781 def basestate(self):
781 def basestate(self):
782 return self._repo['.'].hex()
782 return self._repo['.'].hex()
783
783
784 def checknested(self, path):
784 def checknested(self, path):
785 return self._repo._checknested(self._repo.wjoin(path))
785 return self._repo._checknested(self._repo.wjoin(path))
786
786
787 @annotatesubrepoerror
787 @annotatesubrepoerror
788 def commit(self, text, user, date):
788 def commit(self, text, user, date):
789 # don't bother committing in the subrepo if it's only been
789 # don't bother committing in the subrepo if it's only been
790 # updated
790 # updated
791 if not self.dirty(True):
791 if not self.dirty(True):
792 return self._repo['.'].hex()
792 return self._repo['.'].hex()
793 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
793 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
794 n = self._repo.commit(text, user, date)
794 n = self._repo.commit(text, user, date)
795 if not n:
795 if not n:
796 return self._repo['.'].hex() # different version checked out
796 return self._repo['.'].hex() # different version checked out
797 return node.hex(n)
797 return node.hex(n)
798
798
799 @annotatesubrepoerror
799 @annotatesubrepoerror
800 def phase(self, state):
800 def phase(self, state):
801 return self._repo[state].phase()
801 return self._repo[state].phase()
802
802
803 @annotatesubrepoerror
803 @annotatesubrepoerror
804 def remove(self):
804 def remove(self):
805 # we can't fully delete the repository as it may contain
805 # we can't fully delete the repository as it may contain
806 # local-only history
806 # local-only history
807 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
807 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
808 hg.clean(self._repo, node.nullid, False)
808 hg.clean(self._repo, node.nullid, False)
809
809
810 def _get(self, state):
810 def _get(self, state):
811 source, revision, kind = state
811 source, revision, kind = state
812 if revision in self._repo.unfiltered():
812 if revision in self._repo.unfiltered():
813 return True
813 return True
814 self._repo._subsource = source
814 self._repo._subsource = source
815 srcurl = _abssource(self._repo)
815 srcurl = _abssource(self._repo)
816 other = hg.peer(self._repo, {}, srcurl)
816 other = hg.peer(self._repo, {}, srcurl)
817 if len(self._repo) == 0:
817 if len(self._repo) == 0:
818 self.ui.status(_('cloning subrepo %s from %s\n')
818 self.ui.status(_('cloning subrepo %s from %s\n')
819 % (subrelpath(self), srcurl))
819 % (subrelpath(self), srcurl))
820 parentrepo = self._repo._subparent
820 parentrepo = self._repo._subparent
821 # use self._repo.vfs instead of self.wvfs to remove .hg only
821 # use self._repo.vfs instead of self.wvfs to remove .hg only
822 self._repo.vfs.rmtree()
822 self._repo.vfs.rmtree()
823 other, cloned = hg.clone(self._repo._subparent.baseui, {},
823 other, cloned = hg.clone(self._repo._subparent.baseui, {},
824 other, self._repo.root,
824 other, self._repo.root,
825 update=False)
825 update=False)
826 self._repo = cloned.local()
826 self._repo = cloned.local()
827 self._initrepo(parentrepo, source, create=True)
827 self._initrepo(parentrepo, source, create=True)
828 self._cachestorehash(srcurl)
828 self._cachestorehash(srcurl)
829 else:
829 else:
830 self.ui.status(_('pulling subrepo %s from %s\n')
830 self.ui.status(_('pulling subrepo %s from %s\n')
831 % (subrelpath(self), srcurl))
831 % (subrelpath(self), srcurl))
832 cleansub = self.storeclean(srcurl)
832 cleansub = self.storeclean(srcurl)
833 exchange.pull(self._repo, other)
833 exchange.pull(self._repo, other)
834 if cleansub:
834 if cleansub:
835 # keep the repo clean after pull
835 # keep the repo clean after pull
836 self._cachestorehash(srcurl)
836 self._cachestorehash(srcurl)
837 return False
837 return False
838
838
839 @annotatesubrepoerror
839 @annotatesubrepoerror
840 def get(self, state, overwrite=False):
840 def get(self, state, overwrite=False):
841 inrepo = self._get(state)
841 inrepo = self._get(state)
842 source, revision, kind = state
842 source, revision, kind = state
843 repo = self._repo
843 repo = self._repo
844 repo.ui.debug("getting subrepo %s\n" % self._path)
844 repo.ui.debug("getting subrepo %s\n" % self._path)
845 if inrepo:
845 if inrepo:
846 urepo = repo.unfiltered()
846 urepo = repo.unfiltered()
847 ctx = urepo[revision]
847 ctx = urepo[revision]
848 if ctx.hidden():
848 if ctx.hidden():
849 urepo.ui.warn(
849 urepo.ui.warn(
850 _('revision %s in subrepo %s is hidden\n') \
850 _('revision %s in subrepo %s is hidden\n') \
851 % (revision[0:12], self._path))
851 % (revision[0:12], self._path))
852 repo = urepo
852 repo = urepo
853 hg.updaterepo(repo, revision, overwrite)
853 hg.updaterepo(repo, revision, overwrite)
854
854
855 @annotatesubrepoerror
855 @annotatesubrepoerror
856 def merge(self, state):
856 def merge(self, state):
857 self._get(state)
857 self._get(state)
858 cur = self._repo['.']
858 cur = self._repo['.']
859 dst = self._repo[state[1]]
859 dst = self._repo[state[1]]
860 anc = dst.ancestor(cur)
860 anc = dst.ancestor(cur)
861
861
862 def mergefunc():
862 def mergefunc():
863 if anc == cur and dst.branch() == cur.branch():
863 if anc == cur and dst.branch() == cur.branch():
864 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
864 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
865 hg.update(self._repo, state[1])
865 hg.update(self._repo, state[1])
866 elif anc == dst:
866 elif anc == dst:
867 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
867 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
868 else:
868 else:
869 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
869 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
870 hg.merge(self._repo, state[1], remind=False)
870 hg.merge(self._repo, state[1], remind=False)
871
871
872 wctx = self._repo[None]
872 wctx = self._repo[None]
873 if self.dirty():
873 if self.dirty():
874 if anc != dst:
874 if anc != dst:
875 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
875 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
876 mergefunc()
876 mergefunc()
877 else:
877 else:
878 mergefunc()
878 mergefunc()
879 else:
879 else:
880 mergefunc()
880 mergefunc()
881
881
882 @annotatesubrepoerror
882 @annotatesubrepoerror
883 def push(self, opts):
883 def push(self, opts):
884 force = opts.get('force')
884 force = opts.get('force')
885 newbranch = opts.get('new_branch')
885 newbranch = opts.get('new_branch')
886 ssh = opts.get('ssh')
886 ssh = opts.get('ssh')
887
887
888 # push subrepos depth-first for coherent ordering
888 # push subrepos depth-first for coherent ordering
889 c = self._repo['']
889 c = self._repo['']
890 subs = c.substate # only repos that are committed
890 subs = c.substate # only repos that are committed
891 for s in sorted(subs):
891 for s in sorted(subs):
892 if c.sub(s).push(opts) == 0:
892 if c.sub(s).push(opts) == 0:
893 return False
893 return False
894
894
895 dsturl = _abssource(self._repo, True)
895 dsturl = _abssource(self._repo, True)
896 if not force:
896 if not force:
897 if self.storeclean(dsturl):
897 if self.storeclean(dsturl):
898 self.ui.status(
898 self.ui.status(
899 _('no changes made to subrepo %s since last push to %s\n')
899 _('no changes made to subrepo %s since last push to %s\n')
900 % (subrelpath(self), dsturl))
900 % (subrelpath(self), dsturl))
901 return None
901 return None
902 self.ui.status(_('pushing subrepo %s to %s\n') %
902 self.ui.status(_('pushing subrepo %s to %s\n') %
903 (subrelpath(self), dsturl))
903 (subrelpath(self), dsturl))
904 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
904 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
905 res = exchange.push(self._repo, other, force, newbranch=newbranch)
905 res = exchange.push(self._repo, other, force, newbranch=newbranch)
906
906
907 # the repo is now clean
907 # the repo is now clean
908 self._cachestorehash(dsturl)
908 self._cachestorehash(dsturl)
909 return res.cgresult
909 return res.cgresult
910
910
911 @annotatesubrepoerror
911 @annotatesubrepoerror
912 def outgoing(self, ui, dest, opts):
912 def outgoing(self, ui, dest, opts):
913 if 'rev' in opts or 'branch' in opts:
913 if 'rev' in opts or 'branch' in opts:
914 opts = copy.copy(opts)
914 opts = copy.copy(opts)
915 opts.pop('rev', None)
915 opts.pop('rev', None)
916 opts.pop('branch', None)
916 opts.pop('branch', None)
917 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
917 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
918
918
919 @annotatesubrepoerror
919 @annotatesubrepoerror
920 def incoming(self, ui, source, opts):
920 def incoming(self, ui, source, opts):
921 if 'rev' in opts or 'branch' in opts:
921 if 'rev' in opts or 'branch' in opts:
922 opts = copy.copy(opts)
922 opts = copy.copy(opts)
923 opts.pop('rev', None)
923 opts.pop('rev', None)
924 opts.pop('branch', None)
924 opts.pop('branch', None)
925 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
925 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
926
926
927 @annotatesubrepoerror
927 @annotatesubrepoerror
928 def files(self):
928 def files(self):
929 rev = self._state[1]
929 rev = self._state[1]
930 ctx = self._repo[rev]
930 ctx = self._repo[rev]
931 return ctx.manifest().keys()
931 return ctx.manifest().keys()
932
932
933 def filedata(self, name):
933 def filedata(self, name):
934 rev = self._state[1]
934 rev = self._state[1]
935 return self._repo[rev][name].data()
935 return self._repo[rev][name].data()
936
936
937 def fileflags(self, name):
937 def fileflags(self, name):
938 rev = self._state[1]
938 rev = self._state[1]
939 ctx = self._repo[rev]
939 ctx = self._repo[rev]
940 return ctx.flags(name)
940 return ctx.flags(name)
941
941
942 @annotatesubrepoerror
942 @annotatesubrepoerror
943 def printfiles(self, ui, m, fm, fmt, subrepos):
943 def printfiles(self, ui, m, fm, fmt, subrepos):
944 # If the parent context is a workingctx, use the workingctx here for
944 # If the parent context is a workingctx, use the workingctx here for
945 # consistency.
945 # consistency.
946 if self._ctx.rev() is None:
946 if self._ctx.rev() is None:
947 ctx = self._repo[None]
947 ctx = self._repo[None]
948 else:
948 else:
949 rev = self._state[1]
949 rev = self._state[1]
950 ctx = self._repo[rev]
950 ctx = self._repo[rev]
951 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
951 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
952
952
953 @annotatesubrepoerror
953 @annotatesubrepoerror
954 def getfileset(self, expr):
954 def getfileset(self, expr):
955 if self._ctx.rev() is None:
955 if self._ctx.rev() is None:
956 ctx = self._repo[None]
956 ctx = self._repo[None]
957 else:
957 else:
958 rev = self._state[1]
958 rev = self._state[1]
959 ctx = self._repo[rev]
959 ctx = self._repo[rev]
960
960
961 files = ctx.getfileset(expr)
961 files = ctx.getfileset(expr)
962
962
963 for subpath in ctx.substate:
963 for subpath in ctx.substate:
964 sub = ctx.sub(subpath)
964 sub = ctx.sub(subpath)
965
965
966 try:
966 try:
967 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
967 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
968 except error.LookupError:
968 except error.LookupError:
969 self.ui.status(_("skipping missing subrepository: %s\n")
969 self.ui.status(_("skipping missing subrepository: %s\n")
970 % self.wvfs.reljoin(reporelpath(self), subpath))
970 % self.wvfs.reljoin(reporelpath(self), subpath))
971 return files
971 return files
972
972
973 def walk(self, match):
973 def walk(self, match):
974 ctx = self._repo[None]
974 ctx = self._repo[None]
975 return ctx.walk(match)
975 return ctx.walk(match)
976
976
977 @annotatesubrepoerror
977 @annotatesubrepoerror
978 def forget(self, match, prefix):
978 def forget(self, match, prefix):
979 return cmdutil.forget(self.ui, self._repo, match,
979 return cmdutil.forget(self.ui, self._repo, match,
980 self.wvfs.reljoin(prefix, self._path), True)
980 self.wvfs.reljoin(prefix, self._path), True)
981
981
982 @annotatesubrepoerror
982 @annotatesubrepoerror
983 def removefiles(self, matcher, prefix, after, force, subrepos):
983 def removefiles(self, matcher, prefix, after, force, subrepos):
984 return cmdutil.remove(self.ui, self._repo, matcher,
984 return cmdutil.remove(self.ui, self._repo, matcher,
985 self.wvfs.reljoin(prefix, self._path),
985 self.wvfs.reljoin(prefix, self._path),
986 after, force, subrepos)
986 after, force, subrepos)
987
987
988 @annotatesubrepoerror
988 @annotatesubrepoerror
989 def revert(self, substate, *pats, **opts):
989 def revert(self, substate, *pats, **opts):
990 # reverting a subrepo is a 2 step process:
990 # reverting a subrepo is a 2 step process:
991 # 1. if the no_backup is not set, revert all modified
991 # 1. if the no_backup is not set, revert all modified
992 # files inside the subrepo
992 # files inside the subrepo
993 # 2. update the subrepo to the revision specified in
993 # 2. update the subrepo to the revision specified in
994 # the corresponding substate dictionary
994 # the corresponding substate dictionary
995 self.ui.status(_('reverting subrepo %s\n') % substate[0])
995 self.ui.status(_('reverting subrepo %s\n') % substate[0])
996 if not opts.get('no_backup'):
996 if not opts.get('no_backup'):
997 # Revert all files on the subrepo, creating backups
997 # Revert all files on the subrepo, creating backups
998 # Note that this will not recursively revert subrepos
998 # Note that this will not recursively revert subrepos
999 # We could do it if there was a set:subrepos() predicate
999 # We could do it if there was a set:subrepos() predicate
1000 opts = opts.copy()
1000 opts = opts.copy()
1001 opts['date'] = None
1001 opts['date'] = None
1002 opts['rev'] = substate[1]
1002 opts['rev'] = substate[1]
1003
1003
1004 self.filerevert(*pats, **opts)
1004 self.filerevert(*pats, **opts)
1005
1005
1006 # Update the repo to the revision specified in the given substate
1006 # Update the repo to the revision specified in the given substate
1007 if not opts.get('dry_run'):
1007 if not opts.get('dry_run'):
1008 self.get(substate, overwrite=True)
1008 self.get(substate, overwrite=True)
1009
1009
1010 def filerevert(self, *pats, **opts):
1010 def filerevert(self, *pats, **opts):
1011 ctx = self._repo[opts['rev']]
1011 ctx = self._repo[opts['rev']]
1012 parents = self._repo.dirstate.parents()
1012 parents = self._repo.dirstate.parents()
1013 if opts.get('all'):
1013 if opts.get('all'):
1014 pats = ['set:modified()']
1014 pats = ['set:modified()']
1015 else:
1015 else:
1016 pats = []
1016 pats = []
1017 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1017 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1018
1018
1019 def shortid(self, revid):
1019 def shortid(self, revid):
1020 return revid[:12]
1020 return revid[:12]
1021
1021
1022 def verify(self):
1022 def verify(self):
1023 try:
1023 try:
1024 rev = self._state[1]
1024 rev = self._state[1]
1025 ctx = self._repo.unfiltered()[rev]
1025 ctx = self._repo.unfiltered()[rev]
1026 if ctx.hidden():
1026 if ctx.hidden():
1027 # Since hidden revisions aren't pushed/pulled, it seems worth an
1027 # Since hidden revisions aren't pushed/pulled, it seems worth an
1028 # explicit warning.
1028 # explicit warning.
1029 ui = self._repo.ui
1029 ui = self._repo.ui
1030 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1030 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1031 (self._relpath, node.short(self._ctx.node())))
1031 (self._relpath, node.short(self._ctx.node())))
1032 return 0
1032 return 0
1033 except error.RepoLookupError:
1033 except error.RepoLookupError:
1034 # A missing subrepo revision may be a case of needing to pull it, so
1034 # A missing subrepo revision may be a case of needing to pull it, so
1035 # don't treat this as an error.
1035 # don't treat this as an error.
1036 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1036 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1037 (self._relpath, node.short(self._ctx.node())))
1037 (self._relpath, node.short(self._ctx.node())))
1038 return 0
1038 return 0
1039
1039
1040 @propertycache
1040 @propertycache
1041 def wvfs(self):
1041 def wvfs(self):
1042 """return own wvfs for efficiency and consitency
1042 """return own wvfs for efficiency and consitency
1043 """
1043 """
1044 return self._repo.wvfs
1044 return self._repo.wvfs
1045
1045
1046 @propertycache
1046 @propertycache
1047 def _relpath(self):
1047 def _relpath(self):
1048 """return path to this subrepository as seen from outermost repository
1048 """return path to this subrepository as seen from outermost repository
1049 """
1049 """
1050 # Keep consistent dir separators by avoiding vfs.join(self._path)
1050 # Keep consistent dir separators by avoiding vfs.join(self._path)
1051 return reporelpath(self._repo)
1051 return reporelpath(self._repo)
1052
1052
1053 class svnsubrepo(abstractsubrepo):
1053 class svnsubrepo(abstractsubrepo):
1054 def __init__(self, ctx, path, state):
1054 def __init__(self, ctx, path, state):
1055 super(svnsubrepo, self).__init__(ctx, path)
1055 super(svnsubrepo, self).__init__(ctx, path)
1056 self._state = state
1056 self._state = state
1057 self._exe = util.findexe('svn')
1057 self._exe = util.findexe('svn')
1058 if not self._exe:
1058 if not self._exe:
1059 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
1059 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
1060 % self._path)
1060 % self._path)
1061
1061
1062 def _svncommand(self, commands, filename='', failok=False):
1062 def _svncommand(self, commands, filename='', failok=False):
1063 cmd = [self._exe]
1063 cmd = [self._exe]
1064 extrakw = {}
1064 extrakw = {}
1065 if not self.ui.interactive():
1065 if not self.ui.interactive():
1066 # Making stdin be a pipe should prevent svn from behaving
1066 # Making stdin be a pipe should prevent svn from behaving
1067 # interactively even if we can't pass --non-interactive.
1067 # interactively even if we can't pass --non-interactive.
1068 extrakw['stdin'] = subprocess.PIPE
1068 extrakw['stdin'] = subprocess.PIPE
1069 # Starting in svn 1.5 --non-interactive is a global flag
1069 # Starting in svn 1.5 --non-interactive is a global flag
1070 # instead of being per-command, but we need to support 1.4 so
1070 # instead of being per-command, but we need to support 1.4 so
1071 # we have to be intelligent about what commands take
1071 # we have to be intelligent about what commands take
1072 # --non-interactive.
1072 # --non-interactive.
1073 if commands[0] in ('update', 'checkout', 'commit'):
1073 if commands[0] in ('update', 'checkout', 'commit'):
1074 cmd.append('--non-interactive')
1074 cmd.append('--non-interactive')
1075 cmd.extend(commands)
1075 cmd.extend(commands)
1076 if filename is not None:
1076 if filename is not None:
1077 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1077 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1078 self._path, filename)
1078 self._path, filename)
1079 cmd.append(path)
1079 cmd.append(path)
1080 env = dict(os.environ)
1080 env = dict(os.environ)
1081 # Avoid localized output, preserve current locale for everything else.
1081 # Avoid localized output, preserve current locale for everything else.
1082 lc_all = env.get('LC_ALL')
1082 lc_all = env.get('LC_ALL')
1083 if lc_all:
1083 if lc_all:
1084 env['LANG'] = lc_all
1084 env['LANG'] = lc_all
1085 del env['LC_ALL']
1085 del env['LC_ALL']
1086 env['LC_MESSAGES'] = 'C'
1086 env['LC_MESSAGES'] = 'C'
1087 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1087 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1088 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1088 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1089 universal_newlines=True, env=env, **extrakw)
1089 universal_newlines=True, env=env, **extrakw)
1090 stdout, stderr = p.communicate()
1090 stdout, stderr = p.communicate()
1091 stderr = stderr.strip()
1091 stderr = stderr.strip()
1092 if not failok:
1092 if not failok:
1093 if p.returncode:
1093 if p.returncode:
1094 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
1094 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
1095 if stderr:
1095 if stderr:
1096 self.ui.warn(stderr + '\n')
1096 self.ui.warn(stderr + '\n')
1097 return stdout, stderr
1097 return stdout, stderr
1098
1098
1099 @propertycache
1099 @propertycache
1100 def _svnversion(self):
1100 def _svnversion(self):
1101 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1101 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1102 m = re.search(r'^(\d+)\.(\d+)', output)
1102 m = re.search(r'^(\d+)\.(\d+)', output)
1103 if not m:
1103 if not m:
1104 raise util.Abort(_('cannot retrieve svn tool version'))
1104 raise util.Abort(_('cannot retrieve svn tool version'))
1105 return (int(m.group(1)), int(m.group(2)))
1105 return (int(m.group(1)), int(m.group(2)))
1106
1106
1107 def _wcrevs(self):
1107 def _wcrevs(self):
1108 # Get the working directory revision as well as the last
1108 # Get the working directory revision as well as the last
1109 # commit revision so we can compare the subrepo state with
1109 # commit revision so we can compare the subrepo state with
1110 # both. We used to store the working directory one.
1110 # both. We used to store the working directory one.
1111 output, err = self._svncommand(['info', '--xml'])
1111 output, err = self._svncommand(['info', '--xml'])
1112 doc = xml.dom.minidom.parseString(output)
1112 doc = xml.dom.minidom.parseString(output)
1113 entries = doc.getElementsByTagName('entry')
1113 entries = doc.getElementsByTagName('entry')
1114 lastrev, rev = '0', '0'
1114 lastrev, rev = '0', '0'
1115 if entries:
1115 if entries:
1116 rev = str(entries[0].getAttribute('revision')) or '0'
1116 rev = str(entries[0].getAttribute('revision')) or '0'
1117 commits = entries[0].getElementsByTagName('commit')
1117 commits = entries[0].getElementsByTagName('commit')
1118 if commits:
1118 if commits:
1119 lastrev = str(commits[0].getAttribute('revision')) or '0'
1119 lastrev = str(commits[0].getAttribute('revision')) or '0'
1120 return (lastrev, rev)
1120 return (lastrev, rev)
1121
1121
1122 def _wcrev(self):
1122 def _wcrev(self):
1123 return self._wcrevs()[0]
1123 return self._wcrevs()[0]
1124
1124
1125 def _wcchanged(self):
1125 def _wcchanged(self):
1126 """Return (changes, extchanges, missing) where changes is True
1126 """Return (changes, extchanges, missing) where changes is True
1127 if the working directory was changed, extchanges is
1127 if the working directory was changed, extchanges is
1128 True if any of these changes concern an external entry and missing
1128 True if any of these changes concern an external entry and missing
1129 is True if any change is a missing entry.
1129 is True if any change is a missing entry.
1130 """
1130 """
1131 output, err = self._svncommand(['status', '--xml'])
1131 output, err = self._svncommand(['status', '--xml'])
1132 externals, changes, missing = [], [], []
1132 externals, changes, missing = [], [], []
1133 doc = xml.dom.minidom.parseString(output)
1133 doc = xml.dom.minidom.parseString(output)
1134 for e in doc.getElementsByTagName('entry'):
1134 for e in doc.getElementsByTagName('entry'):
1135 s = e.getElementsByTagName('wc-status')
1135 s = e.getElementsByTagName('wc-status')
1136 if not s:
1136 if not s:
1137 continue
1137 continue
1138 item = s[0].getAttribute('item')
1138 item = s[0].getAttribute('item')
1139 props = s[0].getAttribute('props')
1139 props = s[0].getAttribute('props')
1140 path = e.getAttribute('path')
1140 path = e.getAttribute('path')
1141 if item == 'external':
1141 if item == 'external':
1142 externals.append(path)
1142 externals.append(path)
1143 elif item == 'missing':
1143 elif item == 'missing':
1144 missing.append(path)
1144 missing.append(path)
1145 if (item not in ('', 'normal', 'unversioned', 'external')
1145 if (item not in ('', 'normal', 'unversioned', 'external')
1146 or props not in ('', 'none', 'normal')):
1146 or props not in ('', 'none', 'normal')):
1147 changes.append(path)
1147 changes.append(path)
1148 for path in changes:
1148 for path in changes:
1149 for ext in externals:
1149 for ext in externals:
1150 if path == ext or path.startswith(ext + os.sep):
1150 if path == ext or path.startswith(ext + os.sep):
1151 return True, True, bool(missing)
1151 return True, True, bool(missing)
1152 return bool(changes), False, bool(missing)
1152 return bool(changes), False, bool(missing)
1153
1153
1154 def dirty(self, ignoreupdate=False):
1154 def dirty(self, ignoreupdate=False):
1155 if not self._wcchanged()[0]:
1155 if not self._wcchanged()[0]:
1156 if self._state[1] in self._wcrevs() or ignoreupdate:
1156 if self._state[1] in self._wcrevs() or ignoreupdate:
1157 return False
1157 return False
1158 return True
1158 return True
1159
1159
1160 def basestate(self):
1160 def basestate(self):
1161 lastrev, rev = self._wcrevs()
1161 lastrev, rev = self._wcrevs()
1162 if lastrev != rev:
1162 if lastrev != rev:
1163 # Last committed rev is not the same than rev. We would
1163 # Last committed rev is not the same than rev. We would
1164 # like to take lastrev but we do not know if the subrepo
1164 # like to take lastrev but we do not know if the subrepo
1165 # URL exists at lastrev. Test it and fallback to rev it
1165 # URL exists at lastrev. Test it and fallback to rev it
1166 # is not there.
1166 # is not there.
1167 try:
1167 try:
1168 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1168 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1169 return lastrev
1169 return lastrev
1170 except error.Abort:
1170 except error.Abort:
1171 pass
1171 pass
1172 return rev
1172 return rev
1173
1173
1174 @annotatesubrepoerror
1174 @annotatesubrepoerror
1175 def commit(self, text, user, date):
1175 def commit(self, text, user, date):
1176 # user and date are out of our hands since svn is centralized
1176 # user and date are out of our hands since svn is centralized
1177 changed, extchanged, missing = self._wcchanged()
1177 changed, extchanged, missing = self._wcchanged()
1178 if not changed:
1178 if not changed:
1179 return self.basestate()
1179 return self.basestate()
1180 if extchanged:
1180 if extchanged:
1181 # Do not try to commit externals
1181 # Do not try to commit externals
1182 raise util.Abort(_('cannot commit svn externals'))
1182 raise util.Abort(_('cannot commit svn externals'))
1183 if missing:
1183 if missing:
1184 # svn can commit with missing entries but aborting like hg
1184 # svn can commit with missing entries but aborting like hg
1185 # seems a better approach.
1185 # seems a better approach.
1186 raise util.Abort(_('cannot commit missing svn entries'))
1186 raise util.Abort(_('cannot commit missing svn entries'))
1187 commitinfo, err = self._svncommand(['commit', '-m', text])
1187 commitinfo, err = self._svncommand(['commit', '-m', text])
1188 self.ui.status(commitinfo)
1188 self.ui.status(commitinfo)
1189 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1189 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1190 if not newrev:
1190 if not newrev:
1191 if not commitinfo.strip():
1191 if not commitinfo.strip():
1192 # Sometimes, our definition of "changed" differs from
1192 # Sometimes, our definition of "changed" differs from
1193 # svn one. For instance, svn ignores missing files
1193 # svn one. For instance, svn ignores missing files
1194 # when committing. If there are only missing files, no
1194 # when committing. If there are only missing files, no
1195 # commit is made, no output and no error code.
1195 # commit is made, no output and no error code.
1196 raise util.Abort(_('failed to commit svn changes'))
1196 raise util.Abort(_('failed to commit svn changes'))
1197 raise util.Abort(commitinfo.splitlines()[-1])
1197 raise util.Abort(commitinfo.splitlines()[-1])
1198 newrev = newrev.groups()[0]
1198 newrev = newrev.groups()[0]
1199 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1199 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1200 return newrev
1200 return newrev
1201
1201
1202 @annotatesubrepoerror
1202 @annotatesubrepoerror
1203 def remove(self):
1203 def remove(self):
1204 if self.dirty():
1204 if self.dirty():
1205 self.ui.warn(_('not removing repo %s because '
1205 self.ui.warn(_('not removing repo %s because '
1206 'it has changes.\n') % self._path)
1206 'it has changes.\n') % self._path)
1207 return
1207 return
1208 self.ui.note(_('removing subrepo %s\n') % self._path)
1208 self.ui.note(_('removing subrepo %s\n') % self._path)
1209
1209
1210 self.wvfs.rmtree(forcibly=True)
1210 self.wvfs.rmtree(forcibly=True)
1211 try:
1211 try:
1212 self._ctx.repo().wvfs.removedirs(os.path.dirname(self._path))
1212 self._ctx.repo().wvfs.removedirs(os.path.dirname(self._path))
1213 except OSError:
1213 except OSError:
1214 pass
1214 pass
1215
1215
1216 @annotatesubrepoerror
1216 @annotatesubrepoerror
1217 def get(self, state, overwrite=False):
1217 def get(self, state, overwrite=False):
1218 if overwrite:
1218 if overwrite:
1219 self._svncommand(['revert', '--recursive'])
1219 self._svncommand(['revert', '--recursive'])
1220 args = ['checkout']
1220 args = ['checkout']
1221 if self._svnversion >= (1, 5):
1221 if self._svnversion >= (1, 5):
1222 args.append('--force')
1222 args.append('--force')
1223 # The revision must be specified at the end of the URL to properly
1223 # The revision must be specified at the end of the URL to properly
1224 # update to a directory which has since been deleted and recreated.
1224 # update to a directory which has since been deleted and recreated.
1225 args.append('%s@%s' % (state[0], state[1]))
1225 args.append('%s@%s' % (state[0], state[1]))
1226 status, err = self._svncommand(args, failok=True)
1226 status, err = self._svncommand(args, failok=True)
1227 _sanitize(self.ui, self.wvfs, '.svn')
1227 _sanitize(self.ui, self.wvfs, '.svn')
1228 if not re.search('Checked out revision [0-9]+.', status):
1228 if not re.search('Checked out revision [0-9]+.', status):
1229 if ('is already a working copy for a different URL' in err
1229 if ('is already a working copy for a different URL' in err
1230 and (self._wcchanged()[:2] == (False, False))):
1230 and (self._wcchanged()[:2] == (False, False))):
1231 # obstructed but clean working copy, so just blow it away.
1231 # obstructed but clean working copy, so just blow it away.
1232 self.remove()
1232 self.remove()
1233 self.get(state, overwrite=False)
1233 self.get(state, overwrite=False)
1234 return
1234 return
1235 raise util.Abort((status or err).splitlines()[-1])
1235 raise util.Abort((status or err).splitlines()[-1])
1236 self.ui.status(status)
1236 self.ui.status(status)
1237
1237
1238 @annotatesubrepoerror
1238 @annotatesubrepoerror
1239 def merge(self, state):
1239 def merge(self, state):
1240 old = self._state[1]
1240 old = self._state[1]
1241 new = state[1]
1241 new = state[1]
1242 wcrev = self._wcrev()
1242 wcrev = self._wcrev()
1243 if new != wcrev:
1243 if new != wcrev:
1244 dirty = old == wcrev or self._wcchanged()[0]
1244 dirty = old == wcrev or self._wcchanged()[0]
1245 if _updateprompt(self.ui, self, dirty, wcrev, new):
1245 if _updateprompt(self.ui, self, dirty, wcrev, new):
1246 self.get(state, False)
1246 self.get(state, False)
1247
1247
1248 def push(self, opts):
1248 def push(self, opts):
1249 # push is a no-op for SVN
1249 # push is a no-op for SVN
1250 return True
1250 return True
1251
1251
1252 @annotatesubrepoerror
1252 @annotatesubrepoerror
1253 def files(self):
1253 def files(self):
1254 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1254 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1255 doc = xml.dom.minidom.parseString(output)
1255 doc = xml.dom.minidom.parseString(output)
1256 paths = []
1256 paths = []
1257 for e in doc.getElementsByTagName('entry'):
1257 for e in doc.getElementsByTagName('entry'):
1258 kind = str(e.getAttribute('kind'))
1258 kind = str(e.getAttribute('kind'))
1259 if kind != 'file':
1259 if kind != 'file':
1260 continue
1260 continue
1261 name = ''.join(c.data for c
1261 name = ''.join(c.data for c
1262 in e.getElementsByTagName('name')[0].childNodes
1262 in e.getElementsByTagName('name')[0].childNodes
1263 if c.nodeType == c.TEXT_NODE)
1263 if c.nodeType == c.TEXT_NODE)
1264 paths.append(name.encode('utf-8'))
1264 paths.append(name.encode('utf-8'))
1265 return paths
1265 return paths
1266
1266
1267 def filedata(self, name):
1267 def filedata(self, name):
1268 return self._svncommand(['cat'], name)[0]
1268 return self._svncommand(['cat'], name)[0]
1269
1269
1270
1270
1271 class gitsubrepo(abstractsubrepo):
1271 class gitsubrepo(abstractsubrepo):
1272 def __init__(self, ctx, path, state):
1272 def __init__(self, ctx, path, state):
1273 super(gitsubrepo, self).__init__(ctx, path)
1273 super(gitsubrepo, self).__init__(ctx, path)
1274 self._state = state
1274 self._state = state
1275 self._abspath = ctx.repo().wjoin(path)
1275 self._abspath = ctx.repo().wjoin(path)
1276 self._subparent = ctx.repo()
1276 self._subparent = ctx.repo()
1277 self._ensuregit()
1277 self._ensuregit()
1278
1278
1279 def _ensuregit(self):
1279 def _ensuregit(self):
1280 try:
1280 try:
1281 self._gitexecutable = 'git'
1281 self._gitexecutable = 'git'
1282 out, err = self._gitnodir(['--version'])
1282 out, err = self._gitnodir(['--version'])
1283 except OSError, e:
1283 except OSError, e:
1284 if e.errno != 2 or os.name != 'nt':
1284 if e.errno != 2 or os.name != 'nt':
1285 raise
1285 raise
1286 self._gitexecutable = 'git.cmd'
1286 self._gitexecutable = 'git.cmd'
1287 out, err = self._gitnodir(['--version'])
1287 out, err = self._gitnodir(['--version'])
1288 versionstatus = self._checkversion(out)
1288 versionstatus = self._checkversion(out)
1289 if versionstatus == 'unknown':
1289 if versionstatus == 'unknown':
1290 self.ui.warn(_('cannot retrieve git version\n'))
1290 self.ui.warn(_('cannot retrieve git version\n'))
1291 elif versionstatus == 'abort':
1291 elif versionstatus == 'abort':
1292 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1292 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1293 elif versionstatus == 'warning':
1293 elif versionstatus == 'warning':
1294 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1294 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1295
1295
1296 @staticmethod
1296 @staticmethod
1297 def _gitversion(out):
1297 def _gitversion(out):
1298 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1298 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1299 if m:
1299 if m:
1300 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1300 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1301
1301
1302 m = re.search(r'^git version (\d+)\.(\d+)', out)
1302 m = re.search(r'^git version (\d+)\.(\d+)', out)
1303 if m:
1303 if m:
1304 return (int(m.group(1)), int(m.group(2)), 0)
1304 return (int(m.group(1)), int(m.group(2)), 0)
1305
1305
1306 return -1
1306 return -1
1307
1307
1308 @staticmethod
1308 @staticmethod
1309 def _checkversion(out):
1309 def _checkversion(out):
1310 '''ensure git version is new enough
1310 '''ensure git version is new enough
1311
1311
1312 >>> _checkversion = gitsubrepo._checkversion
1312 >>> _checkversion = gitsubrepo._checkversion
1313 >>> _checkversion('git version 1.6.0')
1313 >>> _checkversion('git version 1.6.0')
1314 'ok'
1314 'ok'
1315 >>> _checkversion('git version 1.8.5')
1315 >>> _checkversion('git version 1.8.5')
1316 'ok'
1316 'ok'
1317 >>> _checkversion('git version 1.4.0')
1317 >>> _checkversion('git version 1.4.0')
1318 'abort'
1318 'abort'
1319 >>> _checkversion('git version 1.5.0')
1319 >>> _checkversion('git version 1.5.0')
1320 'warning'
1320 'warning'
1321 >>> _checkversion('git version 1.9-rc0')
1321 >>> _checkversion('git version 1.9-rc0')
1322 'ok'
1322 'ok'
1323 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1323 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1324 'ok'
1324 'ok'
1325 >>> _checkversion('git version 1.9.0.GIT')
1325 >>> _checkversion('git version 1.9.0.GIT')
1326 'ok'
1326 'ok'
1327 >>> _checkversion('git version 12345')
1327 >>> _checkversion('git version 12345')
1328 'unknown'
1328 'unknown'
1329 >>> _checkversion('no')
1329 >>> _checkversion('no')
1330 'unknown'
1330 'unknown'
1331 '''
1331 '''
1332 version = gitsubrepo._gitversion(out)
1332 version = gitsubrepo._gitversion(out)
1333 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1333 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1334 # despite the docstring comment. For now, error on 1.4.0, warn on
1334 # despite the docstring comment. For now, error on 1.4.0, warn on
1335 # 1.5.0 but attempt to continue.
1335 # 1.5.0 but attempt to continue.
1336 if version == -1:
1336 if version == -1:
1337 return 'unknown'
1337 return 'unknown'
1338 if version < (1, 5, 0):
1338 if version < (1, 5, 0):
1339 return 'abort'
1339 return 'abort'
1340 elif version < (1, 6, 0):
1340 elif version < (1, 6, 0):
1341 return 'warning'
1341 return 'warning'
1342 return 'ok'
1342 return 'ok'
1343
1343
1344 def _gitcommand(self, commands, env=None, stream=False):
1344 def _gitcommand(self, commands, env=None, stream=False):
1345 return self._gitdir(commands, env=env, stream=stream)[0]
1345 return self._gitdir(commands, env=env, stream=stream)[0]
1346
1346
1347 def _gitdir(self, commands, env=None, stream=False):
1347 def _gitdir(self, commands, env=None, stream=False):
1348 return self._gitnodir(commands, env=env, stream=stream,
1348 return self._gitnodir(commands, env=env, stream=stream,
1349 cwd=self._abspath)
1349 cwd=self._abspath)
1350
1350
1351 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1351 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1352 """Calls the git command
1352 """Calls the git command
1353
1353
1354 The methods tries to call the git command. versions prior to 1.6.0
1354 The methods tries to call the git command. versions prior to 1.6.0
1355 are not supported and very probably fail.
1355 are not supported and very probably fail.
1356 """
1356 """
1357 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1357 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1358 # unless ui.quiet is set, print git's stderr,
1358 # unless ui.quiet is set, print git's stderr,
1359 # which is mostly progress and useful info
1359 # which is mostly progress and useful info
1360 errpipe = None
1360 errpipe = None
1361 if self.ui.quiet:
1361 if self.ui.quiet:
1362 errpipe = open(os.devnull, 'w')
1362 errpipe = open(os.devnull, 'w')
1363 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1363 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1364 cwd=cwd, env=env, close_fds=util.closefds,
1364 cwd=cwd, env=env, close_fds=util.closefds,
1365 stdout=subprocess.PIPE, stderr=errpipe)
1365 stdout=subprocess.PIPE, stderr=errpipe)
1366 if stream:
1366 if stream:
1367 return p.stdout, None
1367 return p.stdout, None
1368
1368
1369 retdata = p.stdout.read().strip()
1369 retdata = p.stdout.read().strip()
1370 # wait for the child to exit to avoid race condition.
1370 # wait for the child to exit to avoid race condition.
1371 p.wait()
1371 p.wait()
1372
1372
1373 if p.returncode != 0 and p.returncode != 1:
1373 if p.returncode != 0 and p.returncode != 1:
1374 # there are certain error codes that are ok
1374 # there are certain error codes that are ok
1375 command = commands[0]
1375 command = commands[0]
1376 if command in ('cat-file', 'symbolic-ref'):
1376 if command in ('cat-file', 'symbolic-ref'):
1377 return retdata, p.returncode
1377 return retdata, p.returncode
1378 # for all others, abort
1378 # for all others, abort
1379 raise util.Abort('git %s error %d in %s' %
1379 raise util.Abort('git %s error %d in %s' %
1380 (command, p.returncode, self._relpath))
1380 (command, p.returncode, self._relpath))
1381
1381
1382 return retdata, p.returncode
1382 return retdata, p.returncode
1383
1383
1384 def _gitmissing(self):
1384 def _gitmissing(self):
1385 return not self.wvfs.exists('.git')
1385 return not self.wvfs.exists('.git')
1386
1386
1387 def _gitstate(self):
1387 def _gitstate(self):
1388 return self._gitcommand(['rev-parse', 'HEAD'])
1388 return self._gitcommand(['rev-parse', 'HEAD'])
1389
1389
1390 def _gitcurrentbranch(self):
1390 def _gitcurrentbranch(self):
1391 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1391 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1392 if err:
1392 if err:
1393 current = None
1393 current = None
1394 return current
1394 return current
1395
1395
1396 def _gitremote(self, remote):
1396 def _gitremote(self, remote):
1397 out = self._gitcommand(['remote', 'show', '-n', remote])
1397 out = self._gitcommand(['remote', 'show', '-n', remote])
1398 line = out.split('\n')[1]
1398 line = out.split('\n')[1]
1399 i = line.index('URL: ') + len('URL: ')
1399 i = line.index('URL: ') + len('URL: ')
1400 return line[i:]
1400 return line[i:]
1401
1401
1402 def _githavelocally(self, revision):
1402 def _githavelocally(self, revision):
1403 out, code = self._gitdir(['cat-file', '-e', revision])
1403 out, code = self._gitdir(['cat-file', '-e', revision])
1404 return code == 0
1404 return code == 0
1405
1405
1406 def _gitisancestor(self, r1, r2):
1406 def _gitisancestor(self, r1, r2):
1407 base = self._gitcommand(['merge-base', r1, r2])
1407 base = self._gitcommand(['merge-base', r1, r2])
1408 return base == r1
1408 return base == r1
1409
1409
1410 def _gitisbare(self):
1410 def _gitisbare(self):
1411 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1411 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1412
1412
1413 def _gitupdatestat(self):
1413 def _gitupdatestat(self):
1414 """This must be run before git diff-index.
1414 """This must be run before git diff-index.
1415 diff-index only looks at changes to file stat;
1415 diff-index only looks at changes to file stat;
1416 this command looks at file contents and updates the stat."""
1416 this command looks at file contents and updates the stat."""
1417 self._gitcommand(['update-index', '-q', '--refresh'])
1417 self._gitcommand(['update-index', '-q', '--refresh'])
1418
1418
1419 def _gitbranchmap(self):
1419 def _gitbranchmap(self):
1420 '''returns 2 things:
1420 '''returns 2 things:
1421 a map from git branch to revision
1421 a map from git branch to revision
1422 a map from revision to branches'''
1422 a map from revision to branches'''
1423 branch2rev = {}
1423 branch2rev = {}
1424 rev2branch = {}
1424 rev2branch = {}
1425
1425
1426 out = self._gitcommand(['for-each-ref', '--format',
1426 out = self._gitcommand(['for-each-ref', '--format',
1427 '%(objectname) %(refname)'])
1427 '%(objectname) %(refname)'])
1428 for line in out.split('\n'):
1428 for line in out.split('\n'):
1429 revision, ref = line.split(' ')
1429 revision, ref = line.split(' ')
1430 if (not ref.startswith('refs/heads/') and
1430 if (not ref.startswith('refs/heads/') and
1431 not ref.startswith('refs/remotes/')):
1431 not ref.startswith('refs/remotes/')):
1432 continue
1432 continue
1433 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1433 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1434 continue # ignore remote/HEAD redirects
1434 continue # ignore remote/HEAD redirects
1435 branch2rev[ref] = revision
1435 branch2rev[ref] = revision
1436 rev2branch.setdefault(revision, []).append(ref)
1436 rev2branch.setdefault(revision, []).append(ref)
1437 return branch2rev, rev2branch
1437 return branch2rev, rev2branch
1438
1438
1439 def _gittracking(self, branches):
1439 def _gittracking(self, branches):
1440 'return map of remote branch to local tracking branch'
1440 'return map of remote branch to local tracking branch'
1441 # assumes no more than one local tracking branch for each remote
1441 # assumes no more than one local tracking branch for each remote
1442 tracking = {}
1442 tracking = {}
1443 for b in branches:
1443 for b in branches:
1444 if b.startswith('refs/remotes/'):
1444 if b.startswith('refs/remotes/'):
1445 continue
1445 continue
1446 bname = b.split('/', 2)[2]
1446 bname = b.split('/', 2)[2]
1447 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1447 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1448 if remote:
1448 if remote:
1449 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1449 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1450 tracking['refs/remotes/%s/%s' %
1450 tracking['refs/remotes/%s/%s' %
1451 (remote, ref.split('/', 2)[2])] = b
1451 (remote, ref.split('/', 2)[2])] = b
1452 return tracking
1452 return tracking
1453
1453
1454 def _abssource(self, source):
1454 def _abssource(self, source):
1455 if '://' not in source:
1455 if '://' not in source:
1456 # recognize the scp syntax as an absolute source
1456 # recognize the scp syntax as an absolute source
1457 colon = source.find(':')
1457 colon = source.find(':')
1458 if colon != -1 and '/' not in source[:colon]:
1458 if colon != -1 and '/' not in source[:colon]:
1459 return source
1459 return source
1460 self._subsource = source
1460 self._subsource = source
1461 return _abssource(self)
1461 return _abssource(self)
1462
1462
1463 def _fetch(self, source, revision):
1463 def _fetch(self, source, revision):
1464 if self._gitmissing():
1464 if self._gitmissing():
1465 source = self._abssource(source)
1465 source = self._abssource(source)
1466 self.ui.status(_('cloning subrepo %s from %s\n') %
1466 self.ui.status(_('cloning subrepo %s from %s\n') %
1467 (self._relpath, source))
1467 (self._relpath, source))
1468 self._gitnodir(['clone', source, self._abspath])
1468 self._gitnodir(['clone', source, self._abspath])
1469 if self._githavelocally(revision):
1469 if self._githavelocally(revision):
1470 return
1470 return
1471 self.ui.status(_('pulling subrepo %s from %s\n') %
1471 self.ui.status(_('pulling subrepo %s from %s\n') %
1472 (self._relpath, self._gitremote('origin')))
1472 (self._relpath, self._gitremote('origin')))
1473 # try only origin: the originally cloned repo
1473 # try only origin: the originally cloned repo
1474 self._gitcommand(['fetch'])
1474 self._gitcommand(['fetch'])
1475 if not self._githavelocally(revision):
1475 if not self._githavelocally(revision):
1476 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1476 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1477 (revision, self._relpath))
1477 (revision, self._relpath))
1478
1478
1479 @annotatesubrepoerror
1479 @annotatesubrepoerror
1480 def dirty(self, ignoreupdate=False):
1480 def dirty(self, ignoreupdate=False):
1481 if self._gitmissing():
1481 if self._gitmissing():
1482 return self._state[1] != ''
1482 return self._state[1] != ''
1483 if self._gitisbare():
1483 if self._gitisbare():
1484 return True
1484 return True
1485 if not ignoreupdate and self._state[1] != self._gitstate():
1485 if not ignoreupdate and self._state[1] != self._gitstate():
1486 # different version checked out
1486 # different version checked out
1487 return True
1487 return True
1488 # check for staged changes or modified files; ignore untracked files
1488 # check for staged changes or modified files; ignore untracked files
1489 self._gitupdatestat()
1489 self._gitupdatestat()
1490 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1490 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1491 return code == 1
1491 return code == 1
1492
1492
1493 def basestate(self):
1493 def basestate(self):
1494 return self._gitstate()
1494 return self._gitstate()
1495
1495
1496 @annotatesubrepoerror
1496 @annotatesubrepoerror
1497 def get(self, state, overwrite=False):
1497 def get(self, state, overwrite=False):
1498 source, revision, kind = state
1498 source, revision, kind = state
1499 if not revision:
1499 if not revision:
1500 self.remove()
1500 self.remove()
1501 return
1501 return
1502 self._fetch(source, revision)
1502 self._fetch(source, revision)
1503 # if the repo was set to be bare, unbare it
1503 # if the repo was set to be bare, unbare it
1504 if self._gitisbare():
1504 if self._gitisbare():
1505 self._gitcommand(['config', 'core.bare', 'false'])
1505 self._gitcommand(['config', 'core.bare', 'false'])
1506 if self._gitstate() == revision:
1506 if self._gitstate() == revision:
1507 self._gitcommand(['reset', '--hard', 'HEAD'])
1507 self._gitcommand(['reset', '--hard', 'HEAD'])
1508 return
1508 return
1509 elif self._gitstate() == revision:
1509 elif self._gitstate() == revision:
1510 if overwrite:
1510 if overwrite:
1511 # first reset the index to unmark new files for commit, because
1511 # first reset the index to unmark new files for commit, because
1512 # reset --hard will otherwise throw away files added for commit,
1512 # reset --hard will otherwise throw away files added for commit,
1513 # not just unmark them.
1513 # not just unmark them.
1514 self._gitcommand(['reset', 'HEAD'])
1514 self._gitcommand(['reset', 'HEAD'])
1515 self._gitcommand(['reset', '--hard', 'HEAD'])
1515 self._gitcommand(['reset', '--hard', 'HEAD'])
1516 return
1516 return
1517 branch2rev, rev2branch = self._gitbranchmap()
1517 branch2rev, rev2branch = self._gitbranchmap()
1518
1518
1519 def checkout(args):
1519 def checkout(args):
1520 cmd = ['checkout']
1520 cmd = ['checkout']
1521 if overwrite:
1521 if overwrite:
1522 # first reset the index to unmark new files for commit, because
1522 # first reset the index to unmark new files for commit, because
1523 # the -f option will otherwise throw away files added for
1523 # the -f option will otherwise throw away files added for
1524 # commit, not just unmark them.
1524 # commit, not just unmark them.
1525 self._gitcommand(['reset', 'HEAD'])
1525 self._gitcommand(['reset', 'HEAD'])
1526 cmd.append('-f')
1526 cmd.append('-f')
1527 self._gitcommand(cmd + args)
1527 self._gitcommand(cmd + args)
1528 _sanitize(self.ui, self.wvfs, '.git')
1528 _sanitize(self.ui, self.wvfs, '.git')
1529
1529
1530 def rawcheckout():
1530 def rawcheckout():
1531 # no branch to checkout, check it out with no branch
1531 # no branch to checkout, check it out with no branch
1532 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1532 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1533 self._relpath)
1533 self._relpath)
1534 self.ui.warn(_('check out a git branch if you intend '
1534 self.ui.warn(_('check out a git branch if you intend '
1535 'to make changes\n'))
1535 'to make changes\n'))
1536 checkout(['-q', revision])
1536 checkout(['-q', revision])
1537
1537
1538 if revision not in rev2branch:
1538 if revision not in rev2branch:
1539 rawcheckout()
1539 rawcheckout()
1540 return
1540 return
1541 branches = rev2branch[revision]
1541 branches = rev2branch[revision]
1542 firstlocalbranch = None
1542 firstlocalbranch = None
1543 for b in branches:
1543 for b in branches:
1544 if b == 'refs/heads/master':
1544 if b == 'refs/heads/master':
1545 # master trumps all other branches
1545 # master trumps all other branches
1546 checkout(['refs/heads/master'])
1546 checkout(['refs/heads/master'])
1547 return
1547 return
1548 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1548 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1549 firstlocalbranch = b
1549 firstlocalbranch = b
1550 if firstlocalbranch:
1550 if firstlocalbranch:
1551 checkout([firstlocalbranch])
1551 checkout([firstlocalbranch])
1552 return
1552 return
1553
1553
1554 tracking = self._gittracking(branch2rev.keys())
1554 tracking = self._gittracking(branch2rev.keys())
1555 # choose a remote branch already tracked if possible
1555 # choose a remote branch already tracked if possible
1556 remote = branches[0]
1556 remote = branches[0]
1557 if remote not in tracking:
1557 if remote not in tracking:
1558 for b in branches:
1558 for b in branches:
1559 if b in tracking:
1559 if b in tracking:
1560 remote = b
1560 remote = b
1561 break
1561 break
1562
1562
1563 if remote not in tracking:
1563 if remote not in tracking:
1564 # create a new local tracking branch
1564 # create a new local tracking branch
1565 local = remote.split('/', 3)[3]
1565 local = remote.split('/', 3)[3]
1566 checkout(['-b', local, remote])
1566 checkout(['-b', local, remote])
1567 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1567 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1568 # When updating to a tracked remote branch,
1568 # When updating to a tracked remote branch,
1569 # if the local tracking branch is downstream of it,
1569 # if the local tracking branch is downstream of it,
1570 # a normal `git pull` would have performed a "fast-forward merge"
1570 # a normal `git pull` would have performed a "fast-forward merge"
1571 # which is equivalent to updating the local branch to the remote.
1571 # which is equivalent to updating the local branch to the remote.
1572 # Since we are only looking at branching at update, we need to
1572 # Since we are only looking at branching at update, we need to
1573 # detect this situation and perform this action lazily.
1573 # detect this situation and perform this action lazily.
1574 if tracking[remote] != self._gitcurrentbranch():
1574 if tracking[remote] != self._gitcurrentbranch():
1575 checkout([tracking[remote]])
1575 checkout([tracking[remote]])
1576 self._gitcommand(['merge', '--ff', remote])
1576 self._gitcommand(['merge', '--ff', remote])
1577 _sanitize(self.ui, self.wvfs, '.git')
1577 _sanitize(self.ui, self.wvfs, '.git')
1578 else:
1578 else:
1579 # a real merge would be required, just checkout the revision
1579 # a real merge would be required, just checkout the revision
1580 rawcheckout()
1580 rawcheckout()
1581
1581
1582 @annotatesubrepoerror
1582 @annotatesubrepoerror
1583 def commit(self, text, user, date):
1583 def commit(self, text, user, date):
1584 if self._gitmissing():
1584 if self._gitmissing():
1585 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1585 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1586 cmd = ['commit', '-a', '-m', text]
1586 cmd = ['commit', '-a', '-m', text]
1587 env = os.environ.copy()
1587 env = os.environ.copy()
1588 if user:
1588 if user:
1589 cmd += ['--author', user]
1589 cmd += ['--author', user]
1590 if date:
1590 if date:
1591 # git's date parser silently ignores when seconds < 1e9
1591 # git's date parser silently ignores when seconds < 1e9
1592 # convert to ISO8601
1592 # convert to ISO8601
1593 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1593 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1594 '%Y-%m-%dT%H:%M:%S %1%2')
1594 '%Y-%m-%dT%H:%M:%S %1%2')
1595 self._gitcommand(cmd, env=env)
1595 self._gitcommand(cmd, env=env)
1596 # make sure commit works otherwise HEAD might not exist under certain
1596 # make sure commit works otherwise HEAD might not exist under certain
1597 # circumstances
1597 # circumstances
1598 return self._gitstate()
1598 return self._gitstate()
1599
1599
1600 @annotatesubrepoerror
1600 @annotatesubrepoerror
1601 def merge(self, state):
1601 def merge(self, state):
1602 source, revision, kind = state
1602 source, revision, kind = state
1603 self._fetch(source, revision)
1603 self._fetch(source, revision)
1604 base = self._gitcommand(['merge-base', revision, self._state[1]])
1604 base = self._gitcommand(['merge-base', revision, self._state[1]])
1605 self._gitupdatestat()
1605 self._gitupdatestat()
1606 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1606 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1607
1607
1608 def mergefunc():
1608 def mergefunc():
1609 if base == revision:
1609 if base == revision:
1610 self.get(state) # fast forward merge
1610 self.get(state) # fast forward merge
1611 elif base != self._state[1]:
1611 elif base != self._state[1]:
1612 self._gitcommand(['merge', '--no-commit', revision])
1612 self._gitcommand(['merge', '--no-commit', revision])
1613 _sanitize(self.ui, self.wvfs, '.git')
1613 _sanitize(self.ui, self.wvfs, '.git')
1614
1614
1615 if self.dirty():
1615 if self.dirty():
1616 if self._gitstate() != revision:
1616 if self._gitstate() != revision:
1617 dirty = self._gitstate() == self._state[1] or code != 0
1617 dirty = self._gitstate() == self._state[1] or code != 0
1618 if _updateprompt(self.ui, self, dirty,
1618 if _updateprompt(self.ui, self, dirty,
1619 self._state[1][:7], revision[:7]):
1619 self._state[1][:7], revision[:7]):
1620 mergefunc()
1620 mergefunc()
1621 else:
1621 else:
1622 mergefunc()
1622 mergefunc()
1623
1623
1624 @annotatesubrepoerror
1624 @annotatesubrepoerror
1625 def push(self, opts):
1625 def push(self, opts):
1626 force = opts.get('force')
1626 force = opts.get('force')
1627
1627
1628 if not self._state[1]:
1628 if not self._state[1]:
1629 return True
1629 return True
1630 if self._gitmissing():
1630 if self._gitmissing():
1631 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1631 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1632 # if a branch in origin contains the revision, nothing to do
1632 # if a branch in origin contains the revision, nothing to do
1633 branch2rev, rev2branch = self._gitbranchmap()
1633 branch2rev, rev2branch = self._gitbranchmap()
1634 if self._state[1] in rev2branch:
1634 if self._state[1] in rev2branch:
1635 for b in rev2branch[self._state[1]]:
1635 for b in rev2branch[self._state[1]]:
1636 if b.startswith('refs/remotes/origin/'):
1636 if b.startswith('refs/remotes/origin/'):
1637 return True
1637 return True
1638 for b, revision in branch2rev.iteritems():
1638 for b, revision in branch2rev.iteritems():
1639 if b.startswith('refs/remotes/origin/'):
1639 if b.startswith('refs/remotes/origin/'):
1640 if self._gitisancestor(self._state[1], revision):
1640 if self._gitisancestor(self._state[1], revision):
1641 return True
1641 return True
1642 # otherwise, try to push the currently checked out branch
1642 # otherwise, try to push the currently checked out branch
1643 cmd = ['push']
1643 cmd = ['push']
1644 if force:
1644 if force:
1645 cmd.append('--force')
1645 cmd.append('--force')
1646
1646
1647 current = self._gitcurrentbranch()
1647 current = self._gitcurrentbranch()
1648 if current:
1648 if current:
1649 # determine if the current branch is even useful
1649 # determine if the current branch is even useful
1650 if not self._gitisancestor(self._state[1], current):
1650 if not self._gitisancestor(self._state[1], current):
1651 self.ui.warn(_('unrelated git branch checked out '
1651 self.ui.warn(_('unrelated git branch checked out '
1652 'in subrepo %s\n') % self._relpath)
1652 'in subrepo %s\n') % self._relpath)
1653 return False
1653 return False
1654 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1654 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1655 (current.split('/', 2)[2], self._relpath))
1655 (current.split('/', 2)[2], self._relpath))
1656 ret = self._gitdir(cmd + ['origin', current])
1656 ret = self._gitdir(cmd + ['origin', current])
1657 return ret[1] == 0
1657 return ret[1] == 0
1658 else:
1658 else:
1659 self.ui.warn(_('no branch checked out in subrepo %s\n'
1659 self.ui.warn(_('no branch checked out in subrepo %s\n'
1660 'cannot push revision %s\n') %
1660 'cannot push revision %s\n') %
1661 (self._relpath, self._state[1]))
1661 (self._relpath, self._state[1]))
1662 return False
1662 return False
1663
1663
1664 @annotatesubrepoerror
1664 @annotatesubrepoerror
1665 def add(self, ui, match, prefix, explicitonly, **opts):
1665 def add(self, ui, match, prefix, explicitonly, **opts):
1666 if self._gitmissing():
1666 if self._gitmissing():
1667 return []
1667 return []
1668
1668
1669 (modified, added, removed,
1669 (modified, added, removed,
1670 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1670 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1671 clean=True)
1671 clean=True)
1672
1672
1673 tracked = set()
1673 tracked = set()
1674 # dirstates 'amn' warn, 'r' is added again
1674 # dirstates 'amn' warn, 'r' is added again
1675 for l in (modified, added, deleted, clean):
1675 for l in (modified, added, deleted, clean):
1676 tracked.update(l)
1676 tracked.update(l)
1677
1677
1678 # Unknown files not of interest will be rejected by the matcher
1678 # Unknown files not of interest will be rejected by the matcher
1679 files = unknown
1679 files = unknown
1680 files.extend(match.files())
1680 files.extend(match.files())
1681
1681
1682 rejected = []
1682 rejected = []
1683
1683
1684 files = [f for f in sorted(set(files)) if match(f)]
1684 files = [f for f in sorted(set(files)) if match(f)]
1685 for f in files:
1685 for f in files:
1686 exact = match.exact(f)
1686 exact = match.exact(f)
1687 command = ["add"]
1687 command = ["add"]
1688 if exact:
1688 if exact:
1689 command.append("-f") #should be added, even if ignored
1689 command.append("-f") #should be added, even if ignored
1690 if ui.verbose or not exact:
1690 if ui.verbose or not exact:
1691 ui.status(_('adding %s\n') % match.rel(f))
1691 ui.status(_('adding %s\n') % match.rel(f))
1692
1692
1693 if f in tracked: # hg prints 'adding' even if already tracked
1693 if f in tracked: # hg prints 'adding' even if already tracked
1694 if exact:
1694 if exact:
1695 rejected.append(f)
1695 rejected.append(f)
1696 continue
1696 continue
1697 if not opts.get('dry_run'):
1697 if not opts.get('dry_run'):
1698 self._gitcommand(command + [f])
1698 self._gitcommand(command + [f])
1699
1699
1700 for f in rejected:
1700 for f in rejected:
1701 ui.warn(_("%s already tracked!\n") % match.abs(f))
1701 ui.warn(_("%s already tracked!\n") % match.abs(f))
1702
1702
1703 return rejected
1703 return rejected
1704
1704
1705 @annotatesubrepoerror
1705 @annotatesubrepoerror
1706 def remove(self):
1706 def remove(self):
1707 if self._gitmissing():
1707 if self._gitmissing():
1708 return
1708 return
1709 if self.dirty():
1709 if self.dirty():
1710 self.ui.warn(_('not removing repo %s because '
1710 self.ui.warn(_('not removing repo %s because '
1711 'it has changes.\n') % self._relpath)
1711 'it has changes.\n') % self._relpath)
1712 return
1712 return
1713 # we can't fully delete the repository as it may contain
1713 # we can't fully delete the repository as it may contain
1714 # local-only history
1714 # local-only history
1715 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1715 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1716 self._gitcommand(['config', 'core.bare', 'true'])
1716 self._gitcommand(['config', 'core.bare', 'true'])
1717 for f, kind in self.wvfs.readdir():
1717 for f, kind in self.wvfs.readdir():
1718 if f == '.git':
1718 if f == '.git':
1719 continue
1719 continue
1720 if kind == stat.S_IFDIR:
1720 if kind == stat.S_IFDIR:
1721 self.wvfs.rmtree(f)
1721 self.wvfs.rmtree(f)
1722 else:
1722 else:
1723 self.wvfs.unlink(f)
1723 self.wvfs.unlink(f)
1724
1724
1725 def archive(self, archiver, prefix, match=None):
1725 def archive(self, archiver, prefix, match=None):
1726 total = 0
1726 total = 0
1727 source, revision = self._state
1727 source, revision = self._state
1728 if not revision:
1728 if not revision:
1729 return total
1729 return total
1730 self._fetch(source, revision)
1730 self._fetch(source, revision)
1731
1731
1732 # Parse git's native archive command.
1732 # Parse git's native archive command.
1733 # This should be much faster than manually traversing the trees
1733 # This should be much faster than manually traversing the trees
1734 # and objects with many subprocess calls.
1734 # and objects with many subprocess calls.
1735 tarstream = self._gitcommand(['archive', revision], stream=True)
1735 tarstream = self._gitcommand(['archive', revision], stream=True)
1736 tar = tarfile.open(fileobj=tarstream, mode='r|')
1736 tar = tarfile.open(fileobj=tarstream, mode='r|')
1737 relpath = subrelpath(self)
1737 relpath = subrelpath(self)
1738 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1738 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1739 for i, info in enumerate(tar):
1739 for i, info in enumerate(tar):
1740 if info.isdir():
1740 if info.isdir():
1741 continue
1741 continue
1742 if match and not match(info.name):
1742 if match and not match(info.name):
1743 continue
1743 continue
1744 if info.issym():
1744 if info.issym():
1745 data = info.linkname
1745 data = info.linkname
1746 else:
1746 else:
1747 data = tar.extractfile(info).read()
1747 data = tar.extractfile(info).read()
1748 archiver.addfile(prefix + self._path + '/' + info.name,
1748 archiver.addfile(prefix + self._path + '/' + info.name,
1749 info.mode, info.issym(), data)
1749 info.mode, info.issym(), data)
1750 total += 1
1750 total += 1
1751 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1751 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1752 unit=_('files'))
1752 unit=_('files'))
1753 self.ui.progress(_('archiving (%s)') % relpath, None)
1753 self.ui.progress(_('archiving (%s)') % relpath, None)
1754 return total
1754 return total
1755
1755
1756
1756
1757 @annotatesubrepoerror
1757 @annotatesubrepoerror
1758 def cat(self, match, prefix, **opts):
1758 def cat(self, match, prefix, **opts):
1759 rev = self._state[1]
1759 rev = self._state[1]
1760 if match.anypats():
1760 if match.anypats():
1761 return 1 #No support for include/exclude yet
1761 return 1 #No support for include/exclude yet
1762
1762
1763 if not match.files():
1763 if not match.files():
1764 return 1
1764 return 1
1765
1765
1766 for f in match.files():
1766 for f in match.files():
1767 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1767 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1768 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1768 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1769 self._ctx.node(),
1769 self._ctx.node(),
1770 pathname=self.wvfs.reljoin(prefix, f))
1770 pathname=self.wvfs.reljoin(prefix, f))
1771 fp.write(output)
1771 fp.write(output)
1772 fp.close()
1772 fp.close()
1773 return 0
1773 return 0
1774
1774
1775
1775
1776 @annotatesubrepoerror
1776 @annotatesubrepoerror
1777 def status(self, rev2, **opts):
1777 def status(self, rev2, **opts):
1778 rev1 = self._state[1]
1778 rev1 = self._state[1]
1779 if self._gitmissing() or not rev1:
1779 if self._gitmissing() or not rev1:
1780 # if the repo is missing, return no results
1780 # if the repo is missing, return no results
1781 return scmutil.status([], [], [], [], [], [], [])
1781 return scmutil.status([], [], [], [], [], [], [])
1782 modified, added, removed = [], [], []
1782 modified, added, removed = [], [], []
1783 self._gitupdatestat()
1783 self._gitupdatestat()
1784 if rev2:
1784 if rev2:
1785 command = ['diff-tree', '-r', rev1, rev2]
1785 command = ['diff-tree', '-r', rev1, rev2]
1786 else:
1786 else:
1787 command = ['diff-index', rev1]
1787 command = ['diff-index', rev1]
1788 out = self._gitcommand(command)
1788 out = self._gitcommand(command)
1789 for line in out.split('\n'):
1789 for line in out.split('\n'):
1790 tab = line.find('\t')
1790 tab = line.find('\t')
1791 if tab == -1:
1791 if tab == -1:
1792 continue
1792 continue
1793 status, f = line[tab - 1], line[tab + 1:]
1793 status, f = line[tab - 1], line[tab + 1:]
1794 if status == 'M':
1794 if status == 'M':
1795 modified.append(f)
1795 modified.append(f)
1796 elif status == 'A':
1796 elif status == 'A':
1797 added.append(f)
1797 added.append(f)
1798 elif status == 'D':
1798 elif status == 'D':
1799 removed.append(f)
1799 removed.append(f)
1800
1800
1801 deleted, unknown, ignored, clean = [], [], [], []
1801 deleted, unknown, ignored, clean = [], [], [], []
1802
1802
1803 command = ['status', '--porcelain', '-z']
1803 command = ['status', '--porcelain', '-z']
1804 if opts.get('unknown'):
1804 if opts.get('unknown'):
1805 command += ['--untracked-files=all']
1805 command += ['--untracked-files=all']
1806 if opts.get('ignored'):
1806 if opts.get('ignored'):
1807 command += ['--ignored']
1807 command += ['--ignored']
1808 out = self._gitcommand(command)
1808 out = self._gitcommand(command)
1809
1809
1810 changedfiles = set()
1810 changedfiles = set()
1811 changedfiles.update(modified)
1811 changedfiles.update(modified)
1812 changedfiles.update(added)
1812 changedfiles.update(added)
1813 changedfiles.update(removed)
1813 changedfiles.update(removed)
1814 for line in out.split('\0'):
1814 for line in out.split('\0'):
1815 if not line:
1815 if not line:
1816 continue
1816 continue
1817 st = line[0:2]
1817 st = line[0:2]
1818 #moves and copies show 2 files on one line
1818 #moves and copies show 2 files on one line
1819 if line.find('\0') >= 0:
1819 if line.find('\0') >= 0:
1820 filename1, filename2 = line[3:].split('\0')
1820 filename1, filename2 = line[3:].split('\0')
1821 else:
1821 else:
1822 filename1 = line[3:]
1822 filename1 = line[3:]
1823 filename2 = None
1823 filename2 = None
1824
1824
1825 changedfiles.add(filename1)
1825 changedfiles.add(filename1)
1826 if filename2:
1826 if filename2:
1827 changedfiles.add(filename2)
1827 changedfiles.add(filename2)
1828
1828
1829 if st == '??':
1829 if st == '??':
1830 unknown.append(filename1)
1830 unknown.append(filename1)
1831 elif st == '!!':
1831 elif st == '!!':
1832 ignored.append(filename1)
1832 ignored.append(filename1)
1833
1833
1834 if opts.get('clean'):
1834 if opts.get('clean'):
1835 out = self._gitcommand(['ls-files'])
1835 out = self._gitcommand(['ls-files'])
1836 for f in out.split('\n'):
1836 for f in out.split('\n'):
1837 if not f in changedfiles:
1837 if not f in changedfiles:
1838 clean.append(f)
1838 clean.append(f)
1839
1839
1840 return scmutil.status(modified, added, removed, deleted,
1840 return scmutil.status(modified, added, removed, deleted,
1841 unknown, ignored, clean)
1841 unknown, ignored, clean)
1842
1842
1843 @annotatesubrepoerror
1843 @annotatesubrepoerror
1844 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1844 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1845 node1 = self._state[1]
1845 node1 = self._state[1]
1846 cmd = ['diff']
1846 cmd = ['diff']
1847 if opts['stat']:
1847 if opts['stat']:
1848 cmd.append('--stat')
1848 cmd.append('--stat')
1849 else:
1849 else:
1850 # for Git, this also implies '-p'
1850 # for Git, this also implies '-p'
1851 cmd.append('-U%d' % diffopts.context)
1851 cmd.append('-U%d' % diffopts.context)
1852
1852
1853 gitprefix = self.wvfs.reljoin(prefix, self._path)
1853 gitprefix = self.wvfs.reljoin(prefix, self._path)
1854
1854
1855 if diffopts.noprefix:
1855 if diffopts.noprefix:
1856 cmd.extend(['--src-prefix=%s/' % gitprefix,
1856 cmd.extend(['--src-prefix=%s/' % gitprefix,
1857 '--dst-prefix=%s/' % gitprefix])
1857 '--dst-prefix=%s/' % gitprefix])
1858 else:
1858 else:
1859 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1859 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1860 '--dst-prefix=b/%s/' % gitprefix])
1860 '--dst-prefix=b/%s/' % gitprefix])
1861
1861
1862 if diffopts.ignorews:
1862 if diffopts.ignorews:
1863 cmd.append('--ignore-all-space')
1863 cmd.append('--ignore-all-space')
1864 if diffopts.ignorewsamount:
1864 if diffopts.ignorewsamount:
1865 cmd.append('--ignore-space-change')
1865 cmd.append('--ignore-space-change')
1866 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1866 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1867 and diffopts.ignoreblanklines:
1867 and diffopts.ignoreblanklines:
1868 cmd.append('--ignore-blank-lines')
1868 cmd.append('--ignore-blank-lines')
1869
1869
1870 cmd.append(node1)
1870 cmd.append(node1)
1871 if node2:
1871 if node2:
1872 cmd.append(node2)
1872 cmd.append(node2)
1873
1873
1874 output = ""
1874 output = ""
1875 if match.always():
1875 if match.always():
1876 output += self._gitcommand(cmd) + '\n'
1876 output += self._gitcommand(cmd) + '\n'
1877 else:
1877 else:
1878 st = self.status(node2)[:3]
1878 st = self.status(node2)[:3]
1879 files = [f for sublist in st for f in sublist]
1879 files = [f for sublist in st for f in sublist]
1880 for f in files:
1880 for f in files:
1881 if match(f):
1881 if match(f):
1882 output += self._gitcommand(cmd + ['--', f]) + '\n'
1882 output += self._gitcommand(cmd + ['--', f]) + '\n'
1883
1883
1884 if output.strip():
1884 if output.strip():
1885 ui.write(output)
1885 ui.write(output)
1886
1886
1887 @annotatesubrepoerror
1887 @annotatesubrepoerror
1888 def revert(self, substate, *pats, **opts):
1888 def revert(self, substate, *pats, **opts):
1889 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1889 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1890 if not opts.get('no_backup'):
1890 if not opts.get('no_backup'):
1891 status = self.status(None)
1891 status = self.status(None)
1892 names = status.modified
1892 names = status.modified
1893 for name in names:
1893 for name in names:
1894 bakname = "%s.orig" % name
1894 bakname = "%s.orig" % name
1895 self.ui.note(_('saving current version of %s as %s\n') %
1895 self.ui.note(_('saving current version of %s as %s\n') %
1896 (name, bakname))
1896 (name, bakname))
1897 self.wvfs.rename(name, bakname)
1897 self.wvfs.rename(name, bakname)
1898
1898
1899 if not opts.get('dry_run'):
1899 if not opts.get('dry_run'):
1900 self.get(substate, overwrite=True)
1900 self.get(substate, overwrite=True)
1901 return []
1901 return []
1902
1902
1903 def shortid(self, revid):
1903 def shortid(self, revid):
1904 return revid[:7]
1904 return revid[:7]
1905
1905
1906 types = {
1906 types = {
1907 'hg': hgsubrepo,
1907 'hg': hgsubrepo,
1908 'svn': svnsubrepo,
1908 'svn': svnsubrepo,
1909 'git': gitsubrepo,
1909 'git': gitsubrepo,
1910 }
1910 }
@@ -1,598 +1,694 b''
1 Preparing the subrepository 'sub2'
1 Preparing the subrepository 'sub2'
2
2
3 $ hg init sub2
3 $ hg init sub2
4 $ echo sub2 > sub2/sub2
4 $ echo sub2 > sub2/sub2
5 $ hg add -R sub2
5 $ hg add -R sub2
6 adding sub2/sub2 (glob)
6 adding sub2/sub2 (glob)
7 $ hg commit -R sub2 -m "sub2 import"
7 $ hg commit -R sub2 -m "sub2 import"
8
8
9 Preparing the 'sub1' repo which depends on the subrepo 'sub2'
9 Preparing the 'sub1' repo which depends on the subrepo 'sub2'
10
10
11 $ hg init sub1
11 $ hg init sub1
12 $ echo sub1 > sub1/sub1
12 $ echo sub1 > sub1/sub1
13 $ echo "sub2 = ../sub2" > sub1/.hgsub
13 $ echo "sub2 = ../sub2" > sub1/.hgsub
14 $ hg clone sub2 sub1/sub2
14 $ hg clone sub2 sub1/sub2
15 updating to branch default
15 updating to branch default
16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
17 $ hg add -R sub1
17 $ hg add -R sub1
18 adding sub1/.hgsub (glob)
18 adding sub1/.hgsub (glob)
19 adding sub1/sub1 (glob)
19 adding sub1/sub1 (glob)
20 $ hg commit -R sub1 -m "sub1 import"
20 $ hg commit -R sub1 -m "sub1 import"
21
21
22 Preparing the 'main' repo which depends on the subrepo 'sub1'
22 Preparing the 'main' repo which depends on the subrepo 'sub1'
23
23
24 $ hg init main
24 $ hg init main
25 $ echo main > main/main
25 $ echo main > main/main
26 $ echo "sub1 = ../sub1" > main/.hgsub
26 $ echo "sub1 = ../sub1" > main/.hgsub
27 $ hg clone sub1 main/sub1
27 $ hg clone sub1 main/sub1
28 updating to branch default
28 updating to branch default
29 cloning subrepo sub2 from $TESTTMP/sub2
29 cloning subrepo sub2 from $TESTTMP/sub2
30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 $ hg add -R main
31 $ hg add -R main
32 adding main/.hgsub (glob)
32 adding main/.hgsub (glob)
33 adding main/main (glob)
33 adding main/main (glob)
34 $ hg commit -R main -m "main import"
34 $ hg commit -R main -m "main import"
35
35
36 Cleaning both repositories, just as a clone -U
36 Cleaning both repositories, just as a clone -U
37
37
38 $ hg up -C -R sub2 null
38 $ hg up -C -R sub2 null
39 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
39 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
40 $ hg up -C -R sub1 null
40 $ hg up -C -R sub1 null
41 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
41 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
42 $ hg up -C -R main null
42 $ hg up -C -R main null
43 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
43 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
44 $ rm -rf main/sub1
44 $ rm -rf main/sub1
45 $ rm -rf sub1/sub2
45 $ rm -rf sub1/sub2
46
46
47 Clone main
47 Clone main
48
48
49 $ hg --config extensions.largefiles= clone main cloned
49 $ hg --config extensions.largefiles= clone main cloned
50 updating to branch default
50 updating to branch default
51 cloning subrepo sub1 from $TESTTMP/sub1
51 cloning subrepo sub1 from $TESTTMP/sub1
52 cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob)
52 cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob)
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
54
54
55 Largefiles is NOT enabled in the clone if the source repo doesn't require it
55 Largefiles is NOT enabled in the clone if the source repo doesn't require it
56 $ cat cloned/.hg/hgrc
56 $ cat cloned/.hg/hgrc
57 # example repository config (see "hg help config" for more info)
57 # example repository config (see "hg help config" for more info)
58 [paths]
58 [paths]
59 default = $TESTTMP/main (glob)
59 default = $TESTTMP/main (glob)
60
60
61 # path aliases to other clones of this repo in URLs or filesystem paths
61 # path aliases to other clones of this repo in URLs or filesystem paths
62 # (see "hg help config.paths" for more info)
62 # (see "hg help config.paths" for more info)
63 #
63 #
64 # default-push = ssh://jdoe@example.net/hg/jdoes-fork
64 # default-push = ssh://jdoe@example.net/hg/jdoes-fork
65 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
65 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
66 # my-clone = /home/jdoe/jdoes-clone
66 # my-clone = /home/jdoe/jdoes-clone
67
67
68 [ui]
68 [ui]
69 # name and email (local to this repository, optional), e.g.
69 # name and email (local to this repository, optional), e.g.
70 # username = Jane Doe <jdoe@example.com>
70 # username = Jane Doe <jdoe@example.com>
71
71
72 Checking cloned repo ids
72 Checking cloned repo ids
73
73
74 $ printf "cloned " ; hg id -R cloned
74 $ printf "cloned " ; hg id -R cloned
75 cloned 7f491f53a367 tip
75 cloned 7f491f53a367 tip
76 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
76 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
77 cloned/sub1 fc3b4ce2696f tip
77 cloned/sub1 fc3b4ce2696f tip
78 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
78 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
79 cloned/sub1/sub2 c57a0840e3ba tip
79 cloned/sub1/sub2 c57a0840e3ba tip
80
80
81 debugsub output for main and sub1
81 debugsub output for main and sub1
82
82
83 $ hg debugsub -R cloned
83 $ hg debugsub -R cloned
84 path sub1
84 path sub1
85 source ../sub1
85 source ../sub1
86 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd
86 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd
87 $ hg debugsub -R cloned/sub1
87 $ hg debugsub -R cloned/sub1
88 path sub2
88 path sub2
89 source ../sub2
89 source ../sub2
90 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c
90 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c
91
91
92 Modifying deeply nested 'sub2'
92 Modifying deeply nested 'sub2'
93
93
94 $ echo modified > cloned/sub1/sub2/sub2
94 $ echo modified > cloned/sub1/sub2/sub2
95 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
95 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
96 committing subrepository sub1
96 committing subrepository sub1
97 committing subrepository sub1/sub2 (glob)
97 committing subrepository sub1/sub2 (glob)
98
98
99 Checking modified node ids
99 Checking modified node ids
100
100
101 $ printf "cloned " ; hg id -R cloned
101 $ printf "cloned " ; hg id -R cloned
102 cloned ffe6649062fe tip
102 cloned ffe6649062fe tip
103 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
103 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
104 cloned/sub1 2ecb03bf44a9 tip
104 cloned/sub1 2ecb03bf44a9 tip
105 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
105 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
106 cloned/sub1/sub2 53dd3430bcaf tip
106 cloned/sub1/sub2 53dd3430bcaf tip
107
107
108 debugsub output for main and sub1
108 debugsub output for main and sub1
109
109
110 $ hg debugsub -R cloned
110 $ hg debugsub -R cloned
111 path sub1
111 path sub1
112 source ../sub1
112 source ../sub1
113 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9
113 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9
114 $ hg debugsub -R cloned/sub1
114 $ hg debugsub -R cloned/sub1
115 path sub2
115 path sub2
116 source ../sub2
116 source ../sub2
117 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
117 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
118
118
119 Check that deep archiving works
119 Check that deep archiving works
120
120
121 $ cd cloned
121 $ cd cloned
122 $ echo 'test' > sub1/sub2/test.txt
122 $ echo 'test' > sub1/sub2/test.txt
123 $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
123 $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
124 $ mkdir sub1/sub2/folder
124 $ mkdir sub1/sub2/folder
125 $ echo 'subfolder' > sub1/sub2/folder/test.txt
125 $ echo 'subfolder' > sub1/sub2/folder/test.txt
126 $ hg ci -ASm "add test.txt"
126 $ hg ci -ASm "add test.txt"
127 adding sub1/sub2/folder/test.txt
127 adding sub1/sub2/folder/test.txt
128 committing subrepository sub1
128 committing subrepository sub1
129 committing subrepository sub1/sub2 (glob)
129 committing subrepository sub1/sub2 (glob)
130
130
131 .. but first take a detour through some deep removal testing
131 .. but first take a detour through some deep removal testing
132
132
133 $ hg remove -S -I 're:.*.txt' .
133 $ hg remove -S -I 're:.*.txt' .
134 removing sub1/sub2/folder/test.txt (glob)
134 removing sub1/sub2/folder/test.txt (glob)
135 removing sub1/sub2/test.txt (glob)
135 removing sub1/sub2/test.txt (glob)
136 $ hg status -S
136 $ hg status -S
137 R sub1/sub2/folder/test.txt
137 R sub1/sub2/folder/test.txt
138 R sub1/sub2/test.txt
138 R sub1/sub2/test.txt
139 $ hg update -Cq
139 $ hg update -Cq
140 $ hg remove -I 're:.*.txt' sub1
140 $ hg remove -I 're:.*.txt' sub1
141 $ hg status -S
141 $ hg status -S
142 $ hg remove sub1/sub2/folder/test.txt
142 $ hg remove sub1/sub2/folder/test.txt
143 $ hg remove sub1/.hgsubstate
143 $ hg remove sub1/.hgsubstate
144 $ mv sub1/.hgsub sub1/x.hgsub
144 $ mv sub1/.hgsub sub1/x.hgsub
145 $ hg status -S
145 $ hg status -S
146 warning: subrepo spec file 'sub1/.hgsub' not found (glob)
146 warning: subrepo spec file 'sub1/.hgsub' not found (glob)
147 R sub1/.hgsubstate
147 R sub1/.hgsubstate
148 R sub1/sub2/folder/test.txt
148 R sub1/sub2/folder/test.txt
149 ! sub1/.hgsub
149 ! sub1/.hgsub
150 ? sub1/x.hgsub
150 ? sub1/x.hgsub
151 $ mv sub1/x.hgsub sub1/.hgsub
151 $ mv sub1/x.hgsub sub1/.hgsub
152 $ hg update -Cq
152 $ hg update -Cq
153 $ touch sub1/foo
153 $ touch sub1/foo
154 $ hg forget sub1/sub2/folder/test.txt
154 $ hg forget sub1/sub2/folder/test.txt
155 $ rm sub1/sub2/test.txt
155 $ rm sub1/sub2/test.txt
156
156
157 Test relative path printing + subrepos
157 Test relative path printing + subrepos
158 $ mkdir -p foo/bar
158 $ mkdir -p foo/bar
159 $ cd foo
159 $ cd foo
160 $ touch bar/abc
160 $ touch bar/abc
161 $ hg addremove -S ..
161 $ hg addremove -S ..
162 adding ../sub1/sub2/folder/test.txt (glob)
162 adding ../sub1/sub2/folder/test.txt (glob)
163 removing ../sub1/sub2/test.txt (glob)
163 removing ../sub1/sub2/test.txt (glob)
164 adding ../sub1/foo (glob)
164 adding ../sub1/foo (glob)
165 adding bar/abc (glob)
165 adding bar/abc (glob)
166 $ cd ..
166 $ cd ..
167 $ hg status -S
167 $ hg status -S
168 A foo/bar/abc
168 A foo/bar/abc
169 A sub1/foo
169 A sub1/foo
170 R sub1/sub2/test.txt
170 R sub1/sub2/test.txt
171
172 Archive wdir() with subrepos
173 $ hg rm main
174 $ hg archive -S -r 'wdir()' ../wdir
175 $ diff -r . ../wdir | grep -v '\.hg$'
176 Only in ../wdir: .hg_archival.txt
177
178 $ find ../wdir -type f | sort
179 ../wdir/.hg_archival.txt
180 ../wdir/.hgsub
181 ../wdir/.hgsubstate
182 ../wdir/foo/bar/abc
183 ../wdir/sub1/.hgsub
184 ../wdir/sub1/.hgsubstate
185 ../wdir/sub1/foo
186 ../wdir/sub1/sub1
187 ../wdir/sub1/sub2/folder/test.txt
188 ../wdir/sub1/sub2/sub2
189
190 Attempting to archive 'wdir()' with a missing file is handled gracefully
191 $ rm sub1/sub1
192 $ rm -r ../wdir
193 $ hg archive -v -S -r 'wdir()' ../wdir
194 $ find ../wdir -type f | sort
195 ../wdir/.hg_archival.txt
196 ../wdir/.hgsub
197 ../wdir/.hgsubstate
198 ../wdir/foo/bar/abc
199 ../wdir/sub1/.hgsub
200 ../wdir/sub1/.hgsubstate
201 ../wdir/sub1/foo
202 ../wdir/sub1/sub2/folder/test.txt
203 ../wdir/sub1/sub2/sub2
204
205 Continue relative path printing + subrepos
171 $ hg update -Cq
206 $ hg update -Cq
172 $ touch sub1/sub2/folder/bar
207 $ touch sub1/sub2/folder/bar
173 $ hg addremove sub1/sub2
208 $ hg addremove sub1/sub2
174 adding sub1/sub2/folder/bar (glob)
209 adding sub1/sub2/folder/bar (glob)
175 $ hg status -S
210 $ hg status -S
176 A sub1/sub2/folder/bar
211 A sub1/sub2/folder/bar
177 ? foo/bar/abc
212 ? foo/bar/abc
178 ? sub1/foo
213 ? sub1/foo
179 $ hg update -Cq
214 $ hg update -Cq
180 $ hg addremove sub1
215 $ hg addremove sub1
181 adding sub1/sub2/folder/bar (glob)
216 adding sub1/sub2/folder/bar (glob)
182 adding sub1/foo (glob)
217 adding sub1/foo (glob)
183 $ hg update -Cq
218 $ hg update -Cq
184 $ rm sub1/sub2/folder/test.txt
219 $ rm sub1/sub2/folder/test.txt
185 $ rm sub1/sub2/test.txt
220 $ rm sub1/sub2/test.txt
186 $ hg ci -ASm "remove test.txt"
221 $ hg ci -ASm "remove test.txt"
187 adding sub1/sub2/folder/bar
222 adding sub1/sub2/folder/bar
188 removing sub1/sub2/folder/test.txt
223 removing sub1/sub2/folder/test.txt
189 removing sub1/sub2/test.txt
224 removing sub1/sub2/test.txt
190 adding sub1/foo
225 adding sub1/foo
191 adding foo/bar/abc
226 adding foo/bar/abc
192 committing subrepository sub1
227 committing subrepository sub1
193 committing subrepository sub1/sub2 (glob)
228 committing subrepository sub1/sub2 (glob)
194
229
195 $ hg forget sub1/sub2/sub2
230 $ hg forget sub1/sub2/sub2
196 $ echo x > sub1/sub2/x.txt
231 $ echo x > sub1/sub2/x.txt
197 $ hg add sub1/sub2/x.txt
232 $ hg add sub1/sub2/x.txt
198
233
199 Files sees uncommitted adds and removes in subrepos
234 Files sees uncommitted adds and removes in subrepos
200 $ hg files -S
235 $ hg files -S
201 .hgsub
236 .hgsub
202 .hgsubstate
237 .hgsubstate
203 foo/bar/abc (glob)
238 foo/bar/abc (glob)
204 main
239 main
205 sub1/.hgsub (glob)
240 sub1/.hgsub (glob)
206 sub1/.hgsubstate (glob)
241 sub1/.hgsubstate (glob)
207 sub1/foo (glob)
242 sub1/foo (glob)
208 sub1/sub1 (glob)
243 sub1/sub1 (glob)
209 sub1/sub2/folder/bar (glob)
244 sub1/sub2/folder/bar (glob)
210 sub1/sub2/x.txt (glob)
245 sub1/sub2/x.txt (glob)
211
246
212 $ hg files -S "set:eol('dos') or eol('unix') or size('<= 0')"
247 $ hg files -S "set:eol('dos') or eol('unix') or size('<= 0')"
213 .hgsub
248 .hgsub
214 .hgsubstate
249 .hgsubstate
215 foo/bar/abc (glob)
250 foo/bar/abc (glob)
216 main
251 main
217 sub1/.hgsub (glob)
252 sub1/.hgsub (glob)
218 sub1/.hgsubstate (glob)
253 sub1/.hgsubstate (glob)
219 sub1/foo (glob)
254 sub1/foo (glob)
220 sub1/sub1 (glob)
255 sub1/sub1 (glob)
221 sub1/sub2/folder/bar (glob)
256 sub1/sub2/folder/bar (glob)
222 sub1/sub2/x.txt (glob)
257 sub1/sub2/x.txt (glob)
223
258
224 $ hg files -r '.^' -S "set:eol('dos') or eol('unix')"
259 $ hg files -r '.^' -S "set:eol('dos') or eol('unix')"
225 .hgsub
260 .hgsub
226 .hgsubstate
261 .hgsubstate
227 main
262 main
228 sub1/.hgsub (glob)
263 sub1/.hgsub (glob)
229 sub1/.hgsubstate (glob)
264 sub1/.hgsubstate (glob)
230 sub1/sub1 (glob)
265 sub1/sub1 (glob)
231 sub1/sub2/folder/test.txt (glob)
266 sub1/sub2/folder/test.txt (glob)
232 sub1/sub2/sub2 (glob)
267 sub1/sub2/sub2 (glob)
233 sub1/sub2/test.txt (glob)
268 sub1/sub2/test.txt (glob)
234
269
235 $ hg files sub1
270 $ hg files sub1
236 sub1/.hgsub (glob)
271 sub1/.hgsub (glob)
237 sub1/.hgsubstate (glob)
272 sub1/.hgsubstate (glob)
238 sub1/foo (glob)
273 sub1/foo (glob)
239 sub1/sub1 (glob)
274 sub1/sub1 (glob)
240 sub1/sub2/folder/bar (glob)
275 sub1/sub2/folder/bar (glob)
241 sub1/sub2/x.txt (glob)
276 sub1/sub2/x.txt (glob)
242
277
243 $ hg files sub1/sub2
278 $ hg files sub1/sub2
244 sub1/sub2/folder/bar (glob)
279 sub1/sub2/folder/bar (glob)
245 sub1/sub2/x.txt (glob)
280 sub1/sub2/x.txt (glob)
246
281
247 $ hg files -S -r '.^' sub1/sub2/folder
282 $ hg files -S -r '.^' sub1/sub2/folder
248 sub1/sub2/folder/test.txt (glob)
283 sub1/sub2/folder/test.txt (glob)
249
284
250 $ hg files -S -r '.^' sub1/sub2/missing
285 $ hg files -S -r '.^' sub1/sub2/missing
251 sub1/sub2/missing: no such file in rev 78026e779ea6 (glob)
286 sub1/sub2/missing: no such file in rev 78026e779ea6 (glob)
252 [1]
287 [1]
253
288
254 $ hg files -r '.^' sub1/
289 $ hg files -r '.^' sub1/
255 sub1/.hgsub (glob)
290 sub1/.hgsub (glob)
256 sub1/.hgsubstate (glob)
291 sub1/.hgsubstate (glob)
257 sub1/sub1 (glob)
292 sub1/sub1 (glob)
258 sub1/sub2/folder/test.txt (glob)
293 sub1/sub2/folder/test.txt (glob)
259 sub1/sub2/sub2 (glob)
294 sub1/sub2/sub2 (glob)
260 sub1/sub2/test.txt (glob)
295 sub1/sub2/test.txt (glob)
261
296
262 $ hg files -r '.^' sub1/sub2
297 $ hg files -r '.^' sub1/sub2
263 sub1/sub2/folder/test.txt (glob)
298 sub1/sub2/folder/test.txt (glob)
264 sub1/sub2/sub2 (glob)
299 sub1/sub2/sub2 (glob)
265 sub1/sub2/test.txt (glob)
300 sub1/sub2/test.txt (glob)
266
301
267 $ hg rollback -q
302 $ hg rollback -q
268 $ hg up -Cq
303 $ hg up -Cq
269
304
270 $ hg --config extensions.largefiles=! archive -S ../archive_all
305 $ hg --config extensions.largefiles=! archive -S ../archive_all
271 $ find ../archive_all | sort
306 $ find ../archive_all | sort
272 ../archive_all
307 ../archive_all
273 ../archive_all/.hg_archival.txt
308 ../archive_all/.hg_archival.txt
274 ../archive_all/.hgsub
309 ../archive_all/.hgsub
275 ../archive_all/.hgsubstate
310 ../archive_all/.hgsubstate
276 ../archive_all/main
311 ../archive_all/main
277 ../archive_all/sub1
312 ../archive_all/sub1
278 ../archive_all/sub1/.hgsub
313 ../archive_all/sub1/.hgsub
279 ../archive_all/sub1/.hgsubstate
314 ../archive_all/sub1/.hgsubstate
280 ../archive_all/sub1/sub1
315 ../archive_all/sub1/sub1
281 ../archive_all/sub1/sub2
316 ../archive_all/sub1/sub2
282 ../archive_all/sub1/sub2/folder
317 ../archive_all/sub1/sub2/folder
283 ../archive_all/sub1/sub2/folder/test.txt
318 ../archive_all/sub1/sub2/folder/test.txt
284 ../archive_all/sub1/sub2/sub2
319 ../archive_all/sub1/sub2/sub2
285 ../archive_all/sub1/sub2/test.txt
320 ../archive_all/sub1/sub2/test.txt
286
321
287 Check that archive -X works in deep subrepos
322 Check that archive -X works in deep subrepos
288
323
289 $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude
324 $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude
290 $ find ../archive_exclude | sort
325 $ find ../archive_exclude | sort
291 ../archive_exclude
326 ../archive_exclude
292 ../archive_exclude/.hg_archival.txt
327 ../archive_exclude/.hg_archival.txt
293 ../archive_exclude/.hgsub
328 ../archive_exclude/.hgsub
294 ../archive_exclude/.hgsubstate
329 ../archive_exclude/.hgsubstate
295 ../archive_exclude/main
330 ../archive_exclude/main
296 ../archive_exclude/sub1
331 ../archive_exclude/sub1
297 ../archive_exclude/sub1/.hgsub
332 ../archive_exclude/sub1/.hgsub
298 ../archive_exclude/sub1/.hgsubstate
333 ../archive_exclude/sub1/.hgsubstate
299 ../archive_exclude/sub1/sub1
334 ../archive_exclude/sub1/sub1
300 ../archive_exclude/sub1/sub2
335 ../archive_exclude/sub1/sub2
301 ../archive_exclude/sub1/sub2/sub2
336 ../archive_exclude/sub1/sub2/sub2
302
337
303 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include
338 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include
304 $ find ../archive_include | sort
339 $ find ../archive_include | sort
305 ../archive_include
340 ../archive_include
306 ../archive_include/sub1
341 ../archive_include/sub1
307 ../archive_include/sub1/sub2
342 ../archive_include/sub1/sub2
308 ../archive_include/sub1/sub2/folder
343 ../archive_include/sub1/sub2/folder
309 ../archive_include/sub1/sub2/folder/test.txt
344 ../archive_include/sub1/sub2/folder/test.txt
310 ../archive_include/sub1/sub2/test.txt
345 ../archive_include/sub1/sub2/test.txt
311
346
312 Check that deep archive works with largefiles (which overrides hgsubrepo impl)
347 Check that deep archive works with largefiles (which overrides hgsubrepo impl)
313 This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo
348 This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo
314 subrepos are archived properly.
349 subrepos are archived properly.
315 Note that add --large through a subrepo currently adds the file as a normal file
350 Note that add --large through a subrepo currently adds the file as a normal file
316
351
317 $ echo "large" > sub1/sub2/large.bin
352 $ echo "large" > sub1/sub2/large.bin
318 $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin
353 $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin
319 $ echo "large" > large.bin
354 $ echo "large" > large.bin
320 $ hg --config extensions.largefiles= add --large large.bin
355 $ hg --config extensions.largefiles= add --large large.bin
321 $ hg --config extensions.largefiles= ci -S -m "add large files"
356 $ hg --config extensions.largefiles= ci -S -m "add large files"
322 committing subrepository sub1
357 committing subrepository sub1
323 committing subrepository sub1/sub2 (glob)
358 committing subrepository sub1/sub2 (glob)
324
359
325 $ hg --config extensions.largefiles= archive -S ../archive_lf
360 $ hg --config extensions.largefiles= archive -S ../archive_lf
326 $ find ../archive_lf | sort
361 $ find ../archive_lf | sort
327 ../archive_lf
362 ../archive_lf
328 ../archive_lf/.hg_archival.txt
363 ../archive_lf/.hg_archival.txt
329 ../archive_lf/.hgsub
364 ../archive_lf/.hgsub
330 ../archive_lf/.hgsubstate
365 ../archive_lf/.hgsubstate
331 ../archive_lf/large.bin
366 ../archive_lf/large.bin
332 ../archive_lf/main
367 ../archive_lf/main
333 ../archive_lf/sub1
368 ../archive_lf/sub1
334 ../archive_lf/sub1/.hgsub
369 ../archive_lf/sub1/.hgsub
335 ../archive_lf/sub1/.hgsubstate
370 ../archive_lf/sub1/.hgsubstate
336 ../archive_lf/sub1/sub1
371 ../archive_lf/sub1/sub1
337 ../archive_lf/sub1/sub2
372 ../archive_lf/sub1/sub2
338 ../archive_lf/sub1/sub2/folder
373 ../archive_lf/sub1/sub2/folder
339 ../archive_lf/sub1/sub2/folder/test.txt
374 ../archive_lf/sub1/sub2/folder/test.txt
340 ../archive_lf/sub1/sub2/large.bin
375 ../archive_lf/sub1/sub2/large.bin
341 ../archive_lf/sub1/sub2/sub2
376 ../archive_lf/sub1/sub2/sub2
342 ../archive_lf/sub1/sub2/test.txt
377 ../archive_lf/sub1/sub2/test.txt
343 $ rm -rf ../archive_lf
378 $ rm -rf ../archive_lf
344
379
345 Exclude large files from main and sub-sub repo
380 Exclude large files from main and sub-sub repo
346
381
347 $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf
382 $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf
348 $ find ../archive_lf | sort
383 $ find ../archive_lf | sort
349 ../archive_lf
384 ../archive_lf
350 ../archive_lf/.hg_archival.txt
385 ../archive_lf/.hg_archival.txt
351 ../archive_lf/.hgsub
386 ../archive_lf/.hgsub
352 ../archive_lf/.hgsubstate
387 ../archive_lf/.hgsubstate
353 ../archive_lf/main
388 ../archive_lf/main
354 ../archive_lf/sub1
389 ../archive_lf/sub1
355 ../archive_lf/sub1/.hgsub
390 ../archive_lf/sub1/.hgsub
356 ../archive_lf/sub1/.hgsubstate
391 ../archive_lf/sub1/.hgsubstate
357 ../archive_lf/sub1/sub1
392 ../archive_lf/sub1/sub1
358 ../archive_lf/sub1/sub2
393 ../archive_lf/sub1/sub2
359 ../archive_lf/sub1/sub2/folder
394 ../archive_lf/sub1/sub2/folder
360 ../archive_lf/sub1/sub2/folder/test.txt
395 ../archive_lf/sub1/sub2/folder/test.txt
361 ../archive_lf/sub1/sub2/sub2
396 ../archive_lf/sub1/sub2/sub2
362 ../archive_lf/sub1/sub2/test.txt
397 ../archive_lf/sub1/sub2/test.txt
363 $ rm -rf ../archive_lf
398 $ rm -rf ../archive_lf
364
399
365 Exclude normal files from main and sub-sub repo
400 Exclude normal files from main and sub-sub repo
366
401
367 $ hg --config extensions.largefiles= archive -S -X '**.txt' -p '.' ../archive_lf.tgz
402 $ hg --config extensions.largefiles= archive -S -X '**.txt' -p '.' ../archive_lf.tgz
368 $ tar -tzf ../archive_lf.tgz | sort
403 $ tar -tzf ../archive_lf.tgz | sort
369 .hgsub
404 .hgsub
370 .hgsubstate
405 .hgsubstate
371 large.bin
406 large.bin
372 main
407 main
373 sub1/.hgsub
408 sub1/.hgsub
374 sub1/.hgsubstate
409 sub1/.hgsubstate
375 sub1/sub1
410 sub1/sub1
376 sub1/sub2/large.bin
411 sub1/sub2/large.bin
377 sub1/sub2/sub2
412 sub1/sub2/sub2
378
413
379 Include normal files from within a largefiles subrepo
414 Include normal files from within a largefiles subrepo
380
415
381 $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf
416 $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf
382 $ find ../archive_lf | sort
417 $ find ../archive_lf | sort
383 ../archive_lf
418 ../archive_lf
384 ../archive_lf/.hg_archival.txt
419 ../archive_lf/.hg_archival.txt
385 ../archive_lf/sub1
420 ../archive_lf/sub1
386 ../archive_lf/sub1/sub2
421 ../archive_lf/sub1/sub2
387 ../archive_lf/sub1/sub2/folder
422 ../archive_lf/sub1/sub2/folder
388 ../archive_lf/sub1/sub2/folder/test.txt
423 ../archive_lf/sub1/sub2/folder/test.txt
389 ../archive_lf/sub1/sub2/test.txt
424 ../archive_lf/sub1/sub2/test.txt
390 $ rm -rf ../archive_lf
425 $ rm -rf ../archive_lf
391
426
392 Include large files from within a largefiles subrepo
427 Include large files from within a largefiles subrepo
393
428
394 $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf
429 $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf
395 $ find ../archive_lf | sort
430 $ find ../archive_lf | sort
396 ../archive_lf
431 ../archive_lf
397 ../archive_lf/large.bin
432 ../archive_lf/large.bin
398 ../archive_lf/sub1
433 ../archive_lf/sub1
399 ../archive_lf/sub1/sub2
434 ../archive_lf/sub1/sub2
400 ../archive_lf/sub1/sub2/large.bin
435 ../archive_lf/sub1/sub2/large.bin
401 $ rm -rf ../archive_lf
436 $ rm -rf ../archive_lf
402
437
403 Find an exact largefile match in a largefiles subrepo
438 Find an exact largefile match in a largefiles subrepo
404
439
405 $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf
440 $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf
406 $ find ../archive_lf | sort
441 $ find ../archive_lf | sort
407 ../archive_lf
442 ../archive_lf
408 ../archive_lf/sub1
443 ../archive_lf/sub1
409 ../archive_lf/sub1/sub2
444 ../archive_lf/sub1/sub2
410 ../archive_lf/sub1/sub2/large.bin
445 ../archive_lf/sub1/sub2/large.bin
411 $ rm -rf ../archive_lf
446 $ rm -rf ../archive_lf
412
447
413 The local repo enables largefiles if a largefiles repo is cloned
448 The local repo enables largefiles if a largefiles repo is cloned
414 $ hg showconfig extensions
449 $ hg showconfig extensions
415 abort: repository requires features unknown to this Mercurial: largefiles!
450 abort: repository requires features unknown to this Mercurial: largefiles!
416 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
451 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
417 [255]
452 [255]
418 $ hg --config extensions.largefiles= clone -qU . ../lfclone
453 $ hg --config extensions.largefiles= clone -qU . ../lfclone
419 $ cat ../lfclone/.hg/hgrc
454 $ cat ../lfclone/.hg/hgrc
420 # example repository config (see "hg help config" for more info)
455 # example repository config (see "hg help config" for more info)
421 [paths]
456 [paths]
422 default = $TESTTMP/cloned (glob)
457 default = $TESTTMP/cloned (glob)
423
458
424 # path aliases to other clones of this repo in URLs or filesystem paths
459 # path aliases to other clones of this repo in URLs or filesystem paths
425 # (see "hg help config.paths" for more info)
460 # (see "hg help config.paths" for more info)
426 #
461 #
427 # default-push = ssh://jdoe@example.net/hg/jdoes-fork
462 # default-push = ssh://jdoe@example.net/hg/jdoes-fork
428 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
463 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
429 # my-clone = /home/jdoe/jdoes-clone
464 # my-clone = /home/jdoe/jdoes-clone
430
465
431 [ui]
466 [ui]
432 # name and email (local to this repository, optional), e.g.
467 # name and email (local to this repository, optional), e.g.
433 # username = Jane Doe <jdoe@example.com>
468 # username = Jane Doe <jdoe@example.com>
434
469
435 [extensions]
470 [extensions]
436 largefiles=
471 largefiles=
437
472
438 Find an exact match to a standin (should archive nothing)
473 Find an exact match to a standin (should archive nothing)
439 $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
474 $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
440 $ find ../archive_lf 2> /dev/null | sort
475 $ find ../archive_lf 2> /dev/null | sort
441
476
442 $ cat >> $HGRCPATH <<EOF
477 $ cat >> $HGRCPATH <<EOF
443 > [extensions]
478 > [extensions]
444 > largefiles=
479 > largefiles=
445 > [largefiles]
480 > [largefiles]
446 > patterns=glob:**.dat
481 > patterns=glob:**.dat
447 > EOF
482 > EOF
448
483
449 Test forget through a deep subrepo with the largefiles extension, both a
484 Test forget through a deep subrepo with the largefiles extension, both a
450 largefile and a normal file. Then a largefile that hasn't been committed yet.
485 largefile and a normal file. Then a largefile that hasn't been committed yet.
451 $ touch sub1/sub2/untracked.txt
486 $ touch sub1/sub2/untracked.txt
452 $ touch sub1/sub2/large.dat
487 $ touch sub1/sub2/large.dat
453 $ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt
488 $ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt
454 not removing sub1/sub2/untracked.txt: file is already untracked (glob)
489 not removing sub1/sub2/untracked.txt: file is already untracked (glob)
455 [1]
490 [1]
456 $ hg add --large --dry-run -v sub1/sub2/untracked.txt
491 $ hg add --large --dry-run -v sub1/sub2/untracked.txt
457 adding sub1/sub2/untracked.txt as a largefile (glob)
492 adding sub1/sub2/untracked.txt as a largefile (glob)
458 $ hg add --large -v sub1/sub2/untracked.txt
493 $ hg add --large -v sub1/sub2/untracked.txt
459 adding sub1/sub2/untracked.txt as a largefile (glob)
494 adding sub1/sub2/untracked.txt as a largefile (glob)
460 $ hg add --normal -v sub1/sub2/large.dat
495 $ hg add --normal -v sub1/sub2/large.dat
461 adding sub1/sub2/large.dat (glob)
496 adding sub1/sub2/large.dat (glob)
462 $ hg forget -v sub1/sub2/untracked.txt
497 $ hg forget -v sub1/sub2/untracked.txt
463 removing sub1/sub2/untracked.txt (glob)
498 removing sub1/sub2/untracked.txt (glob)
464 $ hg status -S
499 $ hg status -S
465 A sub1/sub2/large.dat
500 A sub1/sub2/large.dat
466 R sub1/sub2/large.bin
501 R sub1/sub2/large.bin
467 R sub1/sub2/test.txt
502 R sub1/sub2/test.txt
468 ? foo/bar/abc
503 ? foo/bar/abc
469 ? sub1/sub2/untracked.txt
504 ? sub1/sub2/untracked.txt
470 ? sub1/sub2/x.txt
505 ? sub1/sub2/x.txt
471 $ hg add sub1/sub2
506 $ hg add sub1/sub2
507
508 $ hg archive -S -r 'wdir()' ../wdir2
509 $ diff -r . ../wdir2 | grep -v '\.hg$'
510 Only in ../wdir2: .hg_archival.txt
511 Only in .: .hglf
512 Only in .: foo
513 Only in ./sub1/sub2: large.bin
514 Only in ./sub1/sub2: test.txt
515 Only in ./sub1/sub2: untracked.txt
516 Only in ./sub1/sub2: x.txt
517 $ find ../wdir2 -type f | sort
518 ../wdir2/.hg_archival.txt
519 ../wdir2/.hgsub
520 ../wdir2/.hgsubstate
521 ../wdir2/large.bin
522 ../wdir2/main
523 ../wdir2/sub1/.hgsub
524 ../wdir2/sub1/.hgsubstate
525 ../wdir2/sub1/sub1
526 ../wdir2/sub1/sub2/folder/test.txt
527 ../wdir2/sub1/sub2/large.dat
528 ../wdir2/sub1/sub2/sub2
529 $ hg status -S -mac -n | sort
530 .hgsub
531 .hgsubstate
532 large.bin
533 main
534 sub1/.hgsub
535 sub1/.hgsubstate
536 sub1/sub1
537 sub1/sub2/folder/test.txt
538 sub1/sub2/large.dat
539 sub1/sub2/sub2
540
472 $ hg ci -Sqm 'forget testing'
541 $ hg ci -Sqm 'forget testing'
473
542
543 Test 'wdir()' modified file archiving with largefiles
544 $ echo 'mod' > main
545 $ echo 'mod' > large.bin
546 $ echo 'mod' > sub1/sub2/large.dat
547 $ hg archive -S -r 'wdir()' ../wdir3
548 $ diff -r . ../wdir3 | grep -v '\.hg$'
549 Only in ../wdir3: .hg_archival.txt
550 Only in .: .hglf
551 Only in .: foo
552 Only in ./sub1/sub2: large.bin
553 Only in ./sub1/sub2: test.txt
554 Only in ./sub1/sub2: untracked.txt
555 Only in ./sub1/sub2: x.txt
556 $ find ../wdir3 -type f | sort
557 ../wdir3/.hg_archival.txt
558 ../wdir3/.hgsub
559 ../wdir3/.hgsubstate
560 ../wdir3/large.bin
561 ../wdir3/main
562 ../wdir3/sub1/.hgsub
563 ../wdir3/sub1/.hgsubstate
564 ../wdir3/sub1/sub1
565 ../wdir3/sub1/sub2/folder/test.txt
566 ../wdir3/sub1/sub2/large.dat
567 ../wdir3/sub1/sub2/sub2
568 $ hg up -Cq
569
474 Test issue4330: commit a directory where only normal files have changed
570 Test issue4330: commit a directory where only normal files have changed
475 $ touch foo/bar/large.dat
571 $ touch foo/bar/large.dat
476 $ hg add --large foo/bar/large.dat
572 $ hg add --large foo/bar/large.dat
477 $ hg ci -m 'add foo/bar/large.dat'
573 $ hg ci -m 'add foo/bar/large.dat'
478 $ touch a.txt
574 $ touch a.txt
479 $ touch a.dat
575 $ touch a.dat
480 $ hg add -v foo/bar/abc a.txt a.dat
576 $ hg add -v foo/bar/abc a.txt a.dat
481 adding a.dat as a largefile
577 adding a.dat as a largefile
482 adding a.txt
578 adding a.txt
483 adding foo/bar/abc (glob)
579 adding foo/bar/abc (glob)
484 $ hg ci -m 'dir commit with only normal file deltas' foo/bar
580 $ hg ci -m 'dir commit with only normal file deltas' foo/bar
485 $ hg status
581 $ hg status
486 A a.dat
582 A a.dat
487 A a.txt
583 A a.txt
488
584
489 Test a directory commit with a changed largefile and a changed normal file
585 Test a directory commit with a changed largefile and a changed normal file
490 $ echo changed > foo/bar/large.dat
586 $ echo changed > foo/bar/large.dat
491 $ echo changed > foo/bar/abc
587 $ echo changed > foo/bar/abc
492 $ hg ci -m 'dir commit with normal and lf file deltas' foo
588 $ hg ci -m 'dir commit with normal and lf file deltas' foo
493 $ hg status
589 $ hg status
494 A a.dat
590 A a.dat
495 A a.txt
591 A a.txt
496
592
497 $ hg ci -m "add a.*"
593 $ hg ci -m "add a.*"
498 $ hg mv a.dat b.dat
594 $ hg mv a.dat b.dat
499 $ hg mv foo/bar/abc foo/bar/def
595 $ hg mv foo/bar/abc foo/bar/def
500 $ hg status -C
596 $ hg status -C
501 A b.dat
597 A b.dat
502 a.dat
598 a.dat
503 A foo/bar/def
599 A foo/bar/def
504 foo/bar/abc
600 foo/bar/abc
505 R a.dat
601 R a.dat
506 R foo/bar/abc
602 R foo/bar/abc
507
603
508 $ hg ci -m "move large and normal"
604 $ hg ci -m "move large and normal"
509 $ hg status -C --rev '.^' --rev .
605 $ hg status -C --rev '.^' --rev .
510 A b.dat
606 A b.dat
511 a.dat
607 a.dat
512 A foo/bar/def
608 A foo/bar/def
513 foo/bar/abc
609 foo/bar/abc
514 R a.dat
610 R a.dat
515 R foo/bar/abc
611 R foo/bar/abc
516
612
517
613
518 $ echo foo > main
614 $ echo foo > main
519 $ hg ci -m "mod parent only"
615 $ hg ci -m "mod parent only"
520 $ hg init sub3
616 $ hg init sub3
521 $ echo "sub3 = sub3" >> .hgsub
617 $ echo "sub3 = sub3" >> .hgsub
522 $ echo xyz > sub3/a.txt
618 $ echo xyz > sub3/a.txt
523 $ hg add sub3/a.txt
619 $ hg add sub3/a.txt
524 $ hg ci -Sm "add sub3"
620 $ hg ci -Sm "add sub3"
525 committing subrepository sub3
621 committing subrepository sub3
526 $ cat .hgsub | grep -v sub3 > .hgsub1
622 $ cat .hgsub | grep -v sub3 > .hgsub1
527 $ mv .hgsub1 .hgsub
623 $ mv .hgsub1 .hgsub
528 $ hg ci -m "remove sub3"
624 $ hg ci -m "remove sub3"
529
625
530 $ hg log -r "subrepo()" --style compact
626 $ hg log -r "subrepo()" --style compact
531 0 7f491f53a367 1970-01-01 00:00 +0000 test
627 0 7f491f53a367 1970-01-01 00:00 +0000 test
532 main import
628 main import
533
629
534 1 ffe6649062fe 1970-01-01 00:00 +0000 test
630 1 ffe6649062fe 1970-01-01 00:00 +0000 test
535 deep nested modif should trigger a commit
631 deep nested modif should trigger a commit
536
632
537 2 9bb10eebee29 1970-01-01 00:00 +0000 test
633 2 9bb10eebee29 1970-01-01 00:00 +0000 test
538 add test.txt
634 add test.txt
539
635
540 3 7c64f035294f 1970-01-01 00:00 +0000 test
636 3 7c64f035294f 1970-01-01 00:00 +0000 test
541 add large files
637 add large files
542
638
543 4 f734a59e2e35 1970-01-01 00:00 +0000 test
639 4 f734a59e2e35 1970-01-01 00:00 +0000 test
544 forget testing
640 forget testing
545
641
546 11 9685a22af5db 1970-01-01 00:00 +0000 test
642 11 9685a22af5db 1970-01-01 00:00 +0000 test
547 add sub3
643 add sub3
548
644
549 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
645 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
550 remove sub3
646 remove sub3
551
647
552 $ hg log -r "subrepo('sub3')" --style compact
648 $ hg log -r "subrepo('sub3')" --style compact
553 11 9685a22af5db 1970-01-01 00:00 +0000 test
649 11 9685a22af5db 1970-01-01 00:00 +0000 test
554 add sub3
650 add sub3
555
651
556 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
652 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test
557 remove sub3
653 remove sub3
558
654
559 $ hg log -r "subrepo('bogus')" --style compact
655 $ hg log -r "subrepo('bogus')" --style compact
560
656
561
657
562 Test .hgsubstate in the R state
658 Test .hgsubstate in the R state
563
659
564 $ hg rm .hgsub .hgsubstate
660 $ hg rm .hgsub .hgsubstate
565 $ hg ci -m 'trash subrepo tracking'
661 $ hg ci -m 'trash subrepo tracking'
566
662
567 $ hg log -r "subrepo('re:sub\d+')" --style compact
663 $ hg log -r "subrepo('re:sub\d+')" --style compact
568 0 7f491f53a367 1970-01-01 00:00 +0000 test
664 0 7f491f53a367 1970-01-01 00:00 +0000 test
569 main import
665 main import
570
666
571 1 ffe6649062fe 1970-01-01 00:00 +0000 test
667 1 ffe6649062fe 1970-01-01 00:00 +0000 test
572 deep nested modif should trigger a commit
668 deep nested modif should trigger a commit
573
669
574 2 9bb10eebee29 1970-01-01 00:00 +0000 test
670 2 9bb10eebee29 1970-01-01 00:00 +0000 test
575 add test.txt
671 add test.txt
576
672
577 3 7c64f035294f 1970-01-01 00:00 +0000 test
673 3 7c64f035294f 1970-01-01 00:00 +0000 test
578 add large files
674 add large files
579
675
580 4 f734a59e2e35 1970-01-01 00:00 +0000 test
676 4 f734a59e2e35 1970-01-01 00:00 +0000 test
581 forget testing
677 forget testing
582
678
583 11 9685a22af5db 1970-01-01 00:00 +0000 test
679 11 9685a22af5db 1970-01-01 00:00 +0000 test
584 add sub3
680 add sub3
585
681
586 12 2e0485b475b9 1970-01-01 00:00 +0000 test
682 12 2e0485b475b9 1970-01-01 00:00 +0000 test
587 remove sub3
683 remove sub3
588
684
589 13[tip] a68b2c361653 1970-01-01 00:00 +0000 test
685 13[tip] a68b2c361653 1970-01-01 00:00 +0000 test
590 trash subrepo tracking
686 trash subrepo tracking
591
687
592
688
593 Restore the trashed subrepo tracking
689 Restore the trashed subrepo tracking
594
690
595 $ hg rollback -q
691 $ hg rollback -q
596 $ hg update -Cq .
692 $ hg update -Cq .
597
693
598 $ cd ..
694 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now