##// END OF EJS Templates
subrepo: drop the 'ui' parameter to archive()...
Matt Harbison -
r23575:a2f139d2 default
parent child Browse files
Show More
@@ -1,1289 +1,1288 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
10
11 import os
11 import os
12 import copy
12 import copy
13
13
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 archival, pathutil, revset
15 archival, pathutil, revset
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.node import hex
17 from mercurial.node import hex
18
18
19 import lfutil
19 import lfutil
20 import lfcommands
20 import lfcommands
21 import basestore
21 import basestore
22
22
23 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23 # -- Utility functions: commonly/repeatedly needed functionality ---------------
24
24
25 def composenormalfilematcher(match, manifest):
25 def composenormalfilematcher(match, manifest):
26 m = copy.copy(match)
26 m = copy.copy(match)
27 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
27 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
28 manifest)
28 manifest)
29 m._files = filter(notlfile, m._files)
29 m._files = filter(notlfile, m._files)
30 m._fmap = set(m._files)
30 m._fmap = set(m._files)
31 m._always = False
31 m._always = False
32 origmatchfn = m.matchfn
32 origmatchfn = m.matchfn
33 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
33 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
34 return m
34 return m
35
35
36 def installnormalfilesmatchfn(manifest):
36 def installnormalfilesmatchfn(manifest):
37 '''installmatchfn with a matchfn that ignores all largefiles'''
37 '''installmatchfn with a matchfn that ignores all largefiles'''
38 def overridematch(ctx, pats=[], opts={}, globbed=False,
38 def overridematch(ctx, pats=[], opts={}, globbed=False,
39 default='relpath'):
39 default='relpath'):
40 match = oldmatch(ctx, pats, opts, globbed, default)
40 match = oldmatch(ctx, pats, opts, globbed, default)
41 return composenormalfilematcher(match, manifest)
41 return composenormalfilematcher(match, manifest)
42 oldmatch = installmatchfn(overridematch)
42 oldmatch = installmatchfn(overridematch)
43
43
44 def installmatchfn(f):
44 def installmatchfn(f):
45 '''monkey patch the scmutil module with a custom match function.
45 '''monkey patch the scmutil module with a custom match function.
46 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
46 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
47 oldmatch = scmutil.match
47 oldmatch = scmutil.match
48 setattr(f, 'oldmatch', oldmatch)
48 setattr(f, 'oldmatch', oldmatch)
49 scmutil.match = f
49 scmutil.match = f
50 return oldmatch
50 return oldmatch
51
51
52 def restorematchfn():
52 def restorematchfn():
53 '''restores scmutil.match to what it was before installmatchfn
53 '''restores scmutil.match to what it was before installmatchfn
54 was called. no-op if scmutil.match is its original function.
54 was called. no-op if scmutil.match is its original function.
55
55
56 Note that n calls to installmatchfn will require n calls to
56 Note that n calls to installmatchfn will require n calls to
57 restore the original matchfn.'''
57 restore the original matchfn.'''
58 scmutil.match = getattr(scmutil.match, 'oldmatch')
58 scmutil.match = getattr(scmutil.match, 'oldmatch')
59
59
60 def installmatchandpatsfn(f):
60 def installmatchandpatsfn(f):
61 oldmatchandpats = scmutil.matchandpats
61 oldmatchandpats = scmutil.matchandpats
62 setattr(f, 'oldmatchandpats', oldmatchandpats)
62 setattr(f, 'oldmatchandpats', oldmatchandpats)
63 scmutil.matchandpats = f
63 scmutil.matchandpats = f
64 return oldmatchandpats
64 return oldmatchandpats
65
65
66 def restorematchandpatsfn():
66 def restorematchandpatsfn():
67 '''restores scmutil.matchandpats to what it was before
67 '''restores scmutil.matchandpats to what it was before
68 installmatchandpatsfn was called. No-op if scmutil.matchandpats
68 installmatchandpatsfn was called. No-op if scmutil.matchandpats
69 is its original function.
69 is its original function.
70
70
71 Note that n calls to installmatchandpatsfn will require n calls
71 Note that n calls to installmatchandpatsfn will require n calls
72 to restore the original matchfn.'''
72 to restore the original matchfn.'''
73 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
73 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
74 scmutil.matchandpats)
74 scmutil.matchandpats)
75
75
76 def addlargefiles(ui, repo, matcher, **opts):
76 def addlargefiles(ui, repo, matcher, **opts):
77 large = opts.pop('large', None)
77 large = opts.pop('large', None)
78 lfsize = lfutil.getminsize(
78 lfsize = lfutil.getminsize(
79 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
79 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
80
80
81 lfmatcher = None
81 lfmatcher = None
82 if lfutil.islfilesrepo(repo):
82 if lfutil.islfilesrepo(repo):
83 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
83 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
84 if lfpats:
84 if lfpats:
85 lfmatcher = match_.match(repo.root, '', list(lfpats))
85 lfmatcher = match_.match(repo.root, '', list(lfpats))
86
86
87 lfnames = []
87 lfnames = []
88 m = copy.copy(matcher)
88 m = copy.copy(matcher)
89 m.bad = lambda x, y: None
89 m.bad = lambda x, y: None
90 wctx = repo[None]
90 wctx = repo[None]
91 for f in repo.walk(m):
91 for f in repo.walk(m):
92 exact = m.exact(f)
92 exact = m.exact(f)
93 lfile = lfutil.standin(f) in wctx
93 lfile = lfutil.standin(f) in wctx
94 nfile = f in wctx
94 nfile = f in wctx
95 exists = lfile or nfile
95 exists = lfile or nfile
96
96
97 # Don't warn the user when they attempt to add a normal tracked file.
97 # Don't warn the user when they attempt to add a normal tracked file.
98 # The normal add code will do that for us.
98 # The normal add code will do that for us.
99 if exact and exists:
99 if exact and exists:
100 if lfile:
100 if lfile:
101 ui.warn(_('%s already a largefile\n') % f)
101 ui.warn(_('%s already a largefile\n') % f)
102 continue
102 continue
103
103
104 if (exact or not exists) and not lfutil.isstandin(f):
104 if (exact or not exists) and not lfutil.isstandin(f):
105 wfile = repo.wjoin(f)
105 wfile = repo.wjoin(f)
106
106
107 # In case the file was removed previously, but not committed
107 # In case the file was removed previously, but not committed
108 # (issue3507)
108 # (issue3507)
109 if not os.path.exists(wfile):
109 if not os.path.exists(wfile):
110 continue
110 continue
111
111
112 abovemin = (lfsize and
112 abovemin = (lfsize and
113 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
113 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
114 if large or abovemin or (lfmatcher and lfmatcher(f)):
114 if large or abovemin or (lfmatcher and lfmatcher(f)):
115 lfnames.append(f)
115 lfnames.append(f)
116 if ui.verbose or not exact:
116 if ui.verbose or not exact:
117 ui.status(_('adding %s as a largefile\n') % m.rel(f))
117 ui.status(_('adding %s as a largefile\n') % m.rel(f))
118
118
119 bad = []
119 bad = []
120
120
121 # Need to lock, otherwise there could be a race condition between
121 # Need to lock, otherwise there could be a race condition between
122 # when standins are created and added to the repo.
122 # when standins are created and added to the repo.
123 wlock = repo.wlock()
123 wlock = repo.wlock()
124 try:
124 try:
125 if not opts.get('dry_run'):
125 if not opts.get('dry_run'):
126 standins = []
126 standins = []
127 lfdirstate = lfutil.openlfdirstate(ui, repo)
127 lfdirstate = lfutil.openlfdirstate(ui, repo)
128 for f in lfnames:
128 for f in lfnames:
129 standinname = lfutil.standin(f)
129 standinname = lfutil.standin(f)
130 lfutil.writestandin(repo, standinname, hash='',
130 lfutil.writestandin(repo, standinname, hash='',
131 executable=lfutil.getexecutable(repo.wjoin(f)))
131 executable=lfutil.getexecutable(repo.wjoin(f)))
132 standins.append(standinname)
132 standins.append(standinname)
133 if lfdirstate[f] == 'r':
133 if lfdirstate[f] == 'r':
134 lfdirstate.normallookup(f)
134 lfdirstate.normallookup(f)
135 else:
135 else:
136 lfdirstate.add(f)
136 lfdirstate.add(f)
137 lfdirstate.write()
137 lfdirstate.write()
138 bad += [lfutil.splitstandin(f)
138 bad += [lfutil.splitstandin(f)
139 for f in repo[None].add(standins)
139 for f in repo[None].add(standins)
140 if f in m.files()]
140 if f in m.files()]
141 finally:
141 finally:
142 wlock.release()
142 wlock.release()
143 return bad
143 return bad
144
144
145 def removelargefiles(ui, repo, isaddremove, *pats, **opts):
145 def removelargefiles(ui, repo, isaddremove, *pats, **opts):
146 after = opts.get('after')
146 after = opts.get('after')
147 if not pats and not after:
147 if not pats and not after:
148 raise util.Abort(_('no files specified'))
148 raise util.Abort(_('no files specified'))
149 m = scmutil.match(repo[None], pats, opts)
149 m = scmutil.match(repo[None], pats, opts)
150 try:
150 try:
151 repo.lfstatus = True
151 repo.lfstatus = True
152 s = repo.status(match=m, clean=True)
152 s = repo.status(match=m, clean=True)
153 finally:
153 finally:
154 repo.lfstatus = False
154 repo.lfstatus = False
155 manifest = repo[None].manifest()
155 manifest = repo[None].manifest()
156 modified, added, deleted, clean = [[f for f in list
156 modified, added, deleted, clean = [[f for f in list
157 if lfutil.standin(f) in manifest]
157 if lfutil.standin(f) in manifest]
158 for list in (s.modified, s.added,
158 for list in (s.modified, s.added,
159 s.deleted, s.clean)]
159 s.deleted, s.clean)]
160
160
161 def warn(files, msg):
161 def warn(files, msg):
162 for f in files:
162 for f in files:
163 ui.warn(msg % m.rel(f))
163 ui.warn(msg % m.rel(f))
164 return int(len(files) > 0)
164 return int(len(files) > 0)
165
165
166 result = 0
166 result = 0
167
167
168 if after:
168 if after:
169 remove = deleted
169 remove = deleted
170 result = warn(modified + added + clean,
170 result = warn(modified + added + clean,
171 _('not removing %s: file still exists\n'))
171 _('not removing %s: file still exists\n'))
172 else:
172 else:
173 remove = deleted + clean
173 remove = deleted + clean
174 result = warn(modified, _('not removing %s: file is modified (use -f'
174 result = warn(modified, _('not removing %s: file is modified (use -f'
175 ' to force removal)\n'))
175 ' to force removal)\n'))
176 result = warn(added, _('not removing %s: file has been marked for add'
176 result = warn(added, _('not removing %s: file has been marked for add'
177 ' (use forget to undo)\n')) or result
177 ' (use forget to undo)\n')) or result
178
178
179 for f in sorted(remove):
179 for f in sorted(remove):
180 if ui.verbose or not m.exact(f):
180 if ui.verbose or not m.exact(f):
181 ui.status(_('removing %s\n') % m.rel(f))
181 ui.status(_('removing %s\n') % m.rel(f))
182
182
183 # Need to lock because standin files are deleted then removed from the
183 # Need to lock because standin files are deleted then removed from the
184 # repository and we could race in-between.
184 # repository and we could race in-between.
185 wlock = repo.wlock()
185 wlock = repo.wlock()
186 try:
186 try:
187 lfdirstate = lfutil.openlfdirstate(ui, repo)
187 lfdirstate = lfutil.openlfdirstate(ui, repo)
188 for f in remove:
188 for f in remove:
189 if not after:
189 if not after:
190 # If this is being called by addremove, notify the user that we
190 # If this is being called by addremove, notify the user that we
191 # are removing the file.
191 # are removing the file.
192 if isaddremove:
192 if isaddremove:
193 ui.status(_('removing %s\n') % f)
193 ui.status(_('removing %s\n') % f)
194 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
194 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
195 lfdirstate.remove(f)
195 lfdirstate.remove(f)
196 lfdirstate.write()
196 lfdirstate.write()
197 remove = [lfutil.standin(f) for f in remove]
197 remove = [lfutil.standin(f) for f in remove]
198 # If this is being called by addremove, let the original addremove
198 # If this is being called by addremove, let the original addremove
199 # function handle this.
199 # function handle this.
200 if not isaddremove:
200 if not isaddremove:
201 for f in remove:
201 for f in remove:
202 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
202 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
203 repo[None].forget(remove)
203 repo[None].forget(remove)
204 finally:
204 finally:
205 wlock.release()
205 wlock.release()
206
206
207 return result
207 return result
208
208
209 # For overriding mercurial.hgweb.webcommands so that largefiles will
209 # For overriding mercurial.hgweb.webcommands so that largefiles will
210 # appear at their right place in the manifests.
210 # appear at their right place in the manifests.
211 def decodepath(orig, path):
211 def decodepath(orig, path):
212 return lfutil.splitstandin(path) or path
212 return lfutil.splitstandin(path) or path
213
213
214 # -- Wrappers: modify existing commands --------------------------------
214 # -- Wrappers: modify existing commands --------------------------------
215
215
216 # Add works by going through the files that the user wanted to add and
216 # Add works by going through the files that the user wanted to add and
217 # checking if they should be added as largefiles. Then it makes a new
217 # checking if they should be added as largefiles. Then it makes a new
218 # matcher which matches only the normal files and runs the original
218 # matcher which matches only the normal files and runs the original
219 # version of add.
219 # version of add.
220 def overrideadd(orig, ui, repo, *pats, **opts):
220 def overrideadd(orig, ui, repo, *pats, **opts):
221 normal = opts.pop('normal')
221 normal = opts.pop('normal')
222 if normal:
222 if normal:
223 if opts.get('large'):
223 if opts.get('large'):
224 raise util.Abort(_('--normal cannot be used with --large'))
224 raise util.Abort(_('--normal cannot be used with --large'))
225 return orig(ui, repo, *pats, **opts)
225 return orig(ui, repo, *pats, **opts)
226 matcher = scmutil.match(repo[None], pats, opts)
226 matcher = scmutil.match(repo[None], pats, opts)
227 bad = addlargefiles(ui, repo, matcher, **opts)
227 bad = addlargefiles(ui, repo, matcher, **opts)
228 installnormalfilesmatchfn(repo[None].manifest())
228 installnormalfilesmatchfn(repo[None].manifest())
229 result = orig(ui, repo, *pats, **opts)
229 result = orig(ui, repo, *pats, **opts)
230 restorematchfn()
230 restorematchfn()
231
231
232 return (result == 1 or bad) and 1 or 0
232 return (result == 1 or bad) and 1 or 0
233
233
234 def overrideremove(orig, ui, repo, *pats, **opts):
234 def overrideremove(orig, ui, repo, *pats, **opts):
235 installnormalfilesmatchfn(repo[None].manifest())
235 installnormalfilesmatchfn(repo[None].manifest())
236 result = orig(ui, repo, *pats, **opts)
236 result = orig(ui, repo, *pats, **opts)
237 restorematchfn()
237 restorematchfn()
238 return removelargefiles(ui, repo, False, *pats, **opts) or result
238 return removelargefiles(ui, repo, False, *pats, **opts) or result
239
239
240 def overridestatusfn(orig, repo, rev2, **opts):
240 def overridestatusfn(orig, repo, rev2, **opts):
241 try:
241 try:
242 repo._repo.lfstatus = True
242 repo._repo.lfstatus = True
243 return orig(repo, rev2, **opts)
243 return orig(repo, rev2, **opts)
244 finally:
244 finally:
245 repo._repo.lfstatus = False
245 repo._repo.lfstatus = False
246
246
247 def overridestatus(orig, ui, repo, *pats, **opts):
247 def overridestatus(orig, ui, repo, *pats, **opts):
248 try:
248 try:
249 repo.lfstatus = True
249 repo.lfstatus = True
250 return orig(ui, repo, *pats, **opts)
250 return orig(ui, repo, *pats, **opts)
251 finally:
251 finally:
252 repo.lfstatus = False
252 repo.lfstatus = False
253
253
254 def overridedirty(orig, repo, ignoreupdate=False):
254 def overridedirty(orig, repo, ignoreupdate=False):
255 try:
255 try:
256 repo._repo.lfstatus = True
256 repo._repo.lfstatus = True
257 return orig(repo, ignoreupdate)
257 return orig(repo, ignoreupdate)
258 finally:
258 finally:
259 repo._repo.lfstatus = False
259 repo._repo.lfstatus = False
260
260
261 def overridelog(orig, ui, repo, *pats, **opts):
261 def overridelog(orig, ui, repo, *pats, **opts):
262 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
262 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
263 default='relpath'):
263 default='relpath'):
264 """Matcher that merges root directory with .hglf, suitable for log.
264 """Matcher that merges root directory with .hglf, suitable for log.
265 It is still possible to match .hglf directly.
265 It is still possible to match .hglf directly.
266 For any listed files run log on the standin too.
266 For any listed files run log on the standin too.
267 matchfn tries both the given filename and with .hglf stripped.
267 matchfn tries both the given filename and with .hglf stripped.
268 """
268 """
269 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default)
269 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default)
270 m, p = copy.copy(matchandpats)
270 m, p = copy.copy(matchandpats)
271
271
272 if m.always():
272 if m.always():
273 # We want to match everything anyway, so there's no benefit trying
273 # We want to match everything anyway, so there's no benefit trying
274 # to add standins.
274 # to add standins.
275 return matchandpats
275 return matchandpats
276
276
277 pats = set(p)
277 pats = set(p)
278 # TODO: handling of patterns in both cases below
278 # TODO: handling of patterns in both cases below
279 if m._cwd:
279 if m._cwd:
280 if os.path.isabs(m._cwd):
280 if os.path.isabs(m._cwd):
281 # TODO: handle largefile magic when invoked from other cwd
281 # TODO: handle largefile magic when invoked from other cwd
282 return matchandpats
282 return matchandpats
283 back = (m._cwd.count('/') + 1) * '../'
283 back = (m._cwd.count('/') + 1) * '../'
284 pats.update(back + lfutil.standin(m._cwd + '/' + f) for f in p)
284 pats.update(back + lfutil.standin(m._cwd + '/' + f) for f in p)
285 else:
285 else:
286 pats.update(lfutil.standin(f) for f in p)
286 pats.update(lfutil.standin(f) for f in p)
287
287
288 for i in range(0, len(m._files)):
288 for i in range(0, len(m._files)):
289 standin = lfutil.standin(m._files[i])
289 standin = lfutil.standin(m._files[i])
290 if standin in repo[ctx.node()]:
290 if standin in repo[ctx.node()]:
291 m._files[i] = standin
291 m._files[i] = standin
292 elif m._files[i] not in repo[ctx.node()]:
292 elif m._files[i] not in repo[ctx.node()]:
293 m._files.append(standin)
293 m._files.append(standin)
294 pats.add(standin)
294 pats.add(standin)
295
295
296 m._fmap = set(m._files)
296 m._fmap = set(m._files)
297 m._always = False
297 m._always = False
298 origmatchfn = m.matchfn
298 origmatchfn = m.matchfn
299 def lfmatchfn(f):
299 def lfmatchfn(f):
300 lf = lfutil.splitstandin(f)
300 lf = lfutil.splitstandin(f)
301 if lf is not None and origmatchfn(lf):
301 if lf is not None and origmatchfn(lf):
302 return True
302 return True
303 r = origmatchfn(f)
303 r = origmatchfn(f)
304 return r
304 return r
305 m.matchfn = lfmatchfn
305 m.matchfn = lfmatchfn
306
306
307 return m, pats
307 return m, pats
308
308
309 # For hg log --patch, the match object is used in two different senses:
309 # For hg log --patch, the match object is used in two different senses:
310 # (1) to determine what revisions should be printed out, and
310 # (1) to determine what revisions should be printed out, and
311 # (2) to determine what files to print out diffs for.
311 # (2) to determine what files to print out diffs for.
312 # The magic matchandpats override should be used for case (1) but not for
312 # The magic matchandpats override should be used for case (1) but not for
313 # case (2).
313 # case (2).
314 def overridemakelogfilematcher(repo, pats, opts):
314 def overridemakelogfilematcher(repo, pats, opts):
315 pctx = repo[None]
315 pctx = repo[None]
316 match, pats = oldmatchandpats(pctx, pats, opts)
316 match, pats = oldmatchandpats(pctx, pats, opts)
317 return lambda rev: match
317 return lambda rev: match
318
318
319 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
319 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
320 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
320 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
321 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
321 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
322
322
323 try:
323 try:
324 return orig(ui, repo, *pats, **opts)
324 return orig(ui, repo, *pats, **opts)
325 finally:
325 finally:
326 restorematchandpatsfn()
326 restorematchandpatsfn()
327 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
327 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
328
328
329 def overrideverify(orig, ui, repo, *pats, **opts):
329 def overrideverify(orig, ui, repo, *pats, **opts):
330 large = opts.pop('large', False)
330 large = opts.pop('large', False)
331 all = opts.pop('lfa', False)
331 all = opts.pop('lfa', False)
332 contents = opts.pop('lfc', False)
332 contents = opts.pop('lfc', False)
333
333
334 result = orig(ui, repo, *pats, **opts)
334 result = orig(ui, repo, *pats, **opts)
335 if large or all or contents:
335 if large or all or contents:
336 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
336 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
337 return result
337 return result
338
338
339 def overridedebugstate(orig, ui, repo, *pats, **opts):
339 def overridedebugstate(orig, ui, repo, *pats, **opts):
340 large = opts.pop('large', False)
340 large = opts.pop('large', False)
341 if large:
341 if large:
342 class fakerepo(object):
342 class fakerepo(object):
343 dirstate = lfutil.openlfdirstate(ui, repo)
343 dirstate = lfutil.openlfdirstate(ui, repo)
344 orig(ui, fakerepo, *pats, **opts)
344 orig(ui, fakerepo, *pats, **opts)
345 else:
345 else:
346 orig(ui, repo, *pats, **opts)
346 orig(ui, repo, *pats, **opts)
347
347
348 # Override needs to refresh standins so that update's normal merge
348 # Override needs to refresh standins so that update's normal merge
349 # will go through properly. Then the other update hook (overriding repo.update)
349 # will go through properly. Then the other update hook (overriding repo.update)
350 # will get the new files. Filemerge is also overridden so that the merge
350 # will get the new files. Filemerge is also overridden so that the merge
351 # will merge standins correctly.
351 # will merge standins correctly.
352 def overrideupdate(orig, ui, repo, *pats, **opts):
352 def overrideupdate(orig, ui, repo, *pats, **opts):
353 # Need to lock between the standins getting updated and their
353 # Need to lock between the standins getting updated and their
354 # largefiles getting updated
354 # largefiles getting updated
355 wlock = repo.wlock()
355 wlock = repo.wlock()
356 try:
356 try:
357 if opts['check']:
357 if opts['check']:
358 lfdirstate = lfutil.openlfdirstate(ui, repo)
358 lfdirstate = lfutil.openlfdirstate(ui, repo)
359 unsure, s = lfdirstate.status(
359 unsure, s = lfdirstate.status(
360 match_.always(repo.root, repo.getcwd()),
360 match_.always(repo.root, repo.getcwd()),
361 [], False, False, False)
361 [], False, False, False)
362
362
363 mod = len(s.modified) > 0
363 mod = len(s.modified) > 0
364 for lfile in unsure:
364 for lfile in unsure:
365 standin = lfutil.standin(lfile)
365 standin = lfutil.standin(lfile)
366 if repo['.'][standin].data().strip() != \
366 if repo['.'][standin].data().strip() != \
367 lfutil.hashfile(repo.wjoin(lfile)):
367 lfutil.hashfile(repo.wjoin(lfile)):
368 mod = True
368 mod = True
369 else:
369 else:
370 lfdirstate.normal(lfile)
370 lfdirstate.normal(lfile)
371 lfdirstate.write()
371 lfdirstate.write()
372 if mod:
372 if mod:
373 raise util.Abort(_('uncommitted changes'))
373 raise util.Abort(_('uncommitted changes'))
374 return orig(ui, repo, *pats, **opts)
374 return orig(ui, repo, *pats, **opts)
375 finally:
375 finally:
376 wlock.release()
376 wlock.release()
377
377
378 # Before starting the manifest merge, merge.updates will call
378 # Before starting the manifest merge, merge.updates will call
379 # _checkunknownfile to check if there are any files in the merged-in
379 # _checkunknownfile to check if there are any files in the merged-in
380 # changeset that collide with unknown files in the working copy.
380 # changeset that collide with unknown files in the working copy.
381 #
381 #
382 # The largefiles are seen as unknown, so this prevents us from merging
382 # The largefiles are seen as unknown, so this prevents us from merging
383 # in a file 'foo' if we already have a largefile with the same name.
383 # in a file 'foo' if we already have a largefile with the same name.
384 #
384 #
385 # The overridden function filters the unknown files by removing any
385 # The overridden function filters the unknown files by removing any
386 # largefiles. This makes the merge proceed and we can then handle this
386 # largefiles. This makes the merge proceed and we can then handle this
387 # case further in the overridden calculateupdates function below.
387 # case further in the overridden calculateupdates function below.
388 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
388 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
389 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
389 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
390 return False
390 return False
391 return origfn(repo, wctx, mctx, f)
391 return origfn(repo, wctx, mctx, f)
392
392
393 # The manifest merge handles conflicts on the manifest level. We want
393 # The manifest merge handles conflicts on the manifest level. We want
394 # to handle changes in largefile-ness of files at this level too.
394 # to handle changes in largefile-ness of files at this level too.
395 #
395 #
396 # The strategy is to run the original calculateupdates and then process
396 # The strategy is to run the original calculateupdates and then process
397 # the action list it outputs. There are two cases we need to deal with:
397 # the action list it outputs. There are two cases we need to deal with:
398 #
398 #
399 # 1. Normal file in p1, largefile in p2. Here the largefile is
399 # 1. Normal file in p1, largefile in p2. Here the largefile is
400 # detected via its standin file, which will enter the working copy
400 # detected via its standin file, which will enter the working copy
401 # with a "get" action. It is not "merge" since the standin is all
401 # with a "get" action. It is not "merge" since the standin is all
402 # Mercurial is concerned with at this level -- the link to the
402 # Mercurial is concerned with at this level -- the link to the
403 # existing normal file is not relevant here.
403 # existing normal file is not relevant here.
404 #
404 #
405 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
405 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
406 # since the largefile will be present in the working copy and
406 # since the largefile will be present in the working copy and
407 # different from the normal file in p2. Mercurial therefore
407 # different from the normal file in p2. Mercurial therefore
408 # triggers a merge action.
408 # triggers a merge action.
409 #
409 #
410 # In both cases, we prompt the user and emit new actions to either
410 # In both cases, we prompt the user and emit new actions to either
411 # remove the standin (if the normal file was kept) or to remove the
411 # remove the standin (if the normal file was kept) or to remove the
412 # normal file and get the standin (if the largefile was kept). The
412 # normal file and get the standin (if the largefile was kept). The
413 # default prompt answer is to use the largefile version since it was
413 # default prompt answer is to use the largefile version since it was
414 # presumably changed on purpose.
414 # presumably changed on purpose.
415 #
415 #
416 # Finally, the merge.applyupdates function will then take care of
416 # Finally, the merge.applyupdates function will then take care of
417 # writing the files into the working copy and lfcommands.updatelfiles
417 # writing the files into the working copy and lfcommands.updatelfiles
418 # will update the largefiles.
418 # will update the largefiles.
419 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
419 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
420 partial, acceptremote, followcopies):
420 partial, acceptremote, followcopies):
421 overwrite = force and not branchmerge
421 overwrite = force and not branchmerge
422 actions, diverge, renamedelete = origfn(
422 actions, diverge, renamedelete = origfn(
423 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
423 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
424 followcopies)
424 followcopies)
425
425
426 if overwrite:
426 if overwrite:
427 return actions, diverge, renamedelete
427 return actions, diverge, renamedelete
428
428
429 # Convert to dictionary with filename as key and action as value.
429 # Convert to dictionary with filename as key and action as value.
430 lfiles = set()
430 lfiles = set()
431 actionbyfile = {}
431 actionbyfile = {}
432 for m, l in actions.iteritems():
432 for m, l in actions.iteritems():
433 for f, args, msg in l:
433 for f, args, msg in l:
434 actionbyfile[f] = m, args, msg
434 actionbyfile[f] = m, args, msg
435 splitstandin = f and lfutil.splitstandin(f)
435 splitstandin = f and lfutil.splitstandin(f)
436 if splitstandin in p1:
436 if splitstandin in p1:
437 lfiles.add(splitstandin)
437 lfiles.add(splitstandin)
438 elif lfutil.standin(f) in p1:
438 elif lfutil.standin(f) in p1:
439 lfiles.add(f)
439 lfiles.add(f)
440
440
441 for lfile in lfiles:
441 for lfile in lfiles:
442 standin = lfutil.standin(lfile)
442 standin = lfutil.standin(lfile)
443 (lm, largs, lmsg) = actionbyfile.get(lfile, (None, None, None))
443 (lm, largs, lmsg) = actionbyfile.get(lfile, (None, None, None))
444 (sm, sargs, smsg) = actionbyfile.get(standin, (None, None, None))
444 (sm, sargs, smsg) = actionbyfile.get(standin, (None, None, None))
445 if sm in ('g', 'dc') and lm != 'r':
445 if sm in ('g', 'dc') and lm != 'r':
446 # Case 1: normal file in the working copy, largefile in
446 # Case 1: normal file in the working copy, largefile in
447 # the second parent
447 # the second parent
448 usermsg = _('remote turned local normal file %s into a largefile\n'
448 usermsg = _('remote turned local normal file %s into a largefile\n'
449 'use (l)argefile or keep (n)ormal file?'
449 'use (l)argefile or keep (n)ormal file?'
450 '$$ &Largefile $$ &Normal file') % lfile
450 '$$ &Largefile $$ &Normal file') % lfile
451 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
451 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
452 actionbyfile[lfile] = ('r', None, 'replaced by standin')
452 actionbyfile[lfile] = ('r', None, 'replaced by standin')
453 actionbyfile[standin] = ('g', sargs, 'replaces standin')
453 actionbyfile[standin] = ('g', sargs, 'replaces standin')
454 else: # keep local normal file
454 else: # keep local normal file
455 actionbyfile[lfile] = ('k', None, 'replaces standin')
455 actionbyfile[lfile] = ('k', None, 'replaces standin')
456 if branchmerge:
456 if branchmerge:
457 actionbyfile[standin] = ('k', None,
457 actionbyfile[standin] = ('k', None,
458 'replaced by non-standin')
458 'replaced by non-standin')
459 else:
459 else:
460 actionbyfile[standin] = ('r', None,
460 actionbyfile[standin] = ('r', None,
461 'replaced by non-standin')
461 'replaced by non-standin')
462 elif lm in ('g', 'dc') and sm != 'r':
462 elif lm in ('g', 'dc') and sm != 'r':
463 # Case 2: largefile in the working copy, normal file in
463 # Case 2: largefile in the working copy, normal file in
464 # the second parent
464 # the second parent
465 usermsg = _('remote turned local largefile %s into a normal file\n'
465 usermsg = _('remote turned local largefile %s into a normal file\n'
466 'keep (l)argefile or use (n)ormal file?'
466 'keep (l)argefile or use (n)ormal file?'
467 '$$ &Largefile $$ &Normal file') % lfile
467 '$$ &Largefile $$ &Normal file') % lfile
468 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
468 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
469 if branchmerge:
469 if branchmerge:
470 # largefile can be restored from standin safely
470 # largefile can be restored from standin safely
471 actionbyfile[lfile] = ('k', None, 'replaced by standin')
471 actionbyfile[lfile] = ('k', None, 'replaced by standin')
472 actionbyfile[standin] = ('k', None, 'replaces standin')
472 actionbyfile[standin] = ('k', None, 'replaces standin')
473 else:
473 else:
474 # "lfile" should be marked as "removed" without
474 # "lfile" should be marked as "removed" without
475 # removal of itself
475 # removal of itself
476 actionbyfile[lfile] = ('lfmr', None,
476 actionbyfile[lfile] = ('lfmr', None,
477 'forget non-standin largefile')
477 'forget non-standin largefile')
478
478
479 # linear-merge should treat this largefile as 're-added'
479 # linear-merge should treat this largefile as 're-added'
480 actionbyfile[standin] = ('a', None, 'keep standin')
480 actionbyfile[standin] = ('a', None, 'keep standin')
481 else: # pick remote normal file
481 else: # pick remote normal file
482 actionbyfile[lfile] = ('g', largs, 'replaces standin')
482 actionbyfile[lfile] = ('g', largs, 'replaces standin')
483 actionbyfile[standin] = ('r', None, 'replaced by non-standin')
483 actionbyfile[standin] = ('r', None, 'replaced by non-standin')
484
484
485 # Convert back to dictionary-of-lists format
485 # Convert back to dictionary-of-lists format
486 for l in actions.itervalues():
486 for l in actions.itervalues():
487 l[:] = []
487 l[:] = []
488 actions['lfmr'] = []
488 actions['lfmr'] = []
489 for f, (m, args, msg) in actionbyfile.iteritems():
489 for f, (m, args, msg) in actionbyfile.iteritems():
490 actions[m].append((f, args, msg))
490 actions[m].append((f, args, msg))
491
491
492 return actions, diverge, renamedelete
492 return actions, diverge, renamedelete
493
493
494 def mergerecordupdates(orig, repo, actions, branchmerge):
494 def mergerecordupdates(orig, repo, actions, branchmerge):
495 if 'lfmr' in actions:
495 if 'lfmr' in actions:
496 # this should be executed before 'orig', to execute 'remove'
496 # this should be executed before 'orig', to execute 'remove'
497 # before all other actions
497 # before all other actions
498 for lfile, args, msg in actions['lfmr']:
498 for lfile, args, msg in actions['lfmr']:
499 repo.dirstate.remove(lfile)
499 repo.dirstate.remove(lfile)
500
500
501 return orig(repo, actions, branchmerge)
501 return orig(repo, actions, branchmerge)
502
502
503
503
504 # Override filemerge to prompt the user about how they wish to merge
504 # Override filemerge to prompt the user about how they wish to merge
505 # largefiles. This will handle identical edits without prompting the user.
505 # largefiles. This will handle identical edits without prompting the user.
506 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
506 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
507 if not lfutil.isstandin(orig):
507 if not lfutil.isstandin(orig):
508 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
508 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
509
509
510 ahash = fca.data().strip().lower()
510 ahash = fca.data().strip().lower()
511 dhash = fcd.data().strip().lower()
511 dhash = fcd.data().strip().lower()
512 ohash = fco.data().strip().lower()
512 ohash = fco.data().strip().lower()
513 if (ohash != ahash and
513 if (ohash != ahash and
514 ohash != dhash and
514 ohash != dhash and
515 (dhash == ahash or
515 (dhash == ahash or
516 repo.ui.promptchoice(
516 repo.ui.promptchoice(
517 _('largefile %s has a merge conflict\nancestor was %s\n'
517 _('largefile %s has a merge conflict\nancestor was %s\n'
518 'keep (l)ocal %s or\ntake (o)ther %s?'
518 'keep (l)ocal %s or\ntake (o)ther %s?'
519 '$$ &Local $$ &Other') %
519 '$$ &Local $$ &Other') %
520 (lfutil.splitstandin(orig), ahash, dhash, ohash),
520 (lfutil.splitstandin(orig), ahash, dhash, ohash),
521 0) == 1)):
521 0) == 1)):
522 repo.wwrite(fcd.path(), fco.data(), fco.flags())
522 repo.wwrite(fcd.path(), fco.data(), fco.flags())
523 return 0
523 return 0
524
524
525 # Copy first changes the matchers to match standins instead of
525 # Copy first changes the matchers to match standins instead of
526 # largefiles. Then it overrides util.copyfile in that function it
526 # largefiles. Then it overrides util.copyfile in that function it
527 # checks if the destination largefile already exists. It also keeps a
527 # checks if the destination largefile already exists. It also keeps a
528 # list of copied files so that the largefiles can be copied and the
528 # list of copied files so that the largefiles can be copied and the
529 # dirstate updated.
529 # dirstate updated.
530 def overridecopy(orig, ui, repo, pats, opts, rename=False):
530 def overridecopy(orig, ui, repo, pats, opts, rename=False):
531 # doesn't remove largefile on rename
531 # doesn't remove largefile on rename
532 if len(pats) < 2:
532 if len(pats) < 2:
533 # this isn't legal, let the original function deal with it
533 # this isn't legal, let the original function deal with it
534 return orig(ui, repo, pats, opts, rename)
534 return orig(ui, repo, pats, opts, rename)
535
535
536 def makestandin(relpath):
536 def makestandin(relpath):
537 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
537 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
538 return os.path.join(repo.wjoin(lfutil.standin(path)))
538 return os.path.join(repo.wjoin(lfutil.standin(path)))
539
539
540 fullpats = scmutil.expandpats(pats)
540 fullpats = scmutil.expandpats(pats)
541 dest = fullpats[-1]
541 dest = fullpats[-1]
542
542
543 if os.path.isdir(dest):
543 if os.path.isdir(dest):
544 if not os.path.isdir(makestandin(dest)):
544 if not os.path.isdir(makestandin(dest)):
545 os.makedirs(makestandin(dest))
545 os.makedirs(makestandin(dest))
546 # This could copy both lfiles and normal files in one command,
546 # This could copy both lfiles and normal files in one command,
547 # but we don't want to do that. First replace their matcher to
547 # but we don't want to do that. First replace their matcher to
548 # only match normal files and run it, then replace it to just
548 # only match normal files and run it, then replace it to just
549 # match largefiles and run it again.
549 # match largefiles and run it again.
550 nonormalfiles = False
550 nonormalfiles = False
551 nolfiles = False
551 nolfiles = False
552 installnormalfilesmatchfn(repo[None].manifest())
552 installnormalfilesmatchfn(repo[None].manifest())
553 try:
553 try:
554 try:
554 try:
555 result = orig(ui, repo, pats, opts, rename)
555 result = orig(ui, repo, pats, opts, rename)
556 except util.Abort, e:
556 except util.Abort, e:
557 if str(e) != _('no files to copy'):
557 if str(e) != _('no files to copy'):
558 raise e
558 raise e
559 else:
559 else:
560 nonormalfiles = True
560 nonormalfiles = True
561 result = 0
561 result = 0
562 finally:
562 finally:
563 restorematchfn()
563 restorematchfn()
564
564
565 # The first rename can cause our current working directory to be removed.
565 # The first rename can cause our current working directory to be removed.
566 # In that case there is nothing left to copy/rename so just quit.
566 # In that case there is nothing left to copy/rename so just quit.
567 try:
567 try:
568 repo.getcwd()
568 repo.getcwd()
569 except OSError:
569 except OSError:
570 return result
570 return result
571
571
572 try:
572 try:
573 try:
573 try:
574 # When we call orig below it creates the standins but we don't add
574 # When we call orig below it creates the standins but we don't add
575 # them to the dir state until later so lock during that time.
575 # them to the dir state until later so lock during that time.
576 wlock = repo.wlock()
576 wlock = repo.wlock()
577
577
578 manifest = repo[None].manifest()
578 manifest = repo[None].manifest()
579 def overridematch(ctx, pats=[], opts={}, globbed=False,
579 def overridematch(ctx, pats=[], opts={}, globbed=False,
580 default='relpath'):
580 default='relpath'):
581 newpats = []
581 newpats = []
582 # The patterns were previously mangled to add the standin
582 # The patterns were previously mangled to add the standin
583 # directory; we need to remove that now
583 # directory; we need to remove that now
584 for pat in pats:
584 for pat in pats:
585 if match_.patkind(pat) is None and lfutil.shortname in pat:
585 if match_.patkind(pat) is None and lfutil.shortname in pat:
586 newpats.append(pat.replace(lfutil.shortname, ''))
586 newpats.append(pat.replace(lfutil.shortname, ''))
587 else:
587 else:
588 newpats.append(pat)
588 newpats.append(pat)
589 match = oldmatch(ctx, newpats, opts, globbed, default)
589 match = oldmatch(ctx, newpats, opts, globbed, default)
590 m = copy.copy(match)
590 m = copy.copy(match)
591 lfile = lambda f: lfutil.standin(f) in manifest
591 lfile = lambda f: lfutil.standin(f) in manifest
592 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
592 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
593 m._fmap = set(m._files)
593 m._fmap = set(m._files)
594 origmatchfn = m.matchfn
594 origmatchfn = m.matchfn
595 m.matchfn = lambda f: (lfutil.isstandin(f) and
595 m.matchfn = lambda f: (lfutil.isstandin(f) and
596 (f in manifest) and
596 (f in manifest) and
597 origmatchfn(lfutil.splitstandin(f)) or
597 origmatchfn(lfutil.splitstandin(f)) or
598 None)
598 None)
599 return m
599 return m
600 oldmatch = installmatchfn(overridematch)
600 oldmatch = installmatchfn(overridematch)
601 listpats = []
601 listpats = []
602 for pat in pats:
602 for pat in pats:
603 if match_.patkind(pat) is not None:
603 if match_.patkind(pat) is not None:
604 listpats.append(pat)
604 listpats.append(pat)
605 else:
605 else:
606 listpats.append(makestandin(pat))
606 listpats.append(makestandin(pat))
607
607
608 try:
608 try:
609 origcopyfile = util.copyfile
609 origcopyfile = util.copyfile
610 copiedfiles = []
610 copiedfiles = []
611 def overridecopyfile(src, dest):
611 def overridecopyfile(src, dest):
612 if (lfutil.shortname in src and
612 if (lfutil.shortname in src and
613 dest.startswith(repo.wjoin(lfutil.shortname))):
613 dest.startswith(repo.wjoin(lfutil.shortname))):
614 destlfile = dest.replace(lfutil.shortname, '')
614 destlfile = dest.replace(lfutil.shortname, '')
615 if not opts['force'] and os.path.exists(destlfile):
615 if not opts['force'] and os.path.exists(destlfile):
616 raise IOError('',
616 raise IOError('',
617 _('destination largefile already exists'))
617 _('destination largefile already exists'))
618 copiedfiles.append((src, dest))
618 copiedfiles.append((src, dest))
619 origcopyfile(src, dest)
619 origcopyfile(src, dest)
620
620
621 util.copyfile = overridecopyfile
621 util.copyfile = overridecopyfile
622 result += orig(ui, repo, listpats, opts, rename)
622 result += orig(ui, repo, listpats, opts, rename)
623 finally:
623 finally:
624 util.copyfile = origcopyfile
624 util.copyfile = origcopyfile
625
625
626 lfdirstate = lfutil.openlfdirstate(ui, repo)
626 lfdirstate = lfutil.openlfdirstate(ui, repo)
627 for (src, dest) in copiedfiles:
627 for (src, dest) in copiedfiles:
628 if (lfutil.shortname in src and
628 if (lfutil.shortname in src and
629 dest.startswith(repo.wjoin(lfutil.shortname))):
629 dest.startswith(repo.wjoin(lfutil.shortname))):
630 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
630 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
631 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
631 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
632 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
632 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
633 if not os.path.isdir(destlfiledir):
633 if not os.path.isdir(destlfiledir):
634 os.makedirs(destlfiledir)
634 os.makedirs(destlfiledir)
635 if rename:
635 if rename:
636 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
636 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
637
637
638 # The file is gone, but this deletes any empty parent
638 # The file is gone, but this deletes any empty parent
639 # directories as a side-effect.
639 # directories as a side-effect.
640 util.unlinkpath(repo.wjoin(srclfile), True)
640 util.unlinkpath(repo.wjoin(srclfile), True)
641 lfdirstate.remove(srclfile)
641 lfdirstate.remove(srclfile)
642 else:
642 else:
643 util.copyfile(repo.wjoin(srclfile),
643 util.copyfile(repo.wjoin(srclfile),
644 repo.wjoin(destlfile))
644 repo.wjoin(destlfile))
645
645
646 lfdirstate.add(destlfile)
646 lfdirstate.add(destlfile)
647 lfdirstate.write()
647 lfdirstate.write()
648 except util.Abort, e:
648 except util.Abort, e:
649 if str(e) != _('no files to copy'):
649 if str(e) != _('no files to copy'):
650 raise e
650 raise e
651 else:
651 else:
652 nolfiles = True
652 nolfiles = True
653 finally:
653 finally:
654 restorematchfn()
654 restorematchfn()
655 wlock.release()
655 wlock.release()
656
656
657 if nolfiles and nonormalfiles:
657 if nolfiles and nonormalfiles:
658 raise util.Abort(_('no files to copy'))
658 raise util.Abort(_('no files to copy'))
659
659
660 return result
660 return result
661
661
662 # When the user calls revert, we have to be careful to not revert any
662 # When the user calls revert, we have to be careful to not revert any
663 # changes to other largefiles accidentally. This means we have to keep
663 # changes to other largefiles accidentally. This means we have to keep
664 # track of the largefiles that are being reverted so we only pull down
664 # track of the largefiles that are being reverted so we only pull down
665 # the necessary largefiles.
665 # the necessary largefiles.
666 #
666 #
667 # Standins are only updated (to match the hash of largefiles) before
667 # Standins are only updated (to match the hash of largefiles) before
668 # commits. Update the standins then run the original revert, changing
668 # commits. Update the standins then run the original revert, changing
669 # the matcher to hit standins instead of largefiles. Based on the
669 # the matcher to hit standins instead of largefiles. Based on the
670 # resulting standins update the largefiles.
670 # resulting standins update the largefiles.
671 def overriderevert(orig, ui, repo, *pats, **opts):
671 def overriderevert(orig, ui, repo, *pats, **opts):
672 # Because we put the standins in a bad state (by updating them)
672 # Because we put the standins in a bad state (by updating them)
673 # and then return them to a correct state we need to lock to
673 # and then return them to a correct state we need to lock to
674 # prevent others from changing them in their incorrect state.
674 # prevent others from changing them in their incorrect state.
675 wlock = repo.wlock()
675 wlock = repo.wlock()
676 try:
676 try:
677 lfdirstate = lfutil.openlfdirstate(ui, repo)
677 lfdirstate = lfutil.openlfdirstate(ui, repo)
678 s = lfutil.lfdirstatestatus(lfdirstate, repo)
678 s = lfutil.lfdirstatestatus(lfdirstate, repo)
679 lfdirstate.write()
679 lfdirstate.write()
680 for lfile in s.modified:
680 for lfile in s.modified:
681 lfutil.updatestandin(repo, lfutil.standin(lfile))
681 lfutil.updatestandin(repo, lfutil.standin(lfile))
682 for lfile in s.deleted:
682 for lfile in s.deleted:
683 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
683 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
684 os.unlink(repo.wjoin(lfutil.standin(lfile)))
684 os.unlink(repo.wjoin(lfutil.standin(lfile)))
685
685
686 oldstandins = lfutil.getstandinsstate(repo)
686 oldstandins = lfutil.getstandinsstate(repo)
687
687
688 def overridematch(ctx, pats=[], opts={}, globbed=False,
688 def overridematch(ctx, pats=[], opts={}, globbed=False,
689 default='relpath'):
689 default='relpath'):
690 match = oldmatch(ctx, pats, opts, globbed, default)
690 match = oldmatch(ctx, pats, opts, globbed, default)
691 m = copy.copy(match)
691 m = copy.copy(match)
692 def tostandin(f):
692 def tostandin(f):
693 if lfutil.standin(f) in ctx:
693 if lfutil.standin(f) in ctx:
694 return lfutil.standin(f)
694 return lfutil.standin(f)
695 elif lfutil.standin(f) in repo[None]:
695 elif lfutil.standin(f) in repo[None]:
696 return None
696 return None
697 return f
697 return f
698 m._files = [tostandin(f) for f in m._files]
698 m._files = [tostandin(f) for f in m._files]
699 m._files = [f for f in m._files if f is not None]
699 m._files = [f for f in m._files if f is not None]
700 m._fmap = set(m._files)
700 m._fmap = set(m._files)
701 origmatchfn = m.matchfn
701 origmatchfn = m.matchfn
702 def matchfn(f):
702 def matchfn(f):
703 if lfutil.isstandin(f):
703 if lfutil.isstandin(f):
704 return (origmatchfn(lfutil.splitstandin(f)) and
704 return (origmatchfn(lfutil.splitstandin(f)) and
705 (f in repo[None] or f in ctx))
705 (f in repo[None] or f in ctx))
706 return origmatchfn(f)
706 return origmatchfn(f)
707 m.matchfn = matchfn
707 m.matchfn = matchfn
708 return m
708 return m
709 oldmatch = installmatchfn(overridematch)
709 oldmatch = installmatchfn(overridematch)
710 try:
710 try:
711 orig(ui, repo, *pats, **opts)
711 orig(ui, repo, *pats, **opts)
712 finally:
712 finally:
713 restorematchfn()
713 restorematchfn()
714
714
715 newstandins = lfutil.getstandinsstate(repo)
715 newstandins = lfutil.getstandinsstate(repo)
716 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
716 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
717 # lfdirstate should be 'normallookup'-ed for updated files,
717 # lfdirstate should be 'normallookup'-ed for updated files,
718 # because reverting doesn't touch dirstate for 'normal' files
718 # because reverting doesn't touch dirstate for 'normal' files
719 # when target revision is explicitly specified: in such case,
719 # when target revision is explicitly specified: in such case,
720 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
720 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
721 # of target (standin) file.
721 # of target (standin) file.
722 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
722 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
723 normallookup=True)
723 normallookup=True)
724
724
725 finally:
725 finally:
726 wlock.release()
726 wlock.release()
727
727
728 # after pulling changesets, we need to take some extra care to get
728 # after pulling changesets, we need to take some extra care to get
729 # largefiles updated remotely
729 # largefiles updated remotely
730 def overridepull(orig, ui, repo, source=None, **opts):
730 def overridepull(orig, ui, repo, source=None, **opts):
731 revsprepull = len(repo)
731 revsprepull = len(repo)
732 if not source:
732 if not source:
733 source = 'default'
733 source = 'default'
734 repo.lfpullsource = source
734 repo.lfpullsource = source
735 result = orig(ui, repo, source, **opts)
735 result = orig(ui, repo, source, **opts)
736 revspostpull = len(repo)
736 revspostpull = len(repo)
737 lfrevs = opts.get('lfrev', [])
737 lfrevs = opts.get('lfrev', [])
738 if opts.get('all_largefiles'):
738 if opts.get('all_largefiles'):
739 lfrevs.append('pulled()')
739 lfrevs.append('pulled()')
740 if lfrevs and revspostpull > revsprepull:
740 if lfrevs and revspostpull > revsprepull:
741 numcached = 0
741 numcached = 0
742 repo.firstpulled = revsprepull # for pulled() revset expression
742 repo.firstpulled = revsprepull # for pulled() revset expression
743 try:
743 try:
744 for rev in scmutil.revrange(repo, lfrevs):
744 for rev in scmutil.revrange(repo, lfrevs):
745 ui.note(_('pulling largefiles for revision %s\n') % rev)
745 ui.note(_('pulling largefiles for revision %s\n') % rev)
746 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
746 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
747 numcached += len(cached)
747 numcached += len(cached)
748 finally:
748 finally:
749 del repo.firstpulled
749 del repo.firstpulled
750 ui.status(_("%d largefiles cached\n") % numcached)
750 ui.status(_("%d largefiles cached\n") % numcached)
751 return result
751 return result
752
752
753 def pulledrevsetsymbol(repo, subset, x):
753 def pulledrevsetsymbol(repo, subset, x):
754 """``pulled()``
754 """``pulled()``
755 Changesets that just has been pulled.
755 Changesets that just has been pulled.
756
756
757 Only available with largefiles from pull --lfrev expressions.
757 Only available with largefiles from pull --lfrev expressions.
758
758
759 .. container:: verbose
759 .. container:: verbose
760
760
761 Some examples:
761 Some examples:
762
762
763 - pull largefiles for all new changesets::
763 - pull largefiles for all new changesets::
764
764
765 hg pull -lfrev "pulled()"
765 hg pull -lfrev "pulled()"
766
766
767 - pull largefiles for all new branch heads::
767 - pull largefiles for all new branch heads::
768
768
769 hg pull -lfrev "head(pulled()) and not closed()"
769 hg pull -lfrev "head(pulled()) and not closed()"
770
770
771 """
771 """
772
772
773 try:
773 try:
774 firstpulled = repo.firstpulled
774 firstpulled = repo.firstpulled
775 except AttributeError:
775 except AttributeError:
776 raise util.Abort(_("pulled() only available in --lfrev"))
776 raise util.Abort(_("pulled() only available in --lfrev"))
777 return revset.baseset([r for r in subset if r >= firstpulled])
777 return revset.baseset([r for r in subset if r >= firstpulled])
778
778
779 def overrideclone(orig, ui, source, dest=None, **opts):
779 def overrideclone(orig, ui, source, dest=None, **opts):
780 d = dest
780 d = dest
781 if d is None:
781 if d is None:
782 d = hg.defaultdest(source)
782 d = hg.defaultdest(source)
783 if opts.get('all_largefiles') and not hg.islocal(d):
783 if opts.get('all_largefiles') and not hg.islocal(d):
784 raise util.Abort(_(
784 raise util.Abort(_(
785 '--all-largefiles is incompatible with non-local destination %s') %
785 '--all-largefiles is incompatible with non-local destination %s') %
786 d)
786 d)
787
787
788 return orig(ui, source, dest, **opts)
788 return orig(ui, source, dest, **opts)
789
789
790 def hgclone(orig, ui, opts, *args, **kwargs):
790 def hgclone(orig, ui, opts, *args, **kwargs):
791 result = orig(ui, opts, *args, **kwargs)
791 result = orig(ui, opts, *args, **kwargs)
792
792
793 if result is not None:
793 if result is not None:
794 sourcerepo, destrepo = result
794 sourcerepo, destrepo = result
795 repo = destrepo.local()
795 repo = destrepo.local()
796
796
797 # Caching is implicitly limited to 'rev' option, since the dest repo was
797 # Caching is implicitly limited to 'rev' option, since the dest repo was
798 # truncated at that point. The user may expect a download count with
798 # truncated at that point. The user may expect a download count with
799 # this option, so attempt whether or not this is a largefile repo.
799 # this option, so attempt whether or not this is a largefile repo.
800 if opts.get('all_largefiles'):
800 if opts.get('all_largefiles'):
801 success, missing = lfcommands.downloadlfiles(ui, repo, None)
801 success, missing = lfcommands.downloadlfiles(ui, repo, None)
802
802
803 if missing != 0:
803 if missing != 0:
804 return None
804 return None
805
805
806 return result
806 return result
807
807
808 def overriderebase(orig, ui, repo, **opts):
808 def overriderebase(orig, ui, repo, **opts):
809 resuming = opts.get('continue')
809 resuming = opts.get('continue')
810 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
810 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
811 repo._lfstatuswriters.append(lambda *msg, **opts: None)
811 repo._lfstatuswriters.append(lambda *msg, **opts: None)
812 try:
812 try:
813 return orig(ui, repo, **opts)
813 return orig(ui, repo, **opts)
814 finally:
814 finally:
815 repo._lfstatuswriters.pop()
815 repo._lfstatuswriters.pop()
816 repo._lfcommithooks.pop()
816 repo._lfcommithooks.pop()
817
817
818 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
818 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
819 prefix=None, mtime=None, subrepos=None):
819 prefix=None, mtime=None, subrepos=None):
820 # No need to lock because we are only reading history and
820 # No need to lock because we are only reading history and
821 # largefile caches, neither of which are modified.
821 # largefile caches, neither of which are modified.
822 lfcommands.cachelfiles(repo.ui, repo, node)
822 lfcommands.cachelfiles(repo.ui, repo, node)
823
823
824 if kind not in archival.archivers:
824 if kind not in archival.archivers:
825 raise util.Abort(_("unknown archive type '%s'") % kind)
825 raise util.Abort(_("unknown archive type '%s'") % kind)
826
826
827 ctx = repo[node]
827 ctx = repo[node]
828
828
829 if kind == 'files':
829 if kind == 'files':
830 if prefix:
830 if prefix:
831 raise util.Abort(
831 raise util.Abort(
832 _('cannot give prefix when archiving to files'))
832 _('cannot give prefix when archiving to files'))
833 else:
833 else:
834 prefix = archival.tidyprefix(dest, kind, prefix)
834 prefix = archival.tidyprefix(dest, kind, prefix)
835
835
836 def write(name, mode, islink, getdata):
836 def write(name, mode, islink, getdata):
837 if matchfn and not matchfn(name):
837 if matchfn and not matchfn(name):
838 return
838 return
839 data = getdata()
839 data = getdata()
840 if decode:
840 if decode:
841 data = repo.wwritedata(name, data)
841 data = repo.wwritedata(name, data)
842 archiver.addfile(prefix + name, mode, islink, data)
842 archiver.addfile(prefix + name, mode, islink, data)
843
843
844 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
844 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
845
845
846 if repo.ui.configbool("ui", "archivemeta", True):
846 if repo.ui.configbool("ui", "archivemeta", True):
847 def metadata():
847 def metadata():
848 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
848 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
849 hex(repo.changelog.node(0)), hex(node), ctx.branch())
849 hex(repo.changelog.node(0)), hex(node), ctx.branch())
850
850
851 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
851 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
852 if repo.tagtype(t) == 'global')
852 if repo.tagtype(t) == 'global')
853 if not tags:
853 if not tags:
854 repo.ui.pushbuffer()
854 repo.ui.pushbuffer()
855 opts = {'template': '{latesttag}\n{latesttagdistance}',
855 opts = {'template': '{latesttag}\n{latesttagdistance}',
856 'style': '', 'patch': None, 'git': None}
856 'style': '', 'patch': None, 'git': None}
857 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
857 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
858 ltags, dist = repo.ui.popbuffer().split('\n')
858 ltags, dist = repo.ui.popbuffer().split('\n')
859 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
859 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
860 tags += 'latesttagdistance: %s\n' % dist
860 tags += 'latesttagdistance: %s\n' % dist
861
861
862 return base + tags
862 return base + tags
863
863
864 write('.hg_archival.txt', 0644, False, metadata)
864 write('.hg_archival.txt', 0644, False, metadata)
865
865
866 for f in ctx:
866 for f in ctx:
867 ff = ctx.flags(f)
867 ff = ctx.flags(f)
868 getdata = ctx[f].data
868 getdata = ctx[f].data
869 if lfutil.isstandin(f):
869 if lfutil.isstandin(f):
870 path = lfutil.findfile(repo, getdata().strip())
870 path = lfutil.findfile(repo, getdata().strip())
871 if path is None:
871 if path is None:
872 raise util.Abort(
872 raise util.Abort(
873 _('largefile %s not found in repo store or system cache')
873 _('largefile %s not found in repo store or system cache')
874 % lfutil.splitstandin(f))
874 % lfutil.splitstandin(f))
875 f = lfutil.splitstandin(f)
875 f = lfutil.splitstandin(f)
876
876
877 def getdatafn():
877 def getdatafn():
878 fd = None
878 fd = None
879 try:
879 try:
880 fd = open(path, 'rb')
880 fd = open(path, 'rb')
881 return fd.read()
881 return fd.read()
882 finally:
882 finally:
883 if fd:
883 if fd:
884 fd.close()
884 fd.close()
885
885
886 getdata = getdatafn
886 getdata = getdatafn
887 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
887 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
888
888
889 if subrepos:
889 if subrepos:
890 for subpath in sorted(ctx.substate):
890 for subpath in sorted(ctx.substate):
891 sub = ctx.sub(subpath)
891 sub = ctx.sub(subpath)
892 submatch = match_.narrowmatcher(subpath, matchfn)
892 submatch = match_.narrowmatcher(subpath, matchfn)
893 sub.archive(repo.ui, archiver, prefix, submatch)
893 sub.archive(archiver, prefix, submatch)
894
894
895 archiver.done()
895 archiver.done()
896
896
897 def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
897 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
898 repo._get(repo._state + ('hg',))
898 repo._get(repo._state + ('hg',))
899 rev = repo._state[1]
899 rev = repo._state[1]
900 ctx = repo._repo[rev]
900 ctx = repo._repo[rev]
901
901
902 lfcommands.cachelfiles(ui, repo._repo, ctx.node())
902 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
903
903
904 def write(name, mode, islink, getdata):
904 def write(name, mode, islink, getdata):
905 # At this point, the standin has been replaced with the largefile name,
905 # At this point, the standin has been replaced with the largefile name,
906 # so the normal matcher works here without the lfutil variants.
906 # so the normal matcher works here without the lfutil variants.
907 if match and not match(f):
907 if match and not match(f):
908 return
908 return
909 data = getdata()
909 data = getdata()
910
910
911 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
911 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
912
912
913 for f in ctx:
913 for f in ctx:
914 ff = ctx.flags(f)
914 ff = ctx.flags(f)
915 getdata = ctx[f].data
915 getdata = ctx[f].data
916 if lfutil.isstandin(f):
916 if lfutil.isstandin(f):
917 path = lfutil.findfile(repo._repo, getdata().strip())
917 path = lfutil.findfile(repo._repo, getdata().strip())
918 if path is None:
918 if path is None:
919 raise util.Abort(
919 raise util.Abort(
920 _('largefile %s not found in repo store or system cache')
920 _('largefile %s not found in repo store or system cache')
921 % lfutil.splitstandin(f))
921 % lfutil.splitstandin(f))
922 f = lfutil.splitstandin(f)
922 f = lfutil.splitstandin(f)
923
923
924 def getdatafn():
924 def getdatafn():
925 fd = None
925 fd = None
926 try:
926 try:
927 fd = open(os.path.join(prefix, path), 'rb')
927 fd = open(os.path.join(prefix, path), 'rb')
928 return fd.read()
928 return fd.read()
929 finally:
929 finally:
930 if fd:
930 if fd:
931 fd.close()
931 fd.close()
932
932
933 getdata = getdatafn
933 getdata = getdatafn
934
934
935 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
935 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
936
936
937 for subpath in sorted(ctx.substate):
937 for subpath in sorted(ctx.substate):
938 sub = ctx.sub(subpath)
938 sub = ctx.sub(subpath)
939 submatch = match_.narrowmatcher(subpath, match)
939 submatch = match_.narrowmatcher(subpath, match)
940 sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
940 sub.archive(archiver, os.path.join(prefix, repo._path) + '/', submatch)
941 submatch)
942
941
943 # If a largefile is modified, the change is not reflected in its
942 # If a largefile is modified, the change is not reflected in its
944 # standin until a commit. cmdutil.bailifchanged() raises an exception
943 # standin until a commit. cmdutil.bailifchanged() raises an exception
945 # if the repo has uncommitted changes. Wrap it to also check if
944 # if the repo has uncommitted changes. Wrap it to also check if
946 # largefiles were changed. This is used by bisect, backout and fetch.
945 # largefiles were changed. This is used by bisect, backout and fetch.
947 def overridebailifchanged(orig, repo):
946 def overridebailifchanged(orig, repo):
948 orig(repo)
947 orig(repo)
949 repo.lfstatus = True
948 repo.lfstatus = True
950 s = repo.status()
949 s = repo.status()
951 repo.lfstatus = False
950 repo.lfstatus = False
952 if s.modified or s.added or s.removed or s.deleted:
951 if s.modified or s.added or s.removed or s.deleted:
953 raise util.Abort(_('uncommitted changes'))
952 raise util.Abort(_('uncommitted changes'))
954
953
955 def overrideforget(orig, ui, repo, *pats, **opts):
954 def overrideforget(orig, ui, repo, *pats, **opts):
956 installnormalfilesmatchfn(repo[None].manifest())
955 installnormalfilesmatchfn(repo[None].manifest())
957 result = orig(ui, repo, *pats, **opts)
956 result = orig(ui, repo, *pats, **opts)
958 restorematchfn()
957 restorematchfn()
959 m = scmutil.match(repo[None], pats, opts)
958 m = scmutil.match(repo[None], pats, opts)
960
959
961 try:
960 try:
962 repo.lfstatus = True
961 repo.lfstatus = True
963 s = repo.status(match=m, clean=True)
962 s = repo.status(match=m, clean=True)
964 finally:
963 finally:
965 repo.lfstatus = False
964 repo.lfstatus = False
966 forget = sorted(s.modified + s.added + s.deleted + s.clean)
965 forget = sorted(s.modified + s.added + s.deleted + s.clean)
967 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
966 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
968
967
969 for f in forget:
968 for f in forget:
970 if lfutil.standin(f) not in repo.dirstate and not \
969 if lfutil.standin(f) not in repo.dirstate and not \
971 os.path.isdir(m.rel(lfutil.standin(f))):
970 os.path.isdir(m.rel(lfutil.standin(f))):
972 ui.warn(_('not removing %s: file is already untracked\n')
971 ui.warn(_('not removing %s: file is already untracked\n')
973 % m.rel(f))
972 % m.rel(f))
974 result = 1
973 result = 1
975
974
976 for f in forget:
975 for f in forget:
977 if ui.verbose or not m.exact(f):
976 if ui.verbose or not m.exact(f):
978 ui.status(_('removing %s\n') % m.rel(f))
977 ui.status(_('removing %s\n') % m.rel(f))
979
978
980 # Need to lock because standin files are deleted then removed from the
979 # Need to lock because standin files are deleted then removed from the
981 # repository and we could race in-between.
980 # repository and we could race in-between.
982 wlock = repo.wlock()
981 wlock = repo.wlock()
983 try:
982 try:
984 lfdirstate = lfutil.openlfdirstate(ui, repo)
983 lfdirstate = lfutil.openlfdirstate(ui, repo)
985 for f in forget:
984 for f in forget:
986 if lfdirstate[f] == 'a':
985 if lfdirstate[f] == 'a':
987 lfdirstate.drop(f)
986 lfdirstate.drop(f)
988 else:
987 else:
989 lfdirstate.remove(f)
988 lfdirstate.remove(f)
990 lfdirstate.write()
989 lfdirstate.write()
991 standins = [lfutil.standin(f) for f in forget]
990 standins = [lfutil.standin(f) for f in forget]
992 for f in standins:
991 for f in standins:
993 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
992 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
994 repo[None].forget(standins)
993 repo[None].forget(standins)
995 finally:
994 finally:
996 wlock.release()
995 wlock.release()
997
996
998 return result
997 return result
999
998
1000 def _getoutgoings(repo, other, missing, addfunc):
999 def _getoutgoings(repo, other, missing, addfunc):
1001 """get pairs of filename and largefile hash in outgoing revisions
1000 """get pairs of filename and largefile hash in outgoing revisions
1002 in 'missing'.
1001 in 'missing'.
1003
1002
1004 largefiles already existing on 'other' repository are ignored.
1003 largefiles already existing on 'other' repository are ignored.
1005
1004
1006 'addfunc' is invoked with each unique pairs of filename and
1005 'addfunc' is invoked with each unique pairs of filename and
1007 largefile hash value.
1006 largefile hash value.
1008 """
1007 """
1009 knowns = set()
1008 knowns = set()
1010 lfhashes = set()
1009 lfhashes = set()
1011 def dedup(fn, lfhash):
1010 def dedup(fn, lfhash):
1012 k = (fn, lfhash)
1011 k = (fn, lfhash)
1013 if k not in knowns:
1012 if k not in knowns:
1014 knowns.add(k)
1013 knowns.add(k)
1015 lfhashes.add(lfhash)
1014 lfhashes.add(lfhash)
1016 lfutil.getlfilestoupload(repo, missing, dedup)
1015 lfutil.getlfilestoupload(repo, missing, dedup)
1017 if lfhashes:
1016 if lfhashes:
1018 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1017 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1019 for fn, lfhash in knowns:
1018 for fn, lfhash in knowns:
1020 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1019 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1021 addfunc(fn, lfhash)
1020 addfunc(fn, lfhash)
1022
1021
1023 def outgoinghook(ui, repo, other, opts, missing):
1022 def outgoinghook(ui, repo, other, opts, missing):
1024 if opts.pop('large', None):
1023 if opts.pop('large', None):
1025 lfhashes = set()
1024 lfhashes = set()
1026 if ui.debugflag:
1025 if ui.debugflag:
1027 toupload = {}
1026 toupload = {}
1028 def addfunc(fn, lfhash):
1027 def addfunc(fn, lfhash):
1029 if fn not in toupload:
1028 if fn not in toupload:
1030 toupload[fn] = []
1029 toupload[fn] = []
1031 toupload[fn].append(lfhash)
1030 toupload[fn].append(lfhash)
1032 lfhashes.add(lfhash)
1031 lfhashes.add(lfhash)
1033 def showhashes(fn):
1032 def showhashes(fn):
1034 for lfhash in sorted(toupload[fn]):
1033 for lfhash in sorted(toupload[fn]):
1035 ui.debug(' %s\n' % (lfhash))
1034 ui.debug(' %s\n' % (lfhash))
1036 else:
1035 else:
1037 toupload = set()
1036 toupload = set()
1038 def addfunc(fn, lfhash):
1037 def addfunc(fn, lfhash):
1039 toupload.add(fn)
1038 toupload.add(fn)
1040 lfhashes.add(lfhash)
1039 lfhashes.add(lfhash)
1041 def showhashes(fn):
1040 def showhashes(fn):
1042 pass
1041 pass
1043 _getoutgoings(repo, other, missing, addfunc)
1042 _getoutgoings(repo, other, missing, addfunc)
1044
1043
1045 if not toupload:
1044 if not toupload:
1046 ui.status(_('largefiles: no files to upload\n'))
1045 ui.status(_('largefiles: no files to upload\n'))
1047 else:
1046 else:
1048 ui.status(_('largefiles to upload (%d entities):\n')
1047 ui.status(_('largefiles to upload (%d entities):\n')
1049 % (len(lfhashes)))
1048 % (len(lfhashes)))
1050 for file in sorted(toupload):
1049 for file in sorted(toupload):
1051 ui.status(lfutil.splitstandin(file) + '\n')
1050 ui.status(lfutil.splitstandin(file) + '\n')
1052 showhashes(file)
1051 showhashes(file)
1053 ui.status('\n')
1052 ui.status('\n')
1054
1053
1055 def summaryremotehook(ui, repo, opts, changes):
1054 def summaryremotehook(ui, repo, opts, changes):
1056 largeopt = opts.get('large', False)
1055 largeopt = opts.get('large', False)
1057 if changes is None:
1056 if changes is None:
1058 if largeopt:
1057 if largeopt:
1059 return (False, True) # only outgoing check is needed
1058 return (False, True) # only outgoing check is needed
1060 else:
1059 else:
1061 return (False, False)
1060 return (False, False)
1062 elif largeopt:
1061 elif largeopt:
1063 url, branch, peer, outgoing = changes[1]
1062 url, branch, peer, outgoing = changes[1]
1064 if peer is None:
1063 if peer is None:
1065 # i18n: column positioning for "hg summary"
1064 # i18n: column positioning for "hg summary"
1066 ui.status(_('largefiles: (no remote repo)\n'))
1065 ui.status(_('largefiles: (no remote repo)\n'))
1067 return
1066 return
1068
1067
1069 toupload = set()
1068 toupload = set()
1070 lfhashes = set()
1069 lfhashes = set()
1071 def addfunc(fn, lfhash):
1070 def addfunc(fn, lfhash):
1072 toupload.add(fn)
1071 toupload.add(fn)
1073 lfhashes.add(lfhash)
1072 lfhashes.add(lfhash)
1074 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1073 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1075
1074
1076 if not toupload:
1075 if not toupload:
1077 # i18n: column positioning for "hg summary"
1076 # i18n: column positioning for "hg summary"
1078 ui.status(_('largefiles: (no files to upload)\n'))
1077 ui.status(_('largefiles: (no files to upload)\n'))
1079 else:
1078 else:
1080 # i18n: column positioning for "hg summary"
1079 # i18n: column positioning for "hg summary"
1081 ui.status(_('largefiles: %d entities for %d files to upload\n')
1080 ui.status(_('largefiles: %d entities for %d files to upload\n')
1082 % (len(lfhashes), len(toupload)))
1081 % (len(lfhashes), len(toupload)))
1083
1082
1084 def overridesummary(orig, ui, repo, *pats, **opts):
1083 def overridesummary(orig, ui, repo, *pats, **opts):
1085 try:
1084 try:
1086 repo.lfstatus = True
1085 repo.lfstatus = True
1087 orig(ui, repo, *pats, **opts)
1086 orig(ui, repo, *pats, **opts)
1088 finally:
1087 finally:
1089 repo.lfstatus = False
1088 repo.lfstatus = False
1090
1089
1091 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1090 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1092 similarity=None):
1091 similarity=None):
1093 if not lfutil.islfilesrepo(repo):
1092 if not lfutil.islfilesrepo(repo):
1094 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1093 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1095 # Get the list of missing largefiles so we can remove them
1094 # Get the list of missing largefiles so we can remove them
1096 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1095 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1097 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1096 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1098 False, False, False)
1097 False, False, False)
1099
1098
1100 # Call into the normal remove code, but the removing of the standin, we want
1099 # Call into the normal remove code, but the removing of the standin, we want
1101 # to have handled by original addremove. Monkey patching here makes sure
1100 # to have handled by original addremove. Monkey patching here makes sure
1102 # we don't remove the standin in the largefiles code, preventing a very
1101 # we don't remove the standin in the largefiles code, preventing a very
1103 # confused state later.
1102 # confused state later.
1104 if s.deleted:
1103 if s.deleted:
1105 m = [repo.wjoin(f) for f in s.deleted]
1104 m = [repo.wjoin(f) for f in s.deleted]
1106 removelargefiles(repo.ui, repo, True, *m, **opts)
1105 removelargefiles(repo.ui, repo, True, *m, **opts)
1107 # Call into the normal add code, and any files that *should* be added as
1106 # Call into the normal add code, and any files that *should* be added as
1108 # largefiles will be
1107 # largefiles will be
1109 addlargefiles(repo.ui, repo, matcher, **opts)
1108 addlargefiles(repo.ui, repo, matcher, **opts)
1110 # Now that we've handled largefiles, hand off to the original addremove
1109 # Now that we've handled largefiles, hand off to the original addremove
1111 # function to take care of the rest. Make sure it doesn't do anything with
1110 # function to take care of the rest. Make sure it doesn't do anything with
1112 # largefiles by passing a matcher that will ignore them.
1111 # largefiles by passing a matcher that will ignore them.
1113 matcher = composenormalfilematcher(matcher, repo[None].manifest())
1112 matcher = composenormalfilematcher(matcher, repo[None].manifest())
1114 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1113 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1115
1114
1116 # Calling purge with --all will cause the largefiles to be deleted.
1115 # Calling purge with --all will cause the largefiles to be deleted.
1117 # Override repo.status to prevent this from happening.
1116 # Override repo.status to prevent this from happening.
1118 def overridepurge(orig, ui, repo, *dirs, **opts):
1117 def overridepurge(orig, ui, repo, *dirs, **opts):
1119 oldstatus = repo.status
1118 oldstatus = repo.status
1120 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1119 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1121 clean=False, unknown=False, listsubrepos=False):
1120 clean=False, unknown=False, listsubrepos=False):
1122 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1121 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1123 listsubrepos)
1122 listsubrepos)
1124 lfdirstate = lfutil.openlfdirstate(ui, repo)
1123 lfdirstate = lfutil.openlfdirstate(ui, repo)
1125 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1124 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1126 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1125 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1127 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1126 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1128 unknown, ignored, r.clean)
1127 unknown, ignored, r.clean)
1129 repo.status = overridestatus
1128 repo.status = overridestatus
1130 orig(ui, repo, *dirs, **opts)
1129 orig(ui, repo, *dirs, **opts)
1131 repo.status = oldstatus
1130 repo.status = oldstatus
1132 def overriderollback(orig, ui, repo, **opts):
1131 def overriderollback(orig, ui, repo, **opts):
1133 wlock = repo.wlock()
1132 wlock = repo.wlock()
1134 try:
1133 try:
1135 before = repo.dirstate.parents()
1134 before = repo.dirstate.parents()
1136 orphans = set(f for f in repo.dirstate
1135 orphans = set(f for f in repo.dirstate
1137 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1136 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1138 result = orig(ui, repo, **opts)
1137 result = orig(ui, repo, **opts)
1139 after = repo.dirstate.parents()
1138 after = repo.dirstate.parents()
1140 if before == after:
1139 if before == after:
1141 return result # no need to restore standins
1140 return result # no need to restore standins
1142
1141
1143 pctx = repo['.']
1142 pctx = repo['.']
1144 for f in repo.dirstate:
1143 for f in repo.dirstate:
1145 if lfutil.isstandin(f):
1144 if lfutil.isstandin(f):
1146 orphans.discard(f)
1145 orphans.discard(f)
1147 if repo.dirstate[f] == 'r':
1146 if repo.dirstate[f] == 'r':
1148 repo.wvfs.unlinkpath(f, ignoremissing=True)
1147 repo.wvfs.unlinkpath(f, ignoremissing=True)
1149 elif f in pctx:
1148 elif f in pctx:
1150 fctx = pctx[f]
1149 fctx = pctx[f]
1151 repo.wwrite(f, fctx.data(), fctx.flags())
1150 repo.wwrite(f, fctx.data(), fctx.flags())
1152 else:
1151 else:
1153 # content of standin is not so important in 'a',
1152 # content of standin is not so important in 'a',
1154 # 'm' or 'n' (coming from the 2nd parent) cases
1153 # 'm' or 'n' (coming from the 2nd parent) cases
1155 lfutil.writestandin(repo, f, '', False)
1154 lfutil.writestandin(repo, f, '', False)
1156 for standin in orphans:
1155 for standin in orphans:
1157 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1156 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1158
1157
1159 lfdirstate = lfutil.openlfdirstate(ui, repo)
1158 lfdirstate = lfutil.openlfdirstate(ui, repo)
1160 orphans = set(lfdirstate)
1159 orphans = set(lfdirstate)
1161 lfiles = lfutil.listlfiles(repo)
1160 lfiles = lfutil.listlfiles(repo)
1162 for file in lfiles:
1161 for file in lfiles:
1163 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1162 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1164 orphans.discard(file)
1163 orphans.discard(file)
1165 for lfile in orphans:
1164 for lfile in orphans:
1166 lfdirstate.drop(lfile)
1165 lfdirstate.drop(lfile)
1167 lfdirstate.write()
1166 lfdirstate.write()
1168 finally:
1167 finally:
1169 wlock.release()
1168 wlock.release()
1170 return result
1169 return result
1171
1170
1172 def overridetransplant(orig, ui, repo, *revs, **opts):
1171 def overridetransplant(orig, ui, repo, *revs, **opts):
1173 resuming = opts.get('continue')
1172 resuming = opts.get('continue')
1174 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1173 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1175 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1174 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1176 try:
1175 try:
1177 result = orig(ui, repo, *revs, **opts)
1176 result = orig(ui, repo, *revs, **opts)
1178 finally:
1177 finally:
1179 repo._lfstatuswriters.pop()
1178 repo._lfstatuswriters.pop()
1180 repo._lfcommithooks.pop()
1179 repo._lfcommithooks.pop()
1181 return result
1180 return result
1182
1181
1183 def overridecat(orig, ui, repo, file1, *pats, **opts):
1182 def overridecat(orig, ui, repo, file1, *pats, **opts):
1184 ctx = scmutil.revsingle(repo, opts.get('rev'))
1183 ctx = scmutil.revsingle(repo, opts.get('rev'))
1185 err = 1
1184 err = 1
1186 notbad = set()
1185 notbad = set()
1187 m = scmutil.match(ctx, (file1,) + pats, opts)
1186 m = scmutil.match(ctx, (file1,) + pats, opts)
1188 origmatchfn = m.matchfn
1187 origmatchfn = m.matchfn
1189 def lfmatchfn(f):
1188 def lfmatchfn(f):
1190 if origmatchfn(f):
1189 if origmatchfn(f):
1191 return True
1190 return True
1192 lf = lfutil.splitstandin(f)
1191 lf = lfutil.splitstandin(f)
1193 if lf is None:
1192 if lf is None:
1194 return False
1193 return False
1195 notbad.add(lf)
1194 notbad.add(lf)
1196 return origmatchfn(lf)
1195 return origmatchfn(lf)
1197 m.matchfn = lfmatchfn
1196 m.matchfn = lfmatchfn
1198 origbadfn = m.bad
1197 origbadfn = m.bad
1199 def lfbadfn(f, msg):
1198 def lfbadfn(f, msg):
1200 if not f in notbad:
1199 if not f in notbad:
1201 origbadfn(f, msg)
1200 origbadfn(f, msg)
1202 m.bad = lfbadfn
1201 m.bad = lfbadfn
1203 for f in ctx.walk(m):
1202 for f in ctx.walk(m):
1204 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1203 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1205 pathname=f)
1204 pathname=f)
1206 lf = lfutil.splitstandin(f)
1205 lf = lfutil.splitstandin(f)
1207 if lf is None or origmatchfn(f):
1206 if lf is None or origmatchfn(f):
1208 # duplicating unreachable code from commands.cat
1207 # duplicating unreachable code from commands.cat
1209 data = ctx[f].data()
1208 data = ctx[f].data()
1210 if opts.get('decode'):
1209 if opts.get('decode'):
1211 data = repo.wwritedata(f, data)
1210 data = repo.wwritedata(f, data)
1212 fp.write(data)
1211 fp.write(data)
1213 else:
1212 else:
1214 hash = lfutil.readstandin(repo, lf, ctx.rev())
1213 hash = lfutil.readstandin(repo, lf, ctx.rev())
1215 if not lfutil.inusercache(repo.ui, hash):
1214 if not lfutil.inusercache(repo.ui, hash):
1216 store = basestore._openstore(repo)
1215 store = basestore._openstore(repo)
1217 success, missing = store.get([(lf, hash)])
1216 success, missing = store.get([(lf, hash)])
1218 if len(success) != 1:
1217 if len(success) != 1:
1219 raise util.Abort(
1218 raise util.Abort(
1220 _('largefile %s is not in cache and could not be '
1219 _('largefile %s is not in cache and could not be '
1221 'downloaded') % lf)
1220 'downloaded') % lf)
1222 path = lfutil.usercachepath(repo.ui, hash)
1221 path = lfutil.usercachepath(repo.ui, hash)
1223 fpin = open(path, "rb")
1222 fpin = open(path, "rb")
1224 for chunk in util.filechunkiter(fpin, 128 * 1024):
1223 for chunk in util.filechunkiter(fpin, 128 * 1024):
1225 fp.write(chunk)
1224 fp.write(chunk)
1226 fpin.close()
1225 fpin.close()
1227 fp.close()
1226 fp.close()
1228 err = 0
1227 err = 0
1229 return err
1228 return err
1230
1229
1231 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1230 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1232 *args, **kwargs):
1231 *args, **kwargs):
1233 wlock = repo.wlock()
1232 wlock = repo.wlock()
1234 try:
1233 try:
1235 # branch | | |
1234 # branch | | |
1236 # merge | force | partial | action
1235 # merge | force | partial | action
1237 # -------+-------+---------+--------------
1236 # -------+-------+---------+--------------
1238 # x | x | x | linear-merge
1237 # x | x | x | linear-merge
1239 # o | x | x | branch-merge
1238 # o | x | x | branch-merge
1240 # x | o | x | overwrite (as clean update)
1239 # x | o | x | overwrite (as clean update)
1241 # o | o | x | force-branch-merge (*1)
1240 # o | o | x | force-branch-merge (*1)
1242 # x | x | o | (*)
1241 # x | x | o | (*)
1243 # o | x | o | (*)
1242 # o | x | o | (*)
1244 # x | o | o | overwrite (as revert)
1243 # x | o | o | overwrite (as revert)
1245 # o | o | o | (*)
1244 # o | o | o | (*)
1246 #
1245 #
1247 # (*) don't care
1246 # (*) don't care
1248 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1247 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1249
1248
1250 linearmerge = not branchmerge and not force and not partial
1249 linearmerge = not branchmerge and not force and not partial
1251
1250
1252 if linearmerge or (branchmerge and force and not partial):
1251 if linearmerge or (branchmerge and force and not partial):
1253 # update standins for linear-merge or force-branch-merge,
1252 # update standins for linear-merge or force-branch-merge,
1254 # because largefiles in the working directory may be modified
1253 # because largefiles in the working directory may be modified
1255 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1254 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1256 unsure, s = lfdirstate.status(match_.always(repo.root,
1255 unsure, s = lfdirstate.status(match_.always(repo.root,
1257 repo.getcwd()),
1256 repo.getcwd()),
1258 [], False, False, False)
1257 [], False, False, False)
1259 for lfile in unsure + s.modified + s.added:
1258 for lfile in unsure + s.modified + s.added:
1260 lfutil.updatestandin(repo, lfutil.standin(lfile))
1259 lfutil.updatestandin(repo, lfutil.standin(lfile))
1261
1260
1262 if linearmerge:
1261 if linearmerge:
1263 # Only call updatelfiles on the standins that have changed
1262 # Only call updatelfiles on the standins that have changed
1264 # to save time
1263 # to save time
1265 oldstandins = lfutil.getstandinsstate(repo)
1264 oldstandins = lfutil.getstandinsstate(repo)
1266
1265
1267 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1266 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1268
1267
1269 filelist = None
1268 filelist = None
1270 if linearmerge:
1269 if linearmerge:
1271 newstandins = lfutil.getstandinsstate(repo)
1270 newstandins = lfutil.getstandinsstate(repo)
1272 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1271 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1273
1272
1274 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1273 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1275 normallookup=partial)
1274 normallookup=partial)
1276
1275
1277 return result
1276 return result
1278 finally:
1277 finally:
1279 wlock.release()
1278 wlock.release()
1280
1279
1281 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1280 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1282 result = orig(repo, files, *args, **kwargs)
1281 result = orig(repo, files, *args, **kwargs)
1283
1282
1284 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1283 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1285 if filelist:
1284 if filelist:
1286 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1285 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1287 printmessage=False, normallookup=True)
1286 printmessage=False, normallookup=True)
1288
1287
1289 return result
1288 return result
@@ -1,313 +1,313 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex
9 from node import hex
10 import match as matchmod
10 import match as matchmod
11 import cmdutil
11 import cmdutil
12 import scmutil, util, encoding
12 import scmutil, util, encoding
13 import cStringIO, os, tarfile, time, zipfile
13 import cStringIO, os, tarfile, time, zipfile
14 import zlib, gzip
14 import zlib, gzip
15 import struct
15 import struct
16 import error
16 import error
17
17
18 # from unzip source code:
18 # from unzip source code:
19 _UNX_IFREG = 0x8000
19 _UNX_IFREG = 0x8000
20 _UNX_IFLNK = 0xa000
20 _UNX_IFLNK = 0xa000
21
21
22 def tidyprefix(dest, kind, prefix):
22 def tidyprefix(dest, kind, prefix):
23 '''choose prefix to use for names in archive. make sure prefix is
23 '''choose prefix to use for names in archive. make sure prefix is
24 safe for consumers.'''
24 safe for consumers.'''
25
25
26 if prefix:
26 if prefix:
27 prefix = util.normpath(prefix)
27 prefix = util.normpath(prefix)
28 else:
28 else:
29 if not isinstance(dest, str):
29 if not isinstance(dest, str):
30 raise ValueError('dest must be string if no prefix')
30 raise ValueError('dest must be string if no prefix')
31 prefix = os.path.basename(dest)
31 prefix = os.path.basename(dest)
32 lower = prefix.lower()
32 lower = prefix.lower()
33 for sfx in exts.get(kind, []):
33 for sfx in exts.get(kind, []):
34 if lower.endswith(sfx):
34 if lower.endswith(sfx):
35 prefix = prefix[:-len(sfx)]
35 prefix = prefix[:-len(sfx)]
36 break
36 break
37 lpfx = os.path.normpath(util.localpath(prefix))
37 lpfx = os.path.normpath(util.localpath(prefix))
38 prefix = util.pconvert(lpfx)
38 prefix = util.pconvert(lpfx)
39 if not prefix.endswith('/'):
39 if not prefix.endswith('/'):
40 prefix += '/'
40 prefix += '/'
41 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
41 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
42 raise util.Abort(_('archive prefix contains illegal components'))
42 raise util.Abort(_('archive prefix contains illegal components'))
43 return prefix
43 return prefix
44
44
45 exts = {
45 exts = {
46 'tar': ['.tar'],
46 'tar': ['.tar'],
47 'tbz2': ['.tbz2', '.tar.bz2'],
47 'tbz2': ['.tbz2', '.tar.bz2'],
48 'tgz': ['.tgz', '.tar.gz'],
48 'tgz': ['.tgz', '.tar.gz'],
49 'zip': ['.zip'],
49 'zip': ['.zip'],
50 }
50 }
51
51
52 def guesskind(dest):
52 def guesskind(dest):
53 for kind, extensions in exts.iteritems():
53 for kind, extensions in exts.iteritems():
54 if util.any(dest.endswith(ext) for ext in extensions):
54 if util.any(dest.endswith(ext) for ext in extensions):
55 return kind
55 return kind
56 return None
56 return None
57
57
58
58
59 class tarit(object):
59 class tarit(object):
60 '''write archive to tar file or stream. can write uncompressed,
60 '''write archive to tar file or stream. can write uncompressed,
61 or compress with gzip or bzip2.'''
61 or compress with gzip or bzip2.'''
62
62
63 class GzipFileWithTime(gzip.GzipFile):
63 class GzipFileWithTime(gzip.GzipFile):
64
64
65 def __init__(self, *args, **kw):
65 def __init__(self, *args, **kw):
66 timestamp = None
66 timestamp = None
67 if 'timestamp' in kw:
67 if 'timestamp' in kw:
68 timestamp = kw.pop('timestamp')
68 timestamp = kw.pop('timestamp')
69 if timestamp is None:
69 if timestamp is None:
70 self.timestamp = time.time()
70 self.timestamp = time.time()
71 else:
71 else:
72 self.timestamp = timestamp
72 self.timestamp = timestamp
73 gzip.GzipFile.__init__(self, *args, **kw)
73 gzip.GzipFile.__init__(self, *args, **kw)
74
74
75 def _write_gzip_header(self):
75 def _write_gzip_header(self):
76 self.fileobj.write('\037\213') # magic header
76 self.fileobj.write('\037\213') # magic header
77 self.fileobj.write('\010') # compression method
77 self.fileobj.write('\010') # compression method
78 # Python 2.6 introduced self.name and deprecated self.filename
78 # Python 2.6 introduced self.name and deprecated self.filename
79 try:
79 try:
80 fname = self.name
80 fname = self.name
81 except AttributeError:
81 except AttributeError:
82 fname = self.filename
82 fname = self.filename
83 if fname and fname.endswith('.gz'):
83 if fname and fname.endswith('.gz'):
84 fname = fname[:-3]
84 fname = fname[:-3]
85 flags = 0
85 flags = 0
86 if fname:
86 if fname:
87 flags = gzip.FNAME
87 flags = gzip.FNAME
88 self.fileobj.write(chr(flags))
88 self.fileobj.write(chr(flags))
89 gzip.write32u(self.fileobj, long(self.timestamp))
89 gzip.write32u(self.fileobj, long(self.timestamp))
90 self.fileobj.write('\002')
90 self.fileobj.write('\002')
91 self.fileobj.write('\377')
91 self.fileobj.write('\377')
92 if fname:
92 if fname:
93 self.fileobj.write(fname + '\000')
93 self.fileobj.write(fname + '\000')
94
94
95 def __init__(self, dest, mtime, kind=''):
95 def __init__(self, dest, mtime, kind=''):
96 self.mtime = mtime
96 self.mtime = mtime
97 self.fileobj = None
97 self.fileobj = None
98
98
99 def taropen(name, mode, fileobj=None):
99 def taropen(name, mode, fileobj=None):
100 if kind == 'gz':
100 if kind == 'gz':
101 mode = mode[0]
101 mode = mode[0]
102 if not fileobj:
102 if not fileobj:
103 fileobj = open(name, mode + 'b')
103 fileobj = open(name, mode + 'b')
104 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
104 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
105 zlib.Z_BEST_COMPRESSION,
105 zlib.Z_BEST_COMPRESSION,
106 fileobj, timestamp=mtime)
106 fileobj, timestamp=mtime)
107 self.fileobj = gzfileobj
107 self.fileobj = gzfileobj
108 return tarfile.TarFile.taropen(name, mode, gzfileobj)
108 return tarfile.TarFile.taropen(name, mode, gzfileobj)
109 else:
109 else:
110 return tarfile.open(name, mode + kind, fileobj)
110 return tarfile.open(name, mode + kind, fileobj)
111
111
112 if isinstance(dest, str):
112 if isinstance(dest, str):
113 self.z = taropen(dest, mode='w:')
113 self.z = taropen(dest, mode='w:')
114 else:
114 else:
115 # Python 2.5-2.5.1 have a regression that requires a name arg
115 # Python 2.5-2.5.1 have a regression that requires a name arg
116 self.z = taropen(name='', mode='w|', fileobj=dest)
116 self.z = taropen(name='', mode='w|', fileobj=dest)
117
117
118 def addfile(self, name, mode, islink, data):
118 def addfile(self, name, mode, islink, data):
119 i = tarfile.TarInfo(name)
119 i = tarfile.TarInfo(name)
120 i.mtime = self.mtime
120 i.mtime = self.mtime
121 i.size = len(data)
121 i.size = len(data)
122 if islink:
122 if islink:
123 i.type = tarfile.SYMTYPE
123 i.type = tarfile.SYMTYPE
124 i.mode = 0777
124 i.mode = 0777
125 i.linkname = data
125 i.linkname = data
126 data = None
126 data = None
127 i.size = 0
127 i.size = 0
128 else:
128 else:
129 i.mode = mode
129 i.mode = mode
130 data = cStringIO.StringIO(data)
130 data = cStringIO.StringIO(data)
131 self.z.addfile(i, data)
131 self.z.addfile(i, data)
132
132
133 def done(self):
133 def done(self):
134 self.z.close()
134 self.z.close()
135 if self.fileobj:
135 if self.fileobj:
136 self.fileobj.close()
136 self.fileobj.close()
137
137
138 class tellable(object):
138 class tellable(object):
139 '''provide tell method for zipfile.ZipFile when writing to http
139 '''provide tell method for zipfile.ZipFile when writing to http
140 response file object.'''
140 response file object.'''
141
141
142 def __init__(self, fp):
142 def __init__(self, fp):
143 self.fp = fp
143 self.fp = fp
144 self.offset = 0
144 self.offset = 0
145
145
146 def __getattr__(self, key):
146 def __getattr__(self, key):
147 return getattr(self.fp, key)
147 return getattr(self.fp, key)
148
148
149 def write(self, s):
149 def write(self, s):
150 self.fp.write(s)
150 self.fp.write(s)
151 self.offset += len(s)
151 self.offset += len(s)
152
152
153 def tell(self):
153 def tell(self):
154 return self.offset
154 return self.offset
155
155
156 class zipit(object):
156 class zipit(object):
157 '''write archive to zip file or stream. can write uncompressed,
157 '''write archive to zip file or stream. can write uncompressed,
158 or compressed with deflate.'''
158 or compressed with deflate.'''
159
159
160 def __init__(self, dest, mtime, compress=True):
160 def __init__(self, dest, mtime, compress=True):
161 if not isinstance(dest, str):
161 if not isinstance(dest, str):
162 try:
162 try:
163 dest.tell()
163 dest.tell()
164 except (AttributeError, IOError):
164 except (AttributeError, IOError):
165 dest = tellable(dest)
165 dest = tellable(dest)
166 self.z = zipfile.ZipFile(dest, 'w',
166 self.z = zipfile.ZipFile(dest, 'w',
167 compress and zipfile.ZIP_DEFLATED or
167 compress and zipfile.ZIP_DEFLATED or
168 zipfile.ZIP_STORED)
168 zipfile.ZIP_STORED)
169
169
170 # Python's zipfile module emits deprecation warnings if we try
170 # Python's zipfile module emits deprecation warnings if we try
171 # to store files with a date before 1980.
171 # to store files with a date before 1980.
172 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
172 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
173 if mtime < epoch:
173 if mtime < epoch:
174 mtime = epoch
174 mtime = epoch
175
175
176 self.mtime = mtime
176 self.mtime = mtime
177 self.date_time = time.gmtime(mtime)[:6]
177 self.date_time = time.gmtime(mtime)[:6]
178
178
179 def addfile(self, name, mode, islink, data):
179 def addfile(self, name, mode, islink, data):
180 i = zipfile.ZipInfo(name, self.date_time)
180 i = zipfile.ZipInfo(name, self.date_time)
181 i.compress_type = self.z.compression
181 i.compress_type = self.z.compression
182 # unzip will not honor unix file modes unless file creator is
182 # unzip will not honor unix file modes unless file creator is
183 # set to unix (id 3).
183 # set to unix (id 3).
184 i.create_system = 3
184 i.create_system = 3
185 ftype = _UNX_IFREG
185 ftype = _UNX_IFREG
186 if islink:
186 if islink:
187 mode = 0777
187 mode = 0777
188 ftype = _UNX_IFLNK
188 ftype = _UNX_IFLNK
189 i.external_attr = (mode | ftype) << 16L
189 i.external_attr = (mode | ftype) << 16L
190 # add "extended-timestamp" extra block, because zip archives
190 # add "extended-timestamp" extra block, because zip archives
191 # without this will be extracted with unexpected timestamp,
191 # without this will be extracted with unexpected timestamp,
192 # if TZ is not configured as GMT
192 # if TZ is not configured as GMT
193 i.extra += struct.pack('<hhBl',
193 i.extra += struct.pack('<hhBl',
194 0x5455, # block type: "extended-timestamp"
194 0x5455, # block type: "extended-timestamp"
195 1 + 4, # size of this block
195 1 + 4, # size of this block
196 1, # "modification time is present"
196 1, # "modification time is present"
197 int(self.mtime)) # last modification (UTC)
197 int(self.mtime)) # last modification (UTC)
198 self.z.writestr(i, data)
198 self.z.writestr(i, data)
199
199
200 def done(self):
200 def done(self):
201 self.z.close()
201 self.z.close()
202
202
203 class fileit(object):
203 class fileit(object):
204 '''write archive as files in directory.'''
204 '''write archive as files in directory.'''
205
205
206 def __init__(self, name, mtime):
206 def __init__(self, name, mtime):
207 self.basedir = name
207 self.basedir = name
208 self.opener = scmutil.opener(self.basedir)
208 self.opener = scmutil.opener(self.basedir)
209
209
210 def addfile(self, name, mode, islink, data):
210 def addfile(self, name, mode, islink, data):
211 if islink:
211 if islink:
212 self.opener.symlink(data, name)
212 self.opener.symlink(data, name)
213 return
213 return
214 f = self.opener(name, "w", atomictemp=True)
214 f = self.opener(name, "w", atomictemp=True)
215 f.write(data)
215 f.write(data)
216 f.close()
216 f.close()
217 destfile = os.path.join(self.basedir, name)
217 destfile = os.path.join(self.basedir, name)
218 os.chmod(destfile, mode)
218 os.chmod(destfile, mode)
219
219
220 def done(self):
220 def done(self):
221 pass
221 pass
222
222
223 archivers = {
223 archivers = {
224 'files': fileit,
224 'files': fileit,
225 'tar': tarit,
225 'tar': tarit,
226 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
226 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
227 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
227 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
228 'uzip': lambda name, mtime: zipit(name, mtime, False),
228 'uzip': lambda name, mtime: zipit(name, mtime, False),
229 'zip': zipit,
229 'zip': zipit,
230 }
230 }
231
231
232 def archive(repo, dest, node, kind, decode=True, matchfn=None,
232 def archive(repo, dest, node, kind, decode=True, matchfn=None,
233 prefix=None, mtime=None, subrepos=False):
233 prefix=None, mtime=None, subrepos=False):
234 '''create archive of repo as it was at node.
234 '''create archive of repo as it was at node.
235
235
236 dest can be name of directory, name of archive file, or file
236 dest can be name of directory, name of archive file, or file
237 object to write archive to.
237 object to write archive to.
238
238
239 kind is type of archive to create.
239 kind is type of archive to create.
240
240
241 decode tells whether to put files through decode filters from
241 decode tells whether to put files through decode filters from
242 hgrc.
242 hgrc.
243
243
244 matchfn is function to filter names of files to write to archive.
244 matchfn is function to filter names of files to write to archive.
245
245
246 prefix is name of path to put before every archive member.'''
246 prefix is name of path to put before every archive member.'''
247
247
248 if kind == 'files':
248 if kind == 'files':
249 if prefix:
249 if prefix:
250 raise util.Abort(_('cannot give prefix when archiving to files'))
250 raise util.Abort(_('cannot give prefix when archiving to files'))
251 else:
251 else:
252 prefix = tidyprefix(dest, kind, prefix)
252 prefix = tidyprefix(dest, kind, prefix)
253
253
254 def write(name, mode, islink, getdata):
254 def write(name, mode, islink, getdata):
255 data = getdata()
255 data = getdata()
256 if decode:
256 if decode:
257 data = repo.wwritedata(name, data)
257 data = repo.wwritedata(name, data)
258 archiver.addfile(prefix + name, mode, islink, data)
258 archiver.addfile(prefix + name, mode, islink, data)
259
259
260 if kind not in archivers:
260 if kind not in archivers:
261 raise util.Abort(_("unknown archive type '%s'") % kind)
261 raise util.Abort(_("unknown archive type '%s'") % kind)
262
262
263 ctx = repo[node]
263 ctx = repo[node]
264 archiver = archivers[kind](dest, mtime or ctx.date()[0])
264 archiver = archivers[kind](dest, mtime or ctx.date()[0])
265
265
266 if repo.ui.configbool("ui", "archivemeta", True):
266 if repo.ui.configbool("ui", "archivemeta", True):
267 def metadata():
267 def metadata():
268 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
268 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
269 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
269 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
270
270
271 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
271 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
272 if repo.tagtype(t) == 'global')
272 if repo.tagtype(t) == 'global')
273 if not tags:
273 if not tags:
274 repo.ui.pushbuffer()
274 repo.ui.pushbuffer()
275 opts = {'template': '{latesttag}\n{latesttagdistance}',
275 opts = {'template': '{latesttag}\n{latesttagdistance}',
276 'style': '', 'patch': None, 'git': None}
276 'style': '', 'patch': None, 'git': None}
277 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
277 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
278 ltags, dist = repo.ui.popbuffer().split('\n')
278 ltags, dist = repo.ui.popbuffer().split('\n')
279 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
279 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
280 tags += 'latesttagdistance: %s\n' % dist
280 tags += 'latesttagdistance: %s\n' % dist
281
281
282 return base + tags
282 return base + tags
283
283
284 name = '.hg_archival.txt'
284 name = '.hg_archival.txt'
285 if not matchfn or matchfn(name):
285 if not matchfn or matchfn(name):
286 write(name, 0644, False, metadata)
286 write(name, 0644, False, metadata)
287
287
288 if matchfn:
288 if matchfn:
289 files = [f for f in ctx.manifest().keys() if matchfn(f)]
289 files = [f for f in ctx.manifest().keys() if matchfn(f)]
290 else:
290 else:
291 files = ctx.manifest().keys()
291 files = ctx.manifest().keys()
292 total = len(files)
292 total = len(files)
293 if total:
293 if total:
294 files.sort()
294 files.sort()
295 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
295 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
296 for i, f in enumerate(files):
296 for i, f in enumerate(files):
297 ff = ctx.flags(f)
297 ff = ctx.flags(f)
298 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
298 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
299 repo.ui.progress(_('archiving'), i + 1, item=f,
299 repo.ui.progress(_('archiving'), i + 1, item=f,
300 unit=_('files'), total=total)
300 unit=_('files'), total=total)
301 repo.ui.progress(_('archiving'), None)
301 repo.ui.progress(_('archiving'), None)
302
302
303 if subrepos:
303 if subrepos:
304 for subpath in sorted(ctx.substate):
304 for subpath in sorted(ctx.substate):
305 sub = ctx.sub(subpath)
305 sub = ctx.sub(subpath)
306 submatch = matchmod.narrowmatcher(subpath, matchfn)
306 submatch = matchmod.narrowmatcher(subpath, matchfn)
307 total += sub.archive(repo.ui, archiver, prefix, submatch)
307 total += sub.archive(archiver, prefix, submatch)
308
308
309 if total == 0:
309 if total == 0:
310 raise error.Abort(_('no files match the archive pattern'))
310 raise error.Abort(_('no files match the archive pattern'))
311
311
312 archiver.done()
312 archiver.done()
313 return total
313 return total
@@ -1,1676 +1,1676 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import copy
8 import copy
9 import errno, os, re, shutil, posixpath, sys
9 import errno, os, re, shutil, posixpath, sys
10 import xml.dom.minidom
10 import xml.dom.minidom
11 import stat, subprocess, tarfile
11 import stat, subprocess, tarfile
12 from i18n import _
12 from i18n import _
13 import config, util, node, error, cmdutil, scmutil, match as matchmod
13 import config, util, node, error, cmdutil, scmutil, match as matchmod
14 import phases
14 import phases
15 import pathutil
15 import pathutil
16 import exchange
16 import exchange
17 hg = None
17 hg = None
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 nullstate = ('', '', 'empty')
20 nullstate = ('', '', 'empty')
21
21
22 def _expandedabspath(path):
22 def _expandedabspath(path):
23 '''
23 '''
24 get a path or url and if it is a path expand it and return an absolute path
24 get a path or url and if it is a path expand it and return an absolute path
25 '''
25 '''
26 expandedpath = util.urllocalpath(util.expandpath(path))
26 expandedpath = util.urllocalpath(util.expandpath(path))
27 u = util.url(expandedpath)
27 u = util.url(expandedpath)
28 if not u.scheme:
28 if not u.scheme:
29 path = util.normpath(os.path.abspath(u.path))
29 path = util.normpath(os.path.abspath(u.path))
30 return path
30 return path
31
31
32 def _getstorehashcachename(remotepath):
32 def _getstorehashcachename(remotepath):
33 '''get a unique filename for the store hash cache of a remote repository'''
33 '''get a unique filename for the store hash cache of a remote repository'''
34 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
34 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
35
35
36 class SubrepoAbort(error.Abort):
36 class SubrepoAbort(error.Abort):
37 """Exception class used to avoid handling a subrepo error more than once"""
37 """Exception class used to avoid handling a subrepo error more than once"""
38 def __init__(self, *args, **kw):
38 def __init__(self, *args, **kw):
39 error.Abort.__init__(self, *args, **kw)
39 error.Abort.__init__(self, *args, **kw)
40 self.subrepo = kw.get('subrepo')
40 self.subrepo = kw.get('subrepo')
41 self.cause = kw.get('cause')
41 self.cause = kw.get('cause')
42
42
43 def annotatesubrepoerror(func):
43 def annotatesubrepoerror(func):
44 def decoratedmethod(self, *args, **kargs):
44 def decoratedmethod(self, *args, **kargs):
45 try:
45 try:
46 res = func(self, *args, **kargs)
46 res = func(self, *args, **kargs)
47 except SubrepoAbort, ex:
47 except SubrepoAbort, ex:
48 # This exception has already been handled
48 # This exception has already been handled
49 raise ex
49 raise ex
50 except error.Abort, ex:
50 except error.Abort, ex:
51 subrepo = subrelpath(self)
51 subrepo = subrelpath(self)
52 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
52 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
53 # avoid handling this exception by raising a SubrepoAbort exception
53 # avoid handling this exception by raising a SubrepoAbort exception
54 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
54 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
55 cause=sys.exc_info())
55 cause=sys.exc_info())
56 return res
56 return res
57 return decoratedmethod
57 return decoratedmethod
58
58
59 def state(ctx, ui):
59 def state(ctx, ui):
60 """return a state dict, mapping subrepo paths configured in .hgsub
60 """return a state dict, mapping subrepo paths configured in .hgsub
61 to tuple: (source from .hgsub, revision from .hgsubstate, kind
61 to tuple: (source from .hgsub, revision from .hgsubstate, kind
62 (key in types dict))
62 (key in types dict))
63 """
63 """
64 p = config.config()
64 p = config.config()
65 def read(f, sections=None, remap=None):
65 def read(f, sections=None, remap=None):
66 if f in ctx:
66 if f in ctx:
67 try:
67 try:
68 data = ctx[f].data()
68 data = ctx[f].data()
69 except IOError, err:
69 except IOError, err:
70 if err.errno != errno.ENOENT:
70 if err.errno != errno.ENOENT:
71 raise
71 raise
72 # handle missing subrepo spec files as removed
72 # handle missing subrepo spec files as removed
73 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
73 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
74 return
74 return
75 p.parse(f, data, sections, remap, read)
75 p.parse(f, data, sections, remap, read)
76 else:
76 else:
77 raise util.Abort(_("subrepo spec file %s not found") % f)
77 raise util.Abort(_("subrepo spec file %s not found") % f)
78
78
79 if '.hgsub' in ctx:
79 if '.hgsub' in ctx:
80 read('.hgsub')
80 read('.hgsub')
81
81
82 for path, src in ui.configitems('subpaths'):
82 for path, src in ui.configitems('subpaths'):
83 p.set('subpaths', path, src, ui.configsource('subpaths', path))
83 p.set('subpaths', path, src, ui.configsource('subpaths', path))
84
84
85 rev = {}
85 rev = {}
86 if '.hgsubstate' in ctx:
86 if '.hgsubstate' in ctx:
87 try:
87 try:
88 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
88 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
89 l = l.lstrip()
89 l = l.lstrip()
90 if not l:
90 if not l:
91 continue
91 continue
92 try:
92 try:
93 revision, path = l.split(" ", 1)
93 revision, path = l.split(" ", 1)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_("invalid subrepository revision "
95 raise util.Abort(_("invalid subrepository revision "
96 "specifier in .hgsubstate line %d")
96 "specifier in .hgsubstate line %d")
97 % (i + 1))
97 % (i + 1))
98 rev[path] = revision
98 rev[path] = revision
99 except IOError, err:
99 except IOError, err:
100 if err.errno != errno.ENOENT:
100 if err.errno != errno.ENOENT:
101 raise
101 raise
102
102
103 def remap(src):
103 def remap(src):
104 for pattern, repl in p.items('subpaths'):
104 for pattern, repl in p.items('subpaths'):
105 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
105 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
106 # does a string decode.
106 # does a string decode.
107 repl = repl.encode('string-escape')
107 repl = repl.encode('string-escape')
108 # However, we still want to allow back references to go
108 # However, we still want to allow back references to go
109 # through unharmed, so we turn r'\\1' into r'\1'. Again,
109 # through unharmed, so we turn r'\\1' into r'\1'. Again,
110 # extra escapes are needed because re.sub string decodes.
110 # extra escapes are needed because re.sub string decodes.
111 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
111 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
112 try:
112 try:
113 src = re.sub(pattern, repl, src, 1)
113 src = re.sub(pattern, repl, src, 1)
114 except re.error, e:
114 except re.error, e:
115 raise util.Abort(_("bad subrepository pattern in %s: %s")
115 raise util.Abort(_("bad subrepository pattern in %s: %s")
116 % (p.source('subpaths', pattern), e))
116 % (p.source('subpaths', pattern), e))
117 return src
117 return src
118
118
119 state = {}
119 state = {}
120 for path, src in p[''].items():
120 for path, src in p[''].items():
121 kind = 'hg'
121 kind = 'hg'
122 if src.startswith('['):
122 if src.startswith('['):
123 if ']' not in src:
123 if ']' not in src:
124 raise util.Abort(_('missing ] in subrepo source'))
124 raise util.Abort(_('missing ] in subrepo source'))
125 kind, src = src.split(']', 1)
125 kind, src = src.split(']', 1)
126 kind = kind[1:]
126 kind = kind[1:]
127 src = src.lstrip() # strip any extra whitespace after ']'
127 src = src.lstrip() # strip any extra whitespace after ']'
128
128
129 if not util.url(src).isabs():
129 if not util.url(src).isabs():
130 parent = _abssource(ctx._repo, abort=False)
130 parent = _abssource(ctx._repo, abort=False)
131 if parent:
131 if parent:
132 parent = util.url(parent)
132 parent = util.url(parent)
133 parent.path = posixpath.join(parent.path or '', src)
133 parent.path = posixpath.join(parent.path or '', src)
134 parent.path = posixpath.normpath(parent.path)
134 parent.path = posixpath.normpath(parent.path)
135 joined = str(parent)
135 joined = str(parent)
136 # Remap the full joined path and use it if it changes,
136 # Remap the full joined path and use it if it changes,
137 # else remap the original source.
137 # else remap the original source.
138 remapped = remap(joined)
138 remapped = remap(joined)
139 if remapped == joined:
139 if remapped == joined:
140 src = remap(src)
140 src = remap(src)
141 else:
141 else:
142 src = remapped
142 src = remapped
143
143
144 src = remap(src)
144 src = remap(src)
145 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
145 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
146
146
147 return state
147 return state
148
148
149 def writestate(repo, state):
149 def writestate(repo, state):
150 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
150 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
151 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
151 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
152 repo.wwrite('.hgsubstate', ''.join(lines), '')
152 repo.wwrite('.hgsubstate', ''.join(lines), '')
153
153
154 def submerge(repo, wctx, mctx, actx, overwrite):
154 def submerge(repo, wctx, mctx, actx, overwrite):
155 """delegated from merge.applyupdates: merging of .hgsubstate file
155 """delegated from merge.applyupdates: merging of .hgsubstate file
156 in working context, merging context and ancestor context"""
156 in working context, merging context and ancestor context"""
157 if mctx == actx: # backwards?
157 if mctx == actx: # backwards?
158 actx = wctx.p1()
158 actx = wctx.p1()
159 s1 = wctx.substate
159 s1 = wctx.substate
160 s2 = mctx.substate
160 s2 = mctx.substate
161 sa = actx.substate
161 sa = actx.substate
162 sm = {}
162 sm = {}
163
163
164 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
164 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
165
165
166 def debug(s, msg, r=""):
166 def debug(s, msg, r=""):
167 if r:
167 if r:
168 r = "%s:%s:%s" % r
168 r = "%s:%s:%s" % r
169 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
169 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
170
170
171 for s, l in sorted(s1.iteritems()):
171 for s, l in sorted(s1.iteritems()):
172 a = sa.get(s, nullstate)
172 a = sa.get(s, nullstate)
173 ld = l # local state with possible dirty flag for compares
173 ld = l # local state with possible dirty flag for compares
174 if wctx.sub(s).dirty():
174 if wctx.sub(s).dirty():
175 ld = (l[0], l[1] + "+")
175 ld = (l[0], l[1] + "+")
176 if wctx == actx: # overwrite
176 if wctx == actx: # overwrite
177 a = ld
177 a = ld
178
178
179 if s in s2:
179 if s in s2:
180 r = s2[s]
180 r = s2[s]
181 if ld == r or r == a: # no change or local is newer
181 if ld == r or r == a: # no change or local is newer
182 sm[s] = l
182 sm[s] = l
183 continue
183 continue
184 elif ld == a: # other side changed
184 elif ld == a: # other side changed
185 debug(s, "other changed, get", r)
185 debug(s, "other changed, get", r)
186 wctx.sub(s).get(r, overwrite)
186 wctx.sub(s).get(r, overwrite)
187 sm[s] = r
187 sm[s] = r
188 elif ld[0] != r[0]: # sources differ
188 elif ld[0] != r[0]: # sources differ
189 if repo.ui.promptchoice(
189 if repo.ui.promptchoice(
190 _(' subrepository sources for %s differ\n'
190 _(' subrepository sources for %s differ\n'
191 'use (l)ocal source (%s) or (r)emote source (%s)?'
191 'use (l)ocal source (%s) or (r)emote source (%s)?'
192 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
192 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
193 debug(s, "prompt changed, get", r)
193 debug(s, "prompt changed, get", r)
194 wctx.sub(s).get(r, overwrite)
194 wctx.sub(s).get(r, overwrite)
195 sm[s] = r
195 sm[s] = r
196 elif ld[1] == a[1]: # local side is unchanged
196 elif ld[1] == a[1]: # local side is unchanged
197 debug(s, "other side changed, get", r)
197 debug(s, "other side changed, get", r)
198 wctx.sub(s).get(r, overwrite)
198 wctx.sub(s).get(r, overwrite)
199 sm[s] = r
199 sm[s] = r
200 else:
200 else:
201 debug(s, "both sides changed")
201 debug(s, "both sides changed")
202 srepo = wctx.sub(s)
202 srepo = wctx.sub(s)
203 option = repo.ui.promptchoice(
203 option = repo.ui.promptchoice(
204 _(' subrepository %s diverged (local revision: %s, '
204 _(' subrepository %s diverged (local revision: %s, '
205 'remote revision: %s)\n'
205 'remote revision: %s)\n'
206 '(M)erge, keep (l)ocal or keep (r)emote?'
206 '(M)erge, keep (l)ocal or keep (r)emote?'
207 '$$ &Merge $$ &Local $$ &Remote')
207 '$$ &Merge $$ &Local $$ &Remote')
208 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
208 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
209 if option == 0:
209 if option == 0:
210 wctx.sub(s).merge(r)
210 wctx.sub(s).merge(r)
211 sm[s] = l
211 sm[s] = l
212 debug(s, "merge with", r)
212 debug(s, "merge with", r)
213 elif option == 1:
213 elif option == 1:
214 sm[s] = l
214 sm[s] = l
215 debug(s, "keep local subrepo revision", l)
215 debug(s, "keep local subrepo revision", l)
216 else:
216 else:
217 wctx.sub(s).get(r, overwrite)
217 wctx.sub(s).get(r, overwrite)
218 sm[s] = r
218 sm[s] = r
219 debug(s, "get remote subrepo revision", r)
219 debug(s, "get remote subrepo revision", r)
220 elif ld == a: # remote removed, local unchanged
220 elif ld == a: # remote removed, local unchanged
221 debug(s, "remote removed, remove")
221 debug(s, "remote removed, remove")
222 wctx.sub(s).remove()
222 wctx.sub(s).remove()
223 elif a == nullstate: # not present in remote or ancestor
223 elif a == nullstate: # not present in remote or ancestor
224 debug(s, "local added, keep")
224 debug(s, "local added, keep")
225 sm[s] = l
225 sm[s] = l
226 continue
226 continue
227 else:
227 else:
228 if repo.ui.promptchoice(
228 if repo.ui.promptchoice(
229 _(' local changed subrepository %s which remote removed\n'
229 _(' local changed subrepository %s which remote removed\n'
230 'use (c)hanged version or (d)elete?'
230 'use (c)hanged version or (d)elete?'
231 '$$ &Changed $$ &Delete') % s, 0):
231 '$$ &Changed $$ &Delete') % s, 0):
232 debug(s, "prompt remove")
232 debug(s, "prompt remove")
233 wctx.sub(s).remove()
233 wctx.sub(s).remove()
234
234
235 for s, r in sorted(s2.items()):
235 for s, r in sorted(s2.items()):
236 if s in s1:
236 if s in s1:
237 continue
237 continue
238 elif s not in sa:
238 elif s not in sa:
239 debug(s, "remote added, get", r)
239 debug(s, "remote added, get", r)
240 mctx.sub(s).get(r)
240 mctx.sub(s).get(r)
241 sm[s] = r
241 sm[s] = r
242 elif r != sa[s]:
242 elif r != sa[s]:
243 if repo.ui.promptchoice(
243 if repo.ui.promptchoice(
244 _(' remote changed subrepository %s which local removed\n'
244 _(' remote changed subrepository %s which local removed\n'
245 'use (c)hanged version or (d)elete?'
245 'use (c)hanged version or (d)elete?'
246 '$$ &Changed $$ &Delete') % s, 0) == 0:
246 '$$ &Changed $$ &Delete') % s, 0) == 0:
247 debug(s, "prompt recreate", r)
247 debug(s, "prompt recreate", r)
248 wctx.sub(s).get(r)
248 wctx.sub(s).get(r)
249 sm[s] = r
249 sm[s] = r
250
250
251 # record merged .hgsubstate
251 # record merged .hgsubstate
252 writestate(repo, sm)
252 writestate(repo, sm)
253 return sm
253 return sm
254
254
255 def _updateprompt(ui, sub, dirty, local, remote):
255 def _updateprompt(ui, sub, dirty, local, remote):
256 if dirty:
256 if dirty:
257 msg = (_(' subrepository sources for %s differ\n'
257 msg = (_(' subrepository sources for %s differ\n'
258 'use (l)ocal source (%s) or (r)emote source (%s)?'
258 'use (l)ocal source (%s) or (r)emote source (%s)?'
259 '$$ &Local $$ &Remote')
259 '$$ &Local $$ &Remote')
260 % (subrelpath(sub), local, remote))
260 % (subrelpath(sub), local, remote))
261 else:
261 else:
262 msg = (_(' subrepository sources for %s differ (in checked out '
262 msg = (_(' subrepository sources for %s differ (in checked out '
263 'version)\n'
263 'version)\n'
264 'use (l)ocal source (%s) or (r)emote source (%s)?'
264 'use (l)ocal source (%s) or (r)emote source (%s)?'
265 '$$ &Local $$ &Remote')
265 '$$ &Local $$ &Remote')
266 % (subrelpath(sub), local, remote))
266 % (subrelpath(sub), local, remote))
267 return ui.promptchoice(msg, 0)
267 return ui.promptchoice(msg, 0)
268
268
269 def reporelpath(repo):
269 def reporelpath(repo):
270 """return path to this (sub)repo as seen from outermost repo"""
270 """return path to this (sub)repo as seen from outermost repo"""
271 parent = repo
271 parent = repo
272 while util.safehasattr(parent, '_subparent'):
272 while util.safehasattr(parent, '_subparent'):
273 parent = parent._subparent
273 parent = parent._subparent
274 return repo.root[len(pathutil.normasprefix(parent.root)):]
274 return repo.root[len(pathutil.normasprefix(parent.root)):]
275
275
276 def subrelpath(sub):
276 def subrelpath(sub):
277 """return path to this subrepo as seen from outermost repo"""
277 """return path to this subrepo as seen from outermost repo"""
278 if util.safehasattr(sub, '_relpath'):
278 if util.safehasattr(sub, '_relpath'):
279 return sub._relpath
279 return sub._relpath
280 if not util.safehasattr(sub, '_repo'):
280 if not util.safehasattr(sub, '_repo'):
281 return sub._path
281 return sub._path
282 return reporelpath(sub._repo)
282 return reporelpath(sub._repo)
283
283
284 def _abssource(repo, push=False, abort=True):
284 def _abssource(repo, push=False, abort=True):
285 """return pull/push path of repo - either based on parent repo .hgsub info
285 """return pull/push path of repo - either based on parent repo .hgsub info
286 or on the top repo config. Abort or return None if no source found."""
286 or on the top repo config. Abort or return None if no source found."""
287 if util.safehasattr(repo, '_subparent'):
287 if util.safehasattr(repo, '_subparent'):
288 source = util.url(repo._subsource)
288 source = util.url(repo._subsource)
289 if source.isabs():
289 if source.isabs():
290 return str(source)
290 return str(source)
291 source.path = posixpath.normpath(source.path)
291 source.path = posixpath.normpath(source.path)
292 parent = _abssource(repo._subparent, push, abort=False)
292 parent = _abssource(repo._subparent, push, abort=False)
293 if parent:
293 if parent:
294 parent = util.url(util.pconvert(parent))
294 parent = util.url(util.pconvert(parent))
295 parent.path = posixpath.join(parent.path or '', source.path)
295 parent.path = posixpath.join(parent.path or '', source.path)
296 parent.path = posixpath.normpath(parent.path)
296 parent.path = posixpath.normpath(parent.path)
297 return str(parent)
297 return str(parent)
298 else: # recursion reached top repo
298 else: # recursion reached top repo
299 if util.safehasattr(repo, '_subtoppath'):
299 if util.safehasattr(repo, '_subtoppath'):
300 return repo._subtoppath
300 return repo._subtoppath
301 if push and repo.ui.config('paths', 'default-push'):
301 if push and repo.ui.config('paths', 'default-push'):
302 return repo.ui.config('paths', 'default-push')
302 return repo.ui.config('paths', 'default-push')
303 if repo.ui.config('paths', 'default'):
303 if repo.ui.config('paths', 'default'):
304 return repo.ui.config('paths', 'default')
304 return repo.ui.config('paths', 'default')
305 if repo.sharedpath != repo.path:
305 if repo.sharedpath != repo.path:
306 # chop off the .hg component to get the default path form
306 # chop off the .hg component to get the default path form
307 return os.path.dirname(repo.sharedpath)
307 return os.path.dirname(repo.sharedpath)
308 if abort:
308 if abort:
309 raise util.Abort(_("default path for subrepository not found"))
309 raise util.Abort(_("default path for subrepository not found"))
310
310
311 def _sanitize(ui, path, ignore):
311 def _sanitize(ui, path, ignore):
312 for dirname, dirs, names in os.walk(path):
312 for dirname, dirs, names in os.walk(path):
313 for i, d in enumerate(dirs):
313 for i, d in enumerate(dirs):
314 if d.lower() == ignore:
314 if d.lower() == ignore:
315 del dirs[i]
315 del dirs[i]
316 break
316 break
317 if os.path.basename(dirname).lower() != '.hg':
317 if os.path.basename(dirname).lower() != '.hg':
318 continue
318 continue
319 for f in names:
319 for f in names:
320 if f.lower() == 'hgrc':
320 if f.lower() == 'hgrc':
321 ui.warn(_("warning: removing potentially hostile 'hgrc' "
321 ui.warn(_("warning: removing potentially hostile 'hgrc' "
322 "in '%s'\n") % dirname)
322 "in '%s'\n") % dirname)
323 os.unlink(os.path.join(dirname, f))
323 os.unlink(os.path.join(dirname, f))
324
324
325 def subrepo(ctx, path):
325 def subrepo(ctx, path):
326 """return instance of the right subrepo class for subrepo in path"""
326 """return instance of the right subrepo class for subrepo in path"""
327 # subrepo inherently violates our import layering rules
327 # subrepo inherently violates our import layering rules
328 # because it wants to make repo objects from deep inside the stack
328 # because it wants to make repo objects from deep inside the stack
329 # so we manually delay the circular imports to not break
329 # so we manually delay the circular imports to not break
330 # scripts that don't use our demand-loading
330 # scripts that don't use our demand-loading
331 global hg
331 global hg
332 import hg as h
332 import hg as h
333 hg = h
333 hg = h
334
334
335 pathutil.pathauditor(ctx._repo.root)(path)
335 pathutil.pathauditor(ctx._repo.root)(path)
336 state = ctx.substate[path]
336 state = ctx.substate[path]
337 if state[2] not in types:
337 if state[2] not in types:
338 raise util.Abort(_('unknown subrepo type %s') % state[2])
338 raise util.Abort(_('unknown subrepo type %s') % state[2])
339 return types[state[2]](ctx, path, state[:2])
339 return types[state[2]](ctx, path, state[:2])
340
340
341 def newcommitphase(ui, ctx):
341 def newcommitphase(ui, ctx):
342 commitphase = phases.newcommitphase(ui)
342 commitphase = phases.newcommitphase(ui)
343 substate = getattr(ctx, "substate", None)
343 substate = getattr(ctx, "substate", None)
344 if not substate:
344 if not substate:
345 return commitphase
345 return commitphase
346 check = ui.config('phases', 'checksubrepos', 'follow')
346 check = ui.config('phases', 'checksubrepos', 'follow')
347 if check not in ('ignore', 'follow', 'abort'):
347 if check not in ('ignore', 'follow', 'abort'):
348 raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
348 raise util.Abort(_('invalid phases.checksubrepos configuration: %s')
349 % (check))
349 % (check))
350 if check == 'ignore':
350 if check == 'ignore':
351 return commitphase
351 return commitphase
352 maxphase = phases.public
352 maxphase = phases.public
353 maxsub = None
353 maxsub = None
354 for s in sorted(substate):
354 for s in sorted(substate):
355 sub = ctx.sub(s)
355 sub = ctx.sub(s)
356 subphase = sub.phase(substate[s][1])
356 subphase = sub.phase(substate[s][1])
357 if maxphase < subphase:
357 if maxphase < subphase:
358 maxphase = subphase
358 maxphase = subphase
359 maxsub = s
359 maxsub = s
360 if commitphase < maxphase:
360 if commitphase < maxphase:
361 if check == 'abort':
361 if check == 'abort':
362 raise util.Abort(_("can't commit in %s phase"
362 raise util.Abort(_("can't commit in %s phase"
363 " conflicting %s from subrepository %s") %
363 " conflicting %s from subrepository %s") %
364 (phases.phasenames[commitphase],
364 (phases.phasenames[commitphase],
365 phases.phasenames[maxphase], maxsub))
365 phases.phasenames[maxphase], maxsub))
366 ui.warn(_("warning: changes are committed in"
366 ui.warn(_("warning: changes are committed in"
367 " %s phase from subrepository %s\n") %
367 " %s phase from subrepository %s\n") %
368 (phases.phasenames[maxphase], maxsub))
368 (phases.phasenames[maxphase], maxsub))
369 return maxphase
369 return maxphase
370 return commitphase
370 return commitphase
371
371
372 # subrepo classes need to implement the following abstract class:
372 # subrepo classes need to implement the following abstract class:
373
373
374 class abstractsubrepo(object):
374 class abstractsubrepo(object):
375
375
376 def __init__(self, ui):
376 def __init__(self, ui):
377 self.ui = ui
377 self.ui = ui
378
378
379 def storeclean(self, path):
379 def storeclean(self, path):
380 """
380 """
381 returns true if the repository has not changed since it was last
381 returns true if the repository has not changed since it was last
382 cloned from or pushed to a given repository.
382 cloned from or pushed to a given repository.
383 """
383 """
384 return False
384 return False
385
385
386 def dirty(self, ignoreupdate=False):
386 def dirty(self, ignoreupdate=False):
387 """returns true if the dirstate of the subrepo is dirty or does not
387 """returns true if the dirstate of the subrepo is dirty or does not
388 match current stored state. If ignoreupdate is true, only check
388 match current stored state. If ignoreupdate is true, only check
389 whether the subrepo has uncommitted changes in its dirstate.
389 whether the subrepo has uncommitted changes in its dirstate.
390 """
390 """
391 raise NotImplementedError
391 raise NotImplementedError
392
392
393 def basestate(self):
393 def basestate(self):
394 """current working directory base state, disregarding .hgsubstate
394 """current working directory base state, disregarding .hgsubstate
395 state and working directory modifications"""
395 state and working directory modifications"""
396 raise NotImplementedError
396 raise NotImplementedError
397
397
398 def checknested(self, path):
398 def checknested(self, path):
399 """check if path is a subrepository within this repository"""
399 """check if path is a subrepository within this repository"""
400 return False
400 return False
401
401
402 def commit(self, text, user, date):
402 def commit(self, text, user, date):
403 """commit the current changes to the subrepo with the given
403 """commit the current changes to the subrepo with the given
404 log message. Use given user and date if possible. Return the
404 log message. Use given user and date if possible. Return the
405 new state of the subrepo.
405 new state of the subrepo.
406 """
406 """
407 raise NotImplementedError
407 raise NotImplementedError
408
408
409 def phase(self, state):
409 def phase(self, state):
410 """returns phase of specified state in the subrepository.
410 """returns phase of specified state in the subrepository.
411 """
411 """
412 return phases.public
412 return phases.public
413
413
414 def remove(self):
414 def remove(self):
415 """remove the subrepo
415 """remove the subrepo
416
416
417 (should verify the dirstate is not dirty first)
417 (should verify the dirstate is not dirty first)
418 """
418 """
419 raise NotImplementedError
419 raise NotImplementedError
420
420
421 def get(self, state, overwrite=False):
421 def get(self, state, overwrite=False):
422 """run whatever commands are needed to put the subrepo into
422 """run whatever commands are needed to put the subrepo into
423 this state
423 this state
424 """
424 """
425 raise NotImplementedError
425 raise NotImplementedError
426
426
427 def merge(self, state):
427 def merge(self, state):
428 """merge currently-saved state with the new state."""
428 """merge currently-saved state with the new state."""
429 raise NotImplementedError
429 raise NotImplementedError
430
430
431 def push(self, opts):
431 def push(self, opts):
432 """perform whatever action is analogous to 'hg push'
432 """perform whatever action is analogous to 'hg push'
433
433
434 This may be a no-op on some systems.
434 This may be a no-op on some systems.
435 """
435 """
436 raise NotImplementedError
436 raise NotImplementedError
437
437
438 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
438 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
439 return []
439 return []
440
440
441 def addremove(self, matcher, prefix, opts, dry_run, similarity):
441 def addremove(self, matcher, prefix, opts, dry_run, similarity):
442 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
442 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
443 return 1
443 return 1
444
444
445 def cat(self, ui, match, prefix, **opts):
445 def cat(self, ui, match, prefix, **opts):
446 return 1
446 return 1
447
447
448 def status(self, rev2, **opts):
448 def status(self, rev2, **opts):
449 return scmutil.status([], [], [], [], [], [], [])
449 return scmutil.status([], [], [], [], [], [], [])
450
450
451 def diff(self, ui, diffopts, node2, match, prefix, **opts):
451 def diff(self, ui, diffopts, node2, match, prefix, **opts):
452 pass
452 pass
453
453
454 def outgoing(self, ui, dest, opts):
454 def outgoing(self, ui, dest, opts):
455 return 1
455 return 1
456
456
457 def incoming(self, ui, source, opts):
457 def incoming(self, ui, source, opts):
458 return 1
458 return 1
459
459
460 def files(self):
460 def files(self):
461 """return filename iterator"""
461 """return filename iterator"""
462 raise NotImplementedError
462 raise NotImplementedError
463
463
464 def filedata(self, name):
464 def filedata(self, name):
465 """return file data"""
465 """return file data"""
466 raise NotImplementedError
466 raise NotImplementedError
467
467
468 def fileflags(self, name):
468 def fileflags(self, name):
469 """return file flags"""
469 """return file flags"""
470 return ''
470 return ''
471
471
472 def archive(self, ui, archiver, prefix, match=None):
472 def archive(self, archiver, prefix, match=None):
473 if match is not None:
473 if match is not None:
474 files = [f for f in self.files() if match(f)]
474 files = [f for f in self.files() if match(f)]
475 else:
475 else:
476 files = self.files()
476 files = self.files()
477 total = len(files)
477 total = len(files)
478 relpath = subrelpath(self)
478 relpath = subrelpath(self)
479 ui.progress(_('archiving (%s)') % relpath, 0,
479 self.ui.progress(_('archiving (%s)') % relpath, 0,
480 unit=_('files'), total=total)
480 unit=_('files'), total=total)
481 for i, name in enumerate(files):
481 for i, name in enumerate(files):
482 flags = self.fileflags(name)
482 flags = self.fileflags(name)
483 mode = 'x' in flags and 0755 or 0644
483 mode = 'x' in flags and 0755 or 0644
484 symlink = 'l' in flags
484 symlink = 'l' in flags
485 archiver.addfile(os.path.join(prefix, self._path, name),
485 archiver.addfile(os.path.join(prefix, self._path, name),
486 mode, symlink, self.filedata(name))
486 mode, symlink, self.filedata(name))
487 ui.progress(_('archiving (%s)') % relpath, i + 1,
487 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
488 unit=_('files'), total=total)
488 unit=_('files'), total=total)
489 ui.progress(_('archiving (%s)') % relpath, None)
489 self.ui.progress(_('archiving (%s)') % relpath, None)
490 return total
490 return total
491
491
492 def walk(self, match):
492 def walk(self, match):
493 '''
493 '''
494 walk recursively through the directory tree, finding all files
494 walk recursively through the directory tree, finding all files
495 matched by the match function
495 matched by the match function
496 '''
496 '''
497 pass
497 pass
498
498
499 def forget(self, ui, match, prefix):
499 def forget(self, ui, match, prefix):
500 return ([], [])
500 return ([], [])
501
501
502 def removefiles(self, ui, matcher, prefix, after, force, subrepos):
502 def removefiles(self, ui, matcher, prefix, after, force, subrepos):
503 """remove the matched files from the subrepository and the filesystem,
503 """remove the matched files from the subrepository and the filesystem,
504 possibly by force and/or after the file has been removed from the
504 possibly by force and/or after the file has been removed from the
505 filesystem. Return 0 on success, 1 on any warning.
505 filesystem. Return 0 on success, 1 on any warning.
506 """
506 """
507 return 1
507 return 1
508
508
509 def revert(self, ui, substate, *pats, **opts):
509 def revert(self, ui, substate, *pats, **opts):
510 ui.warn('%s: reverting %s subrepos is unsupported\n' \
510 ui.warn('%s: reverting %s subrepos is unsupported\n' \
511 % (substate[0], substate[2]))
511 % (substate[0], substate[2]))
512 return []
512 return []
513
513
514 def shortid(self, revid):
514 def shortid(self, revid):
515 return revid
515 return revid
516
516
517 class hgsubrepo(abstractsubrepo):
517 class hgsubrepo(abstractsubrepo):
518 def __init__(self, ctx, path, state):
518 def __init__(self, ctx, path, state):
519 super(hgsubrepo, self).__init__(ctx._repo.ui)
519 super(hgsubrepo, self).__init__(ctx._repo.ui)
520 self._path = path
520 self._path = path
521 self._state = state
521 self._state = state
522 r = ctx._repo
522 r = ctx._repo
523 root = r.wjoin(path)
523 root = r.wjoin(path)
524 create = not r.wvfs.exists('%s/.hg' % path)
524 create = not r.wvfs.exists('%s/.hg' % path)
525 self._repo = hg.repository(r.baseui, root, create=create)
525 self._repo = hg.repository(r.baseui, root, create=create)
526 self.ui = self._repo.ui
526 self.ui = self._repo.ui
527 for s, k in [('ui', 'commitsubrepos')]:
527 for s, k in [('ui', 'commitsubrepos')]:
528 v = r.ui.config(s, k)
528 v = r.ui.config(s, k)
529 if v:
529 if v:
530 self.ui.setconfig(s, k, v, 'subrepo')
530 self.ui.setconfig(s, k, v, 'subrepo')
531 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
531 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
532 self._initrepo(r, state[0], create)
532 self._initrepo(r, state[0], create)
533
533
534 def storeclean(self, path):
534 def storeclean(self, path):
535 lock = self._repo.lock()
535 lock = self._repo.lock()
536 try:
536 try:
537 return self._storeclean(path)
537 return self._storeclean(path)
538 finally:
538 finally:
539 lock.release()
539 lock.release()
540
540
541 def _storeclean(self, path):
541 def _storeclean(self, path):
542 clean = True
542 clean = True
543 itercache = self._calcstorehash(path)
543 itercache = self._calcstorehash(path)
544 try:
544 try:
545 for filehash in self._readstorehashcache(path):
545 for filehash in self._readstorehashcache(path):
546 if filehash != itercache.next():
546 if filehash != itercache.next():
547 clean = False
547 clean = False
548 break
548 break
549 except StopIteration:
549 except StopIteration:
550 # the cached and current pull states have a different size
550 # the cached and current pull states have a different size
551 clean = False
551 clean = False
552 if clean:
552 if clean:
553 try:
553 try:
554 itercache.next()
554 itercache.next()
555 # the cached and current pull states have a different size
555 # the cached and current pull states have a different size
556 clean = False
556 clean = False
557 except StopIteration:
557 except StopIteration:
558 pass
558 pass
559 return clean
559 return clean
560
560
561 def _calcstorehash(self, remotepath):
561 def _calcstorehash(self, remotepath):
562 '''calculate a unique "store hash"
562 '''calculate a unique "store hash"
563
563
564 This method is used to to detect when there are changes that may
564 This method is used to to detect when there are changes that may
565 require a push to a given remote path.'''
565 require a push to a given remote path.'''
566 # sort the files that will be hashed in increasing (likely) file size
566 # sort the files that will be hashed in increasing (likely) file size
567 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
567 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
568 yield '# %s\n' % _expandedabspath(remotepath)
568 yield '# %s\n' % _expandedabspath(remotepath)
569 vfs = self._repo.vfs
569 vfs = self._repo.vfs
570 for relname in filelist:
570 for relname in filelist:
571 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
571 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
572 yield '%s = %s\n' % (relname, filehash)
572 yield '%s = %s\n' % (relname, filehash)
573
573
574 @propertycache
574 @propertycache
575 def _cachestorehashvfs(self):
575 def _cachestorehashvfs(self):
576 return scmutil.vfs(self._repo.join('cache/storehash'))
576 return scmutil.vfs(self._repo.join('cache/storehash'))
577
577
578 def _readstorehashcache(self, remotepath):
578 def _readstorehashcache(self, remotepath):
579 '''read the store hash cache for a given remote repository'''
579 '''read the store hash cache for a given remote repository'''
580 cachefile = _getstorehashcachename(remotepath)
580 cachefile = _getstorehashcachename(remotepath)
581 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
581 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
582
582
583 def _cachestorehash(self, remotepath):
583 def _cachestorehash(self, remotepath):
584 '''cache the current store hash
584 '''cache the current store hash
585
585
586 Each remote repo requires its own store hash cache, because a subrepo
586 Each remote repo requires its own store hash cache, because a subrepo
587 store may be "clean" versus a given remote repo, but not versus another
587 store may be "clean" versus a given remote repo, but not versus another
588 '''
588 '''
589 cachefile = _getstorehashcachename(remotepath)
589 cachefile = _getstorehashcachename(remotepath)
590 lock = self._repo.lock()
590 lock = self._repo.lock()
591 try:
591 try:
592 storehash = list(self._calcstorehash(remotepath))
592 storehash = list(self._calcstorehash(remotepath))
593 vfs = self._cachestorehashvfs
593 vfs = self._cachestorehashvfs
594 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
594 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
595 finally:
595 finally:
596 lock.release()
596 lock.release()
597
597
598 @annotatesubrepoerror
598 @annotatesubrepoerror
599 def _initrepo(self, parentrepo, source, create):
599 def _initrepo(self, parentrepo, source, create):
600 self._repo._subparent = parentrepo
600 self._repo._subparent = parentrepo
601 self._repo._subsource = source
601 self._repo._subsource = source
602
602
603 if create:
603 if create:
604 lines = ['[paths]\n']
604 lines = ['[paths]\n']
605
605
606 def addpathconfig(key, value):
606 def addpathconfig(key, value):
607 if value:
607 if value:
608 lines.append('%s = %s\n' % (key, value))
608 lines.append('%s = %s\n' % (key, value))
609 self.ui.setconfig('paths', key, value, 'subrepo')
609 self.ui.setconfig('paths', key, value, 'subrepo')
610
610
611 defpath = _abssource(self._repo, abort=False)
611 defpath = _abssource(self._repo, abort=False)
612 defpushpath = _abssource(self._repo, True, abort=False)
612 defpushpath = _abssource(self._repo, True, abort=False)
613 addpathconfig('default', defpath)
613 addpathconfig('default', defpath)
614 if defpath != defpushpath:
614 if defpath != defpushpath:
615 addpathconfig('default-push', defpushpath)
615 addpathconfig('default-push', defpushpath)
616
616
617 fp = self._repo.opener("hgrc", "w", text=True)
617 fp = self._repo.opener("hgrc", "w", text=True)
618 try:
618 try:
619 fp.write(''.join(lines))
619 fp.write(''.join(lines))
620 finally:
620 finally:
621 fp.close()
621 fp.close()
622
622
623 @annotatesubrepoerror
623 @annotatesubrepoerror
624 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
624 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
625 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
625 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
626 os.path.join(prefix, self._path), explicitonly)
626 os.path.join(prefix, self._path), explicitonly)
627
627
628 def addremove(self, m, prefix, opts, dry_run, similarity):
628 def addremove(self, m, prefix, opts, dry_run, similarity):
629 # In the same way as sub directories are processed, once in a subrepo,
629 # In the same way as sub directories are processed, once in a subrepo,
630 # always entry any of its subrepos. Don't corrupt the options that will
630 # always entry any of its subrepos. Don't corrupt the options that will
631 # be used to process sibling subrepos however.
631 # be used to process sibling subrepos however.
632 opts = copy.copy(opts)
632 opts = copy.copy(opts)
633 opts['subrepos'] = True
633 opts['subrepos'] = True
634 return scmutil.addremove(self._repo, m,
634 return scmutil.addremove(self._repo, m,
635 os.path.join(prefix, self._path), opts,
635 os.path.join(prefix, self._path), opts,
636 dry_run, similarity)
636 dry_run, similarity)
637
637
638 @annotatesubrepoerror
638 @annotatesubrepoerror
639 def cat(self, ui, match, prefix, **opts):
639 def cat(self, ui, match, prefix, **opts):
640 rev = self._state[1]
640 rev = self._state[1]
641 ctx = self._repo[rev]
641 ctx = self._repo[rev]
642 return cmdutil.cat(ui, self._repo, ctx, match, prefix, **opts)
642 return cmdutil.cat(ui, self._repo, ctx, match, prefix, **opts)
643
643
644 @annotatesubrepoerror
644 @annotatesubrepoerror
645 def status(self, rev2, **opts):
645 def status(self, rev2, **opts):
646 try:
646 try:
647 rev1 = self._state[1]
647 rev1 = self._state[1]
648 ctx1 = self._repo[rev1]
648 ctx1 = self._repo[rev1]
649 ctx2 = self._repo[rev2]
649 ctx2 = self._repo[rev2]
650 return self._repo.status(ctx1, ctx2, **opts)
650 return self._repo.status(ctx1, ctx2, **opts)
651 except error.RepoLookupError, inst:
651 except error.RepoLookupError, inst:
652 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
652 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
653 % (inst, subrelpath(self)))
653 % (inst, subrelpath(self)))
654 return scmutil.status([], [], [], [], [], [], [])
654 return scmutil.status([], [], [], [], [], [], [])
655
655
656 @annotatesubrepoerror
656 @annotatesubrepoerror
657 def diff(self, ui, diffopts, node2, match, prefix, **opts):
657 def diff(self, ui, diffopts, node2, match, prefix, **opts):
658 try:
658 try:
659 node1 = node.bin(self._state[1])
659 node1 = node.bin(self._state[1])
660 # We currently expect node2 to come from substate and be
660 # We currently expect node2 to come from substate and be
661 # in hex format
661 # in hex format
662 if node2 is not None:
662 if node2 is not None:
663 node2 = node.bin(node2)
663 node2 = node.bin(node2)
664 cmdutil.diffordiffstat(ui, self._repo, diffopts,
664 cmdutil.diffordiffstat(ui, self._repo, diffopts,
665 node1, node2, match,
665 node1, node2, match,
666 prefix=posixpath.join(prefix, self._path),
666 prefix=posixpath.join(prefix, self._path),
667 listsubrepos=True, **opts)
667 listsubrepos=True, **opts)
668 except error.RepoLookupError, inst:
668 except error.RepoLookupError, inst:
669 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
669 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
670 % (inst, subrelpath(self)))
670 % (inst, subrelpath(self)))
671
671
672 @annotatesubrepoerror
672 @annotatesubrepoerror
673 def archive(self, ui, archiver, prefix, match=None):
673 def archive(self, archiver, prefix, match=None):
674 self._get(self._state + ('hg',))
674 self._get(self._state + ('hg',))
675 total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
675 total = abstractsubrepo.archive(self, archiver, prefix, match)
676 rev = self._state[1]
676 rev = self._state[1]
677 ctx = self._repo[rev]
677 ctx = self._repo[rev]
678 for subpath in ctx.substate:
678 for subpath in ctx.substate:
679 s = subrepo(ctx, subpath)
679 s = subrepo(ctx, subpath)
680 submatch = matchmod.narrowmatcher(subpath, match)
680 submatch = matchmod.narrowmatcher(subpath, match)
681 total += s.archive(
681 total += s.archive(
682 ui, archiver, os.path.join(prefix, self._path), submatch)
682 archiver, os.path.join(prefix, self._path), submatch)
683 return total
683 return total
684
684
685 @annotatesubrepoerror
685 @annotatesubrepoerror
686 def dirty(self, ignoreupdate=False):
686 def dirty(self, ignoreupdate=False):
687 r = self._state[1]
687 r = self._state[1]
688 if r == '' and not ignoreupdate: # no state recorded
688 if r == '' and not ignoreupdate: # no state recorded
689 return True
689 return True
690 w = self._repo[None]
690 w = self._repo[None]
691 if r != w.p1().hex() and not ignoreupdate:
691 if r != w.p1().hex() and not ignoreupdate:
692 # different version checked out
692 # different version checked out
693 return True
693 return True
694 return w.dirty() # working directory changed
694 return w.dirty() # working directory changed
695
695
696 def basestate(self):
696 def basestate(self):
697 return self._repo['.'].hex()
697 return self._repo['.'].hex()
698
698
699 def checknested(self, path):
699 def checknested(self, path):
700 return self._repo._checknested(self._repo.wjoin(path))
700 return self._repo._checknested(self._repo.wjoin(path))
701
701
702 @annotatesubrepoerror
702 @annotatesubrepoerror
703 def commit(self, text, user, date):
703 def commit(self, text, user, date):
704 # don't bother committing in the subrepo if it's only been
704 # don't bother committing in the subrepo if it's only been
705 # updated
705 # updated
706 if not self.dirty(True):
706 if not self.dirty(True):
707 return self._repo['.'].hex()
707 return self._repo['.'].hex()
708 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
708 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
709 n = self._repo.commit(text, user, date)
709 n = self._repo.commit(text, user, date)
710 if not n:
710 if not n:
711 return self._repo['.'].hex() # different version checked out
711 return self._repo['.'].hex() # different version checked out
712 return node.hex(n)
712 return node.hex(n)
713
713
714 @annotatesubrepoerror
714 @annotatesubrepoerror
715 def phase(self, state):
715 def phase(self, state):
716 return self._repo[state].phase()
716 return self._repo[state].phase()
717
717
718 @annotatesubrepoerror
718 @annotatesubrepoerror
719 def remove(self):
719 def remove(self):
720 # we can't fully delete the repository as it may contain
720 # we can't fully delete the repository as it may contain
721 # local-only history
721 # local-only history
722 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
722 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
723 hg.clean(self._repo, node.nullid, False)
723 hg.clean(self._repo, node.nullid, False)
724
724
725 def _get(self, state):
725 def _get(self, state):
726 source, revision, kind = state
726 source, revision, kind = state
727 if revision in self._repo.unfiltered():
727 if revision in self._repo.unfiltered():
728 return True
728 return True
729 self._repo._subsource = source
729 self._repo._subsource = source
730 srcurl = _abssource(self._repo)
730 srcurl = _abssource(self._repo)
731 other = hg.peer(self._repo, {}, srcurl)
731 other = hg.peer(self._repo, {}, srcurl)
732 if len(self._repo) == 0:
732 if len(self._repo) == 0:
733 self.ui.status(_('cloning subrepo %s from %s\n')
733 self.ui.status(_('cloning subrepo %s from %s\n')
734 % (subrelpath(self), srcurl))
734 % (subrelpath(self), srcurl))
735 parentrepo = self._repo._subparent
735 parentrepo = self._repo._subparent
736 shutil.rmtree(self._repo.path)
736 shutil.rmtree(self._repo.path)
737 other, cloned = hg.clone(self._repo._subparent.baseui, {},
737 other, cloned = hg.clone(self._repo._subparent.baseui, {},
738 other, self._repo.root,
738 other, self._repo.root,
739 update=False)
739 update=False)
740 self._repo = cloned.local()
740 self._repo = cloned.local()
741 self._initrepo(parentrepo, source, create=True)
741 self._initrepo(parentrepo, source, create=True)
742 self._cachestorehash(srcurl)
742 self._cachestorehash(srcurl)
743 else:
743 else:
744 self.ui.status(_('pulling subrepo %s from %s\n')
744 self.ui.status(_('pulling subrepo %s from %s\n')
745 % (subrelpath(self), srcurl))
745 % (subrelpath(self), srcurl))
746 cleansub = self.storeclean(srcurl)
746 cleansub = self.storeclean(srcurl)
747 exchange.pull(self._repo, other)
747 exchange.pull(self._repo, other)
748 if cleansub:
748 if cleansub:
749 # keep the repo clean after pull
749 # keep the repo clean after pull
750 self._cachestorehash(srcurl)
750 self._cachestorehash(srcurl)
751 return False
751 return False
752
752
753 @annotatesubrepoerror
753 @annotatesubrepoerror
754 def get(self, state, overwrite=False):
754 def get(self, state, overwrite=False):
755 inrepo = self._get(state)
755 inrepo = self._get(state)
756 source, revision, kind = state
756 source, revision, kind = state
757 repo = self._repo
757 repo = self._repo
758 repo.ui.debug("getting subrepo %s\n" % self._path)
758 repo.ui.debug("getting subrepo %s\n" % self._path)
759 if inrepo:
759 if inrepo:
760 urepo = repo.unfiltered()
760 urepo = repo.unfiltered()
761 ctx = urepo[revision]
761 ctx = urepo[revision]
762 if ctx.hidden():
762 if ctx.hidden():
763 urepo.ui.warn(
763 urepo.ui.warn(
764 _('revision %s in subrepo %s is hidden\n') \
764 _('revision %s in subrepo %s is hidden\n') \
765 % (revision[0:12], self._path))
765 % (revision[0:12], self._path))
766 repo = urepo
766 repo = urepo
767 hg.updaterepo(repo, revision, overwrite)
767 hg.updaterepo(repo, revision, overwrite)
768
768
769 @annotatesubrepoerror
769 @annotatesubrepoerror
770 def merge(self, state):
770 def merge(self, state):
771 self._get(state)
771 self._get(state)
772 cur = self._repo['.']
772 cur = self._repo['.']
773 dst = self._repo[state[1]]
773 dst = self._repo[state[1]]
774 anc = dst.ancestor(cur)
774 anc = dst.ancestor(cur)
775
775
776 def mergefunc():
776 def mergefunc():
777 if anc == cur and dst.branch() == cur.branch():
777 if anc == cur and dst.branch() == cur.branch():
778 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
778 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
779 hg.update(self._repo, state[1])
779 hg.update(self._repo, state[1])
780 elif anc == dst:
780 elif anc == dst:
781 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
781 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
782 else:
782 else:
783 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
783 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
784 hg.merge(self._repo, state[1], remind=False)
784 hg.merge(self._repo, state[1], remind=False)
785
785
786 wctx = self._repo[None]
786 wctx = self._repo[None]
787 if self.dirty():
787 if self.dirty():
788 if anc != dst:
788 if anc != dst:
789 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
789 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
790 mergefunc()
790 mergefunc()
791 else:
791 else:
792 mergefunc()
792 mergefunc()
793 else:
793 else:
794 mergefunc()
794 mergefunc()
795
795
796 @annotatesubrepoerror
796 @annotatesubrepoerror
797 def push(self, opts):
797 def push(self, opts):
798 force = opts.get('force')
798 force = opts.get('force')
799 newbranch = opts.get('new_branch')
799 newbranch = opts.get('new_branch')
800 ssh = opts.get('ssh')
800 ssh = opts.get('ssh')
801
801
802 # push subrepos depth-first for coherent ordering
802 # push subrepos depth-first for coherent ordering
803 c = self._repo['']
803 c = self._repo['']
804 subs = c.substate # only repos that are committed
804 subs = c.substate # only repos that are committed
805 for s in sorted(subs):
805 for s in sorted(subs):
806 if c.sub(s).push(opts) == 0:
806 if c.sub(s).push(opts) == 0:
807 return False
807 return False
808
808
809 dsturl = _abssource(self._repo, True)
809 dsturl = _abssource(self._repo, True)
810 if not force:
810 if not force:
811 if self.storeclean(dsturl):
811 if self.storeclean(dsturl):
812 self.ui.status(
812 self.ui.status(
813 _('no changes made to subrepo %s since last push to %s\n')
813 _('no changes made to subrepo %s since last push to %s\n')
814 % (subrelpath(self), dsturl))
814 % (subrelpath(self), dsturl))
815 return None
815 return None
816 self.ui.status(_('pushing subrepo %s to %s\n') %
816 self.ui.status(_('pushing subrepo %s to %s\n') %
817 (subrelpath(self), dsturl))
817 (subrelpath(self), dsturl))
818 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
818 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
819 res = exchange.push(self._repo, other, force, newbranch=newbranch)
819 res = exchange.push(self._repo, other, force, newbranch=newbranch)
820
820
821 # the repo is now clean
821 # the repo is now clean
822 self._cachestorehash(dsturl)
822 self._cachestorehash(dsturl)
823 return res.cgresult
823 return res.cgresult
824
824
825 @annotatesubrepoerror
825 @annotatesubrepoerror
826 def outgoing(self, ui, dest, opts):
826 def outgoing(self, ui, dest, opts):
827 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
827 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
828
828
829 @annotatesubrepoerror
829 @annotatesubrepoerror
830 def incoming(self, ui, source, opts):
830 def incoming(self, ui, source, opts):
831 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
831 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
832
832
833 @annotatesubrepoerror
833 @annotatesubrepoerror
834 def files(self):
834 def files(self):
835 rev = self._state[1]
835 rev = self._state[1]
836 ctx = self._repo[rev]
836 ctx = self._repo[rev]
837 return ctx.manifest()
837 return ctx.manifest()
838
838
839 def filedata(self, name):
839 def filedata(self, name):
840 rev = self._state[1]
840 rev = self._state[1]
841 return self._repo[rev][name].data()
841 return self._repo[rev][name].data()
842
842
843 def fileflags(self, name):
843 def fileflags(self, name):
844 rev = self._state[1]
844 rev = self._state[1]
845 ctx = self._repo[rev]
845 ctx = self._repo[rev]
846 return ctx.flags(name)
846 return ctx.flags(name)
847
847
848 def walk(self, match):
848 def walk(self, match):
849 ctx = self._repo[None]
849 ctx = self._repo[None]
850 return ctx.walk(match)
850 return ctx.walk(match)
851
851
852 @annotatesubrepoerror
852 @annotatesubrepoerror
853 def forget(self, ui, match, prefix):
853 def forget(self, ui, match, prefix):
854 return cmdutil.forget(ui, self._repo, match,
854 return cmdutil.forget(ui, self._repo, match,
855 os.path.join(prefix, self._path), True)
855 os.path.join(prefix, self._path), True)
856
856
857 @annotatesubrepoerror
857 @annotatesubrepoerror
858 def removefiles(self, ui, matcher, prefix, after, force, subrepos):
858 def removefiles(self, ui, matcher, prefix, after, force, subrepos):
859 return cmdutil.remove(ui, self._repo, matcher,
859 return cmdutil.remove(ui, self._repo, matcher,
860 os.path.join(prefix, self._path), after, force,
860 os.path.join(prefix, self._path), after, force,
861 subrepos)
861 subrepos)
862
862
863 @annotatesubrepoerror
863 @annotatesubrepoerror
864 def revert(self, ui, substate, *pats, **opts):
864 def revert(self, ui, substate, *pats, **opts):
865 # reverting a subrepo is a 2 step process:
865 # reverting a subrepo is a 2 step process:
866 # 1. if the no_backup is not set, revert all modified
866 # 1. if the no_backup is not set, revert all modified
867 # files inside the subrepo
867 # files inside the subrepo
868 # 2. update the subrepo to the revision specified in
868 # 2. update the subrepo to the revision specified in
869 # the corresponding substate dictionary
869 # the corresponding substate dictionary
870 ui.status(_('reverting subrepo %s\n') % substate[0])
870 ui.status(_('reverting subrepo %s\n') % substate[0])
871 if not opts.get('no_backup'):
871 if not opts.get('no_backup'):
872 # Revert all files on the subrepo, creating backups
872 # Revert all files on the subrepo, creating backups
873 # Note that this will not recursively revert subrepos
873 # Note that this will not recursively revert subrepos
874 # We could do it if there was a set:subrepos() predicate
874 # We could do it if there was a set:subrepos() predicate
875 opts = opts.copy()
875 opts = opts.copy()
876 opts['date'] = None
876 opts['date'] = None
877 opts['rev'] = substate[1]
877 opts['rev'] = substate[1]
878
878
879 pats = []
879 pats = []
880 if not opts.get('all'):
880 if not opts.get('all'):
881 pats = ['set:modified()']
881 pats = ['set:modified()']
882 self.filerevert(ui, *pats, **opts)
882 self.filerevert(ui, *pats, **opts)
883
883
884 # Update the repo to the revision specified in the given substate
884 # Update the repo to the revision specified in the given substate
885 self.get(substate, overwrite=True)
885 self.get(substate, overwrite=True)
886
886
887 def filerevert(self, ui, *pats, **opts):
887 def filerevert(self, ui, *pats, **opts):
888 ctx = self._repo[opts['rev']]
888 ctx = self._repo[opts['rev']]
889 parents = self._repo.dirstate.parents()
889 parents = self._repo.dirstate.parents()
890 if opts.get('all'):
890 if opts.get('all'):
891 pats = ['set:modified()']
891 pats = ['set:modified()']
892 else:
892 else:
893 pats = []
893 pats = []
894 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
894 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
895
895
896 def shortid(self, revid):
896 def shortid(self, revid):
897 return revid[:12]
897 return revid[:12]
898
898
899 class svnsubrepo(abstractsubrepo):
899 class svnsubrepo(abstractsubrepo):
900 def __init__(self, ctx, path, state):
900 def __init__(self, ctx, path, state):
901 super(svnsubrepo, self).__init__(ctx._repo.ui)
901 super(svnsubrepo, self).__init__(ctx._repo.ui)
902 self._path = path
902 self._path = path
903 self._state = state
903 self._state = state
904 self._ctx = ctx
904 self._ctx = ctx
905 self._exe = util.findexe('svn')
905 self._exe = util.findexe('svn')
906 if not self._exe:
906 if not self._exe:
907 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
907 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
908 % self._path)
908 % self._path)
909
909
910 def _svncommand(self, commands, filename='', failok=False):
910 def _svncommand(self, commands, filename='', failok=False):
911 cmd = [self._exe]
911 cmd = [self._exe]
912 extrakw = {}
912 extrakw = {}
913 if not self.ui.interactive():
913 if not self.ui.interactive():
914 # Making stdin be a pipe should prevent svn from behaving
914 # Making stdin be a pipe should prevent svn from behaving
915 # interactively even if we can't pass --non-interactive.
915 # interactively even if we can't pass --non-interactive.
916 extrakw['stdin'] = subprocess.PIPE
916 extrakw['stdin'] = subprocess.PIPE
917 # Starting in svn 1.5 --non-interactive is a global flag
917 # Starting in svn 1.5 --non-interactive is a global flag
918 # instead of being per-command, but we need to support 1.4 so
918 # instead of being per-command, but we need to support 1.4 so
919 # we have to be intelligent about what commands take
919 # we have to be intelligent about what commands take
920 # --non-interactive.
920 # --non-interactive.
921 if commands[0] in ('update', 'checkout', 'commit'):
921 if commands[0] in ('update', 'checkout', 'commit'):
922 cmd.append('--non-interactive')
922 cmd.append('--non-interactive')
923 cmd.extend(commands)
923 cmd.extend(commands)
924 if filename is not None:
924 if filename is not None:
925 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
925 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
926 cmd.append(path)
926 cmd.append(path)
927 env = dict(os.environ)
927 env = dict(os.environ)
928 # Avoid localized output, preserve current locale for everything else.
928 # Avoid localized output, preserve current locale for everything else.
929 lc_all = env.get('LC_ALL')
929 lc_all = env.get('LC_ALL')
930 if lc_all:
930 if lc_all:
931 env['LANG'] = lc_all
931 env['LANG'] = lc_all
932 del env['LC_ALL']
932 del env['LC_ALL']
933 env['LC_MESSAGES'] = 'C'
933 env['LC_MESSAGES'] = 'C'
934 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
934 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
935 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
935 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
936 universal_newlines=True, env=env, **extrakw)
936 universal_newlines=True, env=env, **extrakw)
937 stdout, stderr = p.communicate()
937 stdout, stderr = p.communicate()
938 stderr = stderr.strip()
938 stderr = stderr.strip()
939 if not failok:
939 if not failok:
940 if p.returncode:
940 if p.returncode:
941 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
941 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
942 if stderr:
942 if stderr:
943 self.ui.warn(stderr + '\n')
943 self.ui.warn(stderr + '\n')
944 return stdout, stderr
944 return stdout, stderr
945
945
946 @propertycache
946 @propertycache
947 def _svnversion(self):
947 def _svnversion(self):
948 output, err = self._svncommand(['--version', '--quiet'], filename=None)
948 output, err = self._svncommand(['--version', '--quiet'], filename=None)
949 m = re.search(r'^(\d+)\.(\d+)', output)
949 m = re.search(r'^(\d+)\.(\d+)', output)
950 if not m:
950 if not m:
951 raise util.Abort(_('cannot retrieve svn tool version'))
951 raise util.Abort(_('cannot retrieve svn tool version'))
952 return (int(m.group(1)), int(m.group(2)))
952 return (int(m.group(1)), int(m.group(2)))
953
953
954 def _wcrevs(self):
954 def _wcrevs(self):
955 # Get the working directory revision as well as the last
955 # Get the working directory revision as well as the last
956 # commit revision so we can compare the subrepo state with
956 # commit revision so we can compare the subrepo state with
957 # both. We used to store the working directory one.
957 # both. We used to store the working directory one.
958 output, err = self._svncommand(['info', '--xml'])
958 output, err = self._svncommand(['info', '--xml'])
959 doc = xml.dom.minidom.parseString(output)
959 doc = xml.dom.minidom.parseString(output)
960 entries = doc.getElementsByTagName('entry')
960 entries = doc.getElementsByTagName('entry')
961 lastrev, rev = '0', '0'
961 lastrev, rev = '0', '0'
962 if entries:
962 if entries:
963 rev = str(entries[0].getAttribute('revision')) or '0'
963 rev = str(entries[0].getAttribute('revision')) or '0'
964 commits = entries[0].getElementsByTagName('commit')
964 commits = entries[0].getElementsByTagName('commit')
965 if commits:
965 if commits:
966 lastrev = str(commits[0].getAttribute('revision')) or '0'
966 lastrev = str(commits[0].getAttribute('revision')) or '0'
967 return (lastrev, rev)
967 return (lastrev, rev)
968
968
969 def _wcrev(self):
969 def _wcrev(self):
970 return self._wcrevs()[0]
970 return self._wcrevs()[0]
971
971
972 def _wcchanged(self):
972 def _wcchanged(self):
973 """Return (changes, extchanges, missing) where changes is True
973 """Return (changes, extchanges, missing) where changes is True
974 if the working directory was changed, extchanges is
974 if the working directory was changed, extchanges is
975 True if any of these changes concern an external entry and missing
975 True if any of these changes concern an external entry and missing
976 is True if any change is a missing entry.
976 is True if any change is a missing entry.
977 """
977 """
978 output, err = self._svncommand(['status', '--xml'])
978 output, err = self._svncommand(['status', '--xml'])
979 externals, changes, missing = [], [], []
979 externals, changes, missing = [], [], []
980 doc = xml.dom.minidom.parseString(output)
980 doc = xml.dom.minidom.parseString(output)
981 for e in doc.getElementsByTagName('entry'):
981 for e in doc.getElementsByTagName('entry'):
982 s = e.getElementsByTagName('wc-status')
982 s = e.getElementsByTagName('wc-status')
983 if not s:
983 if not s:
984 continue
984 continue
985 item = s[0].getAttribute('item')
985 item = s[0].getAttribute('item')
986 props = s[0].getAttribute('props')
986 props = s[0].getAttribute('props')
987 path = e.getAttribute('path')
987 path = e.getAttribute('path')
988 if item == 'external':
988 if item == 'external':
989 externals.append(path)
989 externals.append(path)
990 elif item == 'missing':
990 elif item == 'missing':
991 missing.append(path)
991 missing.append(path)
992 if (item not in ('', 'normal', 'unversioned', 'external')
992 if (item not in ('', 'normal', 'unversioned', 'external')
993 or props not in ('', 'none', 'normal')):
993 or props not in ('', 'none', 'normal')):
994 changes.append(path)
994 changes.append(path)
995 for path in changes:
995 for path in changes:
996 for ext in externals:
996 for ext in externals:
997 if path == ext or path.startswith(ext + os.sep):
997 if path == ext or path.startswith(ext + os.sep):
998 return True, True, bool(missing)
998 return True, True, bool(missing)
999 return bool(changes), False, bool(missing)
999 return bool(changes), False, bool(missing)
1000
1000
1001 def dirty(self, ignoreupdate=False):
1001 def dirty(self, ignoreupdate=False):
1002 if not self._wcchanged()[0]:
1002 if not self._wcchanged()[0]:
1003 if self._state[1] in self._wcrevs() or ignoreupdate:
1003 if self._state[1] in self._wcrevs() or ignoreupdate:
1004 return False
1004 return False
1005 return True
1005 return True
1006
1006
1007 def basestate(self):
1007 def basestate(self):
1008 lastrev, rev = self._wcrevs()
1008 lastrev, rev = self._wcrevs()
1009 if lastrev != rev:
1009 if lastrev != rev:
1010 # Last committed rev is not the same than rev. We would
1010 # Last committed rev is not the same than rev. We would
1011 # like to take lastrev but we do not know if the subrepo
1011 # like to take lastrev but we do not know if the subrepo
1012 # URL exists at lastrev. Test it and fallback to rev it
1012 # URL exists at lastrev. Test it and fallback to rev it
1013 # is not there.
1013 # is not there.
1014 try:
1014 try:
1015 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1015 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1016 return lastrev
1016 return lastrev
1017 except error.Abort:
1017 except error.Abort:
1018 pass
1018 pass
1019 return rev
1019 return rev
1020
1020
1021 @annotatesubrepoerror
1021 @annotatesubrepoerror
1022 def commit(self, text, user, date):
1022 def commit(self, text, user, date):
1023 # user and date are out of our hands since svn is centralized
1023 # user and date are out of our hands since svn is centralized
1024 changed, extchanged, missing = self._wcchanged()
1024 changed, extchanged, missing = self._wcchanged()
1025 if not changed:
1025 if not changed:
1026 return self.basestate()
1026 return self.basestate()
1027 if extchanged:
1027 if extchanged:
1028 # Do not try to commit externals
1028 # Do not try to commit externals
1029 raise util.Abort(_('cannot commit svn externals'))
1029 raise util.Abort(_('cannot commit svn externals'))
1030 if missing:
1030 if missing:
1031 # svn can commit with missing entries but aborting like hg
1031 # svn can commit with missing entries but aborting like hg
1032 # seems a better approach.
1032 # seems a better approach.
1033 raise util.Abort(_('cannot commit missing svn entries'))
1033 raise util.Abort(_('cannot commit missing svn entries'))
1034 commitinfo, err = self._svncommand(['commit', '-m', text])
1034 commitinfo, err = self._svncommand(['commit', '-m', text])
1035 self.ui.status(commitinfo)
1035 self.ui.status(commitinfo)
1036 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1036 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1037 if not newrev:
1037 if not newrev:
1038 if not commitinfo.strip():
1038 if not commitinfo.strip():
1039 # Sometimes, our definition of "changed" differs from
1039 # Sometimes, our definition of "changed" differs from
1040 # svn one. For instance, svn ignores missing files
1040 # svn one. For instance, svn ignores missing files
1041 # when committing. If there are only missing files, no
1041 # when committing. If there are only missing files, no
1042 # commit is made, no output and no error code.
1042 # commit is made, no output and no error code.
1043 raise util.Abort(_('failed to commit svn changes'))
1043 raise util.Abort(_('failed to commit svn changes'))
1044 raise util.Abort(commitinfo.splitlines()[-1])
1044 raise util.Abort(commitinfo.splitlines()[-1])
1045 newrev = newrev.groups()[0]
1045 newrev = newrev.groups()[0]
1046 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1046 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1047 return newrev
1047 return newrev
1048
1048
1049 @annotatesubrepoerror
1049 @annotatesubrepoerror
1050 def remove(self):
1050 def remove(self):
1051 if self.dirty():
1051 if self.dirty():
1052 self.ui.warn(_('not removing repo %s because '
1052 self.ui.warn(_('not removing repo %s because '
1053 'it has changes.\n') % self._path)
1053 'it has changes.\n') % self._path)
1054 return
1054 return
1055 self.ui.note(_('removing subrepo %s\n') % self._path)
1055 self.ui.note(_('removing subrepo %s\n') % self._path)
1056
1056
1057 def onerror(function, path, excinfo):
1057 def onerror(function, path, excinfo):
1058 if function is not os.remove:
1058 if function is not os.remove:
1059 raise
1059 raise
1060 # read-only files cannot be unlinked under Windows
1060 # read-only files cannot be unlinked under Windows
1061 s = os.stat(path)
1061 s = os.stat(path)
1062 if (s.st_mode & stat.S_IWRITE) != 0:
1062 if (s.st_mode & stat.S_IWRITE) != 0:
1063 raise
1063 raise
1064 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
1064 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
1065 os.remove(path)
1065 os.remove(path)
1066
1066
1067 path = self._ctx._repo.wjoin(self._path)
1067 path = self._ctx._repo.wjoin(self._path)
1068 shutil.rmtree(path, onerror=onerror)
1068 shutil.rmtree(path, onerror=onerror)
1069 try:
1069 try:
1070 os.removedirs(os.path.dirname(path))
1070 os.removedirs(os.path.dirname(path))
1071 except OSError:
1071 except OSError:
1072 pass
1072 pass
1073
1073
1074 @annotatesubrepoerror
1074 @annotatesubrepoerror
1075 def get(self, state, overwrite=False):
1075 def get(self, state, overwrite=False):
1076 if overwrite:
1076 if overwrite:
1077 self._svncommand(['revert', '--recursive'])
1077 self._svncommand(['revert', '--recursive'])
1078 args = ['checkout']
1078 args = ['checkout']
1079 if self._svnversion >= (1, 5):
1079 if self._svnversion >= (1, 5):
1080 args.append('--force')
1080 args.append('--force')
1081 # The revision must be specified at the end of the URL to properly
1081 # The revision must be specified at the end of the URL to properly
1082 # update to a directory which has since been deleted and recreated.
1082 # update to a directory which has since been deleted and recreated.
1083 args.append('%s@%s' % (state[0], state[1]))
1083 args.append('%s@%s' % (state[0], state[1]))
1084 status, err = self._svncommand(args, failok=True)
1084 status, err = self._svncommand(args, failok=True)
1085 _sanitize(self.ui, self._ctx._repo.wjoin(self._path), '.svn')
1085 _sanitize(self.ui, self._ctx._repo.wjoin(self._path), '.svn')
1086 if not re.search('Checked out revision [0-9]+.', status):
1086 if not re.search('Checked out revision [0-9]+.', status):
1087 if ('is already a working copy for a different URL' in err
1087 if ('is already a working copy for a different URL' in err
1088 and (self._wcchanged()[:2] == (False, False))):
1088 and (self._wcchanged()[:2] == (False, False))):
1089 # obstructed but clean working copy, so just blow it away.
1089 # obstructed but clean working copy, so just blow it away.
1090 self.remove()
1090 self.remove()
1091 self.get(state, overwrite=False)
1091 self.get(state, overwrite=False)
1092 return
1092 return
1093 raise util.Abort((status or err).splitlines()[-1])
1093 raise util.Abort((status or err).splitlines()[-1])
1094 self.ui.status(status)
1094 self.ui.status(status)
1095
1095
1096 @annotatesubrepoerror
1096 @annotatesubrepoerror
1097 def merge(self, state):
1097 def merge(self, state):
1098 old = self._state[1]
1098 old = self._state[1]
1099 new = state[1]
1099 new = state[1]
1100 wcrev = self._wcrev()
1100 wcrev = self._wcrev()
1101 if new != wcrev:
1101 if new != wcrev:
1102 dirty = old == wcrev or self._wcchanged()[0]
1102 dirty = old == wcrev or self._wcchanged()[0]
1103 if _updateprompt(self.ui, self, dirty, wcrev, new):
1103 if _updateprompt(self.ui, self, dirty, wcrev, new):
1104 self.get(state, False)
1104 self.get(state, False)
1105
1105
1106 def push(self, opts):
1106 def push(self, opts):
1107 # push is a no-op for SVN
1107 # push is a no-op for SVN
1108 return True
1108 return True
1109
1109
1110 @annotatesubrepoerror
1110 @annotatesubrepoerror
1111 def files(self):
1111 def files(self):
1112 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1112 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1113 doc = xml.dom.minidom.parseString(output)
1113 doc = xml.dom.minidom.parseString(output)
1114 paths = []
1114 paths = []
1115 for e in doc.getElementsByTagName('entry'):
1115 for e in doc.getElementsByTagName('entry'):
1116 kind = str(e.getAttribute('kind'))
1116 kind = str(e.getAttribute('kind'))
1117 if kind != 'file':
1117 if kind != 'file':
1118 continue
1118 continue
1119 name = ''.join(c.data for c
1119 name = ''.join(c.data for c
1120 in e.getElementsByTagName('name')[0].childNodes
1120 in e.getElementsByTagName('name')[0].childNodes
1121 if c.nodeType == c.TEXT_NODE)
1121 if c.nodeType == c.TEXT_NODE)
1122 paths.append(name.encode('utf-8'))
1122 paths.append(name.encode('utf-8'))
1123 return paths
1123 return paths
1124
1124
1125 def filedata(self, name):
1125 def filedata(self, name):
1126 return self._svncommand(['cat'], name)[0]
1126 return self._svncommand(['cat'], name)[0]
1127
1127
1128
1128
1129 class gitsubrepo(abstractsubrepo):
1129 class gitsubrepo(abstractsubrepo):
1130 def __init__(self, ctx, path, state):
1130 def __init__(self, ctx, path, state):
1131 super(gitsubrepo, self).__init__(ctx._repo.ui)
1131 super(gitsubrepo, self).__init__(ctx._repo.ui)
1132 self._state = state
1132 self._state = state
1133 self._ctx = ctx
1133 self._ctx = ctx
1134 self._path = path
1134 self._path = path
1135 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1135 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1136 self._abspath = ctx._repo.wjoin(path)
1136 self._abspath = ctx._repo.wjoin(path)
1137 self._subparent = ctx._repo
1137 self._subparent = ctx._repo
1138 self._ensuregit()
1138 self._ensuregit()
1139
1139
1140 def _ensuregit(self):
1140 def _ensuregit(self):
1141 try:
1141 try:
1142 self._gitexecutable = 'git'
1142 self._gitexecutable = 'git'
1143 out, err = self._gitnodir(['--version'])
1143 out, err = self._gitnodir(['--version'])
1144 except OSError, e:
1144 except OSError, e:
1145 if e.errno != 2 or os.name != 'nt':
1145 if e.errno != 2 or os.name != 'nt':
1146 raise
1146 raise
1147 self._gitexecutable = 'git.cmd'
1147 self._gitexecutable = 'git.cmd'
1148 out, err = self._gitnodir(['--version'])
1148 out, err = self._gitnodir(['--version'])
1149 versionstatus = self._checkversion(out)
1149 versionstatus = self._checkversion(out)
1150 if versionstatus == 'unknown':
1150 if versionstatus == 'unknown':
1151 self.ui.warn(_('cannot retrieve git version\n'))
1151 self.ui.warn(_('cannot retrieve git version\n'))
1152 elif versionstatus == 'abort':
1152 elif versionstatus == 'abort':
1153 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1153 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1154 elif versionstatus == 'warning':
1154 elif versionstatus == 'warning':
1155 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1155 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1156
1156
1157 @staticmethod
1157 @staticmethod
1158 def _gitversion(out):
1158 def _gitversion(out):
1159 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1159 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1160 if m:
1160 if m:
1161 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1161 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1162
1162
1163 m = re.search(r'^git version (\d+)\.(\d+)', out)
1163 m = re.search(r'^git version (\d+)\.(\d+)', out)
1164 if m:
1164 if m:
1165 return (int(m.group(1)), int(m.group(2)), 0)
1165 return (int(m.group(1)), int(m.group(2)), 0)
1166
1166
1167 return -1
1167 return -1
1168
1168
1169 @staticmethod
1169 @staticmethod
1170 def _checkversion(out):
1170 def _checkversion(out):
1171 '''ensure git version is new enough
1171 '''ensure git version is new enough
1172
1172
1173 >>> _checkversion = gitsubrepo._checkversion
1173 >>> _checkversion = gitsubrepo._checkversion
1174 >>> _checkversion('git version 1.6.0')
1174 >>> _checkversion('git version 1.6.0')
1175 'ok'
1175 'ok'
1176 >>> _checkversion('git version 1.8.5')
1176 >>> _checkversion('git version 1.8.5')
1177 'ok'
1177 'ok'
1178 >>> _checkversion('git version 1.4.0')
1178 >>> _checkversion('git version 1.4.0')
1179 'abort'
1179 'abort'
1180 >>> _checkversion('git version 1.5.0')
1180 >>> _checkversion('git version 1.5.0')
1181 'warning'
1181 'warning'
1182 >>> _checkversion('git version 1.9-rc0')
1182 >>> _checkversion('git version 1.9-rc0')
1183 'ok'
1183 'ok'
1184 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1184 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1185 'ok'
1185 'ok'
1186 >>> _checkversion('git version 1.9.0.GIT')
1186 >>> _checkversion('git version 1.9.0.GIT')
1187 'ok'
1187 'ok'
1188 >>> _checkversion('git version 12345')
1188 >>> _checkversion('git version 12345')
1189 'unknown'
1189 'unknown'
1190 >>> _checkversion('no')
1190 >>> _checkversion('no')
1191 'unknown'
1191 'unknown'
1192 '''
1192 '''
1193 version = gitsubrepo._gitversion(out)
1193 version = gitsubrepo._gitversion(out)
1194 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1194 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1195 # despite the docstring comment. For now, error on 1.4.0, warn on
1195 # despite the docstring comment. For now, error on 1.4.0, warn on
1196 # 1.5.0 but attempt to continue.
1196 # 1.5.0 but attempt to continue.
1197 if version == -1:
1197 if version == -1:
1198 return 'unknown'
1198 return 'unknown'
1199 if version < (1, 5, 0):
1199 if version < (1, 5, 0):
1200 return 'abort'
1200 return 'abort'
1201 elif version < (1, 6, 0):
1201 elif version < (1, 6, 0):
1202 return 'warning'
1202 return 'warning'
1203 return 'ok'
1203 return 'ok'
1204
1204
1205 def _gitcommand(self, commands, env=None, stream=False):
1205 def _gitcommand(self, commands, env=None, stream=False):
1206 return self._gitdir(commands, env=env, stream=stream)[0]
1206 return self._gitdir(commands, env=env, stream=stream)[0]
1207
1207
1208 def _gitdir(self, commands, env=None, stream=False):
1208 def _gitdir(self, commands, env=None, stream=False):
1209 return self._gitnodir(commands, env=env, stream=stream,
1209 return self._gitnodir(commands, env=env, stream=stream,
1210 cwd=self._abspath)
1210 cwd=self._abspath)
1211
1211
1212 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1212 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1213 """Calls the git command
1213 """Calls the git command
1214
1214
1215 The methods tries to call the git command. versions prior to 1.6.0
1215 The methods tries to call the git command. versions prior to 1.6.0
1216 are not supported and very probably fail.
1216 are not supported and very probably fail.
1217 """
1217 """
1218 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1218 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1219 # unless ui.quiet is set, print git's stderr,
1219 # unless ui.quiet is set, print git's stderr,
1220 # which is mostly progress and useful info
1220 # which is mostly progress and useful info
1221 errpipe = None
1221 errpipe = None
1222 if self.ui.quiet:
1222 if self.ui.quiet:
1223 errpipe = open(os.devnull, 'w')
1223 errpipe = open(os.devnull, 'w')
1224 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1224 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1225 cwd=cwd, env=env, close_fds=util.closefds,
1225 cwd=cwd, env=env, close_fds=util.closefds,
1226 stdout=subprocess.PIPE, stderr=errpipe)
1226 stdout=subprocess.PIPE, stderr=errpipe)
1227 if stream:
1227 if stream:
1228 return p.stdout, None
1228 return p.stdout, None
1229
1229
1230 retdata = p.stdout.read().strip()
1230 retdata = p.stdout.read().strip()
1231 # wait for the child to exit to avoid race condition.
1231 # wait for the child to exit to avoid race condition.
1232 p.wait()
1232 p.wait()
1233
1233
1234 if p.returncode != 0 and p.returncode != 1:
1234 if p.returncode != 0 and p.returncode != 1:
1235 # there are certain error codes that are ok
1235 # there are certain error codes that are ok
1236 command = commands[0]
1236 command = commands[0]
1237 if command in ('cat-file', 'symbolic-ref'):
1237 if command in ('cat-file', 'symbolic-ref'):
1238 return retdata, p.returncode
1238 return retdata, p.returncode
1239 # for all others, abort
1239 # for all others, abort
1240 raise util.Abort('git %s error %d in %s' %
1240 raise util.Abort('git %s error %d in %s' %
1241 (command, p.returncode, self._relpath))
1241 (command, p.returncode, self._relpath))
1242
1242
1243 return retdata, p.returncode
1243 return retdata, p.returncode
1244
1244
1245 def _gitmissing(self):
1245 def _gitmissing(self):
1246 return not os.path.exists(os.path.join(self._abspath, '.git'))
1246 return not os.path.exists(os.path.join(self._abspath, '.git'))
1247
1247
1248 def _gitstate(self):
1248 def _gitstate(self):
1249 return self._gitcommand(['rev-parse', 'HEAD'])
1249 return self._gitcommand(['rev-parse', 'HEAD'])
1250
1250
1251 def _gitcurrentbranch(self):
1251 def _gitcurrentbranch(self):
1252 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1252 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1253 if err:
1253 if err:
1254 current = None
1254 current = None
1255 return current
1255 return current
1256
1256
1257 def _gitremote(self, remote):
1257 def _gitremote(self, remote):
1258 out = self._gitcommand(['remote', 'show', '-n', remote])
1258 out = self._gitcommand(['remote', 'show', '-n', remote])
1259 line = out.split('\n')[1]
1259 line = out.split('\n')[1]
1260 i = line.index('URL: ') + len('URL: ')
1260 i = line.index('URL: ') + len('URL: ')
1261 return line[i:]
1261 return line[i:]
1262
1262
1263 def _githavelocally(self, revision):
1263 def _githavelocally(self, revision):
1264 out, code = self._gitdir(['cat-file', '-e', revision])
1264 out, code = self._gitdir(['cat-file', '-e', revision])
1265 return code == 0
1265 return code == 0
1266
1266
1267 def _gitisancestor(self, r1, r2):
1267 def _gitisancestor(self, r1, r2):
1268 base = self._gitcommand(['merge-base', r1, r2])
1268 base = self._gitcommand(['merge-base', r1, r2])
1269 return base == r1
1269 return base == r1
1270
1270
1271 def _gitisbare(self):
1271 def _gitisbare(self):
1272 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1272 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1273
1273
1274 def _gitupdatestat(self):
1274 def _gitupdatestat(self):
1275 """This must be run before git diff-index.
1275 """This must be run before git diff-index.
1276 diff-index only looks at changes to file stat;
1276 diff-index only looks at changes to file stat;
1277 this command looks at file contents and updates the stat."""
1277 this command looks at file contents and updates the stat."""
1278 self._gitcommand(['update-index', '-q', '--refresh'])
1278 self._gitcommand(['update-index', '-q', '--refresh'])
1279
1279
1280 def _gitbranchmap(self):
1280 def _gitbranchmap(self):
1281 '''returns 2 things:
1281 '''returns 2 things:
1282 a map from git branch to revision
1282 a map from git branch to revision
1283 a map from revision to branches'''
1283 a map from revision to branches'''
1284 branch2rev = {}
1284 branch2rev = {}
1285 rev2branch = {}
1285 rev2branch = {}
1286
1286
1287 out = self._gitcommand(['for-each-ref', '--format',
1287 out = self._gitcommand(['for-each-ref', '--format',
1288 '%(objectname) %(refname)'])
1288 '%(objectname) %(refname)'])
1289 for line in out.split('\n'):
1289 for line in out.split('\n'):
1290 revision, ref = line.split(' ')
1290 revision, ref = line.split(' ')
1291 if (not ref.startswith('refs/heads/') and
1291 if (not ref.startswith('refs/heads/') and
1292 not ref.startswith('refs/remotes/')):
1292 not ref.startswith('refs/remotes/')):
1293 continue
1293 continue
1294 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1294 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1295 continue # ignore remote/HEAD redirects
1295 continue # ignore remote/HEAD redirects
1296 branch2rev[ref] = revision
1296 branch2rev[ref] = revision
1297 rev2branch.setdefault(revision, []).append(ref)
1297 rev2branch.setdefault(revision, []).append(ref)
1298 return branch2rev, rev2branch
1298 return branch2rev, rev2branch
1299
1299
1300 def _gittracking(self, branches):
1300 def _gittracking(self, branches):
1301 'return map of remote branch to local tracking branch'
1301 'return map of remote branch to local tracking branch'
1302 # assumes no more than one local tracking branch for each remote
1302 # assumes no more than one local tracking branch for each remote
1303 tracking = {}
1303 tracking = {}
1304 for b in branches:
1304 for b in branches:
1305 if b.startswith('refs/remotes/'):
1305 if b.startswith('refs/remotes/'):
1306 continue
1306 continue
1307 bname = b.split('/', 2)[2]
1307 bname = b.split('/', 2)[2]
1308 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1308 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1309 if remote:
1309 if remote:
1310 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1310 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1311 tracking['refs/remotes/%s/%s' %
1311 tracking['refs/remotes/%s/%s' %
1312 (remote, ref.split('/', 2)[2])] = b
1312 (remote, ref.split('/', 2)[2])] = b
1313 return tracking
1313 return tracking
1314
1314
1315 def _abssource(self, source):
1315 def _abssource(self, source):
1316 if '://' not in source:
1316 if '://' not in source:
1317 # recognize the scp syntax as an absolute source
1317 # recognize the scp syntax as an absolute source
1318 colon = source.find(':')
1318 colon = source.find(':')
1319 if colon != -1 and '/' not in source[:colon]:
1319 if colon != -1 and '/' not in source[:colon]:
1320 return source
1320 return source
1321 self._subsource = source
1321 self._subsource = source
1322 return _abssource(self)
1322 return _abssource(self)
1323
1323
1324 def _fetch(self, source, revision):
1324 def _fetch(self, source, revision):
1325 if self._gitmissing():
1325 if self._gitmissing():
1326 source = self._abssource(source)
1326 source = self._abssource(source)
1327 self.ui.status(_('cloning subrepo %s from %s\n') %
1327 self.ui.status(_('cloning subrepo %s from %s\n') %
1328 (self._relpath, source))
1328 (self._relpath, source))
1329 self._gitnodir(['clone', source, self._abspath])
1329 self._gitnodir(['clone', source, self._abspath])
1330 if self._githavelocally(revision):
1330 if self._githavelocally(revision):
1331 return
1331 return
1332 self.ui.status(_('pulling subrepo %s from %s\n') %
1332 self.ui.status(_('pulling subrepo %s from %s\n') %
1333 (self._relpath, self._gitremote('origin')))
1333 (self._relpath, self._gitremote('origin')))
1334 # try only origin: the originally cloned repo
1334 # try only origin: the originally cloned repo
1335 self._gitcommand(['fetch'])
1335 self._gitcommand(['fetch'])
1336 if not self._githavelocally(revision):
1336 if not self._githavelocally(revision):
1337 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1337 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1338 (revision, self._relpath))
1338 (revision, self._relpath))
1339
1339
1340 @annotatesubrepoerror
1340 @annotatesubrepoerror
1341 def dirty(self, ignoreupdate=False):
1341 def dirty(self, ignoreupdate=False):
1342 if self._gitmissing():
1342 if self._gitmissing():
1343 return self._state[1] != ''
1343 return self._state[1] != ''
1344 if self._gitisbare():
1344 if self._gitisbare():
1345 return True
1345 return True
1346 if not ignoreupdate and self._state[1] != self._gitstate():
1346 if not ignoreupdate and self._state[1] != self._gitstate():
1347 # different version checked out
1347 # different version checked out
1348 return True
1348 return True
1349 # check for staged changes or modified files; ignore untracked files
1349 # check for staged changes or modified files; ignore untracked files
1350 self._gitupdatestat()
1350 self._gitupdatestat()
1351 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1351 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1352 return code == 1
1352 return code == 1
1353
1353
1354 def basestate(self):
1354 def basestate(self):
1355 return self._gitstate()
1355 return self._gitstate()
1356
1356
1357 @annotatesubrepoerror
1357 @annotatesubrepoerror
1358 def get(self, state, overwrite=False):
1358 def get(self, state, overwrite=False):
1359 source, revision, kind = state
1359 source, revision, kind = state
1360 if not revision:
1360 if not revision:
1361 self.remove()
1361 self.remove()
1362 return
1362 return
1363 self._fetch(source, revision)
1363 self._fetch(source, revision)
1364 # if the repo was set to be bare, unbare it
1364 # if the repo was set to be bare, unbare it
1365 if self._gitisbare():
1365 if self._gitisbare():
1366 self._gitcommand(['config', 'core.bare', 'false'])
1366 self._gitcommand(['config', 'core.bare', 'false'])
1367 if self._gitstate() == revision:
1367 if self._gitstate() == revision:
1368 self._gitcommand(['reset', '--hard', 'HEAD'])
1368 self._gitcommand(['reset', '--hard', 'HEAD'])
1369 return
1369 return
1370 elif self._gitstate() == revision:
1370 elif self._gitstate() == revision:
1371 if overwrite:
1371 if overwrite:
1372 # first reset the index to unmark new files for commit, because
1372 # first reset the index to unmark new files for commit, because
1373 # reset --hard will otherwise throw away files added for commit,
1373 # reset --hard will otherwise throw away files added for commit,
1374 # not just unmark them.
1374 # not just unmark them.
1375 self._gitcommand(['reset', 'HEAD'])
1375 self._gitcommand(['reset', 'HEAD'])
1376 self._gitcommand(['reset', '--hard', 'HEAD'])
1376 self._gitcommand(['reset', '--hard', 'HEAD'])
1377 return
1377 return
1378 branch2rev, rev2branch = self._gitbranchmap()
1378 branch2rev, rev2branch = self._gitbranchmap()
1379
1379
1380 def checkout(args):
1380 def checkout(args):
1381 cmd = ['checkout']
1381 cmd = ['checkout']
1382 if overwrite:
1382 if overwrite:
1383 # first reset the index to unmark new files for commit, because
1383 # first reset the index to unmark new files for commit, because
1384 # the -f option will otherwise throw away files added for
1384 # the -f option will otherwise throw away files added for
1385 # commit, not just unmark them.
1385 # commit, not just unmark them.
1386 self._gitcommand(['reset', 'HEAD'])
1386 self._gitcommand(['reset', 'HEAD'])
1387 cmd.append('-f')
1387 cmd.append('-f')
1388 self._gitcommand(cmd + args)
1388 self._gitcommand(cmd + args)
1389 _sanitize(self.ui, self._abspath, '.git')
1389 _sanitize(self.ui, self._abspath, '.git')
1390
1390
1391 def rawcheckout():
1391 def rawcheckout():
1392 # no branch to checkout, check it out with no branch
1392 # no branch to checkout, check it out with no branch
1393 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1393 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1394 self._relpath)
1394 self._relpath)
1395 self.ui.warn(_('check out a git branch if you intend '
1395 self.ui.warn(_('check out a git branch if you intend '
1396 'to make changes\n'))
1396 'to make changes\n'))
1397 checkout(['-q', revision])
1397 checkout(['-q', revision])
1398
1398
1399 if revision not in rev2branch:
1399 if revision not in rev2branch:
1400 rawcheckout()
1400 rawcheckout()
1401 return
1401 return
1402 branches = rev2branch[revision]
1402 branches = rev2branch[revision]
1403 firstlocalbranch = None
1403 firstlocalbranch = None
1404 for b in branches:
1404 for b in branches:
1405 if b == 'refs/heads/master':
1405 if b == 'refs/heads/master':
1406 # master trumps all other branches
1406 # master trumps all other branches
1407 checkout(['refs/heads/master'])
1407 checkout(['refs/heads/master'])
1408 return
1408 return
1409 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1409 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1410 firstlocalbranch = b
1410 firstlocalbranch = b
1411 if firstlocalbranch:
1411 if firstlocalbranch:
1412 checkout([firstlocalbranch])
1412 checkout([firstlocalbranch])
1413 return
1413 return
1414
1414
1415 tracking = self._gittracking(branch2rev.keys())
1415 tracking = self._gittracking(branch2rev.keys())
1416 # choose a remote branch already tracked if possible
1416 # choose a remote branch already tracked if possible
1417 remote = branches[0]
1417 remote = branches[0]
1418 if remote not in tracking:
1418 if remote not in tracking:
1419 for b in branches:
1419 for b in branches:
1420 if b in tracking:
1420 if b in tracking:
1421 remote = b
1421 remote = b
1422 break
1422 break
1423
1423
1424 if remote not in tracking:
1424 if remote not in tracking:
1425 # create a new local tracking branch
1425 # create a new local tracking branch
1426 local = remote.split('/', 3)[3]
1426 local = remote.split('/', 3)[3]
1427 checkout(['-b', local, remote])
1427 checkout(['-b', local, remote])
1428 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1428 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1429 # When updating to a tracked remote branch,
1429 # When updating to a tracked remote branch,
1430 # if the local tracking branch is downstream of it,
1430 # if the local tracking branch is downstream of it,
1431 # a normal `git pull` would have performed a "fast-forward merge"
1431 # a normal `git pull` would have performed a "fast-forward merge"
1432 # which is equivalent to updating the local branch to the remote.
1432 # which is equivalent to updating the local branch to the remote.
1433 # Since we are only looking at branching at update, we need to
1433 # Since we are only looking at branching at update, we need to
1434 # detect this situation and perform this action lazily.
1434 # detect this situation and perform this action lazily.
1435 if tracking[remote] != self._gitcurrentbranch():
1435 if tracking[remote] != self._gitcurrentbranch():
1436 checkout([tracking[remote]])
1436 checkout([tracking[remote]])
1437 self._gitcommand(['merge', '--ff', remote])
1437 self._gitcommand(['merge', '--ff', remote])
1438 _sanitize(self.ui, self._abspath, '.git')
1438 _sanitize(self.ui, self._abspath, '.git')
1439 else:
1439 else:
1440 # a real merge would be required, just checkout the revision
1440 # a real merge would be required, just checkout the revision
1441 rawcheckout()
1441 rawcheckout()
1442
1442
1443 @annotatesubrepoerror
1443 @annotatesubrepoerror
1444 def commit(self, text, user, date):
1444 def commit(self, text, user, date):
1445 if self._gitmissing():
1445 if self._gitmissing():
1446 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1446 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1447 cmd = ['commit', '-a', '-m', text]
1447 cmd = ['commit', '-a', '-m', text]
1448 env = os.environ.copy()
1448 env = os.environ.copy()
1449 if user:
1449 if user:
1450 cmd += ['--author', user]
1450 cmd += ['--author', user]
1451 if date:
1451 if date:
1452 # git's date parser silently ignores when seconds < 1e9
1452 # git's date parser silently ignores when seconds < 1e9
1453 # convert to ISO8601
1453 # convert to ISO8601
1454 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1454 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1455 '%Y-%m-%dT%H:%M:%S %1%2')
1455 '%Y-%m-%dT%H:%M:%S %1%2')
1456 self._gitcommand(cmd, env=env)
1456 self._gitcommand(cmd, env=env)
1457 # make sure commit works otherwise HEAD might not exist under certain
1457 # make sure commit works otherwise HEAD might not exist under certain
1458 # circumstances
1458 # circumstances
1459 return self._gitstate()
1459 return self._gitstate()
1460
1460
1461 @annotatesubrepoerror
1461 @annotatesubrepoerror
1462 def merge(self, state):
1462 def merge(self, state):
1463 source, revision, kind = state
1463 source, revision, kind = state
1464 self._fetch(source, revision)
1464 self._fetch(source, revision)
1465 base = self._gitcommand(['merge-base', revision, self._state[1]])
1465 base = self._gitcommand(['merge-base', revision, self._state[1]])
1466 self._gitupdatestat()
1466 self._gitupdatestat()
1467 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1467 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1468
1468
1469 def mergefunc():
1469 def mergefunc():
1470 if base == revision:
1470 if base == revision:
1471 self.get(state) # fast forward merge
1471 self.get(state) # fast forward merge
1472 elif base != self._state[1]:
1472 elif base != self._state[1]:
1473 self._gitcommand(['merge', '--no-commit', revision])
1473 self._gitcommand(['merge', '--no-commit', revision])
1474 _sanitize(self.ui, self._abspath, '.git')
1474 _sanitize(self.ui, self._abspath, '.git')
1475
1475
1476 if self.dirty():
1476 if self.dirty():
1477 if self._gitstate() != revision:
1477 if self._gitstate() != revision:
1478 dirty = self._gitstate() == self._state[1] or code != 0
1478 dirty = self._gitstate() == self._state[1] or code != 0
1479 if _updateprompt(self.ui, self, dirty,
1479 if _updateprompt(self.ui, self, dirty,
1480 self._state[1][:7], revision[:7]):
1480 self._state[1][:7], revision[:7]):
1481 mergefunc()
1481 mergefunc()
1482 else:
1482 else:
1483 mergefunc()
1483 mergefunc()
1484
1484
1485 @annotatesubrepoerror
1485 @annotatesubrepoerror
1486 def push(self, opts):
1486 def push(self, opts):
1487 force = opts.get('force')
1487 force = opts.get('force')
1488
1488
1489 if not self._state[1]:
1489 if not self._state[1]:
1490 return True
1490 return True
1491 if self._gitmissing():
1491 if self._gitmissing():
1492 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1492 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1493 # if a branch in origin contains the revision, nothing to do
1493 # if a branch in origin contains the revision, nothing to do
1494 branch2rev, rev2branch = self._gitbranchmap()
1494 branch2rev, rev2branch = self._gitbranchmap()
1495 if self._state[1] in rev2branch:
1495 if self._state[1] in rev2branch:
1496 for b in rev2branch[self._state[1]]:
1496 for b in rev2branch[self._state[1]]:
1497 if b.startswith('refs/remotes/origin/'):
1497 if b.startswith('refs/remotes/origin/'):
1498 return True
1498 return True
1499 for b, revision in branch2rev.iteritems():
1499 for b, revision in branch2rev.iteritems():
1500 if b.startswith('refs/remotes/origin/'):
1500 if b.startswith('refs/remotes/origin/'):
1501 if self._gitisancestor(self._state[1], revision):
1501 if self._gitisancestor(self._state[1], revision):
1502 return True
1502 return True
1503 # otherwise, try to push the currently checked out branch
1503 # otherwise, try to push the currently checked out branch
1504 cmd = ['push']
1504 cmd = ['push']
1505 if force:
1505 if force:
1506 cmd.append('--force')
1506 cmd.append('--force')
1507
1507
1508 current = self._gitcurrentbranch()
1508 current = self._gitcurrentbranch()
1509 if current:
1509 if current:
1510 # determine if the current branch is even useful
1510 # determine if the current branch is even useful
1511 if not self._gitisancestor(self._state[1], current):
1511 if not self._gitisancestor(self._state[1], current):
1512 self.ui.warn(_('unrelated git branch checked out '
1512 self.ui.warn(_('unrelated git branch checked out '
1513 'in subrepo %s\n') % self._relpath)
1513 'in subrepo %s\n') % self._relpath)
1514 return False
1514 return False
1515 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1515 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1516 (current.split('/', 2)[2], self._relpath))
1516 (current.split('/', 2)[2], self._relpath))
1517 ret = self._gitdir(cmd + ['origin', current])
1517 ret = self._gitdir(cmd + ['origin', current])
1518 return ret[1] == 0
1518 return ret[1] == 0
1519 else:
1519 else:
1520 self.ui.warn(_('no branch checked out in subrepo %s\n'
1520 self.ui.warn(_('no branch checked out in subrepo %s\n'
1521 'cannot push revision %s\n') %
1521 'cannot push revision %s\n') %
1522 (self._relpath, self._state[1]))
1522 (self._relpath, self._state[1]))
1523 return False
1523 return False
1524
1524
1525 @annotatesubrepoerror
1525 @annotatesubrepoerror
1526 def remove(self):
1526 def remove(self):
1527 if self._gitmissing():
1527 if self._gitmissing():
1528 return
1528 return
1529 if self.dirty():
1529 if self.dirty():
1530 self.ui.warn(_('not removing repo %s because '
1530 self.ui.warn(_('not removing repo %s because '
1531 'it has changes.\n') % self._relpath)
1531 'it has changes.\n') % self._relpath)
1532 return
1532 return
1533 # we can't fully delete the repository as it may contain
1533 # we can't fully delete the repository as it may contain
1534 # local-only history
1534 # local-only history
1535 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1535 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1536 self._gitcommand(['config', 'core.bare', 'true'])
1536 self._gitcommand(['config', 'core.bare', 'true'])
1537 for f in os.listdir(self._abspath):
1537 for f in os.listdir(self._abspath):
1538 if f == '.git':
1538 if f == '.git':
1539 continue
1539 continue
1540 path = os.path.join(self._abspath, f)
1540 path = os.path.join(self._abspath, f)
1541 if os.path.isdir(path) and not os.path.islink(path):
1541 if os.path.isdir(path) and not os.path.islink(path):
1542 shutil.rmtree(path)
1542 shutil.rmtree(path)
1543 else:
1543 else:
1544 os.remove(path)
1544 os.remove(path)
1545
1545
1546 def archive(self, ui, archiver, prefix, match=None):
1546 def archive(self, archiver, prefix, match=None):
1547 total = 0
1547 total = 0
1548 source, revision = self._state
1548 source, revision = self._state
1549 if not revision:
1549 if not revision:
1550 return total
1550 return total
1551 self._fetch(source, revision)
1551 self._fetch(source, revision)
1552
1552
1553 # Parse git's native archive command.
1553 # Parse git's native archive command.
1554 # This should be much faster than manually traversing the trees
1554 # This should be much faster than manually traversing the trees
1555 # and objects with many subprocess calls.
1555 # and objects with many subprocess calls.
1556 tarstream = self._gitcommand(['archive', revision], stream=True)
1556 tarstream = self._gitcommand(['archive', revision], stream=True)
1557 tar = tarfile.open(fileobj=tarstream, mode='r|')
1557 tar = tarfile.open(fileobj=tarstream, mode='r|')
1558 relpath = subrelpath(self)
1558 relpath = subrelpath(self)
1559 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1559 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1560 for i, info in enumerate(tar):
1560 for i, info in enumerate(tar):
1561 if info.isdir():
1561 if info.isdir():
1562 continue
1562 continue
1563 if match and not match(info.name):
1563 if match and not match(info.name):
1564 continue
1564 continue
1565 if info.issym():
1565 if info.issym():
1566 data = info.linkname
1566 data = info.linkname
1567 else:
1567 else:
1568 data = tar.extractfile(info).read()
1568 data = tar.extractfile(info).read()
1569 archiver.addfile(os.path.join(prefix, self._path, info.name),
1569 archiver.addfile(os.path.join(prefix, self._path, info.name),
1570 info.mode, info.issym(), data)
1570 info.mode, info.issym(), data)
1571 total += 1
1571 total += 1
1572 ui.progress(_('archiving (%s)') % relpath, i + 1,
1572 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1573 unit=_('files'))
1573 unit=_('files'))
1574 ui.progress(_('archiving (%s)') % relpath, None)
1574 self.ui.progress(_('archiving (%s)') % relpath, None)
1575 return total
1575 return total
1576
1576
1577
1577
1578 @annotatesubrepoerror
1578 @annotatesubrepoerror
1579 def status(self, rev2, **opts):
1579 def status(self, rev2, **opts):
1580 rev1 = self._state[1]
1580 rev1 = self._state[1]
1581 if self._gitmissing() or not rev1:
1581 if self._gitmissing() or not rev1:
1582 # if the repo is missing, return no results
1582 # if the repo is missing, return no results
1583 return [], [], [], [], [], [], []
1583 return [], [], [], [], [], [], []
1584 modified, added, removed = [], [], []
1584 modified, added, removed = [], [], []
1585 self._gitupdatestat()
1585 self._gitupdatestat()
1586 if rev2:
1586 if rev2:
1587 command = ['diff-tree', rev1, rev2]
1587 command = ['diff-tree', rev1, rev2]
1588 else:
1588 else:
1589 command = ['diff-index', rev1]
1589 command = ['diff-index', rev1]
1590 out = self._gitcommand(command)
1590 out = self._gitcommand(command)
1591 for line in out.split('\n'):
1591 for line in out.split('\n'):
1592 tab = line.find('\t')
1592 tab = line.find('\t')
1593 if tab == -1:
1593 if tab == -1:
1594 continue
1594 continue
1595 status, f = line[tab - 1], line[tab + 1:]
1595 status, f = line[tab - 1], line[tab + 1:]
1596 if status == 'M':
1596 if status == 'M':
1597 modified.append(f)
1597 modified.append(f)
1598 elif status == 'A':
1598 elif status == 'A':
1599 added.append(f)
1599 added.append(f)
1600 elif status == 'D':
1600 elif status == 'D':
1601 removed.append(f)
1601 removed.append(f)
1602
1602
1603 deleted, unknown, ignored, clean = [], [], [], []
1603 deleted, unknown, ignored, clean = [], [], [], []
1604
1604
1605 if not rev2:
1605 if not rev2:
1606 command = ['ls-files', '--others', '--exclude-standard']
1606 command = ['ls-files', '--others', '--exclude-standard']
1607 out = self._gitcommand(command)
1607 out = self._gitcommand(command)
1608 for line in out.split('\n'):
1608 for line in out.split('\n'):
1609 if len(line) == 0:
1609 if len(line) == 0:
1610 continue
1610 continue
1611 unknown.append(line)
1611 unknown.append(line)
1612
1612
1613 return scmutil.status(modified, added, removed, deleted,
1613 return scmutil.status(modified, added, removed, deleted,
1614 unknown, ignored, clean)
1614 unknown, ignored, clean)
1615
1615
1616 @annotatesubrepoerror
1616 @annotatesubrepoerror
1617 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1617 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1618 node1 = self._state[1]
1618 node1 = self._state[1]
1619 cmd = ['diff']
1619 cmd = ['diff']
1620 if opts['stat']:
1620 if opts['stat']:
1621 cmd.append('--stat')
1621 cmd.append('--stat')
1622 else:
1622 else:
1623 # for Git, this also implies '-p'
1623 # for Git, this also implies '-p'
1624 cmd.append('-U%d' % diffopts.context)
1624 cmd.append('-U%d' % diffopts.context)
1625
1625
1626 gitprefix = os.path.join(prefix, self._path)
1626 gitprefix = os.path.join(prefix, self._path)
1627
1627
1628 if diffopts.noprefix:
1628 if diffopts.noprefix:
1629 cmd.extend(['--src-prefix=%s/' % gitprefix,
1629 cmd.extend(['--src-prefix=%s/' % gitprefix,
1630 '--dst-prefix=%s/' % gitprefix])
1630 '--dst-prefix=%s/' % gitprefix])
1631 else:
1631 else:
1632 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1632 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1633 '--dst-prefix=b/%s/' % gitprefix])
1633 '--dst-prefix=b/%s/' % gitprefix])
1634
1634
1635 if diffopts.ignorews:
1635 if diffopts.ignorews:
1636 cmd.append('--ignore-all-space')
1636 cmd.append('--ignore-all-space')
1637 if diffopts.ignorewsamount:
1637 if diffopts.ignorewsamount:
1638 cmd.append('--ignore-space-change')
1638 cmd.append('--ignore-space-change')
1639 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1639 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1640 and diffopts.ignoreblanklines:
1640 and diffopts.ignoreblanklines:
1641 cmd.append('--ignore-blank-lines')
1641 cmd.append('--ignore-blank-lines')
1642
1642
1643 cmd.append(node1)
1643 cmd.append(node1)
1644 if node2:
1644 if node2:
1645 cmd.append(node2)
1645 cmd.append(node2)
1646
1646
1647 if match.anypats():
1647 if match.anypats():
1648 return #No support for include/exclude yet
1648 return #No support for include/exclude yet
1649
1649
1650 if match.always():
1650 if match.always():
1651 ui.write(self._gitcommand(cmd))
1651 ui.write(self._gitcommand(cmd))
1652 elif match.files():
1652 elif match.files():
1653 for f in match.files():
1653 for f in match.files():
1654 ui.write(self._gitcommand(cmd + [f]))
1654 ui.write(self._gitcommand(cmd + [f]))
1655 elif match(gitprefix): #Subrepo is matched
1655 elif match(gitprefix): #Subrepo is matched
1656 ui.write(self._gitcommand(cmd))
1656 ui.write(self._gitcommand(cmd))
1657
1657
1658 def revert(self, ui, substate, *pats, **opts):
1658 def revert(self, ui, substate, *pats, **opts):
1659 ui.status(_('reverting subrepo %s\n') % substate[0])
1659 ui.status(_('reverting subrepo %s\n') % substate[0])
1660 if not opts.get('no_backup'):
1660 if not opts.get('no_backup'):
1661 ui.warn('%s: reverting %s subrepos without '
1661 ui.warn('%s: reverting %s subrepos without '
1662 '--no-backup is unsupported\n'
1662 '--no-backup is unsupported\n'
1663 % (substate[0], substate[2]))
1663 % (substate[0], substate[2]))
1664 return []
1664 return []
1665
1665
1666 self.get(substate, overwrite=True)
1666 self.get(substate, overwrite=True)
1667 return []
1667 return []
1668
1668
1669 def shortid(self, revid):
1669 def shortid(self, revid):
1670 return revid[:7]
1670 return revid[:7]
1671
1671
1672 types = {
1672 types = {
1673 'hg': hgsubrepo,
1673 'hg': hgsubrepo,
1674 'svn': svnsubrepo,
1674 'svn': svnsubrepo,
1675 'git': gitsubrepo,
1675 'git': gitsubrepo,
1676 }
1676 }
General Comments 0
You need to be logged in to leave comments. Login now