##// END OF EJS Templates
ui: merge prompt text components into a singe string...
Matt Mackall -
r19226:c58b6ab4 default
parent child Browse files
Show More
@@ -1,1214 +1,1214 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
10
11 import os
11 import os
12 import copy
12 import copy
13
13
14 from mercurial import hg, commands, util, cmdutil, scmutil, match as match_, \
14 from mercurial import hg, commands, util, cmdutil, scmutil, match as match_, \
15 node, archival, error, merge, discovery
15 node, archival, error, merge, discovery
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.node import hex
17 from mercurial.node import hex
18 from hgext import rebase
18 from hgext import rebase
19
19
20 import lfutil
20 import lfutil
21 import lfcommands
21 import lfcommands
22 import basestore
22 import basestore
23
23
24 # -- Utility functions: commonly/repeatedly needed functionality ---------------
24 # -- Utility functions: commonly/repeatedly needed functionality ---------------
25
25
26 def installnormalfilesmatchfn(manifest):
26 def installnormalfilesmatchfn(manifest):
27 '''overrides scmutil.match so that the matcher it returns will ignore all
27 '''overrides scmutil.match so that the matcher it returns will ignore all
28 largefiles'''
28 largefiles'''
29 oldmatch = None # for the closure
29 oldmatch = None # for the closure
30 def overridematch(ctx, pats=[], opts={}, globbed=False,
30 def overridematch(ctx, pats=[], opts={}, globbed=False,
31 default='relpath'):
31 default='relpath'):
32 match = oldmatch(ctx, pats, opts, globbed, default)
32 match = oldmatch(ctx, pats, opts, globbed, default)
33 m = copy.copy(match)
33 m = copy.copy(match)
34 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
34 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
35 manifest)
35 manifest)
36 m._files = filter(notlfile, m._files)
36 m._files = filter(notlfile, m._files)
37 m._fmap = set(m._files)
37 m._fmap = set(m._files)
38 m._always = False
38 m._always = False
39 origmatchfn = m.matchfn
39 origmatchfn = m.matchfn
40 m.matchfn = lambda f: notlfile(f) and origmatchfn(f) or None
40 m.matchfn = lambda f: notlfile(f) and origmatchfn(f) or None
41 return m
41 return m
42 oldmatch = installmatchfn(overridematch)
42 oldmatch = installmatchfn(overridematch)
43
43
44 def installmatchfn(f):
44 def installmatchfn(f):
45 oldmatch = scmutil.match
45 oldmatch = scmutil.match
46 setattr(f, 'oldmatch', oldmatch)
46 setattr(f, 'oldmatch', oldmatch)
47 scmutil.match = f
47 scmutil.match = f
48 return oldmatch
48 return oldmatch
49
49
50 def restorematchfn():
50 def restorematchfn():
51 '''restores scmutil.match to what it was before installnormalfilesmatchfn
51 '''restores scmutil.match to what it was before installnormalfilesmatchfn
52 was called. no-op if scmutil.match is its original function.
52 was called. no-op if scmutil.match is its original function.
53
53
54 Note that n calls to installnormalfilesmatchfn will require n calls to
54 Note that n calls to installnormalfilesmatchfn will require n calls to
55 restore matchfn to reverse'''
55 restore matchfn to reverse'''
56 scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
56 scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
57
57
58 def addlargefiles(ui, repo, *pats, **opts):
58 def addlargefiles(ui, repo, *pats, **opts):
59 large = opts.pop('large', None)
59 large = opts.pop('large', None)
60 lfsize = lfutil.getminsize(
60 lfsize = lfutil.getminsize(
61 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
61 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
62
62
63 lfmatcher = None
63 lfmatcher = None
64 if lfutil.islfilesrepo(repo):
64 if lfutil.islfilesrepo(repo):
65 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
65 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
66 if lfpats:
66 if lfpats:
67 lfmatcher = match_.match(repo.root, '', list(lfpats))
67 lfmatcher = match_.match(repo.root, '', list(lfpats))
68
68
69 lfnames = []
69 lfnames = []
70 m = scmutil.match(repo[None], pats, opts)
70 m = scmutil.match(repo[None], pats, opts)
71 m.bad = lambda x, y: None
71 m.bad = lambda x, y: None
72 wctx = repo[None]
72 wctx = repo[None]
73 for f in repo.walk(m):
73 for f in repo.walk(m):
74 exact = m.exact(f)
74 exact = m.exact(f)
75 lfile = lfutil.standin(f) in wctx
75 lfile = lfutil.standin(f) in wctx
76 nfile = f in wctx
76 nfile = f in wctx
77 exists = lfile or nfile
77 exists = lfile or nfile
78
78
79 # Don't warn the user when they attempt to add a normal tracked file.
79 # Don't warn the user when they attempt to add a normal tracked file.
80 # The normal add code will do that for us.
80 # The normal add code will do that for us.
81 if exact and exists:
81 if exact and exists:
82 if lfile:
82 if lfile:
83 ui.warn(_('%s already a largefile\n') % f)
83 ui.warn(_('%s already a largefile\n') % f)
84 continue
84 continue
85
85
86 if (exact or not exists) and not lfutil.isstandin(f):
86 if (exact or not exists) and not lfutil.isstandin(f):
87 wfile = repo.wjoin(f)
87 wfile = repo.wjoin(f)
88
88
89 # In case the file was removed previously, but not committed
89 # In case the file was removed previously, but not committed
90 # (issue3507)
90 # (issue3507)
91 if not os.path.exists(wfile):
91 if not os.path.exists(wfile):
92 continue
92 continue
93
93
94 abovemin = (lfsize and
94 abovemin = (lfsize and
95 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
95 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
96 if large or abovemin or (lfmatcher and lfmatcher(f)):
96 if large or abovemin or (lfmatcher and lfmatcher(f)):
97 lfnames.append(f)
97 lfnames.append(f)
98 if ui.verbose or not exact:
98 if ui.verbose or not exact:
99 ui.status(_('adding %s as a largefile\n') % m.rel(f))
99 ui.status(_('adding %s as a largefile\n') % m.rel(f))
100
100
101 bad = []
101 bad = []
102 standins = []
102 standins = []
103
103
104 # Need to lock, otherwise there could be a race condition between
104 # Need to lock, otherwise there could be a race condition between
105 # when standins are created and added to the repo.
105 # when standins are created and added to the repo.
106 wlock = repo.wlock()
106 wlock = repo.wlock()
107 try:
107 try:
108 if not opts.get('dry_run'):
108 if not opts.get('dry_run'):
109 lfdirstate = lfutil.openlfdirstate(ui, repo)
109 lfdirstate = lfutil.openlfdirstate(ui, repo)
110 for f in lfnames:
110 for f in lfnames:
111 standinname = lfutil.standin(f)
111 standinname = lfutil.standin(f)
112 lfutil.writestandin(repo, standinname, hash='',
112 lfutil.writestandin(repo, standinname, hash='',
113 executable=lfutil.getexecutable(repo.wjoin(f)))
113 executable=lfutil.getexecutable(repo.wjoin(f)))
114 standins.append(standinname)
114 standins.append(standinname)
115 if lfdirstate[f] == 'r':
115 if lfdirstate[f] == 'r':
116 lfdirstate.normallookup(f)
116 lfdirstate.normallookup(f)
117 else:
117 else:
118 lfdirstate.add(f)
118 lfdirstate.add(f)
119 lfdirstate.write()
119 lfdirstate.write()
120 bad += [lfutil.splitstandin(f)
120 bad += [lfutil.splitstandin(f)
121 for f in repo[None].add(standins)
121 for f in repo[None].add(standins)
122 if f in m.files()]
122 if f in m.files()]
123 finally:
123 finally:
124 wlock.release()
124 wlock.release()
125 return bad
125 return bad
126
126
127 def removelargefiles(ui, repo, *pats, **opts):
127 def removelargefiles(ui, repo, *pats, **opts):
128 after = opts.get('after')
128 after = opts.get('after')
129 if not pats and not after:
129 if not pats and not after:
130 raise util.Abort(_('no files specified'))
130 raise util.Abort(_('no files specified'))
131 m = scmutil.match(repo[None], pats, opts)
131 m = scmutil.match(repo[None], pats, opts)
132 try:
132 try:
133 repo.lfstatus = True
133 repo.lfstatus = True
134 s = repo.status(match=m, clean=True)
134 s = repo.status(match=m, clean=True)
135 finally:
135 finally:
136 repo.lfstatus = False
136 repo.lfstatus = False
137 manifest = repo[None].manifest()
137 manifest = repo[None].manifest()
138 modified, added, deleted, clean = [[f for f in list
138 modified, added, deleted, clean = [[f for f in list
139 if lfutil.standin(f) in manifest]
139 if lfutil.standin(f) in manifest]
140 for list in [s[0], s[1], s[3], s[6]]]
140 for list in [s[0], s[1], s[3], s[6]]]
141
141
142 def warn(files, msg):
142 def warn(files, msg):
143 for f in files:
143 for f in files:
144 ui.warn(msg % m.rel(f))
144 ui.warn(msg % m.rel(f))
145 return int(len(files) > 0)
145 return int(len(files) > 0)
146
146
147 result = 0
147 result = 0
148
148
149 if after:
149 if after:
150 remove, forget = deleted, []
150 remove, forget = deleted, []
151 result = warn(modified + added + clean,
151 result = warn(modified + added + clean,
152 _('not removing %s: file still exists\n'))
152 _('not removing %s: file still exists\n'))
153 else:
153 else:
154 remove, forget = deleted + clean, []
154 remove, forget = deleted + clean, []
155 result = warn(modified, _('not removing %s: file is modified (use -f'
155 result = warn(modified, _('not removing %s: file is modified (use -f'
156 ' to force removal)\n'))
156 ' to force removal)\n'))
157 result = warn(added, _('not removing %s: file has been marked for add'
157 result = warn(added, _('not removing %s: file has been marked for add'
158 ' (use forget to undo)\n')) or result
158 ' (use forget to undo)\n')) or result
159
159
160 for f in sorted(remove + forget):
160 for f in sorted(remove + forget):
161 if ui.verbose or not m.exact(f):
161 if ui.verbose or not m.exact(f):
162 ui.status(_('removing %s\n') % m.rel(f))
162 ui.status(_('removing %s\n') % m.rel(f))
163
163
164 # Need to lock because standin files are deleted then removed from the
164 # Need to lock because standin files are deleted then removed from the
165 # repository and we could race in-between.
165 # repository and we could race in-between.
166 wlock = repo.wlock()
166 wlock = repo.wlock()
167 try:
167 try:
168 lfdirstate = lfutil.openlfdirstate(ui, repo)
168 lfdirstate = lfutil.openlfdirstate(ui, repo)
169 for f in remove:
169 for f in remove:
170 if not after:
170 if not after:
171 # If this is being called by addremove, notify the user that we
171 # If this is being called by addremove, notify the user that we
172 # are removing the file.
172 # are removing the file.
173 if getattr(repo, "_isaddremove", False):
173 if getattr(repo, "_isaddremove", False):
174 ui.status(_('removing %s\n') % f)
174 ui.status(_('removing %s\n') % f)
175 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
175 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
176 lfdirstate.remove(f)
176 lfdirstate.remove(f)
177 lfdirstate.write()
177 lfdirstate.write()
178 forget = [lfutil.standin(f) for f in forget]
178 forget = [lfutil.standin(f) for f in forget]
179 remove = [lfutil.standin(f) for f in remove]
179 remove = [lfutil.standin(f) for f in remove]
180 repo[None].forget(forget)
180 repo[None].forget(forget)
181 # If this is being called by addremove, let the original addremove
181 # If this is being called by addremove, let the original addremove
182 # function handle this.
182 # function handle this.
183 if not getattr(repo, "_isaddremove", False):
183 if not getattr(repo, "_isaddremove", False):
184 for f in remove:
184 for f in remove:
185 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
185 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
186 repo[None].forget(remove)
186 repo[None].forget(remove)
187 finally:
187 finally:
188 wlock.release()
188 wlock.release()
189
189
190 return result
190 return result
191
191
192 # For overriding mercurial.hgweb.webcommands so that largefiles will
192 # For overriding mercurial.hgweb.webcommands so that largefiles will
193 # appear at their right place in the manifests.
193 # appear at their right place in the manifests.
194 def decodepath(orig, path):
194 def decodepath(orig, path):
195 return lfutil.splitstandin(path) or path
195 return lfutil.splitstandin(path) or path
196
196
197 # -- Wrappers: modify existing commands --------------------------------
197 # -- Wrappers: modify existing commands --------------------------------
198
198
199 # Add works by going through the files that the user wanted to add and
199 # Add works by going through the files that the user wanted to add and
200 # checking if they should be added as largefiles. Then it makes a new
200 # checking if they should be added as largefiles. Then it makes a new
201 # matcher which matches only the normal files and runs the original
201 # matcher which matches only the normal files and runs the original
202 # version of add.
202 # version of add.
203 def overrideadd(orig, ui, repo, *pats, **opts):
203 def overrideadd(orig, ui, repo, *pats, **opts):
204 normal = opts.pop('normal')
204 normal = opts.pop('normal')
205 if normal:
205 if normal:
206 if opts.get('large'):
206 if opts.get('large'):
207 raise util.Abort(_('--normal cannot be used with --large'))
207 raise util.Abort(_('--normal cannot be used with --large'))
208 return orig(ui, repo, *pats, **opts)
208 return orig(ui, repo, *pats, **opts)
209 bad = addlargefiles(ui, repo, *pats, **opts)
209 bad = addlargefiles(ui, repo, *pats, **opts)
210 installnormalfilesmatchfn(repo[None].manifest())
210 installnormalfilesmatchfn(repo[None].manifest())
211 result = orig(ui, repo, *pats, **opts)
211 result = orig(ui, repo, *pats, **opts)
212 restorematchfn()
212 restorematchfn()
213
213
214 return (result == 1 or bad) and 1 or 0
214 return (result == 1 or bad) and 1 or 0
215
215
216 def overrideremove(orig, ui, repo, *pats, **opts):
216 def overrideremove(orig, ui, repo, *pats, **opts):
217 installnormalfilesmatchfn(repo[None].manifest())
217 installnormalfilesmatchfn(repo[None].manifest())
218 result = orig(ui, repo, *pats, **opts)
218 result = orig(ui, repo, *pats, **opts)
219 restorematchfn()
219 restorematchfn()
220 return removelargefiles(ui, repo, *pats, **opts) or result
220 return removelargefiles(ui, repo, *pats, **opts) or result
221
221
222 def overridestatusfn(orig, repo, rev2, **opts):
222 def overridestatusfn(orig, repo, rev2, **opts):
223 try:
223 try:
224 repo._repo.lfstatus = True
224 repo._repo.lfstatus = True
225 return orig(repo, rev2, **opts)
225 return orig(repo, rev2, **opts)
226 finally:
226 finally:
227 repo._repo.lfstatus = False
227 repo._repo.lfstatus = False
228
228
229 def overridestatus(orig, ui, repo, *pats, **opts):
229 def overridestatus(orig, ui, repo, *pats, **opts):
230 try:
230 try:
231 repo.lfstatus = True
231 repo.lfstatus = True
232 return orig(ui, repo, *pats, **opts)
232 return orig(ui, repo, *pats, **opts)
233 finally:
233 finally:
234 repo.lfstatus = False
234 repo.lfstatus = False
235
235
236 def overridedirty(orig, repo, ignoreupdate=False):
236 def overridedirty(orig, repo, ignoreupdate=False):
237 try:
237 try:
238 repo._repo.lfstatus = True
238 repo._repo.lfstatus = True
239 return orig(repo, ignoreupdate)
239 return orig(repo, ignoreupdate)
240 finally:
240 finally:
241 repo._repo.lfstatus = False
241 repo._repo.lfstatus = False
242
242
243 def overridelog(orig, ui, repo, *pats, **opts):
243 def overridelog(orig, ui, repo, *pats, **opts):
244 def overridematch(ctx, pats=[], opts={}, globbed=False,
244 def overridematch(ctx, pats=[], opts={}, globbed=False,
245 default='relpath'):
245 default='relpath'):
246 """Matcher that merges root directory with .hglf, suitable for log.
246 """Matcher that merges root directory with .hglf, suitable for log.
247 It is still possible to match .hglf directly.
247 It is still possible to match .hglf directly.
248 For any listed files run log on the standin too.
248 For any listed files run log on the standin too.
249 matchfn tries both the given filename and with .hglf stripped.
249 matchfn tries both the given filename and with .hglf stripped.
250 """
250 """
251 match = oldmatch(ctx, pats, opts, globbed, default)
251 match = oldmatch(ctx, pats, opts, globbed, default)
252 m = copy.copy(match)
252 m = copy.copy(match)
253 standins = [lfutil.standin(f) for f in m._files]
253 standins = [lfutil.standin(f) for f in m._files]
254 m._files.extend(standins)
254 m._files.extend(standins)
255 m._fmap = set(m._files)
255 m._fmap = set(m._files)
256 m._always = False
256 m._always = False
257 origmatchfn = m.matchfn
257 origmatchfn = m.matchfn
258 def lfmatchfn(f):
258 def lfmatchfn(f):
259 lf = lfutil.splitstandin(f)
259 lf = lfutil.splitstandin(f)
260 if lf is not None and origmatchfn(lf):
260 if lf is not None and origmatchfn(lf):
261 return True
261 return True
262 r = origmatchfn(f)
262 r = origmatchfn(f)
263 return r
263 return r
264 m.matchfn = lfmatchfn
264 m.matchfn = lfmatchfn
265 return m
265 return m
266 oldmatch = installmatchfn(overridematch)
266 oldmatch = installmatchfn(overridematch)
267 try:
267 try:
268 repo.lfstatus = True
268 repo.lfstatus = True
269 return orig(ui, repo, *pats, **opts)
269 return orig(ui, repo, *pats, **opts)
270 finally:
270 finally:
271 repo.lfstatus = False
271 repo.lfstatus = False
272 restorematchfn()
272 restorematchfn()
273
273
274 def overrideverify(orig, ui, repo, *pats, **opts):
274 def overrideverify(orig, ui, repo, *pats, **opts):
275 large = opts.pop('large', False)
275 large = opts.pop('large', False)
276 all = opts.pop('lfa', False)
276 all = opts.pop('lfa', False)
277 contents = opts.pop('lfc', False)
277 contents = opts.pop('lfc', False)
278
278
279 result = orig(ui, repo, *pats, **opts)
279 result = orig(ui, repo, *pats, **opts)
280 if large or all or contents:
280 if large or all or contents:
281 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
281 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
282 return result
282 return result
283
283
284 def overridedebugstate(orig, ui, repo, *pats, **opts):
284 def overridedebugstate(orig, ui, repo, *pats, **opts):
285 large = opts.pop('large', False)
285 large = opts.pop('large', False)
286 if large:
286 if large:
287 lfcommands.debugdirstate(ui, repo)
287 lfcommands.debugdirstate(ui, repo)
288 else:
288 else:
289 orig(ui, repo, *pats, **opts)
289 orig(ui, repo, *pats, **opts)
290
290
291 # Override needs to refresh standins so that update's normal merge
291 # Override needs to refresh standins so that update's normal merge
292 # will go through properly. Then the other update hook (overriding repo.update)
292 # will go through properly. Then the other update hook (overriding repo.update)
293 # will get the new files. Filemerge is also overridden so that the merge
293 # will get the new files. Filemerge is also overridden so that the merge
294 # will merge standins correctly.
294 # will merge standins correctly.
295 def overrideupdate(orig, ui, repo, *pats, **opts):
295 def overrideupdate(orig, ui, repo, *pats, **opts):
296 lfdirstate = lfutil.openlfdirstate(ui, repo)
296 lfdirstate = lfutil.openlfdirstate(ui, repo)
297 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
297 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
298 False, False)
298 False, False)
299 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
299 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
300
300
301 # Need to lock between the standins getting updated and their
301 # Need to lock between the standins getting updated and their
302 # largefiles getting updated
302 # largefiles getting updated
303 wlock = repo.wlock()
303 wlock = repo.wlock()
304 try:
304 try:
305 if opts['check']:
305 if opts['check']:
306 mod = len(modified) > 0
306 mod = len(modified) > 0
307 for lfile in unsure:
307 for lfile in unsure:
308 standin = lfutil.standin(lfile)
308 standin = lfutil.standin(lfile)
309 if repo['.'][standin].data().strip() != \
309 if repo['.'][standin].data().strip() != \
310 lfutil.hashfile(repo.wjoin(lfile)):
310 lfutil.hashfile(repo.wjoin(lfile)):
311 mod = True
311 mod = True
312 else:
312 else:
313 lfdirstate.normal(lfile)
313 lfdirstate.normal(lfile)
314 lfdirstate.write()
314 lfdirstate.write()
315 if mod:
315 if mod:
316 raise util.Abort(_('uncommitted local changes'))
316 raise util.Abort(_('uncommitted local changes'))
317 # XXX handle removed differently
317 # XXX handle removed differently
318 if not opts['clean']:
318 if not opts['clean']:
319 for lfile in unsure + modified + added:
319 for lfile in unsure + modified + added:
320 lfutil.updatestandin(repo, lfutil.standin(lfile))
320 lfutil.updatestandin(repo, lfutil.standin(lfile))
321 finally:
321 finally:
322 wlock.release()
322 wlock.release()
323 return orig(ui, repo, *pats, **opts)
323 return orig(ui, repo, *pats, **opts)
324
324
325 # Before starting the manifest merge, merge.updates will call
325 # Before starting the manifest merge, merge.updates will call
326 # _checkunknown to check if there are any files in the merged-in
326 # _checkunknown to check if there are any files in the merged-in
327 # changeset that collide with unknown files in the working copy.
327 # changeset that collide with unknown files in the working copy.
328 #
328 #
329 # The largefiles are seen as unknown, so this prevents us from merging
329 # The largefiles are seen as unknown, so this prevents us from merging
330 # in a file 'foo' if we already have a largefile with the same name.
330 # in a file 'foo' if we already have a largefile with the same name.
331 #
331 #
332 # The overridden function filters the unknown files by removing any
332 # The overridden function filters the unknown files by removing any
333 # largefiles. This makes the merge proceed and we can then handle this
333 # largefiles. This makes the merge proceed and we can then handle this
334 # case further in the overridden manifestmerge function below.
334 # case further in the overridden manifestmerge function below.
335 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
335 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
336 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
336 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
337 return False
337 return False
338 return origfn(repo, wctx, mctx, f)
338 return origfn(repo, wctx, mctx, f)
339
339
340 # The manifest merge handles conflicts on the manifest level. We want
340 # The manifest merge handles conflicts on the manifest level. We want
341 # to handle changes in largefile-ness of files at this level too.
341 # to handle changes in largefile-ness of files at this level too.
342 #
342 #
343 # The strategy is to run the original manifestmerge and then process
343 # The strategy is to run the original manifestmerge and then process
344 # the action list it outputs. There are two cases we need to deal with:
344 # the action list it outputs. There are two cases we need to deal with:
345 #
345 #
346 # 1. Normal file in p1, largefile in p2. Here the largefile is
346 # 1. Normal file in p1, largefile in p2. Here the largefile is
347 # detected via its standin file, which will enter the working copy
347 # detected via its standin file, which will enter the working copy
348 # with a "get" action. It is not "merge" since the standin is all
348 # with a "get" action. It is not "merge" since the standin is all
349 # Mercurial is concerned with at this level -- the link to the
349 # Mercurial is concerned with at this level -- the link to the
350 # existing normal file is not relevant here.
350 # existing normal file is not relevant here.
351 #
351 #
352 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
352 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
353 # since the largefile will be present in the working copy and
353 # since the largefile will be present in the working copy and
354 # different from the normal file in p2. Mercurial therefore
354 # different from the normal file in p2. Mercurial therefore
355 # triggers a merge action.
355 # triggers a merge action.
356 #
356 #
357 # In both cases, we prompt the user and emit new actions to either
357 # In both cases, we prompt the user and emit new actions to either
358 # remove the standin (if the normal file was kept) or to remove the
358 # remove the standin (if the normal file was kept) or to remove the
359 # normal file and get the standin (if the largefile was kept). The
359 # normal file and get the standin (if the largefile was kept). The
360 # default prompt answer is to use the largefile version since it was
360 # default prompt answer is to use the largefile version since it was
361 # presumably changed on purpose.
361 # presumably changed on purpose.
362 #
362 #
363 # Finally, the merge.applyupdates function will then take care of
363 # Finally, the merge.applyupdates function will then take care of
364 # writing the files into the working copy and lfcommands.updatelfiles
364 # writing the files into the working copy and lfcommands.updatelfiles
365 # will update the largefiles.
365 # will update the largefiles.
366 def overridemanifestmerge(origfn, repo, p1, p2, pa, branchmerge, force,
366 def overridemanifestmerge(origfn, repo, p1, p2, pa, branchmerge, force,
367 partial, acceptremote=False):
367 partial, acceptremote=False):
368 overwrite = force and not branchmerge
368 overwrite = force and not branchmerge
369 actions = origfn(repo, p1, p2, pa, branchmerge, force, partial,
369 actions = origfn(repo, p1, p2, pa, branchmerge, force, partial,
370 acceptremote)
370 acceptremote)
371 processed = []
371 processed = []
372
372
373 for action in actions:
373 for action in actions:
374 if overwrite:
374 if overwrite:
375 processed.append(action)
375 processed.append(action)
376 continue
376 continue
377 f, m, args, msg = action
377 f, m, args, msg = action
378
378
379 choices = (_('&Largefile'), _('&Normal file'))
380
381 splitstandin = lfutil.splitstandin(f)
379 splitstandin = lfutil.splitstandin(f)
382 if (m == "g" and splitstandin is not None and
380 if (m == "g" and splitstandin is not None and
383 splitstandin in p1 and f in p2):
381 splitstandin in p1 and f in p2):
384 # Case 1: normal file in the working copy, largefile in
382 # Case 1: normal file in the working copy, largefile in
385 # the second parent
383 # the second parent
386 lfile = splitstandin
384 lfile = splitstandin
387 standin = f
385 standin = f
388 msg = _('%s has been turned into a largefile\n'
386 msg = _('%s has been turned into a largefile\n'
389 'use (l)argefile or keep as (n)ormal file?') % lfile
387 'use (l)argefile or keep as (n)ormal file?'
390 if repo.ui.promptchoice(msg, choices, 0) == 0:
388 '$$ &Largefile $$ &Normal file') % lfile
389 if repo.ui.promptchoice(msg, 0) == 0:
391 processed.append((lfile, "r", None, msg))
390 processed.append((lfile, "r", None, msg))
392 processed.append((standin, "g", (p2.flags(standin),), msg))
391 processed.append((standin, "g", (p2.flags(standin),), msg))
393 else:
392 else:
394 processed.append((standin, "r", None, msg))
393 processed.append((standin, "r", None, msg))
395 elif m == "g" and lfutil.standin(f) in p1 and f in p2:
394 elif m == "g" and lfutil.standin(f) in p1 and f in p2:
396 # Case 2: largefile in the working copy, normal file in
395 # Case 2: largefile in the working copy, normal file in
397 # the second parent
396 # the second parent
398 standin = lfutil.standin(f)
397 standin = lfutil.standin(f)
399 lfile = f
398 lfile = f
400 msg = _('%s has been turned into a normal file\n'
399 msg = _('%s has been turned into a normal file\n'
401 'keep as (l)argefile or use (n)ormal file?') % lfile
400 'keep as (l)argefile or use (n)ormal file?'
402 if repo.ui.promptchoice(msg, choices, 0) == 0:
401 '$$ &Largefile $$ &Normal file') % lfile
402 if repo.ui.promptchoice(msg, 0) == 0:
403 processed.append((lfile, "r", None, msg))
403 processed.append((lfile, "r", None, msg))
404 else:
404 else:
405 processed.append((standin, "r", None, msg))
405 processed.append((standin, "r", None, msg))
406 processed.append((lfile, "g", (p2.flags(lfile),), msg))
406 processed.append((lfile, "g", (p2.flags(lfile),), msg))
407 else:
407 else:
408 processed.append(action)
408 processed.append(action)
409
409
410 return processed
410 return processed
411
411
412 # Override filemerge to prompt the user about how they wish to merge
412 # Override filemerge to prompt the user about how they wish to merge
413 # largefiles. This will handle identical edits, and copy/rename +
413 # largefiles. This will handle identical edits, and copy/rename +
414 # edit without prompting the user.
414 # edit without prompting the user.
415 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca):
415 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca):
416 # Use better variable names here. Because this is a wrapper we cannot
416 # Use better variable names here. Because this is a wrapper we cannot
417 # change the variable names in the function declaration.
417 # change the variable names in the function declaration.
418 fcdest, fcother, fcancestor = fcd, fco, fca
418 fcdest, fcother, fcancestor = fcd, fco, fca
419 if not lfutil.isstandin(orig):
419 if not lfutil.isstandin(orig):
420 return origfn(repo, mynode, orig, fcdest, fcother, fcancestor)
420 return origfn(repo, mynode, orig, fcdest, fcother, fcancestor)
421 else:
421 else:
422 if not fcother.cmp(fcdest): # files identical?
422 if not fcother.cmp(fcdest): # files identical?
423 return None
423 return None
424
424
425 # backwards, use working dir parent as ancestor
425 # backwards, use working dir parent as ancestor
426 if fcancestor == fcother:
426 if fcancestor == fcother:
427 fcancestor = fcdest.parents()[0]
427 fcancestor = fcdest.parents()[0]
428
428
429 if orig != fcother.path():
429 if orig != fcother.path():
430 repo.ui.status(_('merging %s and %s to %s\n')
430 repo.ui.status(_('merging %s and %s to %s\n')
431 % (lfutil.splitstandin(orig),
431 % (lfutil.splitstandin(orig),
432 lfutil.splitstandin(fcother.path()),
432 lfutil.splitstandin(fcother.path()),
433 lfutil.splitstandin(fcdest.path())))
433 lfutil.splitstandin(fcdest.path())))
434 else:
434 else:
435 repo.ui.status(_('merging %s\n')
435 repo.ui.status(_('merging %s\n')
436 % lfutil.splitstandin(fcdest.path()))
436 % lfutil.splitstandin(fcdest.path()))
437
437
438 if fcancestor.path() != fcother.path() and fcother.data() == \
438 if fcancestor.path() != fcother.path() and fcother.data() == \
439 fcancestor.data():
439 fcancestor.data():
440 return 0
440 return 0
441 if fcancestor.path() != fcdest.path() and fcdest.data() == \
441 if fcancestor.path() != fcdest.path() and fcdest.data() == \
442 fcancestor.data():
442 fcancestor.data():
443 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
443 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
444 return 0
444 return 0
445
445
446 if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
446 if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
447 'keep (l)ocal or take (o)ther?') %
447 'keep (l)ocal or take (o)ther?'
448 lfutil.splitstandin(orig),
448 '$$ &Local $$ &Other') %
449 (_('&Local'), _('&Other')), 0) == 0:
449 lfutil.splitstandin(orig), 0) == 0:
450 return 0
450 return 0
451 else:
451 else:
452 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
452 repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
453 return 0
453 return 0
454
454
455 # Copy first changes the matchers to match standins instead of
455 # Copy first changes the matchers to match standins instead of
456 # largefiles. Then it overrides util.copyfile in that function it
456 # largefiles. Then it overrides util.copyfile in that function it
457 # checks if the destination largefile already exists. It also keeps a
457 # checks if the destination largefile already exists. It also keeps a
458 # list of copied files so that the largefiles can be copied and the
458 # list of copied files so that the largefiles can be copied and the
459 # dirstate updated.
459 # dirstate updated.
460 def overridecopy(orig, ui, repo, pats, opts, rename=False):
460 def overridecopy(orig, ui, repo, pats, opts, rename=False):
461 # doesn't remove largefile on rename
461 # doesn't remove largefile on rename
462 if len(pats) < 2:
462 if len(pats) < 2:
463 # this isn't legal, let the original function deal with it
463 # this isn't legal, let the original function deal with it
464 return orig(ui, repo, pats, opts, rename)
464 return orig(ui, repo, pats, opts, rename)
465
465
466 def makestandin(relpath):
466 def makestandin(relpath):
467 path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
467 path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
468 return os.path.join(repo.wjoin(lfutil.standin(path)))
468 return os.path.join(repo.wjoin(lfutil.standin(path)))
469
469
470 fullpats = scmutil.expandpats(pats)
470 fullpats = scmutil.expandpats(pats)
471 dest = fullpats[-1]
471 dest = fullpats[-1]
472
472
473 if os.path.isdir(dest):
473 if os.path.isdir(dest):
474 if not os.path.isdir(makestandin(dest)):
474 if not os.path.isdir(makestandin(dest)):
475 os.makedirs(makestandin(dest))
475 os.makedirs(makestandin(dest))
476 # This could copy both lfiles and normal files in one command,
476 # This could copy both lfiles and normal files in one command,
477 # but we don't want to do that. First replace their matcher to
477 # but we don't want to do that. First replace their matcher to
478 # only match normal files and run it, then replace it to just
478 # only match normal files and run it, then replace it to just
479 # match largefiles and run it again.
479 # match largefiles and run it again.
480 nonormalfiles = False
480 nonormalfiles = False
481 nolfiles = False
481 nolfiles = False
482 try:
482 try:
483 try:
483 try:
484 installnormalfilesmatchfn(repo[None].manifest())
484 installnormalfilesmatchfn(repo[None].manifest())
485 result = orig(ui, repo, pats, opts, rename)
485 result = orig(ui, repo, pats, opts, rename)
486 except util.Abort, e:
486 except util.Abort, e:
487 if str(e) != _('no files to copy'):
487 if str(e) != _('no files to copy'):
488 raise e
488 raise e
489 else:
489 else:
490 nonormalfiles = True
490 nonormalfiles = True
491 result = 0
491 result = 0
492 finally:
492 finally:
493 restorematchfn()
493 restorematchfn()
494
494
495 # The first rename can cause our current working directory to be removed.
495 # The first rename can cause our current working directory to be removed.
496 # In that case there is nothing left to copy/rename so just quit.
496 # In that case there is nothing left to copy/rename so just quit.
497 try:
497 try:
498 repo.getcwd()
498 repo.getcwd()
499 except OSError:
499 except OSError:
500 return result
500 return result
501
501
502 try:
502 try:
503 try:
503 try:
504 # When we call orig below it creates the standins but we don't add
504 # When we call orig below it creates the standins but we don't add
505 # them to the dir state until later so lock during that time.
505 # them to the dir state until later so lock during that time.
506 wlock = repo.wlock()
506 wlock = repo.wlock()
507
507
508 manifest = repo[None].manifest()
508 manifest = repo[None].manifest()
509 oldmatch = None # for the closure
509 oldmatch = None # for the closure
510 def overridematch(ctx, pats=[], opts={}, globbed=False,
510 def overridematch(ctx, pats=[], opts={}, globbed=False,
511 default='relpath'):
511 default='relpath'):
512 newpats = []
512 newpats = []
513 # The patterns were previously mangled to add the standin
513 # The patterns were previously mangled to add the standin
514 # directory; we need to remove that now
514 # directory; we need to remove that now
515 for pat in pats:
515 for pat in pats:
516 if match_.patkind(pat) is None and lfutil.shortname in pat:
516 if match_.patkind(pat) is None and lfutil.shortname in pat:
517 newpats.append(pat.replace(lfutil.shortname, ''))
517 newpats.append(pat.replace(lfutil.shortname, ''))
518 else:
518 else:
519 newpats.append(pat)
519 newpats.append(pat)
520 match = oldmatch(ctx, newpats, opts, globbed, default)
520 match = oldmatch(ctx, newpats, opts, globbed, default)
521 m = copy.copy(match)
521 m = copy.copy(match)
522 lfile = lambda f: lfutil.standin(f) in manifest
522 lfile = lambda f: lfutil.standin(f) in manifest
523 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
523 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
524 m._fmap = set(m._files)
524 m._fmap = set(m._files)
525 m._always = False
525 m._always = False
526 origmatchfn = m.matchfn
526 origmatchfn = m.matchfn
527 m.matchfn = lambda f: (lfutil.isstandin(f) and
527 m.matchfn = lambda f: (lfutil.isstandin(f) and
528 (f in manifest) and
528 (f in manifest) and
529 origmatchfn(lfutil.splitstandin(f)) or
529 origmatchfn(lfutil.splitstandin(f)) or
530 None)
530 None)
531 return m
531 return m
532 oldmatch = installmatchfn(overridematch)
532 oldmatch = installmatchfn(overridematch)
533 listpats = []
533 listpats = []
534 for pat in pats:
534 for pat in pats:
535 if match_.patkind(pat) is not None:
535 if match_.patkind(pat) is not None:
536 listpats.append(pat)
536 listpats.append(pat)
537 else:
537 else:
538 listpats.append(makestandin(pat))
538 listpats.append(makestandin(pat))
539
539
540 try:
540 try:
541 origcopyfile = util.copyfile
541 origcopyfile = util.copyfile
542 copiedfiles = []
542 copiedfiles = []
543 def overridecopyfile(src, dest):
543 def overridecopyfile(src, dest):
544 if (lfutil.shortname in src and
544 if (lfutil.shortname in src and
545 dest.startswith(repo.wjoin(lfutil.shortname))):
545 dest.startswith(repo.wjoin(lfutil.shortname))):
546 destlfile = dest.replace(lfutil.shortname, '')
546 destlfile = dest.replace(lfutil.shortname, '')
547 if not opts['force'] and os.path.exists(destlfile):
547 if not opts['force'] and os.path.exists(destlfile):
548 raise IOError('',
548 raise IOError('',
549 _('destination largefile already exists'))
549 _('destination largefile already exists'))
550 copiedfiles.append((src, dest))
550 copiedfiles.append((src, dest))
551 origcopyfile(src, dest)
551 origcopyfile(src, dest)
552
552
553 util.copyfile = overridecopyfile
553 util.copyfile = overridecopyfile
554 result += orig(ui, repo, listpats, opts, rename)
554 result += orig(ui, repo, listpats, opts, rename)
555 finally:
555 finally:
556 util.copyfile = origcopyfile
556 util.copyfile = origcopyfile
557
557
558 lfdirstate = lfutil.openlfdirstate(ui, repo)
558 lfdirstate = lfutil.openlfdirstate(ui, repo)
559 for (src, dest) in copiedfiles:
559 for (src, dest) in copiedfiles:
560 if (lfutil.shortname in src and
560 if (lfutil.shortname in src and
561 dest.startswith(repo.wjoin(lfutil.shortname))):
561 dest.startswith(repo.wjoin(lfutil.shortname))):
562 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
562 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
563 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
563 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
564 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
564 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
565 if not os.path.isdir(destlfiledir):
565 if not os.path.isdir(destlfiledir):
566 os.makedirs(destlfiledir)
566 os.makedirs(destlfiledir)
567 if rename:
567 if rename:
568 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
568 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
569 lfdirstate.remove(srclfile)
569 lfdirstate.remove(srclfile)
570 else:
570 else:
571 util.copyfile(repo.wjoin(srclfile),
571 util.copyfile(repo.wjoin(srclfile),
572 repo.wjoin(destlfile))
572 repo.wjoin(destlfile))
573
573
574 lfdirstate.add(destlfile)
574 lfdirstate.add(destlfile)
575 lfdirstate.write()
575 lfdirstate.write()
576 except util.Abort, e:
576 except util.Abort, e:
577 if str(e) != _('no files to copy'):
577 if str(e) != _('no files to copy'):
578 raise e
578 raise e
579 else:
579 else:
580 nolfiles = True
580 nolfiles = True
581 finally:
581 finally:
582 restorematchfn()
582 restorematchfn()
583 wlock.release()
583 wlock.release()
584
584
585 if nolfiles and nonormalfiles:
585 if nolfiles and nonormalfiles:
586 raise util.Abort(_('no files to copy'))
586 raise util.Abort(_('no files to copy'))
587
587
588 return result
588 return result
589
589
590 # When the user calls revert, we have to be careful to not revert any
590 # When the user calls revert, we have to be careful to not revert any
591 # changes to other largefiles accidentally. This means we have to keep
591 # changes to other largefiles accidentally. This means we have to keep
592 # track of the largefiles that are being reverted so we only pull down
592 # track of the largefiles that are being reverted so we only pull down
593 # the necessary largefiles.
593 # the necessary largefiles.
594 #
594 #
595 # Standins are only updated (to match the hash of largefiles) before
595 # Standins are only updated (to match the hash of largefiles) before
596 # commits. Update the standins then run the original revert, changing
596 # commits. Update the standins then run the original revert, changing
597 # the matcher to hit standins instead of largefiles. Based on the
597 # the matcher to hit standins instead of largefiles. Based on the
598 # resulting standins update the largefiles. Then return the standins
598 # resulting standins update the largefiles. Then return the standins
599 # to their proper state
599 # to their proper state
600 def overriderevert(orig, ui, repo, *pats, **opts):
600 def overriderevert(orig, ui, repo, *pats, **opts):
601 # Because we put the standins in a bad state (by updating them)
601 # Because we put the standins in a bad state (by updating them)
602 # and then return them to a correct state we need to lock to
602 # and then return them to a correct state we need to lock to
603 # prevent others from changing them in their incorrect state.
603 # prevent others from changing them in their incorrect state.
604 wlock = repo.wlock()
604 wlock = repo.wlock()
605 try:
605 try:
606 lfdirstate = lfutil.openlfdirstate(ui, repo)
606 lfdirstate = lfutil.openlfdirstate(ui, repo)
607 (modified, added, removed, missing, unknown, ignored, clean) = \
607 (modified, added, removed, missing, unknown, ignored, clean) = \
608 lfutil.lfdirstatestatus(lfdirstate, repo, repo['.'].rev())
608 lfutil.lfdirstatestatus(lfdirstate, repo, repo['.'].rev())
609 lfdirstate.write()
609 lfdirstate.write()
610 for lfile in modified:
610 for lfile in modified:
611 lfutil.updatestandin(repo, lfutil.standin(lfile))
611 lfutil.updatestandin(repo, lfutil.standin(lfile))
612 for lfile in missing:
612 for lfile in missing:
613 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
613 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
614 os.unlink(repo.wjoin(lfutil.standin(lfile)))
614 os.unlink(repo.wjoin(lfutil.standin(lfile)))
615
615
616 try:
616 try:
617 ctx = scmutil.revsingle(repo, opts.get('rev'))
617 ctx = scmutil.revsingle(repo, opts.get('rev'))
618 oldmatch = None # for the closure
618 oldmatch = None # for the closure
619 def overridematch(ctx, pats=[], opts={}, globbed=False,
619 def overridematch(ctx, pats=[], opts={}, globbed=False,
620 default='relpath'):
620 default='relpath'):
621 match = oldmatch(ctx, pats, opts, globbed, default)
621 match = oldmatch(ctx, pats, opts, globbed, default)
622 m = copy.copy(match)
622 m = copy.copy(match)
623 def tostandin(f):
623 def tostandin(f):
624 if lfutil.standin(f) in ctx:
624 if lfutil.standin(f) in ctx:
625 return lfutil.standin(f)
625 return lfutil.standin(f)
626 elif lfutil.standin(f) in repo[None]:
626 elif lfutil.standin(f) in repo[None]:
627 return None
627 return None
628 return f
628 return f
629 m._files = [tostandin(f) for f in m._files]
629 m._files = [tostandin(f) for f in m._files]
630 m._files = [f for f in m._files if f is not None]
630 m._files = [f for f in m._files if f is not None]
631 m._fmap = set(m._files)
631 m._fmap = set(m._files)
632 m._always = False
632 m._always = False
633 origmatchfn = m.matchfn
633 origmatchfn = m.matchfn
634 def matchfn(f):
634 def matchfn(f):
635 if lfutil.isstandin(f):
635 if lfutil.isstandin(f):
636 # We need to keep track of what largefiles are being
636 # We need to keep track of what largefiles are being
637 # matched so we know which ones to update later --
637 # matched so we know which ones to update later --
638 # otherwise we accidentally revert changes to other
638 # otherwise we accidentally revert changes to other
639 # largefiles. This is repo-specific, so duckpunch the
639 # largefiles. This is repo-specific, so duckpunch the
640 # repo object to keep the list of largefiles for us
640 # repo object to keep the list of largefiles for us
641 # later.
641 # later.
642 if origmatchfn(lfutil.splitstandin(f)) and \
642 if origmatchfn(lfutil.splitstandin(f)) and \
643 (f in repo[None] or f in ctx):
643 (f in repo[None] or f in ctx):
644 lfileslist = getattr(repo, '_lfilestoupdate', [])
644 lfileslist = getattr(repo, '_lfilestoupdate', [])
645 lfileslist.append(lfutil.splitstandin(f))
645 lfileslist.append(lfutil.splitstandin(f))
646 repo._lfilestoupdate = lfileslist
646 repo._lfilestoupdate = lfileslist
647 return True
647 return True
648 else:
648 else:
649 return False
649 return False
650 return origmatchfn(f)
650 return origmatchfn(f)
651 m.matchfn = matchfn
651 m.matchfn = matchfn
652 return m
652 return m
653 oldmatch = installmatchfn(overridematch)
653 oldmatch = installmatchfn(overridematch)
654 scmutil.match
654 scmutil.match
655 matches = overridematch(repo[None], pats, opts)
655 matches = overridematch(repo[None], pats, opts)
656 orig(ui, repo, *pats, **opts)
656 orig(ui, repo, *pats, **opts)
657 finally:
657 finally:
658 restorematchfn()
658 restorematchfn()
659 lfileslist = getattr(repo, '_lfilestoupdate', [])
659 lfileslist = getattr(repo, '_lfilestoupdate', [])
660 lfcommands.updatelfiles(ui, repo, filelist=lfileslist,
660 lfcommands.updatelfiles(ui, repo, filelist=lfileslist,
661 printmessage=False)
661 printmessage=False)
662
662
663 # empty out the largefiles list so we start fresh next time
663 # empty out the largefiles list so we start fresh next time
664 repo._lfilestoupdate = []
664 repo._lfilestoupdate = []
665 for lfile in modified:
665 for lfile in modified:
666 if lfile in lfileslist:
666 if lfile in lfileslist:
667 if os.path.exists(repo.wjoin(lfutil.standin(lfile))) and lfile\
667 if os.path.exists(repo.wjoin(lfutil.standin(lfile))) and lfile\
668 in repo['.']:
668 in repo['.']:
669 lfutil.writestandin(repo, lfutil.standin(lfile),
669 lfutil.writestandin(repo, lfutil.standin(lfile),
670 repo['.'][lfile].data().strip(),
670 repo['.'][lfile].data().strip(),
671 'x' in repo['.'][lfile].flags())
671 'x' in repo['.'][lfile].flags())
672 lfdirstate = lfutil.openlfdirstate(ui, repo)
672 lfdirstate = lfutil.openlfdirstate(ui, repo)
673 for lfile in added:
673 for lfile in added:
674 standin = lfutil.standin(lfile)
674 standin = lfutil.standin(lfile)
675 if standin not in ctx and (standin in matches or opts.get('all')):
675 if standin not in ctx and (standin in matches or opts.get('all')):
676 if lfile in lfdirstate:
676 if lfile in lfdirstate:
677 lfdirstate.drop(lfile)
677 lfdirstate.drop(lfile)
678 util.unlinkpath(repo.wjoin(standin))
678 util.unlinkpath(repo.wjoin(standin))
679 lfdirstate.write()
679 lfdirstate.write()
680 finally:
680 finally:
681 wlock.release()
681 wlock.release()
682
682
683 def hgupdaterepo(orig, repo, node, overwrite):
683 def hgupdaterepo(orig, repo, node, overwrite):
684 if not overwrite:
684 if not overwrite:
685 # Only call updatelfiles on the standins that have changed to save time
685 # Only call updatelfiles on the standins that have changed to save time
686 oldstandins = lfutil.getstandinsstate(repo)
686 oldstandins = lfutil.getstandinsstate(repo)
687
687
688 result = orig(repo, node, overwrite)
688 result = orig(repo, node, overwrite)
689
689
690 filelist = None
690 filelist = None
691 if not overwrite:
691 if not overwrite:
692 newstandins = lfutil.getstandinsstate(repo)
692 newstandins = lfutil.getstandinsstate(repo)
693 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
693 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
694 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist)
694 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist)
695 return result
695 return result
696
696
697 def hgmerge(orig, repo, node, force=None, remind=True):
697 def hgmerge(orig, repo, node, force=None, remind=True):
698 result = orig(repo, node, force, remind)
698 result = orig(repo, node, force, remind)
699 lfcommands.updatelfiles(repo.ui, repo)
699 lfcommands.updatelfiles(repo.ui, repo)
700 return result
700 return result
701
701
702 # When we rebase a repository with remotely changed largefiles, we need to
702 # When we rebase a repository with remotely changed largefiles, we need to
703 # take some extra care so that the largefiles are correctly updated in the
703 # take some extra care so that the largefiles are correctly updated in the
704 # working copy
704 # working copy
705 def overridepull(orig, ui, repo, source=None, **opts):
705 def overridepull(orig, ui, repo, source=None, **opts):
706 revsprepull = len(repo)
706 revsprepull = len(repo)
707 if not source:
707 if not source:
708 source = 'default'
708 source = 'default'
709 repo.lfpullsource = source
709 repo.lfpullsource = source
710 if opts.get('rebase', False):
710 if opts.get('rebase', False):
711 repo._isrebasing = True
711 repo._isrebasing = True
712 try:
712 try:
713 if opts.get('update'):
713 if opts.get('update'):
714 del opts['update']
714 del opts['update']
715 ui.debug('--update and --rebase are not compatible, ignoring '
715 ui.debug('--update and --rebase are not compatible, ignoring '
716 'the update flag\n')
716 'the update flag\n')
717 del opts['rebase']
717 del opts['rebase']
718 cmdutil.bailifchanged(repo)
718 cmdutil.bailifchanged(repo)
719 origpostincoming = commands.postincoming
719 origpostincoming = commands.postincoming
720 def _dummy(*args, **kwargs):
720 def _dummy(*args, **kwargs):
721 pass
721 pass
722 commands.postincoming = _dummy
722 commands.postincoming = _dummy
723 try:
723 try:
724 result = commands.pull(ui, repo, source, **opts)
724 result = commands.pull(ui, repo, source, **opts)
725 finally:
725 finally:
726 commands.postincoming = origpostincoming
726 commands.postincoming = origpostincoming
727 revspostpull = len(repo)
727 revspostpull = len(repo)
728 if revspostpull > revsprepull:
728 if revspostpull > revsprepull:
729 result = result or rebase.rebase(ui, repo)
729 result = result or rebase.rebase(ui, repo)
730 finally:
730 finally:
731 repo._isrebasing = False
731 repo._isrebasing = False
732 else:
732 else:
733 result = orig(ui, repo, source, **opts)
733 result = orig(ui, repo, source, **opts)
734 revspostpull = len(repo)
734 revspostpull = len(repo)
735 lfrevs = opts.get('lfrev', [])
735 lfrevs = opts.get('lfrev', [])
736 if opts.get('all_largefiles'):
736 if opts.get('all_largefiles'):
737 lfrevs.append('pulled()')
737 lfrevs.append('pulled()')
738 if lfrevs and revspostpull > revsprepull:
738 if lfrevs and revspostpull > revsprepull:
739 numcached = 0
739 numcached = 0
740 repo.firstpulled = revsprepull # for pulled() revset expression
740 repo.firstpulled = revsprepull # for pulled() revset expression
741 try:
741 try:
742 for rev in scmutil.revrange(repo, lfrevs):
742 for rev in scmutil.revrange(repo, lfrevs):
743 ui.note(_('pulling largefiles for revision %s\n') % rev)
743 ui.note(_('pulling largefiles for revision %s\n') % rev)
744 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
744 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
745 numcached += len(cached)
745 numcached += len(cached)
746 finally:
746 finally:
747 del repo.firstpulled
747 del repo.firstpulled
748 ui.status(_("%d largefiles cached\n") % numcached)
748 ui.status(_("%d largefiles cached\n") % numcached)
749 return result
749 return result
750
750
751 def pulledrevsetsymbol(repo, subset, x):
751 def pulledrevsetsymbol(repo, subset, x):
752 """``pulled()``
752 """``pulled()``
753 Changesets that just has been pulled.
753 Changesets that just has been pulled.
754
754
755 Only available with largefiles from pull --lfrev expressions.
755 Only available with largefiles from pull --lfrev expressions.
756
756
757 .. container:: verbose
757 .. container:: verbose
758
758
759 Some examples:
759 Some examples:
760
760
761 - pull largefiles for all new changesets::
761 - pull largefiles for all new changesets::
762
762
763 hg pull -lfrev "pulled()"
763 hg pull -lfrev "pulled()"
764
764
765 - pull largefiles for all new branch heads::
765 - pull largefiles for all new branch heads::
766
766
767 hg pull -lfrev "head(pulled()) and not closed()"
767 hg pull -lfrev "head(pulled()) and not closed()"
768
768
769 """
769 """
770
770
771 try:
771 try:
772 firstpulled = repo.firstpulled
772 firstpulled = repo.firstpulled
773 except AttributeError:
773 except AttributeError:
774 raise util.Abort(_("pulled() only available in --lfrev"))
774 raise util.Abort(_("pulled() only available in --lfrev"))
775 return [r for r in subset if r >= firstpulled]
775 return [r for r in subset if r >= firstpulled]
776
776
777 def overrideclone(orig, ui, source, dest=None, **opts):
777 def overrideclone(orig, ui, source, dest=None, **opts):
778 d = dest
778 d = dest
779 if d is None:
779 if d is None:
780 d = hg.defaultdest(source)
780 d = hg.defaultdest(source)
781 if opts.get('all_largefiles') and not hg.islocal(d):
781 if opts.get('all_largefiles') and not hg.islocal(d):
782 raise util.Abort(_(
782 raise util.Abort(_(
783 '--all-largefiles is incompatible with non-local destination %s' %
783 '--all-largefiles is incompatible with non-local destination %s' %
784 d))
784 d))
785
785
786 return orig(ui, source, dest, **opts)
786 return orig(ui, source, dest, **opts)
787
787
788 def hgclone(orig, ui, opts, *args, **kwargs):
788 def hgclone(orig, ui, opts, *args, **kwargs):
789 result = orig(ui, opts, *args, **kwargs)
789 result = orig(ui, opts, *args, **kwargs)
790
790
791 if result is not None:
791 if result is not None:
792 sourcerepo, destrepo = result
792 sourcerepo, destrepo = result
793 repo = destrepo.local()
793 repo = destrepo.local()
794
794
795 # Caching is implicitly limited to 'rev' option, since the dest repo was
795 # Caching is implicitly limited to 'rev' option, since the dest repo was
796 # truncated at that point. The user may expect a download count with
796 # truncated at that point. The user may expect a download count with
797 # this option, so attempt whether or not this is a largefile repo.
797 # this option, so attempt whether or not this is a largefile repo.
798 if opts.get('all_largefiles'):
798 if opts.get('all_largefiles'):
799 success, missing = lfcommands.downloadlfiles(ui, repo, None)
799 success, missing = lfcommands.downloadlfiles(ui, repo, None)
800
800
801 if missing != 0:
801 if missing != 0:
802 return None
802 return None
803
803
804 return result
804 return result
805
805
806 def overriderebase(orig, ui, repo, **opts):
806 def overriderebase(orig, ui, repo, **opts):
807 repo._isrebasing = True
807 repo._isrebasing = True
808 try:
808 try:
809 return orig(ui, repo, **opts)
809 return orig(ui, repo, **opts)
810 finally:
810 finally:
811 repo._isrebasing = False
811 repo._isrebasing = False
812
812
813 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
813 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
814 prefix=None, mtime=None, subrepos=None):
814 prefix=None, mtime=None, subrepos=None):
815 # No need to lock because we are only reading history and
815 # No need to lock because we are only reading history and
816 # largefile caches, neither of which are modified.
816 # largefile caches, neither of which are modified.
817 lfcommands.cachelfiles(repo.ui, repo, node)
817 lfcommands.cachelfiles(repo.ui, repo, node)
818
818
819 if kind not in archival.archivers:
819 if kind not in archival.archivers:
820 raise util.Abort(_("unknown archive type '%s'") % kind)
820 raise util.Abort(_("unknown archive type '%s'") % kind)
821
821
822 ctx = repo[node]
822 ctx = repo[node]
823
823
824 if kind == 'files':
824 if kind == 'files':
825 if prefix:
825 if prefix:
826 raise util.Abort(
826 raise util.Abort(
827 _('cannot give prefix when archiving to files'))
827 _('cannot give prefix when archiving to files'))
828 else:
828 else:
829 prefix = archival.tidyprefix(dest, kind, prefix)
829 prefix = archival.tidyprefix(dest, kind, prefix)
830
830
831 def write(name, mode, islink, getdata):
831 def write(name, mode, islink, getdata):
832 if matchfn and not matchfn(name):
832 if matchfn and not matchfn(name):
833 return
833 return
834 data = getdata()
834 data = getdata()
835 if decode:
835 if decode:
836 data = repo.wwritedata(name, data)
836 data = repo.wwritedata(name, data)
837 archiver.addfile(prefix + name, mode, islink, data)
837 archiver.addfile(prefix + name, mode, islink, data)
838
838
839 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
839 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
840
840
841 if repo.ui.configbool("ui", "archivemeta", True):
841 if repo.ui.configbool("ui", "archivemeta", True):
842 def metadata():
842 def metadata():
843 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
843 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
844 hex(repo.changelog.node(0)), hex(node), ctx.branch())
844 hex(repo.changelog.node(0)), hex(node), ctx.branch())
845
845
846 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
846 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
847 if repo.tagtype(t) == 'global')
847 if repo.tagtype(t) == 'global')
848 if not tags:
848 if not tags:
849 repo.ui.pushbuffer()
849 repo.ui.pushbuffer()
850 opts = {'template': '{latesttag}\n{latesttagdistance}',
850 opts = {'template': '{latesttag}\n{latesttagdistance}',
851 'style': '', 'patch': None, 'git': None}
851 'style': '', 'patch': None, 'git': None}
852 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
852 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
853 ltags, dist = repo.ui.popbuffer().split('\n')
853 ltags, dist = repo.ui.popbuffer().split('\n')
854 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
854 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
855 tags += 'latesttagdistance: %s\n' % dist
855 tags += 'latesttagdistance: %s\n' % dist
856
856
857 return base + tags
857 return base + tags
858
858
859 write('.hg_archival.txt', 0644, False, metadata)
859 write('.hg_archival.txt', 0644, False, metadata)
860
860
861 for f in ctx:
861 for f in ctx:
862 ff = ctx.flags(f)
862 ff = ctx.flags(f)
863 getdata = ctx[f].data
863 getdata = ctx[f].data
864 if lfutil.isstandin(f):
864 if lfutil.isstandin(f):
865 path = lfutil.findfile(repo, getdata().strip())
865 path = lfutil.findfile(repo, getdata().strip())
866 if path is None:
866 if path is None:
867 raise util.Abort(
867 raise util.Abort(
868 _('largefile %s not found in repo store or system cache')
868 _('largefile %s not found in repo store or system cache')
869 % lfutil.splitstandin(f))
869 % lfutil.splitstandin(f))
870 f = lfutil.splitstandin(f)
870 f = lfutil.splitstandin(f)
871
871
872 def getdatafn():
872 def getdatafn():
873 fd = None
873 fd = None
874 try:
874 try:
875 fd = open(path, 'rb')
875 fd = open(path, 'rb')
876 return fd.read()
876 return fd.read()
877 finally:
877 finally:
878 if fd:
878 if fd:
879 fd.close()
879 fd.close()
880
880
881 getdata = getdatafn
881 getdata = getdatafn
882 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
882 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
883
883
884 if subrepos:
884 if subrepos:
885 for subpath in sorted(ctx.substate):
885 for subpath in sorted(ctx.substate):
886 sub = ctx.sub(subpath)
886 sub = ctx.sub(subpath)
887 submatch = match_.narrowmatcher(subpath, matchfn)
887 submatch = match_.narrowmatcher(subpath, matchfn)
888 sub.archive(repo.ui, archiver, prefix, submatch)
888 sub.archive(repo.ui, archiver, prefix, submatch)
889
889
890 archiver.done()
890 archiver.done()
891
891
892 def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
892 def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
893 repo._get(repo._state + ('hg',))
893 repo._get(repo._state + ('hg',))
894 rev = repo._state[1]
894 rev = repo._state[1]
895 ctx = repo._repo[rev]
895 ctx = repo._repo[rev]
896
896
897 lfcommands.cachelfiles(ui, repo._repo, ctx.node())
897 lfcommands.cachelfiles(ui, repo._repo, ctx.node())
898
898
899 def write(name, mode, islink, getdata):
899 def write(name, mode, islink, getdata):
900 # At this point, the standin has been replaced with the largefile name,
900 # At this point, the standin has been replaced with the largefile name,
901 # so the normal matcher works here without the lfutil variants.
901 # so the normal matcher works here without the lfutil variants.
902 if match and not match(f):
902 if match and not match(f):
903 return
903 return
904 data = getdata()
904 data = getdata()
905
905
906 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
906 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
907
907
908 for f in ctx:
908 for f in ctx:
909 ff = ctx.flags(f)
909 ff = ctx.flags(f)
910 getdata = ctx[f].data
910 getdata = ctx[f].data
911 if lfutil.isstandin(f):
911 if lfutil.isstandin(f):
912 path = lfutil.findfile(repo._repo, getdata().strip())
912 path = lfutil.findfile(repo._repo, getdata().strip())
913 if path is None:
913 if path is None:
914 raise util.Abort(
914 raise util.Abort(
915 _('largefile %s not found in repo store or system cache')
915 _('largefile %s not found in repo store or system cache')
916 % lfutil.splitstandin(f))
916 % lfutil.splitstandin(f))
917 f = lfutil.splitstandin(f)
917 f = lfutil.splitstandin(f)
918
918
919 def getdatafn():
919 def getdatafn():
920 fd = None
920 fd = None
921 try:
921 try:
922 fd = open(os.path.join(prefix, path), 'rb')
922 fd = open(os.path.join(prefix, path), 'rb')
923 return fd.read()
923 return fd.read()
924 finally:
924 finally:
925 if fd:
925 if fd:
926 fd.close()
926 fd.close()
927
927
928 getdata = getdatafn
928 getdata = getdatafn
929
929
930 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
930 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
931
931
932 for subpath in sorted(ctx.substate):
932 for subpath in sorted(ctx.substate):
933 sub = ctx.sub(subpath)
933 sub = ctx.sub(subpath)
934 submatch = match_.narrowmatcher(subpath, match)
934 submatch = match_.narrowmatcher(subpath, match)
935 sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
935 sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
936 submatch)
936 submatch)
937
937
938 # If a largefile is modified, the change is not reflected in its
938 # If a largefile is modified, the change is not reflected in its
939 # standin until a commit. cmdutil.bailifchanged() raises an exception
939 # standin until a commit. cmdutil.bailifchanged() raises an exception
940 # if the repo has uncommitted changes. Wrap it to also check if
940 # if the repo has uncommitted changes. Wrap it to also check if
941 # largefiles were changed. This is used by bisect and backout.
941 # largefiles were changed. This is used by bisect and backout.
942 def overridebailifchanged(orig, repo):
942 def overridebailifchanged(orig, repo):
943 orig(repo)
943 orig(repo)
944 repo.lfstatus = True
944 repo.lfstatus = True
945 modified, added, removed, deleted = repo.status()[:4]
945 modified, added, removed, deleted = repo.status()[:4]
946 repo.lfstatus = False
946 repo.lfstatus = False
947 if modified or added or removed or deleted:
947 if modified or added or removed or deleted:
948 raise util.Abort(_('outstanding uncommitted changes'))
948 raise util.Abort(_('outstanding uncommitted changes'))
949
949
950 # Fetch doesn't use cmdutil.bailifchanged so override it to add the check
950 # Fetch doesn't use cmdutil.bailifchanged so override it to add the check
951 def overridefetch(orig, ui, repo, *pats, **opts):
951 def overridefetch(orig, ui, repo, *pats, **opts):
952 repo.lfstatus = True
952 repo.lfstatus = True
953 modified, added, removed, deleted = repo.status()[:4]
953 modified, added, removed, deleted = repo.status()[:4]
954 repo.lfstatus = False
954 repo.lfstatus = False
955 if modified or added or removed or deleted:
955 if modified or added or removed or deleted:
956 raise util.Abort(_('outstanding uncommitted changes'))
956 raise util.Abort(_('outstanding uncommitted changes'))
957 return orig(ui, repo, *pats, **opts)
957 return orig(ui, repo, *pats, **opts)
958
958
959 def overrideforget(orig, ui, repo, *pats, **opts):
959 def overrideforget(orig, ui, repo, *pats, **opts):
960 installnormalfilesmatchfn(repo[None].manifest())
960 installnormalfilesmatchfn(repo[None].manifest())
961 result = orig(ui, repo, *pats, **opts)
961 result = orig(ui, repo, *pats, **opts)
962 restorematchfn()
962 restorematchfn()
963 m = scmutil.match(repo[None], pats, opts)
963 m = scmutil.match(repo[None], pats, opts)
964
964
965 try:
965 try:
966 repo.lfstatus = True
966 repo.lfstatus = True
967 s = repo.status(match=m, clean=True)
967 s = repo.status(match=m, clean=True)
968 finally:
968 finally:
969 repo.lfstatus = False
969 repo.lfstatus = False
970 forget = sorted(s[0] + s[1] + s[3] + s[6])
970 forget = sorted(s[0] + s[1] + s[3] + s[6])
971 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
971 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
972
972
973 for f in forget:
973 for f in forget:
974 if lfutil.standin(f) not in repo.dirstate and not \
974 if lfutil.standin(f) not in repo.dirstate and not \
975 os.path.isdir(m.rel(lfutil.standin(f))):
975 os.path.isdir(m.rel(lfutil.standin(f))):
976 ui.warn(_('not removing %s: file is already untracked\n')
976 ui.warn(_('not removing %s: file is already untracked\n')
977 % m.rel(f))
977 % m.rel(f))
978 result = 1
978 result = 1
979
979
980 for f in forget:
980 for f in forget:
981 if ui.verbose or not m.exact(f):
981 if ui.verbose or not m.exact(f):
982 ui.status(_('removing %s\n') % m.rel(f))
982 ui.status(_('removing %s\n') % m.rel(f))
983
983
984 # Need to lock because standin files are deleted then removed from the
984 # Need to lock because standin files are deleted then removed from the
985 # repository and we could race in-between.
985 # repository and we could race in-between.
986 wlock = repo.wlock()
986 wlock = repo.wlock()
987 try:
987 try:
988 lfdirstate = lfutil.openlfdirstate(ui, repo)
988 lfdirstate = lfutil.openlfdirstate(ui, repo)
989 for f in forget:
989 for f in forget:
990 if lfdirstate[f] == 'a':
990 if lfdirstate[f] == 'a':
991 lfdirstate.drop(f)
991 lfdirstate.drop(f)
992 else:
992 else:
993 lfdirstate.remove(f)
993 lfdirstate.remove(f)
994 lfdirstate.write()
994 lfdirstate.write()
995 standins = [lfutil.standin(f) for f in forget]
995 standins = [lfutil.standin(f) for f in forget]
996 for f in standins:
996 for f in standins:
997 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
997 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
998 repo[None].forget(standins)
998 repo[None].forget(standins)
999 finally:
999 finally:
1000 wlock.release()
1000 wlock.release()
1001
1001
1002 return result
1002 return result
1003
1003
1004 def getoutgoinglfiles(ui, repo, dest=None, **opts):
1004 def getoutgoinglfiles(ui, repo, dest=None, **opts):
1005 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1005 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1006 dest, branches = hg.parseurl(dest, opts.get('branch'))
1006 dest, branches = hg.parseurl(dest, opts.get('branch'))
1007 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
1007 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
1008 if revs:
1008 if revs:
1009 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
1009 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
1010
1010
1011 try:
1011 try:
1012 remote = hg.peer(repo, opts, dest)
1012 remote = hg.peer(repo, opts, dest)
1013 except error.RepoError:
1013 except error.RepoError:
1014 return None
1014 return None
1015 outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=False)
1015 outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=False)
1016 if not outgoing.missing:
1016 if not outgoing.missing:
1017 return outgoing.missing
1017 return outgoing.missing
1018 o = repo.changelog.nodesbetween(outgoing.missing, revs)[0]
1018 o = repo.changelog.nodesbetween(outgoing.missing, revs)[0]
1019 if opts.get('newest_first'):
1019 if opts.get('newest_first'):
1020 o.reverse()
1020 o.reverse()
1021
1021
1022 toupload = set()
1022 toupload = set()
1023 for n in o:
1023 for n in o:
1024 parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
1024 parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
1025 ctx = repo[n]
1025 ctx = repo[n]
1026 files = set(ctx.files())
1026 files = set(ctx.files())
1027 if len(parents) == 2:
1027 if len(parents) == 2:
1028 mc = ctx.manifest()
1028 mc = ctx.manifest()
1029 mp1 = ctx.parents()[0].manifest()
1029 mp1 = ctx.parents()[0].manifest()
1030 mp2 = ctx.parents()[1].manifest()
1030 mp2 = ctx.parents()[1].manifest()
1031 for f in mp1:
1031 for f in mp1:
1032 if f not in mc:
1032 if f not in mc:
1033 files.add(f)
1033 files.add(f)
1034 for f in mp2:
1034 for f in mp2:
1035 if f not in mc:
1035 if f not in mc:
1036 files.add(f)
1036 files.add(f)
1037 for f in mc:
1037 for f in mc:
1038 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
1038 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
1039 files.add(f)
1039 files.add(f)
1040 toupload = toupload.union(
1040 toupload = toupload.union(
1041 set([f for f in files if lfutil.isstandin(f) and f in ctx]))
1041 set([f for f in files if lfutil.isstandin(f) and f in ctx]))
1042 return sorted(toupload)
1042 return sorted(toupload)
1043
1043
1044 def overrideoutgoing(orig, ui, repo, dest=None, **opts):
1044 def overrideoutgoing(orig, ui, repo, dest=None, **opts):
1045 result = orig(ui, repo, dest, **opts)
1045 result = orig(ui, repo, dest, **opts)
1046
1046
1047 if opts.pop('large', None):
1047 if opts.pop('large', None):
1048 toupload = getoutgoinglfiles(ui, repo, dest, **opts)
1048 toupload = getoutgoinglfiles(ui, repo, dest, **opts)
1049 if toupload is None:
1049 if toupload is None:
1050 ui.status(_('largefiles: No remote repo\n'))
1050 ui.status(_('largefiles: No remote repo\n'))
1051 elif not toupload:
1051 elif not toupload:
1052 ui.status(_('largefiles: no files to upload\n'))
1052 ui.status(_('largefiles: no files to upload\n'))
1053 else:
1053 else:
1054 ui.status(_('largefiles to upload:\n'))
1054 ui.status(_('largefiles to upload:\n'))
1055 for file in toupload:
1055 for file in toupload:
1056 ui.status(lfutil.splitstandin(file) + '\n')
1056 ui.status(lfutil.splitstandin(file) + '\n')
1057 ui.status('\n')
1057 ui.status('\n')
1058
1058
1059 return result
1059 return result
1060
1060
1061 def overridesummary(orig, ui, repo, *pats, **opts):
1061 def overridesummary(orig, ui, repo, *pats, **opts):
1062 try:
1062 try:
1063 repo.lfstatus = True
1063 repo.lfstatus = True
1064 orig(ui, repo, *pats, **opts)
1064 orig(ui, repo, *pats, **opts)
1065 finally:
1065 finally:
1066 repo.lfstatus = False
1066 repo.lfstatus = False
1067
1067
1068 if opts.pop('large', None):
1068 if opts.pop('large', None):
1069 toupload = getoutgoinglfiles(ui, repo, None, **opts)
1069 toupload = getoutgoinglfiles(ui, repo, None, **opts)
1070 if toupload is None:
1070 if toupload is None:
1071 # i18n: column positioning for "hg summary"
1071 # i18n: column positioning for "hg summary"
1072 ui.status(_('largefiles: (no remote repo)\n'))
1072 ui.status(_('largefiles: (no remote repo)\n'))
1073 elif not toupload:
1073 elif not toupload:
1074 # i18n: column positioning for "hg summary"
1074 # i18n: column positioning for "hg summary"
1075 ui.status(_('largefiles: (no files to upload)\n'))
1075 ui.status(_('largefiles: (no files to upload)\n'))
1076 else:
1076 else:
1077 # i18n: column positioning for "hg summary"
1077 # i18n: column positioning for "hg summary"
1078 ui.status(_('largefiles: %d to upload\n') % len(toupload))
1078 ui.status(_('largefiles: %d to upload\n') % len(toupload))
1079
1079
1080 def scmutiladdremove(orig, repo, pats=[], opts={}, dry_run=None,
1080 def scmutiladdremove(orig, repo, pats=[], opts={}, dry_run=None,
1081 similarity=None):
1081 similarity=None):
1082 if not lfutil.islfilesrepo(repo):
1082 if not lfutil.islfilesrepo(repo):
1083 return orig(repo, pats, opts, dry_run, similarity)
1083 return orig(repo, pats, opts, dry_run, similarity)
1084 # Get the list of missing largefiles so we can remove them
1084 # Get the list of missing largefiles so we can remove them
1085 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1085 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1086 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
1086 s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
1087 False, False)
1087 False, False)
1088 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
1088 (unsure, modified, added, removed, missing, unknown, ignored, clean) = s
1089
1089
1090 # Call into the normal remove code, but the removing of the standin, we want
1090 # Call into the normal remove code, but the removing of the standin, we want
1091 # to have handled by original addremove. Monkey patching here makes sure
1091 # to have handled by original addremove. Monkey patching here makes sure
1092 # we don't remove the standin in the largefiles code, preventing a very
1092 # we don't remove the standin in the largefiles code, preventing a very
1093 # confused state later.
1093 # confused state later.
1094 if missing:
1094 if missing:
1095 m = [repo.wjoin(f) for f in missing]
1095 m = [repo.wjoin(f) for f in missing]
1096 repo._isaddremove = True
1096 repo._isaddremove = True
1097 removelargefiles(repo.ui, repo, *m, **opts)
1097 removelargefiles(repo.ui, repo, *m, **opts)
1098 repo._isaddremove = False
1098 repo._isaddremove = False
1099 # Call into the normal add code, and any files that *should* be added as
1099 # Call into the normal add code, and any files that *should* be added as
1100 # largefiles will be
1100 # largefiles will be
1101 addlargefiles(repo.ui, repo, *pats, **opts)
1101 addlargefiles(repo.ui, repo, *pats, **opts)
1102 # Now that we've handled largefiles, hand off to the original addremove
1102 # Now that we've handled largefiles, hand off to the original addremove
1103 # function to take care of the rest. Make sure it doesn't do anything with
1103 # function to take care of the rest. Make sure it doesn't do anything with
1104 # largefiles by installing a matcher that will ignore them.
1104 # largefiles by installing a matcher that will ignore them.
1105 installnormalfilesmatchfn(repo[None].manifest())
1105 installnormalfilesmatchfn(repo[None].manifest())
1106 result = orig(repo, pats, opts, dry_run, similarity)
1106 result = orig(repo, pats, opts, dry_run, similarity)
1107 restorematchfn()
1107 restorematchfn()
1108 return result
1108 return result
1109
1109
1110 # Calling purge with --all will cause the largefiles to be deleted.
1110 # Calling purge with --all will cause the largefiles to be deleted.
1111 # Override repo.status to prevent this from happening.
1111 # Override repo.status to prevent this from happening.
1112 def overridepurge(orig, ui, repo, *dirs, **opts):
1112 def overridepurge(orig, ui, repo, *dirs, **opts):
1113 # XXX large file status is buggy when used on repo proxy.
1113 # XXX large file status is buggy when used on repo proxy.
1114 # XXX this needs to be investigate.
1114 # XXX this needs to be investigate.
1115 repo = repo.unfiltered()
1115 repo = repo.unfiltered()
1116 oldstatus = repo.status
1116 oldstatus = repo.status
1117 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1117 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1118 clean=False, unknown=False, listsubrepos=False):
1118 clean=False, unknown=False, listsubrepos=False):
1119 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1119 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1120 listsubrepos)
1120 listsubrepos)
1121 lfdirstate = lfutil.openlfdirstate(ui, repo)
1121 lfdirstate = lfutil.openlfdirstate(ui, repo)
1122 modified, added, removed, deleted, unknown, ignored, clean = r
1122 modified, added, removed, deleted, unknown, ignored, clean = r
1123 unknown = [f for f in unknown if lfdirstate[f] == '?']
1123 unknown = [f for f in unknown if lfdirstate[f] == '?']
1124 ignored = [f for f in ignored if lfdirstate[f] == '?']
1124 ignored = [f for f in ignored if lfdirstate[f] == '?']
1125 return modified, added, removed, deleted, unknown, ignored, clean
1125 return modified, added, removed, deleted, unknown, ignored, clean
1126 repo.status = overridestatus
1126 repo.status = overridestatus
1127 orig(ui, repo, *dirs, **opts)
1127 orig(ui, repo, *dirs, **opts)
1128 repo.status = oldstatus
1128 repo.status = oldstatus
1129
1129
1130 def overriderollback(orig, ui, repo, **opts):
1130 def overriderollback(orig, ui, repo, **opts):
1131 result = orig(ui, repo, **opts)
1131 result = orig(ui, repo, **opts)
1132 merge.update(repo, node=None, branchmerge=False, force=True,
1132 merge.update(repo, node=None, branchmerge=False, force=True,
1133 partial=lfutil.isstandin)
1133 partial=lfutil.isstandin)
1134 wlock = repo.wlock()
1134 wlock = repo.wlock()
1135 try:
1135 try:
1136 lfdirstate = lfutil.openlfdirstate(ui, repo)
1136 lfdirstate = lfutil.openlfdirstate(ui, repo)
1137 lfiles = lfutil.listlfiles(repo)
1137 lfiles = lfutil.listlfiles(repo)
1138 oldlfiles = lfutil.listlfiles(repo, repo[None].parents()[0].rev())
1138 oldlfiles = lfutil.listlfiles(repo, repo[None].parents()[0].rev())
1139 for file in lfiles:
1139 for file in lfiles:
1140 if file in oldlfiles:
1140 if file in oldlfiles:
1141 lfdirstate.normallookup(file)
1141 lfdirstate.normallookup(file)
1142 else:
1142 else:
1143 lfdirstate.add(file)
1143 lfdirstate.add(file)
1144 lfdirstate.write()
1144 lfdirstate.write()
1145 finally:
1145 finally:
1146 wlock.release()
1146 wlock.release()
1147 return result
1147 return result
1148
1148
1149 def overridetransplant(orig, ui, repo, *revs, **opts):
1149 def overridetransplant(orig, ui, repo, *revs, **opts):
1150 try:
1150 try:
1151 oldstandins = lfutil.getstandinsstate(repo)
1151 oldstandins = lfutil.getstandinsstate(repo)
1152 repo._istransplanting = True
1152 repo._istransplanting = True
1153 result = orig(ui, repo, *revs, **opts)
1153 result = orig(ui, repo, *revs, **opts)
1154 newstandins = lfutil.getstandinsstate(repo)
1154 newstandins = lfutil.getstandinsstate(repo)
1155 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1155 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1156 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1156 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1157 printmessage=True)
1157 printmessage=True)
1158 finally:
1158 finally:
1159 repo._istransplanting = False
1159 repo._istransplanting = False
1160 return result
1160 return result
1161
1161
1162 def overridecat(orig, ui, repo, file1, *pats, **opts):
1162 def overridecat(orig, ui, repo, file1, *pats, **opts):
1163 ctx = scmutil.revsingle(repo, opts.get('rev'))
1163 ctx = scmutil.revsingle(repo, opts.get('rev'))
1164 err = 1
1164 err = 1
1165 notbad = set()
1165 notbad = set()
1166 m = scmutil.match(ctx, (file1,) + pats, opts)
1166 m = scmutil.match(ctx, (file1,) + pats, opts)
1167 origmatchfn = m.matchfn
1167 origmatchfn = m.matchfn
1168 def lfmatchfn(f):
1168 def lfmatchfn(f):
1169 lf = lfutil.splitstandin(f)
1169 lf = lfutil.splitstandin(f)
1170 if lf is None:
1170 if lf is None:
1171 return origmatchfn(f)
1171 return origmatchfn(f)
1172 notbad.add(lf)
1172 notbad.add(lf)
1173 return origmatchfn(lf)
1173 return origmatchfn(lf)
1174 m.matchfn = lfmatchfn
1174 m.matchfn = lfmatchfn
1175 origbadfn = m.bad
1175 origbadfn = m.bad
1176 def lfbadfn(f, msg):
1176 def lfbadfn(f, msg):
1177 if not f in notbad:
1177 if not f in notbad:
1178 return origbadfn(f, msg)
1178 return origbadfn(f, msg)
1179 m.bad = lfbadfn
1179 m.bad = lfbadfn
1180 for f in ctx.walk(m):
1180 for f in ctx.walk(m):
1181 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1181 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1182 pathname=f)
1182 pathname=f)
1183 lf = lfutil.splitstandin(f)
1183 lf = lfutil.splitstandin(f)
1184 if lf is None:
1184 if lf is None:
1185 # duplicating unreachable code from commands.cat
1185 # duplicating unreachable code from commands.cat
1186 data = ctx[f].data()
1186 data = ctx[f].data()
1187 if opts.get('decode'):
1187 if opts.get('decode'):
1188 data = repo.wwritedata(f, data)
1188 data = repo.wwritedata(f, data)
1189 fp.write(data)
1189 fp.write(data)
1190 else:
1190 else:
1191 hash = lfutil.readstandin(repo, lf, ctx.rev())
1191 hash = lfutil.readstandin(repo, lf, ctx.rev())
1192 if not lfutil.inusercache(repo.ui, hash):
1192 if not lfutil.inusercache(repo.ui, hash):
1193 store = basestore._openstore(repo)
1193 store = basestore._openstore(repo)
1194 success, missing = store.get([(lf, hash)])
1194 success, missing = store.get([(lf, hash)])
1195 if len(success) != 1:
1195 if len(success) != 1:
1196 raise util.Abort(
1196 raise util.Abort(
1197 _('largefile %s is not in cache and could not be '
1197 _('largefile %s is not in cache and could not be '
1198 'downloaded') % lf)
1198 'downloaded') % lf)
1199 path = lfutil.usercachepath(repo.ui, hash)
1199 path = lfutil.usercachepath(repo.ui, hash)
1200 fpin = open(path, "rb")
1200 fpin = open(path, "rb")
1201 for chunk in util.filechunkiter(fpin, 128 * 1024):
1201 for chunk in util.filechunkiter(fpin, 128 * 1024):
1202 fp.write(chunk)
1202 fp.write(chunk)
1203 fpin.close()
1203 fpin.close()
1204 fp.close()
1204 fp.close()
1205 err = 0
1205 err = 0
1206 return err
1206 return err
1207
1207
1208 def mercurialsinkbefore(orig, sink):
1208 def mercurialsinkbefore(orig, sink):
1209 sink.repo._isconverting = True
1209 sink.repo._isconverting = True
1210 orig(sink)
1210 orig(sink)
1211
1211
1212 def mercurialsinkafter(orig, sink):
1212 def mercurialsinkafter(orig, sink):
1213 sink.repo._isconverting = False
1213 sink.repo._isconverting = False
1214 orig(sink)
1214 orig(sink)
@@ -1,561 +1,561 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46 '''
46 '''
47
47
48 import os, errno, socket, tempfile, cStringIO
48 import os, errno, socket, tempfile, cStringIO
49 import email.MIMEMultipart, email.MIMEBase
49 import email.MIMEMultipart, email.MIMEBase
50 import email.Utils, email.Encoders, email.Generator
50 import email.Utils, email.Encoders, email.Generator
51 from mercurial import cmdutil, commands, hg, mail, patch, util
51 from mercurial import cmdutil, commands, hg, mail, patch, util
52 from mercurial import scmutil
52 from mercurial import scmutil
53 from mercurial.i18n import _
53 from mercurial.i18n import _
54 from mercurial.node import bin
54 from mercurial.node import bin
55
55
56 cmdtable = {}
56 cmdtable = {}
57 command = cmdutil.command(cmdtable)
57 command = cmdutil.command(cmdtable)
58 testedwith = 'internal'
58 testedwith = 'internal'
59
59
60 def prompt(ui, prompt, default=None, rest=':'):
60 def prompt(ui, prompt, default=None, rest=':'):
61 if default:
61 if default:
62 prompt += ' [%s]' % default
62 prompt += ' [%s]' % default
63 return ui.prompt(prompt + rest, default)
63 return ui.prompt(prompt + rest, default)
64
64
65 def introwanted(opts, number):
65 def introwanted(opts, number):
66 '''is an introductory message apparently wanted?'''
66 '''is an introductory message apparently wanted?'''
67 return number > 1 or opts.get('intro') or opts.get('desc')
67 return number > 1 or opts.get('intro') or opts.get('desc')
68
68
69 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
69 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
70 patchname=None):
70 patchname=None):
71
71
72 desc = []
72 desc = []
73 node = None
73 node = None
74 body = ''
74 body = ''
75
75
76 for line in patchlines:
76 for line in patchlines:
77 if line.startswith('#'):
77 if line.startswith('#'):
78 if line.startswith('# Node ID'):
78 if line.startswith('# Node ID'):
79 node = line.split()[-1]
79 node = line.split()[-1]
80 continue
80 continue
81 if line.startswith('diff -r') or line.startswith('diff --git'):
81 if line.startswith('diff -r') or line.startswith('diff --git'):
82 break
82 break
83 desc.append(line)
83 desc.append(line)
84
84
85 if not patchname and not node:
85 if not patchname and not node:
86 raise ValueError
86 raise ValueError
87
87
88 if opts.get('attach') and not opts.get('body'):
88 if opts.get('attach') and not opts.get('body'):
89 body = ('\n'.join(desc[1:]).strip() or
89 body = ('\n'.join(desc[1:]).strip() or
90 'Patch subject is complete summary.')
90 'Patch subject is complete summary.')
91 body += '\n\n\n'
91 body += '\n\n\n'
92
92
93 if opts.get('plain'):
93 if opts.get('plain'):
94 while patchlines and patchlines[0].startswith('# '):
94 while patchlines and patchlines[0].startswith('# '):
95 patchlines.pop(0)
95 patchlines.pop(0)
96 if patchlines:
96 if patchlines:
97 patchlines.pop(0)
97 patchlines.pop(0)
98 while patchlines and not patchlines[0].strip():
98 while patchlines and not patchlines[0].strip():
99 patchlines.pop(0)
99 patchlines.pop(0)
100
100
101 ds = patch.diffstat(patchlines, git=opts.get('git'))
101 ds = patch.diffstat(patchlines, git=opts.get('git'))
102 if opts.get('diffstat'):
102 if opts.get('diffstat'):
103 body += ds + '\n\n'
103 body += ds + '\n\n'
104
104
105 addattachment = opts.get('attach') or opts.get('inline')
105 addattachment = opts.get('attach') or opts.get('inline')
106 if not addattachment or opts.get('body'):
106 if not addattachment or opts.get('body'):
107 body += '\n'.join(patchlines)
107 body += '\n'.join(patchlines)
108
108
109 if addattachment:
109 if addattachment:
110 msg = email.MIMEMultipart.MIMEMultipart()
110 msg = email.MIMEMultipart.MIMEMultipart()
111 if body:
111 if body:
112 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
112 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
113 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
113 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
114 opts.get('test'))
114 opts.get('test'))
115 binnode = bin(node)
115 binnode = bin(node)
116 # if node is mq patch, it will have the patch file's name as a tag
116 # if node is mq patch, it will have the patch file's name as a tag
117 if not patchname:
117 if not patchname:
118 patchtags = [t for t in repo.nodetags(binnode)
118 patchtags = [t for t in repo.nodetags(binnode)
119 if t.endswith('.patch') or t.endswith('.diff')]
119 if t.endswith('.patch') or t.endswith('.diff')]
120 if patchtags:
120 if patchtags:
121 patchname = patchtags[0]
121 patchname = patchtags[0]
122 elif total > 1:
122 elif total > 1:
123 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
123 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
124 binnode, seqno=idx,
124 binnode, seqno=idx,
125 total=total)
125 total=total)
126 else:
126 else:
127 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
127 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
128 disposition = 'inline'
128 disposition = 'inline'
129 if opts.get('attach'):
129 if opts.get('attach'):
130 disposition = 'attachment'
130 disposition = 'attachment'
131 p['Content-Disposition'] = disposition + '; filename=' + patchname
131 p['Content-Disposition'] = disposition + '; filename=' + patchname
132 msg.attach(p)
132 msg.attach(p)
133 else:
133 else:
134 msg = mail.mimetextpatch(body, display=opts.get('test'))
134 msg = mail.mimetextpatch(body, display=opts.get('test'))
135
135
136 flag = ' '.join(opts.get('flag'))
136 flag = ' '.join(opts.get('flag'))
137 if flag:
137 if flag:
138 flag = ' ' + flag
138 flag = ' ' + flag
139
139
140 subj = desc[0].strip().rstrip('. ')
140 subj = desc[0].strip().rstrip('. ')
141 if not numbered:
141 if not numbered:
142 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
142 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
143 else:
143 else:
144 tlen = len(str(total))
144 tlen = len(str(total))
145 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
145 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
146 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
146 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
147 msg['X-Mercurial-Node'] = node
147 msg['X-Mercurial-Node'] = node
148 return msg, subj, ds
148 return msg, subj, ds
149
149
150 emailopts = [
150 emailopts = [
151 ('', 'body', None, _('send patches as inline message text (default)')),
151 ('', 'body', None, _('send patches as inline message text (default)')),
152 ('a', 'attach', None, _('send patches as attachments')),
152 ('a', 'attach', None, _('send patches as attachments')),
153 ('i', 'inline', None, _('send patches as inline attachments')),
153 ('i', 'inline', None, _('send patches as inline attachments')),
154 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
154 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
155 ('c', 'cc', [], _('email addresses of copy recipients')),
155 ('c', 'cc', [], _('email addresses of copy recipients')),
156 ('', 'confirm', None, _('ask for confirmation before sending')),
156 ('', 'confirm', None, _('ask for confirmation before sending')),
157 ('d', 'diffstat', None, _('add diffstat output to messages')),
157 ('d', 'diffstat', None, _('add diffstat output to messages')),
158 ('', 'date', '', _('use the given date as the sending date')),
158 ('', 'date', '', _('use the given date as the sending date')),
159 ('', 'desc', '', _('use the given file as the series description')),
159 ('', 'desc', '', _('use the given file as the series description')),
160 ('f', 'from', '', _('email address of sender')),
160 ('f', 'from', '', _('email address of sender')),
161 ('n', 'test', None, _('print messages that would be sent')),
161 ('n', 'test', None, _('print messages that would be sent')),
162 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
162 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
163 ('', 'reply-to', [], _('email addresses replies should be sent to')),
163 ('', 'reply-to', [], _('email addresses replies should be sent to')),
164 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
164 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
165 ('', 'in-reply-to', '', _('message identifier to reply to')),
165 ('', 'in-reply-to', '', _('message identifier to reply to')),
166 ('', 'flag', [], _('flags to add in subject prefixes')),
166 ('', 'flag', [], _('flags to add in subject prefixes')),
167 ('t', 'to', [], _('email addresses of recipients'))]
167 ('t', 'to', [], _('email addresses of recipients'))]
168
168
169 @command('email',
169 @command('email',
170 [('g', 'git', None, _('use git extended diff format')),
170 [('g', 'git', None, _('use git extended diff format')),
171 ('', 'plain', None, _('omit hg patch header')),
171 ('', 'plain', None, _('omit hg patch header')),
172 ('o', 'outgoing', None,
172 ('o', 'outgoing', None,
173 _('send changes not found in the target repository')),
173 _('send changes not found in the target repository')),
174 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
174 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
175 ('', 'bundlename', 'bundle',
175 ('', 'bundlename', 'bundle',
176 _('name of the bundle attachment file'), _('NAME')),
176 _('name of the bundle attachment file'), _('NAME')),
177 ('r', 'rev', [], _('a revision to send'), _('REV')),
177 ('r', 'rev', [], _('a revision to send'), _('REV')),
178 ('', 'force', None, _('run even when remote repository is unrelated '
178 ('', 'force', None, _('run even when remote repository is unrelated '
179 '(with -b/--bundle)')),
179 '(with -b/--bundle)')),
180 ('', 'base', [], _('a base changeset to specify instead of a destination '
180 ('', 'base', [], _('a base changeset to specify instead of a destination '
181 '(with -b/--bundle)'), _('REV')),
181 '(with -b/--bundle)'), _('REV')),
182 ('', 'intro', None, _('send an introduction email for a single patch')),
182 ('', 'intro', None, _('send an introduction email for a single patch')),
183 ] + emailopts + commands.remoteopts,
183 ] + emailopts + commands.remoteopts,
184 _('hg email [OPTION]... [DEST]...'))
184 _('hg email [OPTION]... [DEST]...'))
185 def patchbomb(ui, repo, *revs, **opts):
185 def patchbomb(ui, repo, *revs, **opts):
186 '''send changesets by email
186 '''send changesets by email
187
187
188 By default, diffs are sent in the format generated by
188 By default, diffs are sent in the format generated by
189 :hg:`export`, one per message. The series starts with a "[PATCH 0
189 :hg:`export`, one per message. The series starts with a "[PATCH 0
190 of N]" introduction, which describes the series as a whole.
190 of N]" introduction, which describes the series as a whole.
191
191
192 Each patch email has a Subject line of "[PATCH M of N] ...", using
192 Each patch email has a Subject line of "[PATCH M of N] ...", using
193 the first line of the changeset description as the subject text.
193 the first line of the changeset description as the subject text.
194 The message contains two or three parts. First, the changeset
194 The message contains two or three parts. First, the changeset
195 description.
195 description.
196
196
197 With the -d/--diffstat option, if the diffstat program is
197 With the -d/--diffstat option, if the diffstat program is
198 installed, the result of running diffstat on the patch is inserted.
198 installed, the result of running diffstat on the patch is inserted.
199
199
200 Finally, the patch itself, as generated by :hg:`export`.
200 Finally, the patch itself, as generated by :hg:`export`.
201
201
202 With the -d/--diffstat or --confirm options, you will be presented
202 With the -d/--diffstat or --confirm options, you will be presented
203 with a final summary of all messages and asked for confirmation before
203 with a final summary of all messages and asked for confirmation before
204 the messages are sent.
204 the messages are sent.
205
205
206 By default the patch is included as text in the email body for
206 By default the patch is included as text in the email body for
207 easy reviewing. Using the -a/--attach option will instead create
207 easy reviewing. Using the -a/--attach option will instead create
208 an attachment for the patch. With -i/--inline an inline attachment
208 an attachment for the patch. With -i/--inline an inline attachment
209 will be created. You can include a patch both as text in the email
209 will be created. You can include a patch both as text in the email
210 body and as a regular or an inline attachment by combining the
210 body and as a regular or an inline attachment by combining the
211 -a/--attach or -i/--inline with the --body option.
211 -a/--attach or -i/--inline with the --body option.
212
212
213 With -o/--outgoing, emails will be generated for patches not found
213 With -o/--outgoing, emails will be generated for patches not found
214 in the destination repository (or only those which are ancestors
214 in the destination repository (or only those which are ancestors
215 of the specified revisions if any are provided)
215 of the specified revisions if any are provided)
216
216
217 With -b/--bundle, changesets are selected as for --outgoing, but a
217 With -b/--bundle, changesets are selected as for --outgoing, but a
218 single email containing a binary Mercurial bundle as an attachment
218 single email containing a binary Mercurial bundle as an attachment
219 will be sent.
219 will be sent.
220
220
221 With -m/--mbox, instead of previewing each patchbomb message in a
221 With -m/--mbox, instead of previewing each patchbomb message in a
222 pager or sending the messages directly, it will create a UNIX
222 pager or sending the messages directly, it will create a UNIX
223 mailbox file with the patch emails. This mailbox file can be
223 mailbox file with the patch emails. This mailbox file can be
224 previewed with any mail user agent which supports UNIX mbox
224 previewed with any mail user agent which supports UNIX mbox
225 files.
225 files.
226
226
227 With -n/--test, all steps will run, but mail will not be sent.
227 With -n/--test, all steps will run, but mail will not be sent.
228 You will be prompted for an email recipient address, a subject and
228 You will be prompted for an email recipient address, a subject and
229 an introductory message describing the patches of your patchbomb.
229 an introductory message describing the patches of your patchbomb.
230 Then when all is done, patchbomb messages are displayed. If the
230 Then when all is done, patchbomb messages are displayed. If the
231 PAGER environment variable is set, your pager will be fired up once
231 PAGER environment variable is set, your pager will be fired up once
232 for each patchbomb message, so you can verify everything is alright.
232 for each patchbomb message, so you can verify everything is alright.
233
233
234 In case email sending fails, you will find a backup of your series
234 In case email sending fails, you will find a backup of your series
235 introductory message in ``.hg/last-email.txt``.
235 introductory message in ``.hg/last-email.txt``.
236
236
237 Examples::
237 Examples::
238
238
239 hg email -r 3000 # send patch 3000 only
239 hg email -r 3000 # send patch 3000 only
240 hg email -r 3000 -r 3001 # send patches 3000 and 3001
240 hg email -r 3000 -r 3001 # send patches 3000 and 3001
241 hg email -r 3000:3005 # send patches 3000 through 3005
241 hg email -r 3000:3005 # send patches 3000 through 3005
242 hg email 3000 # send patch 3000 (deprecated)
242 hg email 3000 # send patch 3000 (deprecated)
243
243
244 hg email -o # send all patches not in default
244 hg email -o # send all patches not in default
245 hg email -o DEST # send all patches not in DEST
245 hg email -o DEST # send all patches not in DEST
246 hg email -o -r 3000 # send all ancestors of 3000 not in default
246 hg email -o -r 3000 # send all ancestors of 3000 not in default
247 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
247 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
248
248
249 hg email -b # send bundle of all patches not in default
249 hg email -b # send bundle of all patches not in default
250 hg email -b DEST # send bundle of all patches not in DEST
250 hg email -b DEST # send bundle of all patches not in DEST
251 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
251 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
252 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
252 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
253
253
254 hg email -o -m mbox && # generate an mbox file...
254 hg email -o -m mbox && # generate an mbox file...
255 mutt -R -f mbox # ... and view it with mutt
255 mutt -R -f mbox # ... and view it with mutt
256 hg email -o -m mbox && # generate an mbox file ...
256 hg email -o -m mbox && # generate an mbox file ...
257 formail -s sendmail \\ # ... and use formail to send from the mbox
257 formail -s sendmail \\ # ... and use formail to send from the mbox
258 -bm -t < mbox # ... using sendmail
258 -bm -t < mbox # ... using sendmail
259
259
260 Before using this command, you will need to enable email in your
260 Before using this command, you will need to enable email in your
261 hgrc. See the [email] section in hgrc(5) for details.
261 hgrc. See the [email] section in hgrc(5) for details.
262 '''
262 '''
263
263
264 _charsets = mail._charsets(ui)
264 _charsets = mail._charsets(ui)
265
265
266 bundle = opts.get('bundle')
266 bundle = opts.get('bundle')
267 date = opts.get('date')
267 date = opts.get('date')
268 mbox = opts.get('mbox')
268 mbox = opts.get('mbox')
269 outgoing = opts.get('outgoing')
269 outgoing = opts.get('outgoing')
270 rev = opts.get('rev')
270 rev = opts.get('rev')
271 # internal option used by pbranches
271 # internal option used by pbranches
272 patches = opts.get('patches')
272 patches = opts.get('patches')
273
273
274 def getoutgoing(dest, revs):
274 def getoutgoing(dest, revs):
275 '''Return the revisions present locally but not in dest'''
275 '''Return the revisions present locally but not in dest'''
276 url = ui.expandpath(dest or 'default-push', dest or 'default')
276 url = ui.expandpath(dest or 'default-push', dest or 'default')
277 url = hg.parseurl(url)[0]
277 url = hg.parseurl(url)[0]
278 ui.status(_('comparing with %s\n') % util.hidepassword(url))
278 ui.status(_('comparing with %s\n') % util.hidepassword(url))
279
279
280 revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
280 revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
281 if not revs:
281 if not revs:
282 revs = [len(repo) - 1]
282 revs = [len(repo) - 1]
283 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
283 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
284 if not revs:
284 if not revs:
285 ui.status(_("no changes found\n"))
285 ui.status(_("no changes found\n"))
286 return []
286 return []
287 return [str(r) for r in revs]
287 return [str(r) for r in revs]
288
288
289 def getpatches(revs):
289 def getpatches(revs):
290 for r in scmutil.revrange(repo, revs):
290 for r in scmutil.revrange(repo, revs):
291 output = cStringIO.StringIO()
291 output = cStringIO.StringIO()
292 cmdutil.export(repo, [r], fp=output,
292 cmdutil.export(repo, [r], fp=output,
293 opts=patch.diffopts(ui, opts))
293 opts=patch.diffopts(ui, opts))
294 yield output.getvalue().split('\n')
294 yield output.getvalue().split('\n')
295
295
296 def getbundle(dest):
296 def getbundle(dest):
297 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
297 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
298 tmpfn = os.path.join(tmpdir, 'bundle')
298 tmpfn = os.path.join(tmpdir, 'bundle')
299 try:
299 try:
300 commands.bundle(ui, repo, tmpfn, dest, **opts)
300 commands.bundle(ui, repo, tmpfn, dest, **opts)
301 fp = open(tmpfn, 'rb')
301 fp = open(tmpfn, 'rb')
302 data = fp.read()
302 data = fp.read()
303 fp.close()
303 fp.close()
304 return data
304 return data
305 finally:
305 finally:
306 try:
306 try:
307 os.unlink(tmpfn)
307 os.unlink(tmpfn)
308 except OSError:
308 except OSError:
309 pass
309 pass
310 os.rmdir(tmpdir)
310 os.rmdir(tmpdir)
311
311
312 if not (opts.get('test') or mbox):
312 if not (opts.get('test') or mbox):
313 # really sending
313 # really sending
314 mail.validateconfig(ui)
314 mail.validateconfig(ui)
315
315
316 if not (revs or rev or outgoing or bundle or patches):
316 if not (revs or rev or outgoing or bundle or patches):
317 raise util.Abort(_('specify at least one changeset with -r or -o'))
317 raise util.Abort(_('specify at least one changeset with -r or -o'))
318
318
319 if outgoing and bundle:
319 if outgoing and bundle:
320 raise util.Abort(_("--outgoing mode always on with --bundle;"
320 raise util.Abort(_("--outgoing mode always on with --bundle;"
321 " do not re-specify --outgoing"))
321 " do not re-specify --outgoing"))
322
322
323 if outgoing or bundle:
323 if outgoing or bundle:
324 if len(revs) > 1:
324 if len(revs) > 1:
325 raise util.Abort(_("too many destinations"))
325 raise util.Abort(_("too many destinations"))
326 dest = revs and revs[0] or None
326 dest = revs and revs[0] or None
327 revs = []
327 revs = []
328
328
329 if rev:
329 if rev:
330 if revs:
330 if revs:
331 raise util.Abort(_('use only one form to specify the revision'))
331 raise util.Abort(_('use only one form to specify the revision'))
332 revs = rev
332 revs = rev
333
333
334 if outgoing:
334 if outgoing:
335 revs = getoutgoing(dest, rev)
335 revs = getoutgoing(dest, rev)
336 if bundle:
336 if bundle:
337 opts['revs'] = revs
337 opts['revs'] = revs
338
338
339 # start
339 # start
340 if date:
340 if date:
341 start_time = util.parsedate(date)
341 start_time = util.parsedate(date)
342 else:
342 else:
343 start_time = util.makedate()
343 start_time = util.makedate()
344
344
345 def genmsgid(id):
345 def genmsgid(id):
346 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
346 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
347
347
348 def getdescription(body, sender):
348 def getdescription(body, sender):
349 if opts.get('desc'):
349 if opts.get('desc'):
350 body = open(opts.get('desc')).read()
350 body = open(opts.get('desc')).read()
351 else:
351 else:
352 ui.write(_('\nWrite the introductory message for the '
352 ui.write(_('\nWrite the introductory message for the '
353 'patch series.\n\n'))
353 'patch series.\n\n'))
354 body = ui.edit(body, sender)
354 body = ui.edit(body, sender)
355 # Save series description in case sendmail fails
355 # Save series description in case sendmail fails
356 msgfile = repo.opener('last-email.txt', 'wb')
356 msgfile = repo.opener('last-email.txt', 'wb')
357 msgfile.write(body)
357 msgfile.write(body)
358 msgfile.close()
358 msgfile.close()
359 return body
359 return body
360
360
361 def getpatchmsgs(patches, patchnames=None):
361 def getpatchmsgs(patches, patchnames=None):
362 msgs = []
362 msgs = []
363
363
364 ui.write(_('this patch series consists of %d patches.\n\n')
364 ui.write(_('this patch series consists of %d patches.\n\n')
365 % len(patches))
365 % len(patches))
366
366
367 # build the intro message, or skip it if the user declines
367 # build the intro message, or skip it if the user declines
368 if introwanted(opts, len(patches)):
368 if introwanted(opts, len(patches)):
369 msg = makeintro(patches)
369 msg = makeintro(patches)
370 if msg:
370 if msg:
371 msgs.append(msg)
371 msgs.append(msg)
372
372
373 # are we going to send more than one message?
373 # are we going to send more than one message?
374 numbered = len(msgs) + len(patches) > 1
374 numbered = len(msgs) + len(patches) > 1
375
375
376 # now generate the actual patch messages
376 # now generate the actual patch messages
377 name = None
377 name = None
378 for i, p in enumerate(patches):
378 for i, p in enumerate(patches):
379 if patchnames:
379 if patchnames:
380 name = patchnames[i]
380 name = patchnames[i]
381 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
381 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
382 len(patches), numbered, name)
382 len(patches), numbered, name)
383 msgs.append(msg)
383 msgs.append(msg)
384
384
385 return msgs
385 return msgs
386
386
387 def makeintro(patches):
387 def makeintro(patches):
388 tlen = len(str(len(patches)))
388 tlen = len(str(len(patches)))
389
389
390 flag = opts.get('flag') or ''
390 flag = opts.get('flag') or ''
391 if flag:
391 if flag:
392 flag = ' ' + ' '.join(flag)
392 flag = ' ' + ' '.join(flag)
393 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
393 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
394
394
395 subj = (opts.get('subject') or
395 subj = (opts.get('subject') or
396 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
396 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
397 if not subj:
397 if not subj:
398 return None # skip intro if the user doesn't bother
398 return None # skip intro if the user doesn't bother
399
399
400 subj = prefix + ' ' + subj
400 subj = prefix + ' ' + subj
401
401
402 body = ''
402 body = ''
403 if opts.get('diffstat'):
403 if opts.get('diffstat'):
404 # generate a cumulative diffstat of the whole patch series
404 # generate a cumulative diffstat of the whole patch series
405 diffstat = patch.diffstat(sum(patches, []))
405 diffstat = patch.diffstat(sum(patches, []))
406 body = '\n' + diffstat
406 body = '\n' + diffstat
407 else:
407 else:
408 diffstat = None
408 diffstat = None
409
409
410 body = getdescription(body, sender)
410 body = getdescription(body, sender)
411 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
411 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
412 msg['Subject'] = mail.headencode(ui, subj, _charsets,
412 msg['Subject'] = mail.headencode(ui, subj, _charsets,
413 opts.get('test'))
413 opts.get('test'))
414 return (msg, subj, diffstat)
414 return (msg, subj, diffstat)
415
415
416 def getbundlemsgs(bundle):
416 def getbundlemsgs(bundle):
417 subj = (opts.get('subject')
417 subj = (opts.get('subject')
418 or prompt(ui, 'Subject:', 'A bundle for your repository'))
418 or prompt(ui, 'Subject:', 'A bundle for your repository'))
419
419
420 body = getdescription('', sender)
420 body = getdescription('', sender)
421 msg = email.MIMEMultipart.MIMEMultipart()
421 msg = email.MIMEMultipart.MIMEMultipart()
422 if body:
422 if body:
423 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
423 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
424 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
424 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
425 datapart.set_payload(bundle)
425 datapart.set_payload(bundle)
426 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
426 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
427 datapart.add_header('Content-Disposition', 'attachment',
427 datapart.add_header('Content-Disposition', 'attachment',
428 filename=bundlename)
428 filename=bundlename)
429 email.Encoders.encode_base64(datapart)
429 email.Encoders.encode_base64(datapart)
430 msg.attach(datapart)
430 msg.attach(datapart)
431 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
431 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
432 return [(msg, subj, None)]
432 return [(msg, subj, None)]
433
433
434 sender = (opts.get('from') or ui.config('email', 'from') or
434 sender = (opts.get('from') or ui.config('email', 'from') or
435 ui.config('patchbomb', 'from') or
435 ui.config('patchbomb', 'from') or
436 prompt(ui, 'From', ui.username()))
436 prompt(ui, 'From', ui.username()))
437
437
438 if patches:
438 if patches:
439 msgs = getpatchmsgs(patches, opts.get('patchnames'))
439 msgs = getpatchmsgs(patches, opts.get('patchnames'))
440 elif bundle:
440 elif bundle:
441 msgs = getbundlemsgs(getbundle(dest))
441 msgs = getbundlemsgs(getbundle(dest))
442 else:
442 else:
443 msgs = getpatchmsgs(list(getpatches(revs)))
443 msgs = getpatchmsgs(list(getpatches(revs)))
444
444
445 showaddrs = []
445 showaddrs = []
446
446
447 def getaddrs(header, ask=False, default=None):
447 def getaddrs(header, ask=False, default=None):
448 configkey = header.lower()
448 configkey = header.lower()
449 opt = header.replace('-', '_').lower()
449 opt = header.replace('-', '_').lower()
450 addrs = opts.get(opt)
450 addrs = opts.get(opt)
451 if addrs:
451 if addrs:
452 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
452 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
453 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
453 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
454
454
455 # not on the command line: fallback to config and then maybe ask
455 # not on the command line: fallback to config and then maybe ask
456 addr = (ui.config('email', configkey) or
456 addr = (ui.config('email', configkey) or
457 ui.config('patchbomb', configkey) or
457 ui.config('patchbomb', configkey) or
458 '')
458 '')
459 if not addr and ask:
459 if not addr and ask:
460 addr = prompt(ui, header, default=default)
460 addr = prompt(ui, header, default=default)
461 if addr:
461 if addr:
462 showaddrs.append('%s: %s' % (header, addr))
462 showaddrs.append('%s: %s' % (header, addr))
463 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
463 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
464 else:
464 else:
465 return default
465 return default
466
466
467 to = getaddrs('To', ask=True)
467 to = getaddrs('To', ask=True)
468 if not to:
468 if not to:
469 # we can get here in non-interactive mode
469 # we can get here in non-interactive mode
470 raise util.Abort(_('no recipient addresses provided'))
470 raise util.Abort(_('no recipient addresses provided'))
471 cc = getaddrs('Cc', ask=True, default='') or []
471 cc = getaddrs('Cc', ask=True, default='') or []
472 bcc = getaddrs('Bcc') or []
472 bcc = getaddrs('Bcc') or []
473 replyto = getaddrs('Reply-To')
473 replyto = getaddrs('Reply-To')
474
474
475 if opts.get('diffstat') or opts.get('confirm'):
475 if opts.get('diffstat') or opts.get('confirm'):
476 ui.write(_('\nFinal summary:\n\n'))
476 ui.write(_('\nFinal summary:\n\n'))
477 ui.write(('From: %s\n' % sender))
477 ui.write(('From: %s\n' % sender))
478 for addr in showaddrs:
478 for addr in showaddrs:
479 ui.write('%s\n' % addr)
479 ui.write('%s\n' % addr)
480 for m, subj, ds in msgs:
480 for m, subj, ds in msgs:
481 ui.write(('Subject: %s\n' % subj))
481 ui.write(('Subject: %s\n' % subj))
482 if ds:
482 if ds:
483 ui.write(ds)
483 ui.write(ds)
484 ui.write('\n')
484 ui.write('\n')
485 if ui.promptchoice(_('are you sure you want to send (yn)?'),
485 if ui.promptchoice(_('are you sure you want to send (yn)?'
486 (_('&Yes'), _('&No'))):
486 '$$ &Yes $$ &No')):
487 raise util.Abort(_('patchbomb canceled'))
487 raise util.Abort(_('patchbomb canceled'))
488
488
489 ui.write('\n')
489 ui.write('\n')
490
490
491 parent = opts.get('in_reply_to') or None
491 parent = opts.get('in_reply_to') or None
492 # angle brackets may be omitted, they're not semantically part of the msg-id
492 # angle brackets may be omitted, they're not semantically part of the msg-id
493 if parent is not None:
493 if parent is not None:
494 if not parent.startswith('<'):
494 if not parent.startswith('<'):
495 parent = '<' + parent
495 parent = '<' + parent
496 if not parent.endswith('>'):
496 if not parent.endswith('>'):
497 parent += '>'
497 parent += '>'
498
498
499 sender_addr = email.Utils.parseaddr(sender)[1]
499 sender_addr = email.Utils.parseaddr(sender)[1]
500 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
500 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
501 sendmail = None
501 sendmail = None
502 for i, (m, subj, ds) in enumerate(msgs):
502 for i, (m, subj, ds) in enumerate(msgs):
503 try:
503 try:
504 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
504 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
505 except TypeError:
505 except TypeError:
506 m['Message-Id'] = genmsgid('patchbomb')
506 m['Message-Id'] = genmsgid('patchbomb')
507 if parent:
507 if parent:
508 m['In-Reply-To'] = parent
508 m['In-Reply-To'] = parent
509 m['References'] = parent
509 m['References'] = parent
510 if not parent or 'X-Mercurial-Node' not in m:
510 if not parent or 'X-Mercurial-Node' not in m:
511 parent = m['Message-Id']
511 parent = m['Message-Id']
512
512
513 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
513 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
514 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
514 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
515
515
516 start_time = (start_time[0] + 1, start_time[1])
516 start_time = (start_time[0] + 1, start_time[1])
517 m['From'] = sender
517 m['From'] = sender
518 m['To'] = ', '.join(to)
518 m['To'] = ', '.join(to)
519 if cc:
519 if cc:
520 m['Cc'] = ', '.join(cc)
520 m['Cc'] = ', '.join(cc)
521 if bcc:
521 if bcc:
522 m['Bcc'] = ', '.join(bcc)
522 m['Bcc'] = ', '.join(bcc)
523 if replyto:
523 if replyto:
524 m['Reply-To'] = ', '.join(replyto)
524 m['Reply-To'] = ', '.join(replyto)
525 if opts.get('test'):
525 if opts.get('test'):
526 ui.status(_('displaying '), subj, ' ...\n')
526 ui.status(_('displaying '), subj, ' ...\n')
527 ui.flush()
527 ui.flush()
528 if 'PAGER' in os.environ and not ui.plain():
528 if 'PAGER' in os.environ and not ui.plain():
529 fp = util.popen(os.environ['PAGER'], 'w')
529 fp = util.popen(os.environ['PAGER'], 'w')
530 else:
530 else:
531 fp = ui
531 fp = ui
532 generator = email.Generator.Generator(fp, mangle_from_=False)
532 generator = email.Generator.Generator(fp, mangle_from_=False)
533 try:
533 try:
534 generator.flatten(m, 0)
534 generator.flatten(m, 0)
535 fp.write('\n')
535 fp.write('\n')
536 except IOError, inst:
536 except IOError, inst:
537 if inst.errno != errno.EPIPE:
537 if inst.errno != errno.EPIPE:
538 raise
538 raise
539 if fp is not ui:
539 if fp is not ui:
540 fp.close()
540 fp.close()
541 else:
541 else:
542 if not sendmail:
542 if not sendmail:
543 verifycert = ui.config('smtp', 'verifycert')
543 verifycert = ui.config('smtp', 'verifycert')
544 if opts.get('insecure'):
544 if opts.get('insecure'):
545 ui.setconfig('smtp', 'verifycert', 'loose')
545 ui.setconfig('smtp', 'verifycert', 'loose')
546 try:
546 try:
547 sendmail = mail.connect(ui, mbox=mbox)
547 sendmail = mail.connect(ui, mbox=mbox)
548 finally:
548 finally:
549 ui.setconfig('smtp', 'verifycert', verifycert)
549 ui.setconfig('smtp', 'verifycert', verifycert)
550 ui.status(_('sending '), subj, ' ...\n')
550 ui.status(_('sending '), subj, ' ...\n')
551 ui.progress(_('sending'), i, item=subj, total=len(msgs))
551 ui.progress(_('sending'), i, item=subj, total=len(msgs))
552 if not mbox:
552 if not mbox:
553 # Exim does not remove the Bcc field
553 # Exim does not remove the Bcc field
554 del m['Bcc']
554 del m['Bcc']
555 fp = cStringIO.StringIO()
555 fp = cStringIO.StringIO()
556 generator = email.Generator.Generator(fp, mangle_from_=False)
556 generator = email.Generator.Generator(fp, mangle_from_=False)
557 generator.flatten(m, 0)
557 generator.flatten(m, 0)
558 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
558 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
559
559
560 ui.progress(_('writing'), None)
560 ui.progress(_('writing'), None)
561 ui.progress(_('sending'), None)
561 ui.progress(_('sending'), None)
@@ -1,680 +1,680 b''
1 # record.py
1 # record.py
2 #
2 #
3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''commands to interactively select changes for commit/qrefresh'''
8 '''commands to interactively select changes for commit/qrefresh'''
9
9
10 from mercurial.i18n import gettext, _
10 from mercurial.i18n import gettext, _
11 from mercurial import cmdutil, commands, extensions, hg, patch
11 from mercurial import cmdutil, commands, extensions, hg, patch
12 from mercurial import util
12 from mercurial import util
13 import copy, cStringIO, errno, os, re, shutil, tempfile
13 import copy, cStringIO, errno, os, re, shutil, tempfile
14
14
15 cmdtable = {}
15 cmdtable = {}
16 command = cmdutil.command(cmdtable)
16 command = cmdutil.command(cmdtable)
17 testedwith = 'internal'
17 testedwith = 'internal'
18
18
19 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
19 lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
20
20
21 diffopts = [
21 diffopts = [
22 ('w', 'ignore-all-space', False,
22 ('w', 'ignore-all-space', False,
23 _('ignore white space when comparing lines')),
23 _('ignore white space when comparing lines')),
24 ('b', 'ignore-space-change', None,
24 ('b', 'ignore-space-change', None,
25 _('ignore changes in the amount of white space')),
25 _('ignore changes in the amount of white space')),
26 ('B', 'ignore-blank-lines', None,
26 ('B', 'ignore-blank-lines', None,
27 _('ignore changes whose lines are all blank')),
27 _('ignore changes whose lines are all blank')),
28 ]
28 ]
29
29
30 def scanpatch(fp):
30 def scanpatch(fp):
31 """like patch.iterhunks, but yield different events
31 """like patch.iterhunks, but yield different events
32
32
33 - ('file', [header_lines + fromfile + tofile])
33 - ('file', [header_lines + fromfile + tofile])
34 - ('context', [context_lines])
34 - ('context', [context_lines])
35 - ('hunk', [hunk_lines])
35 - ('hunk', [hunk_lines])
36 - ('range', (-start,len, +start,len, proc))
36 - ('range', (-start,len, +start,len, proc))
37 """
37 """
38 lr = patch.linereader(fp)
38 lr = patch.linereader(fp)
39
39
40 def scanwhile(first, p):
40 def scanwhile(first, p):
41 """scan lr while predicate holds"""
41 """scan lr while predicate holds"""
42 lines = [first]
42 lines = [first]
43 while True:
43 while True:
44 line = lr.readline()
44 line = lr.readline()
45 if not line:
45 if not line:
46 break
46 break
47 if p(line):
47 if p(line):
48 lines.append(line)
48 lines.append(line)
49 else:
49 else:
50 lr.push(line)
50 lr.push(line)
51 break
51 break
52 return lines
52 return lines
53
53
54 while True:
54 while True:
55 line = lr.readline()
55 line = lr.readline()
56 if not line:
56 if not line:
57 break
57 break
58 if line.startswith('diff --git a/') or line.startswith('diff -r '):
58 if line.startswith('diff --git a/') or line.startswith('diff -r '):
59 def notheader(line):
59 def notheader(line):
60 s = line.split(None, 1)
60 s = line.split(None, 1)
61 return not s or s[0] not in ('---', 'diff')
61 return not s or s[0] not in ('---', 'diff')
62 header = scanwhile(line, notheader)
62 header = scanwhile(line, notheader)
63 fromfile = lr.readline()
63 fromfile = lr.readline()
64 if fromfile.startswith('---'):
64 if fromfile.startswith('---'):
65 tofile = lr.readline()
65 tofile = lr.readline()
66 header += [fromfile, tofile]
66 header += [fromfile, tofile]
67 else:
67 else:
68 lr.push(fromfile)
68 lr.push(fromfile)
69 yield 'file', header
69 yield 'file', header
70 elif line[0] == ' ':
70 elif line[0] == ' ':
71 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
71 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
72 elif line[0] in '-+':
72 elif line[0] in '-+':
73 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
73 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
74 else:
74 else:
75 m = lines_re.match(line)
75 m = lines_re.match(line)
76 if m:
76 if m:
77 yield 'range', m.groups()
77 yield 'range', m.groups()
78 else:
78 else:
79 yield 'other', line
79 yield 'other', line
80
80
81 class header(object):
81 class header(object):
82 """patch header
82 """patch header
83
83
84 XXX shouldn't we move this to mercurial/patch.py ?
84 XXX shouldn't we move this to mercurial/patch.py ?
85 """
85 """
86 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
86 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
87 diff_re = re.compile('diff -r .* (.*)$')
87 diff_re = re.compile('diff -r .* (.*)$')
88 allhunks_re = re.compile('(?:index|new file|deleted file) ')
88 allhunks_re = re.compile('(?:index|new file|deleted file) ')
89 pretty_re = re.compile('(?:new file|deleted file) ')
89 pretty_re = re.compile('(?:new file|deleted file) ')
90 special_re = re.compile('(?:index|new|deleted|copy|rename) ')
90 special_re = re.compile('(?:index|new|deleted|copy|rename) ')
91
91
92 def __init__(self, header):
92 def __init__(self, header):
93 self.header = header
93 self.header = header
94 self.hunks = []
94 self.hunks = []
95
95
96 def binary(self):
96 def binary(self):
97 return util.any(h.startswith('index ') for h in self.header)
97 return util.any(h.startswith('index ') for h in self.header)
98
98
99 def pretty(self, fp):
99 def pretty(self, fp):
100 for h in self.header:
100 for h in self.header:
101 if h.startswith('index '):
101 if h.startswith('index '):
102 fp.write(_('this modifies a binary file (all or nothing)\n'))
102 fp.write(_('this modifies a binary file (all or nothing)\n'))
103 break
103 break
104 if self.pretty_re.match(h):
104 if self.pretty_re.match(h):
105 fp.write(h)
105 fp.write(h)
106 if self.binary():
106 if self.binary():
107 fp.write(_('this is a binary file\n'))
107 fp.write(_('this is a binary file\n'))
108 break
108 break
109 if h.startswith('---'):
109 if h.startswith('---'):
110 fp.write(_('%d hunks, %d lines changed\n') %
110 fp.write(_('%d hunks, %d lines changed\n') %
111 (len(self.hunks),
111 (len(self.hunks),
112 sum([max(h.added, h.removed) for h in self.hunks])))
112 sum([max(h.added, h.removed) for h in self.hunks])))
113 break
113 break
114 fp.write(h)
114 fp.write(h)
115
115
116 def write(self, fp):
116 def write(self, fp):
117 fp.write(''.join(self.header))
117 fp.write(''.join(self.header))
118
118
119 def allhunks(self):
119 def allhunks(self):
120 return util.any(self.allhunks_re.match(h) for h in self.header)
120 return util.any(self.allhunks_re.match(h) for h in self.header)
121
121
122 def files(self):
122 def files(self):
123 match = self.diffgit_re.match(self.header[0])
123 match = self.diffgit_re.match(self.header[0])
124 if match:
124 if match:
125 fromfile, tofile = match.groups()
125 fromfile, tofile = match.groups()
126 if fromfile == tofile:
126 if fromfile == tofile:
127 return [fromfile]
127 return [fromfile]
128 return [fromfile, tofile]
128 return [fromfile, tofile]
129 else:
129 else:
130 return self.diff_re.match(self.header[0]).groups()
130 return self.diff_re.match(self.header[0]).groups()
131
131
132 def filename(self):
132 def filename(self):
133 return self.files()[-1]
133 return self.files()[-1]
134
134
135 def __repr__(self):
135 def __repr__(self):
136 return '<header %s>' % (' '.join(map(repr, self.files())))
136 return '<header %s>' % (' '.join(map(repr, self.files())))
137
137
138 def special(self):
138 def special(self):
139 return util.any(self.special_re.match(h) for h in self.header)
139 return util.any(self.special_re.match(h) for h in self.header)
140
140
141 def countchanges(hunk):
141 def countchanges(hunk):
142 """hunk -> (n+,n-)"""
142 """hunk -> (n+,n-)"""
143 add = len([h for h in hunk if h[0] == '+'])
143 add = len([h for h in hunk if h[0] == '+'])
144 rem = len([h for h in hunk if h[0] == '-'])
144 rem = len([h for h in hunk if h[0] == '-'])
145 return add, rem
145 return add, rem
146
146
147 class hunk(object):
147 class hunk(object):
148 """patch hunk
148 """patch hunk
149
149
150 XXX shouldn't we merge this with patch.hunk ?
150 XXX shouldn't we merge this with patch.hunk ?
151 """
151 """
152 maxcontext = 3
152 maxcontext = 3
153
153
154 def __init__(self, header, fromline, toline, proc, before, hunk, after):
154 def __init__(self, header, fromline, toline, proc, before, hunk, after):
155 def trimcontext(number, lines):
155 def trimcontext(number, lines):
156 delta = len(lines) - self.maxcontext
156 delta = len(lines) - self.maxcontext
157 if False and delta > 0:
157 if False and delta > 0:
158 return number + delta, lines[:self.maxcontext]
158 return number + delta, lines[:self.maxcontext]
159 return number, lines
159 return number, lines
160
160
161 self.header = header
161 self.header = header
162 self.fromline, self.before = trimcontext(fromline, before)
162 self.fromline, self.before = trimcontext(fromline, before)
163 self.toline, self.after = trimcontext(toline, after)
163 self.toline, self.after = trimcontext(toline, after)
164 self.proc = proc
164 self.proc = proc
165 self.hunk = hunk
165 self.hunk = hunk
166 self.added, self.removed = countchanges(self.hunk)
166 self.added, self.removed = countchanges(self.hunk)
167
167
168 def write(self, fp):
168 def write(self, fp):
169 delta = len(self.before) + len(self.after)
169 delta = len(self.before) + len(self.after)
170 if self.after and self.after[-1] == '\\ No newline at end of file\n':
170 if self.after and self.after[-1] == '\\ No newline at end of file\n':
171 delta -= 1
171 delta -= 1
172 fromlen = delta + self.removed
172 fromlen = delta + self.removed
173 tolen = delta + self.added
173 tolen = delta + self.added
174 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
174 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
175 (self.fromline, fromlen, self.toline, tolen,
175 (self.fromline, fromlen, self.toline, tolen,
176 self.proc and (' ' + self.proc)))
176 self.proc and (' ' + self.proc)))
177 fp.write(''.join(self.before + self.hunk + self.after))
177 fp.write(''.join(self.before + self.hunk + self.after))
178
178
179 pretty = write
179 pretty = write
180
180
181 def filename(self):
181 def filename(self):
182 return self.header.filename()
182 return self.header.filename()
183
183
184 def __repr__(self):
184 def __repr__(self):
185 return '<hunk %r@%d>' % (self.filename(), self.fromline)
185 return '<hunk %r@%d>' % (self.filename(), self.fromline)
186
186
187 def parsepatch(fp):
187 def parsepatch(fp):
188 """patch -> [] of headers -> [] of hunks """
188 """patch -> [] of headers -> [] of hunks """
189 class parser(object):
189 class parser(object):
190 """patch parsing state machine"""
190 """patch parsing state machine"""
191 def __init__(self):
191 def __init__(self):
192 self.fromline = 0
192 self.fromline = 0
193 self.toline = 0
193 self.toline = 0
194 self.proc = ''
194 self.proc = ''
195 self.header = None
195 self.header = None
196 self.context = []
196 self.context = []
197 self.before = []
197 self.before = []
198 self.hunk = []
198 self.hunk = []
199 self.headers = []
199 self.headers = []
200
200
201 def addrange(self, limits):
201 def addrange(self, limits):
202 fromstart, fromend, tostart, toend, proc = limits
202 fromstart, fromend, tostart, toend, proc = limits
203 self.fromline = int(fromstart)
203 self.fromline = int(fromstart)
204 self.toline = int(tostart)
204 self.toline = int(tostart)
205 self.proc = proc
205 self.proc = proc
206
206
207 def addcontext(self, context):
207 def addcontext(self, context):
208 if self.hunk:
208 if self.hunk:
209 h = hunk(self.header, self.fromline, self.toline, self.proc,
209 h = hunk(self.header, self.fromline, self.toline, self.proc,
210 self.before, self.hunk, context)
210 self.before, self.hunk, context)
211 self.header.hunks.append(h)
211 self.header.hunks.append(h)
212 self.fromline += len(self.before) + h.removed
212 self.fromline += len(self.before) + h.removed
213 self.toline += len(self.before) + h.added
213 self.toline += len(self.before) + h.added
214 self.before = []
214 self.before = []
215 self.hunk = []
215 self.hunk = []
216 self.proc = ''
216 self.proc = ''
217 self.context = context
217 self.context = context
218
218
219 def addhunk(self, hunk):
219 def addhunk(self, hunk):
220 if self.context:
220 if self.context:
221 self.before = self.context
221 self.before = self.context
222 self.context = []
222 self.context = []
223 self.hunk = hunk
223 self.hunk = hunk
224
224
225 def newfile(self, hdr):
225 def newfile(self, hdr):
226 self.addcontext([])
226 self.addcontext([])
227 h = header(hdr)
227 h = header(hdr)
228 self.headers.append(h)
228 self.headers.append(h)
229 self.header = h
229 self.header = h
230
230
231 def addother(self, line):
231 def addother(self, line):
232 pass # 'other' lines are ignored
232 pass # 'other' lines are ignored
233
233
234 def finished(self):
234 def finished(self):
235 self.addcontext([])
235 self.addcontext([])
236 return self.headers
236 return self.headers
237
237
238 transitions = {
238 transitions = {
239 'file': {'context': addcontext,
239 'file': {'context': addcontext,
240 'file': newfile,
240 'file': newfile,
241 'hunk': addhunk,
241 'hunk': addhunk,
242 'range': addrange},
242 'range': addrange},
243 'context': {'file': newfile,
243 'context': {'file': newfile,
244 'hunk': addhunk,
244 'hunk': addhunk,
245 'range': addrange,
245 'range': addrange,
246 'other': addother},
246 'other': addother},
247 'hunk': {'context': addcontext,
247 'hunk': {'context': addcontext,
248 'file': newfile,
248 'file': newfile,
249 'range': addrange},
249 'range': addrange},
250 'range': {'context': addcontext,
250 'range': {'context': addcontext,
251 'hunk': addhunk},
251 'hunk': addhunk},
252 'other': {'other': addother},
252 'other': {'other': addother},
253 }
253 }
254
254
255 p = parser()
255 p = parser()
256
256
257 state = 'context'
257 state = 'context'
258 for newstate, data in scanpatch(fp):
258 for newstate, data in scanpatch(fp):
259 try:
259 try:
260 p.transitions[state][newstate](p, data)
260 p.transitions[state][newstate](p, data)
261 except KeyError:
261 except KeyError:
262 raise patch.PatchError('unhandled transition: %s -> %s' %
262 raise patch.PatchError('unhandled transition: %s -> %s' %
263 (state, newstate))
263 (state, newstate))
264 state = newstate
264 state = newstate
265 return p.finished()
265 return p.finished()
266
266
267 def filterpatch(ui, headers):
267 def filterpatch(ui, headers):
268 """Interactively filter patch chunks into applied-only chunks"""
268 """Interactively filter patch chunks into applied-only chunks"""
269
269
270 def prompt(skipfile, skipall, query, chunk):
270 def prompt(skipfile, skipall, query, chunk):
271 """prompt query, and process base inputs
271 """prompt query, and process base inputs
272
272
273 - y/n for the rest of file
273 - y/n for the rest of file
274 - y/n for the rest
274 - y/n for the rest
275 - ? (help)
275 - ? (help)
276 - q (quit)
276 - q (quit)
277
277
278 Return True/False and possibly updated skipfile and skipall.
278 Return True/False and possibly updated skipfile and skipall.
279 """
279 """
280 newpatches = None
280 newpatches = None
281 if skipall is not None:
281 if skipall is not None:
282 return skipall, skipfile, skipall, newpatches
282 return skipall, skipfile, skipall, newpatches
283 if skipfile is not None:
283 if skipfile is not None:
284 return skipfile, skipfile, skipall, newpatches
284 return skipfile, skipfile, skipall, newpatches
285 while True:
285 while True:
286 resps = _('[Ynesfdaq?]')
286 resps = _('[Ynesfdaq?]'
287 choices = (_('&Yes, record this change'),
287 '$$ &Yes, record this change'
288 _('&No, skip this change'),
288 '$$ &No, skip this change'
289 _('&Edit the change manually'),
289 '$$ &Edit the change manually'
290 _('&Skip remaining changes to this file'),
290 '$$ &Skip remaining changes to this file'
291 _('Record remaining changes to this &file'),
291 '$$ Record remaining changes to this &file'
292 _('&Done, skip remaining changes and files'),
292 '$$ &Done, skip remaining changes and files'
293 _('Record &all changes to all remaining files'),
293 '$$ Record &all changes to all remaining files'
294 _('&Quit, recording no changes'),
294 '$$ &Quit, recording no changes'
295 _('&?'))
295 '$$ &?')
296 r = ui.promptchoice("%s %s" % (query, resps), choices)
296 r = ui.promptchoice("%s %s" % (query, resps))
297 ui.write("\n")
297 ui.write("\n")
298 if r == 8: # ?
298 if r == 8: # ?
299 doc = gettext(record.__doc__)
299 doc = gettext(record.__doc__)
300 c = doc.find('::') + 2
300 c = doc.find('::') + 2
301 for l in doc[c:].splitlines():
301 for l in doc[c:].splitlines():
302 if l.startswith(' '):
302 if l.startswith(' '):
303 ui.write(l.strip(), '\n')
303 ui.write(l.strip(), '\n')
304 continue
304 continue
305 elif r == 0: # yes
305 elif r == 0: # yes
306 ret = True
306 ret = True
307 elif r == 1: # no
307 elif r == 1: # no
308 ret = False
308 ret = False
309 elif r == 2: # Edit patch
309 elif r == 2: # Edit patch
310 if chunk is None:
310 if chunk is None:
311 ui.write(_('cannot edit patch for whole file'))
311 ui.write(_('cannot edit patch for whole file'))
312 ui.write("\n")
312 ui.write("\n")
313 continue
313 continue
314 if chunk.header.binary():
314 if chunk.header.binary():
315 ui.write(_('cannot edit patch for binary file'))
315 ui.write(_('cannot edit patch for binary file'))
316 ui.write("\n")
316 ui.write("\n")
317 continue
317 continue
318 # Patch comment based on the Git one (based on comment at end of
318 # Patch comment based on the Git one (based on comment at end of
319 # http://mercurial.selenic.com/wiki/RecordExtension)
319 # http://mercurial.selenic.com/wiki/RecordExtension)
320 phelp = '---' + _("""
320 phelp = '---' + _("""
321 To remove '-' lines, make them ' ' lines (context).
321 To remove '-' lines, make them ' ' lines (context).
322 To remove '+' lines, delete them.
322 To remove '+' lines, delete them.
323 Lines starting with # will be removed from the patch.
323 Lines starting with # will be removed from the patch.
324
324
325 If the patch applies cleanly, the edited hunk will immediately be
325 If the patch applies cleanly, the edited hunk will immediately be
326 added to the record list. If it does not apply cleanly, a rejects
326 added to the record list. If it does not apply cleanly, a rejects
327 file will be generated: you can use that when you try again. If
327 file will be generated: you can use that when you try again. If
328 all lines of the hunk are removed, then the edit is aborted and
328 all lines of the hunk are removed, then the edit is aborted and
329 the hunk is left unchanged.
329 the hunk is left unchanged.
330 """)
330 """)
331 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
331 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
332 suffix=".diff", text=True)
332 suffix=".diff", text=True)
333 ncpatchfp = None
333 ncpatchfp = None
334 try:
334 try:
335 # Write the initial patch
335 # Write the initial patch
336 f = os.fdopen(patchfd, "w")
336 f = os.fdopen(patchfd, "w")
337 chunk.header.write(f)
337 chunk.header.write(f)
338 chunk.write(f)
338 chunk.write(f)
339 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
339 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
340 f.close()
340 f.close()
341 # Start the editor and wait for it to complete
341 # Start the editor and wait for it to complete
342 editor = ui.geteditor()
342 editor = ui.geteditor()
343 util.system("%s \"%s\"" % (editor, patchfn),
343 util.system("%s \"%s\"" % (editor, patchfn),
344 environ={'HGUSER': ui.username()},
344 environ={'HGUSER': ui.username()},
345 onerr=util.Abort, errprefix=_("edit failed"),
345 onerr=util.Abort, errprefix=_("edit failed"),
346 out=ui.fout)
346 out=ui.fout)
347 # Remove comment lines
347 # Remove comment lines
348 patchfp = open(patchfn)
348 patchfp = open(patchfn)
349 ncpatchfp = cStringIO.StringIO()
349 ncpatchfp = cStringIO.StringIO()
350 for line in patchfp:
350 for line in patchfp:
351 if not line.startswith('#'):
351 if not line.startswith('#'):
352 ncpatchfp.write(line)
352 ncpatchfp.write(line)
353 patchfp.close()
353 patchfp.close()
354 ncpatchfp.seek(0)
354 ncpatchfp.seek(0)
355 newpatches = parsepatch(ncpatchfp)
355 newpatches = parsepatch(ncpatchfp)
356 finally:
356 finally:
357 os.unlink(patchfn)
357 os.unlink(patchfn)
358 del ncpatchfp
358 del ncpatchfp
359 # Signal that the chunk shouldn't be applied as-is, but
359 # Signal that the chunk shouldn't be applied as-is, but
360 # provide the new patch to be used instead.
360 # provide the new patch to be used instead.
361 ret = False
361 ret = False
362 elif r == 3: # Skip
362 elif r == 3: # Skip
363 ret = skipfile = False
363 ret = skipfile = False
364 elif r == 4: # file (Record remaining)
364 elif r == 4: # file (Record remaining)
365 ret = skipfile = True
365 ret = skipfile = True
366 elif r == 5: # done, skip remaining
366 elif r == 5: # done, skip remaining
367 ret = skipall = False
367 ret = skipall = False
368 elif r == 6: # all
368 elif r == 6: # all
369 ret = skipall = True
369 ret = skipall = True
370 elif r == 7: # quit
370 elif r == 7: # quit
371 raise util.Abort(_('user quit'))
371 raise util.Abort(_('user quit'))
372 return ret, skipfile, skipall, newpatches
372 return ret, skipfile, skipall, newpatches
373
373
374 seen = set()
374 seen = set()
375 applied = {} # 'filename' -> [] of chunks
375 applied = {} # 'filename' -> [] of chunks
376 skipfile, skipall = None, None
376 skipfile, skipall = None, None
377 pos, total = 1, sum(len(h.hunks) for h in headers)
377 pos, total = 1, sum(len(h.hunks) for h in headers)
378 for h in headers:
378 for h in headers:
379 pos += len(h.hunks)
379 pos += len(h.hunks)
380 skipfile = None
380 skipfile = None
381 fixoffset = 0
381 fixoffset = 0
382 hdr = ''.join(h.header)
382 hdr = ''.join(h.header)
383 if hdr in seen:
383 if hdr in seen:
384 continue
384 continue
385 seen.add(hdr)
385 seen.add(hdr)
386 if skipall is None:
386 if skipall is None:
387 h.pretty(ui)
387 h.pretty(ui)
388 msg = (_('examine changes to %s?') %
388 msg = (_('examine changes to %s?') %
389 _(' and ').join("'%s'" % f for f in h.files()))
389 _(' and ').join("'%s'" % f for f in h.files()))
390 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
390 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
391 if not r:
391 if not r:
392 continue
392 continue
393 applied[h.filename()] = [h]
393 applied[h.filename()] = [h]
394 if h.allhunks():
394 if h.allhunks():
395 applied[h.filename()] += h.hunks
395 applied[h.filename()] += h.hunks
396 continue
396 continue
397 for i, chunk in enumerate(h.hunks):
397 for i, chunk in enumerate(h.hunks):
398 if skipfile is None and skipall is None:
398 if skipfile is None and skipall is None:
399 chunk.pretty(ui)
399 chunk.pretty(ui)
400 if total == 1:
400 if total == 1:
401 msg = _("record this change to '%s'?") % chunk.filename()
401 msg = _("record this change to '%s'?") % chunk.filename()
402 else:
402 else:
403 idx = pos - len(h.hunks) + i
403 idx = pos - len(h.hunks) + i
404 msg = _("record change %d/%d to '%s'?") % (idx, total,
404 msg = _("record change %d/%d to '%s'?") % (idx, total,
405 chunk.filename())
405 chunk.filename())
406 r, skipfile, skipall, newpatches = prompt(skipfile,
406 r, skipfile, skipall, newpatches = prompt(skipfile,
407 skipall, msg, chunk)
407 skipall, msg, chunk)
408 if r:
408 if r:
409 if fixoffset:
409 if fixoffset:
410 chunk = copy.copy(chunk)
410 chunk = copy.copy(chunk)
411 chunk.toline += fixoffset
411 chunk.toline += fixoffset
412 applied[chunk.filename()].append(chunk)
412 applied[chunk.filename()].append(chunk)
413 elif newpatches is not None:
413 elif newpatches is not None:
414 for newpatch in newpatches:
414 for newpatch in newpatches:
415 for newhunk in newpatch.hunks:
415 for newhunk in newpatch.hunks:
416 if fixoffset:
416 if fixoffset:
417 newhunk.toline += fixoffset
417 newhunk.toline += fixoffset
418 applied[newhunk.filename()].append(newhunk)
418 applied[newhunk.filename()].append(newhunk)
419 else:
419 else:
420 fixoffset += chunk.removed - chunk.added
420 fixoffset += chunk.removed - chunk.added
421 return sum([h for h in applied.itervalues()
421 return sum([h for h in applied.itervalues()
422 if h[0].special() or len(h) > 1], [])
422 if h[0].special() or len(h) > 1], [])
423
423
424 @command("record",
424 @command("record",
425 # same options as commit + white space diff options
425 # same options as commit + white space diff options
426 commands.table['^commit|ci'][1][:] + diffopts,
426 commands.table['^commit|ci'][1][:] + diffopts,
427 _('hg record [OPTION]... [FILE]...'))
427 _('hg record [OPTION]... [FILE]...'))
428 def record(ui, repo, *pats, **opts):
428 def record(ui, repo, *pats, **opts):
429 '''interactively select changes to commit
429 '''interactively select changes to commit
430
430
431 If a list of files is omitted, all changes reported by :hg:`status`
431 If a list of files is omitted, all changes reported by :hg:`status`
432 will be candidates for recording.
432 will be candidates for recording.
433
433
434 See :hg:`help dates` for a list of formats valid for -d/--date.
434 See :hg:`help dates` for a list of formats valid for -d/--date.
435
435
436 You will be prompted for whether to record changes to each
436 You will be prompted for whether to record changes to each
437 modified file, and for files with multiple changes, for each
437 modified file, and for files with multiple changes, for each
438 change to use. For each query, the following responses are
438 change to use. For each query, the following responses are
439 possible::
439 possible::
440
440
441 y - record this change
441 y - record this change
442 n - skip this change
442 n - skip this change
443 e - edit this change manually
443 e - edit this change manually
444
444
445 s - skip remaining changes to this file
445 s - skip remaining changes to this file
446 f - record remaining changes to this file
446 f - record remaining changes to this file
447
447
448 d - done, skip remaining changes and files
448 d - done, skip remaining changes and files
449 a - record all changes to all remaining files
449 a - record all changes to all remaining files
450 q - quit, recording no changes
450 q - quit, recording no changes
451
451
452 ? - display help
452 ? - display help
453
453
454 This command is not available when committing a merge.'''
454 This command is not available when committing a merge.'''
455
455
456 dorecord(ui, repo, commands.commit, 'commit', False, *pats, **opts)
456 dorecord(ui, repo, commands.commit, 'commit', False, *pats, **opts)
457
457
458 def qrefresh(origfn, ui, repo, *pats, **opts):
458 def qrefresh(origfn, ui, repo, *pats, **opts):
459 if not opts['interactive']:
459 if not opts['interactive']:
460 return origfn(ui, repo, *pats, **opts)
460 return origfn(ui, repo, *pats, **opts)
461
461
462 mq = extensions.find('mq')
462 mq = extensions.find('mq')
463
463
464 def committomq(ui, repo, *pats, **opts):
464 def committomq(ui, repo, *pats, **opts):
465 # At this point the working copy contains only changes that
465 # At this point the working copy contains only changes that
466 # were accepted. All other changes were reverted.
466 # were accepted. All other changes were reverted.
467 # We can't pass *pats here since qrefresh will undo all other
467 # We can't pass *pats here since qrefresh will undo all other
468 # changed files in the patch that aren't in pats.
468 # changed files in the patch that aren't in pats.
469 mq.refresh(ui, repo, **opts)
469 mq.refresh(ui, repo, **opts)
470
470
471 # backup all changed files
471 # backup all changed files
472 dorecord(ui, repo, committomq, 'qrefresh', True, *pats, **opts)
472 dorecord(ui, repo, committomq, 'qrefresh', True, *pats, **opts)
473
473
474 def qrecord(ui, repo, patch, *pats, **opts):
474 def qrecord(ui, repo, patch, *pats, **opts):
475 '''interactively record a new patch
475 '''interactively record a new patch
476
476
477 See :hg:`help qnew` & :hg:`help record` for more information and
477 See :hg:`help qnew` & :hg:`help record` for more information and
478 usage.
478 usage.
479 '''
479 '''
480
480
481 try:
481 try:
482 mq = extensions.find('mq')
482 mq = extensions.find('mq')
483 except KeyError:
483 except KeyError:
484 raise util.Abort(_("'mq' extension not loaded"))
484 raise util.Abort(_("'mq' extension not loaded"))
485
485
486 repo.mq.checkpatchname(patch)
486 repo.mq.checkpatchname(patch)
487
487
488 def committomq(ui, repo, *pats, **opts):
488 def committomq(ui, repo, *pats, **opts):
489 opts['checkname'] = False
489 opts['checkname'] = False
490 mq.new(ui, repo, patch, *pats, **opts)
490 mq.new(ui, repo, patch, *pats, **opts)
491
491
492 dorecord(ui, repo, committomq, 'qnew', False, *pats, **opts)
492 dorecord(ui, repo, committomq, 'qnew', False, *pats, **opts)
493
493
494 def qnew(origfn, ui, repo, patch, *args, **opts):
494 def qnew(origfn, ui, repo, patch, *args, **opts):
495 if opts['interactive']:
495 if opts['interactive']:
496 return qrecord(ui, repo, patch, *args, **opts)
496 return qrecord(ui, repo, patch, *args, **opts)
497 return origfn(ui, repo, patch, *args, **opts)
497 return origfn(ui, repo, patch, *args, **opts)
498
498
499 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, *pats, **opts):
499 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, *pats, **opts):
500 if not ui.interactive():
500 if not ui.interactive():
501 raise util.Abort(_('running non-interactively, use %s instead') %
501 raise util.Abort(_('running non-interactively, use %s instead') %
502 cmdsuggest)
502 cmdsuggest)
503
503
504 # make sure username is set before going interactive
504 # make sure username is set before going interactive
505 ui.username()
505 ui.username()
506
506
507 def recordfunc(ui, repo, message, match, opts):
507 def recordfunc(ui, repo, message, match, opts):
508 """This is generic record driver.
508 """This is generic record driver.
509
509
510 Its job is to interactively filter local changes, and
510 Its job is to interactively filter local changes, and
511 accordingly prepare working directory into a state in which the
511 accordingly prepare working directory into a state in which the
512 job can be delegated to a non-interactive commit command such as
512 job can be delegated to a non-interactive commit command such as
513 'commit' or 'qrefresh'.
513 'commit' or 'qrefresh'.
514
514
515 After the actual job is done by non-interactive command, the
515 After the actual job is done by non-interactive command, the
516 working directory is restored to its original state.
516 working directory is restored to its original state.
517
517
518 In the end we'll record interesting changes, and everything else
518 In the end we'll record interesting changes, and everything else
519 will be left in place, so the user can continue working.
519 will be left in place, so the user can continue working.
520 """
520 """
521
521
522 merge = len(repo[None].parents()) > 1
522 merge = len(repo[None].parents()) > 1
523 if merge:
523 if merge:
524 raise util.Abort(_('cannot partially commit a merge '
524 raise util.Abort(_('cannot partially commit a merge '
525 '(use "hg commit" instead)'))
525 '(use "hg commit" instead)'))
526
526
527 changes = repo.status(match=match)[:3]
527 changes = repo.status(match=match)[:3]
528 diffopts = patch.diffopts(ui, opts=dict(
528 diffopts = patch.diffopts(ui, opts=dict(
529 git=True, nodates=True,
529 git=True, nodates=True,
530 ignorews=opts.get('ignore_all_space'),
530 ignorews=opts.get('ignore_all_space'),
531 ignorewsamount=opts.get('ignore_space_change'),
531 ignorewsamount=opts.get('ignore_space_change'),
532 ignoreblanklines=opts.get('ignore_blank_lines')))
532 ignoreblanklines=opts.get('ignore_blank_lines')))
533 chunks = patch.diff(repo, changes=changes, opts=diffopts)
533 chunks = patch.diff(repo, changes=changes, opts=diffopts)
534 fp = cStringIO.StringIO()
534 fp = cStringIO.StringIO()
535 fp.write(''.join(chunks))
535 fp.write(''.join(chunks))
536 fp.seek(0)
536 fp.seek(0)
537
537
538 # 1. filter patch, so we have intending-to apply subset of it
538 # 1. filter patch, so we have intending-to apply subset of it
539 try:
539 try:
540 chunks = filterpatch(ui, parsepatch(fp))
540 chunks = filterpatch(ui, parsepatch(fp))
541 except patch.PatchError, err:
541 except patch.PatchError, err:
542 raise util.Abort(_('error parsing patch: %s') % err)
542 raise util.Abort(_('error parsing patch: %s') % err)
543
543
544 del fp
544 del fp
545
545
546 contenders = set()
546 contenders = set()
547 for h in chunks:
547 for h in chunks:
548 try:
548 try:
549 contenders.update(set(h.files()))
549 contenders.update(set(h.files()))
550 except AttributeError:
550 except AttributeError:
551 pass
551 pass
552
552
553 changed = changes[0] + changes[1] + changes[2]
553 changed = changes[0] + changes[1] + changes[2]
554 newfiles = [f for f in changed if f in contenders]
554 newfiles = [f for f in changed if f in contenders]
555 if not newfiles:
555 if not newfiles:
556 ui.status(_('no changes to record\n'))
556 ui.status(_('no changes to record\n'))
557 return 0
557 return 0
558
558
559 modified = set(changes[0])
559 modified = set(changes[0])
560
560
561 # 2. backup changed files, so we can restore them in the end
561 # 2. backup changed files, so we can restore them in the end
562 if backupall:
562 if backupall:
563 tobackup = changed
563 tobackup = changed
564 else:
564 else:
565 tobackup = [f for f in newfiles if f in modified]
565 tobackup = [f for f in newfiles if f in modified]
566
566
567 backups = {}
567 backups = {}
568 if tobackup:
568 if tobackup:
569 backupdir = repo.join('record-backups')
569 backupdir = repo.join('record-backups')
570 try:
570 try:
571 os.mkdir(backupdir)
571 os.mkdir(backupdir)
572 except OSError, err:
572 except OSError, err:
573 if err.errno != errno.EEXIST:
573 if err.errno != errno.EEXIST:
574 raise
574 raise
575 try:
575 try:
576 # backup continues
576 # backup continues
577 for f in tobackup:
577 for f in tobackup:
578 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
578 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
579 dir=backupdir)
579 dir=backupdir)
580 os.close(fd)
580 os.close(fd)
581 ui.debug('backup %r as %r\n' % (f, tmpname))
581 ui.debug('backup %r as %r\n' % (f, tmpname))
582 util.copyfile(repo.wjoin(f), tmpname)
582 util.copyfile(repo.wjoin(f), tmpname)
583 shutil.copystat(repo.wjoin(f), tmpname)
583 shutil.copystat(repo.wjoin(f), tmpname)
584 backups[f] = tmpname
584 backups[f] = tmpname
585
585
586 fp = cStringIO.StringIO()
586 fp = cStringIO.StringIO()
587 for c in chunks:
587 for c in chunks:
588 if c.filename() in backups:
588 if c.filename() in backups:
589 c.write(fp)
589 c.write(fp)
590 dopatch = fp.tell()
590 dopatch = fp.tell()
591 fp.seek(0)
591 fp.seek(0)
592
592
593 # 3a. apply filtered patch to clean repo (clean)
593 # 3a. apply filtered patch to clean repo (clean)
594 if backups:
594 if backups:
595 hg.revert(repo, repo.dirstate.p1(),
595 hg.revert(repo, repo.dirstate.p1(),
596 lambda key: key in backups)
596 lambda key: key in backups)
597
597
598 # 3b. (apply)
598 # 3b. (apply)
599 if dopatch:
599 if dopatch:
600 try:
600 try:
601 ui.debug('applying patch\n')
601 ui.debug('applying patch\n')
602 ui.debug(fp.getvalue())
602 ui.debug(fp.getvalue())
603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
603 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
604 except patch.PatchError, err:
604 except patch.PatchError, err:
605 raise util.Abort(str(err))
605 raise util.Abort(str(err))
606 del fp
606 del fp
607
607
608 # 4. We prepared working directory according to filtered
608 # 4. We prepared working directory according to filtered
609 # patch. Now is the time to delegate the job to
609 # patch. Now is the time to delegate the job to
610 # commit/qrefresh or the like!
610 # commit/qrefresh or the like!
611
611
612 # it is important to first chdir to repo root -- we'll call
612 # it is important to first chdir to repo root -- we'll call
613 # a highlevel command with list of pathnames relative to
613 # a highlevel command with list of pathnames relative to
614 # repo root
614 # repo root
615 cwd = os.getcwd()
615 cwd = os.getcwd()
616 os.chdir(repo.root)
616 os.chdir(repo.root)
617 try:
617 try:
618 commitfunc(ui, repo, *newfiles, **opts)
618 commitfunc(ui, repo, *newfiles, **opts)
619 finally:
619 finally:
620 os.chdir(cwd)
620 os.chdir(cwd)
621
621
622 return 0
622 return 0
623 finally:
623 finally:
624 # 5. finally restore backed-up files
624 # 5. finally restore backed-up files
625 try:
625 try:
626 for realname, tmpname in backups.iteritems():
626 for realname, tmpname in backups.iteritems():
627 ui.debug('restoring %r to %r\n' % (tmpname, realname))
627 ui.debug('restoring %r to %r\n' % (tmpname, realname))
628 util.copyfile(tmpname, repo.wjoin(realname))
628 util.copyfile(tmpname, repo.wjoin(realname))
629 # Our calls to copystat() here and above are a
629 # Our calls to copystat() here and above are a
630 # hack to trick any editors that have f open that
630 # hack to trick any editors that have f open that
631 # we haven't modified them.
631 # we haven't modified them.
632 #
632 #
633 # Also note that this racy as an editor could
633 # Also note that this racy as an editor could
634 # notice the file's mtime before we've finished
634 # notice the file's mtime before we've finished
635 # writing it.
635 # writing it.
636 shutil.copystat(tmpname, repo.wjoin(realname))
636 shutil.copystat(tmpname, repo.wjoin(realname))
637 os.unlink(tmpname)
637 os.unlink(tmpname)
638 if tobackup:
638 if tobackup:
639 os.rmdir(backupdir)
639 os.rmdir(backupdir)
640 except OSError:
640 except OSError:
641 pass
641 pass
642
642
643 # wrap ui.write so diff output can be labeled/colorized
643 # wrap ui.write so diff output can be labeled/colorized
644 def wrapwrite(orig, *args, **kw):
644 def wrapwrite(orig, *args, **kw):
645 label = kw.pop('label', '')
645 label = kw.pop('label', '')
646 for chunk, l in patch.difflabel(lambda: args):
646 for chunk, l in patch.difflabel(lambda: args):
647 orig(chunk, label=label + l)
647 orig(chunk, label=label + l)
648 oldwrite = ui.write
648 oldwrite = ui.write
649 extensions.wrapfunction(ui, 'write', wrapwrite)
649 extensions.wrapfunction(ui, 'write', wrapwrite)
650 try:
650 try:
651 return cmdutil.commit(ui, repo, recordfunc, pats, opts)
651 return cmdutil.commit(ui, repo, recordfunc, pats, opts)
652 finally:
652 finally:
653 ui.write = oldwrite
653 ui.write = oldwrite
654
654
655 cmdtable["qrecord"] = \
655 cmdtable["qrecord"] = \
656 (qrecord, [], # placeholder until mq is available
656 (qrecord, [], # placeholder until mq is available
657 _('hg qrecord [OPTION]... PATCH [FILE]...'))
657 _('hg qrecord [OPTION]... PATCH [FILE]...'))
658
658
659 def uisetup(ui):
659 def uisetup(ui):
660 try:
660 try:
661 mq = extensions.find('mq')
661 mq = extensions.find('mq')
662 except KeyError:
662 except KeyError:
663 return
663 return
664
664
665 cmdtable["qrecord"] = \
665 cmdtable["qrecord"] = \
666 (qrecord,
666 (qrecord,
667 # same options as qnew, but copy them so we don't get
667 # same options as qnew, but copy them so we don't get
668 # -i/--interactive for qrecord and add white space diff options
668 # -i/--interactive for qrecord and add white space diff options
669 mq.cmdtable['^qnew'][1][:] + diffopts,
669 mq.cmdtable['^qnew'][1][:] + diffopts,
670 _('hg qrecord [OPTION]... PATCH [FILE]...'))
670 _('hg qrecord [OPTION]... PATCH [FILE]...'))
671
671
672 _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
672 _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
673 _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
673 _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
674 _("interactively select changes to refresh"))
674 _("interactively select changes to refresh"))
675
675
676 def _wrapcmd(cmd, table, wrapfn, msg):
676 def _wrapcmd(cmd, table, wrapfn, msg):
677 entry = extensions.wrapcommand(table, cmd, wrapfn)
677 entry = extensions.wrapcommand(table, cmd, wrapfn)
678 entry[1].append(('i', 'interactive', None, msg))
678 entry[1].append(('i', 'interactive', None, msg))
679
679
680 commands.inferrepo += " record qrecord"
680 commands.inferrepo += " record qrecord"
@@ -1,377 +1,377 b''
1 # filemerge.py - file-level merge handling for Mercurial
1 # filemerge.py - file-level merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import short
8 from node import short
9 from i18n import _
9 from i18n import _
10 import util, simplemerge, match, error
10 import util, simplemerge, match, error
11 import os, tempfile, re, filecmp
11 import os, tempfile, re, filecmp
12
12
13 def _toolstr(ui, tool, part, default=""):
13 def _toolstr(ui, tool, part, default=""):
14 return ui.config("merge-tools", tool + "." + part, default)
14 return ui.config("merge-tools", tool + "." + part, default)
15
15
16 def _toolbool(ui, tool, part, default=False):
16 def _toolbool(ui, tool, part, default=False):
17 return ui.configbool("merge-tools", tool + "." + part, default)
17 return ui.configbool("merge-tools", tool + "." + part, default)
18
18
19 def _toollist(ui, tool, part, default=[]):
19 def _toollist(ui, tool, part, default=[]):
20 return ui.configlist("merge-tools", tool + "." + part, default)
20 return ui.configlist("merge-tools", tool + "." + part, default)
21
21
22 internals = {}
22 internals = {}
23
23
24 def internaltool(name, trymerge, onfailure=None):
24 def internaltool(name, trymerge, onfailure=None):
25 '''return a decorator for populating internal merge tool table'''
25 '''return a decorator for populating internal merge tool table'''
26 def decorator(func):
26 def decorator(func):
27 fullname = 'internal:' + name
27 fullname = 'internal:' + name
28 func.__doc__ = "``%s``\n" % fullname + func.__doc__.strip()
28 func.__doc__ = "``%s``\n" % fullname + func.__doc__.strip()
29 internals[fullname] = func
29 internals[fullname] = func
30 func.trymerge = trymerge
30 func.trymerge = trymerge
31 func.onfailure = onfailure
31 func.onfailure = onfailure
32 return func
32 return func
33 return decorator
33 return decorator
34
34
35 def _findtool(ui, tool):
35 def _findtool(ui, tool):
36 if tool in internals:
36 if tool in internals:
37 return tool
37 return tool
38 for kn in ("regkey", "regkeyalt"):
38 for kn in ("regkey", "regkeyalt"):
39 k = _toolstr(ui, tool, kn)
39 k = _toolstr(ui, tool, kn)
40 if not k:
40 if not k:
41 continue
41 continue
42 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
42 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
43 if p:
43 if p:
44 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
44 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
45 if p:
45 if p:
46 return p
46 return p
47 exe = _toolstr(ui, tool, "executable", tool)
47 exe = _toolstr(ui, tool, "executable", tool)
48 return util.findexe(util.expandpath(exe))
48 return util.findexe(util.expandpath(exe))
49
49
50 def _picktool(repo, ui, path, binary, symlink):
50 def _picktool(repo, ui, path, binary, symlink):
51 def check(tool, pat, symlink, binary):
51 def check(tool, pat, symlink, binary):
52 tmsg = tool
52 tmsg = tool
53 if pat:
53 if pat:
54 tmsg += " specified for " + pat
54 tmsg += " specified for " + pat
55 if not _findtool(ui, tool):
55 if not _findtool(ui, tool):
56 if pat: # explicitly requested tool deserves a warning
56 if pat: # explicitly requested tool deserves a warning
57 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
57 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
58 else: # configured but non-existing tools are more silent
58 else: # configured but non-existing tools are more silent
59 ui.note(_("couldn't find merge tool %s\n") % tmsg)
59 ui.note(_("couldn't find merge tool %s\n") % tmsg)
60 elif symlink and not _toolbool(ui, tool, "symlink"):
60 elif symlink and not _toolbool(ui, tool, "symlink"):
61 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
61 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
62 elif binary and not _toolbool(ui, tool, "binary"):
62 elif binary and not _toolbool(ui, tool, "binary"):
63 ui.warn(_("tool %s can't handle binary\n") % tmsg)
63 ui.warn(_("tool %s can't handle binary\n") % tmsg)
64 elif not util.gui() and _toolbool(ui, tool, "gui"):
64 elif not util.gui() and _toolbool(ui, tool, "gui"):
65 ui.warn(_("tool %s requires a GUI\n") % tmsg)
65 ui.warn(_("tool %s requires a GUI\n") % tmsg)
66 else:
66 else:
67 return True
67 return True
68 return False
68 return False
69
69
70 # forcemerge comes from command line arguments, highest priority
70 # forcemerge comes from command line arguments, highest priority
71 force = ui.config('ui', 'forcemerge')
71 force = ui.config('ui', 'forcemerge')
72 if force:
72 if force:
73 toolpath = _findtool(ui, force)
73 toolpath = _findtool(ui, force)
74 if toolpath:
74 if toolpath:
75 return (force, util.shellquote(toolpath))
75 return (force, util.shellquote(toolpath))
76 else:
76 else:
77 # mimic HGMERGE if given tool not found
77 # mimic HGMERGE if given tool not found
78 return (force, force)
78 return (force, force)
79
79
80 # HGMERGE takes next precedence
80 # HGMERGE takes next precedence
81 hgmerge = os.environ.get("HGMERGE")
81 hgmerge = os.environ.get("HGMERGE")
82 if hgmerge:
82 if hgmerge:
83 return (hgmerge, hgmerge)
83 return (hgmerge, hgmerge)
84
84
85 # then patterns
85 # then patterns
86 for pat, tool in ui.configitems("merge-patterns"):
86 for pat, tool in ui.configitems("merge-patterns"):
87 mf = match.match(repo.root, '', [pat])
87 mf = match.match(repo.root, '', [pat])
88 if mf(path) and check(tool, pat, symlink, False):
88 if mf(path) and check(tool, pat, symlink, False):
89 toolpath = _findtool(ui, tool)
89 toolpath = _findtool(ui, tool)
90 return (tool, util.shellquote(toolpath))
90 return (tool, util.shellquote(toolpath))
91
91
92 # then merge tools
92 # then merge tools
93 tools = {}
93 tools = {}
94 for k, v in ui.configitems("merge-tools"):
94 for k, v in ui.configitems("merge-tools"):
95 t = k.split('.')[0]
95 t = k.split('.')[0]
96 if t not in tools:
96 if t not in tools:
97 tools[t] = int(_toolstr(ui, t, "priority", "0"))
97 tools[t] = int(_toolstr(ui, t, "priority", "0"))
98 names = tools.keys()
98 names = tools.keys()
99 tools = sorted([(-p, t) for t, p in tools.items()])
99 tools = sorted([(-p, t) for t, p in tools.items()])
100 uimerge = ui.config("ui", "merge")
100 uimerge = ui.config("ui", "merge")
101 if uimerge:
101 if uimerge:
102 if uimerge not in names:
102 if uimerge not in names:
103 return (uimerge, uimerge)
103 return (uimerge, uimerge)
104 tools.insert(0, (None, uimerge)) # highest priority
104 tools.insert(0, (None, uimerge)) # highest priority
105 tools.append((None, "hgmerge")) # the old default, if found
105 tools.append((None, "hgmerge")) # the old default, if found
106 for p, t in tools:
106 for p, t in tools:
107 if check(t, None, symlink, binary):
107 if check(t, None, symlink, binary):
108 toolpath = _findtool(ui, t)
108 toolpath = _findtool(ui, t)
109 return (t, util.shellquote(toolpath))
109 return (t, util.shellquote(toolpath))
110
110
111 # internal merge or prompt as last resort
111 # internal merge or prompt as last resort
112 if symlink or binary:
112 if symlink or binary:
113 return "internal:prompt", None
113 return "internal:prompt", None
114 return "internal:merge", None
114 return "internal:merge", None
115
115
116 def _eoltype(data):
116 def _eoltype(data):
117 "Guess the EOL type of a file"
117 "Guess the EOL type of a file"
118 if '\0' in data: # binary
118 if '\0' in data: # binary
119 return None
119 return None
120 if '\r\n' in data: # Windows
120 if '\r\n' in data: # Windows
121 return '\r\n'
121 return '\r\n'
122 if '\r' in data: # Old Mac
122 if '\r' in data: # Old Mac
123 return '\r'
123 return '\r'
124 if '\n' in data: # UNIX
124 if '\n' in data: # UNIX
125 return '\n'
125 return '\n'
126 return None # unknown
126 return None # unknown
127
127
128 def _matcheol(file, origfile):
128 def _matcheol(file, origfile):
129 "Convert EOL markers in a file to match origfile"
129 "Convert EOL markers in a file to match origfile"
130 tostyle = _eoltype(util.readfile(origfile))
130 tostyle = _eoltype(util.readfile(origfile))
131 if tostyle:
131 if tostyle:
132 data = util.readfile(file)
132 data = util.readfile(file)
133 style = _eoltype(data)
133 style = _eoltype(data)
134 if style:
134 if style:
135 newdata = data.replace(style, tostyle)
135 newdata = data.replace(style, tostyle)
136 if newdata != data:
136 if newdata != data:
137 util.writefile(file, newdata)
137 util.writefile(file, newdata)
138
138
139 @internaltool('prompt', False)
139 @internaltool('prompt', False)
140 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf):
140 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf):
141 """Asks the user which of the local or the other version to keep as
141 """Asks the user which of the local or the other version to keep as
142 the merged version."""
142 the merged version."""
143 ui = repo.ui
143 ui = repo.ui
144 fd = fcd.path()
144 fd = fcd.path()
145
145
146 if ui.promptchoice(_(" no tool found to merge %s\n"
146 if ui.promptchoice(_(" no tool found to merge %s\n"
147 "keep (l)ocal or take (o)ther?") % fd,
147 "keep (l)ocal or take (o)ther?"
148 (_("&Local"), _("&Other")), 0):
148 "$$ &Local $$ &Other") % fd, 0):
149 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf)
149 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf)
150 else:
150 else:
151 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
151 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
152
152
153 @internaltool('local', False)
153 @internaltool('local', False)
154 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf):
154 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf):
155 """Uses the local version of files as the merged version."""
155 """Uses the local version of files as the merged version."""
156 return 0
156 return 0
157
157
158 @internaltool('other', False)
158 @internaltool('other', False)
159 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf):
159 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf):
160 """Uses the other version of files as the merged version."""
160 """Uses the other version of files as the merged version."""
161 repo.wwrite(fcd.path(), fco.data(), fco.flags())
161 repo.wwrite(fcd.path(), fco.data(), fco.flags())
162 return 0
162 return 0
163
163
164 @internaltool('fail', False)
164 @internaltool('fail', False)
165 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf):
165 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf):
166 """
166 """
167 Rather than attempting to merge files that were modified on both
167 Rather than attempting to merge files that were modified on both
168 branches, it marks them as unresolved. The resolve command must be
168 branches, it marks them as unresolved. The resolve command must be
169 used to resolve these conflicts."""
169 used to resolve these conflicts."""
170 return 1
170 return 1
171
171
172 def _premerge(repo, toolconf, files):
172 def _premerge(repo, toolconf, files):
173 tool, toolpath, binary, symlink = toolconf
173 tool, toolpath, binary, symlink = toolconf
174 if symlink:
174 if symlink:
175 return 1
175 return 1
176 a, b, c, back = files
176 a, b, c, back = files
177
177
178 ui = repo.ui
178 ui = repo.ui
179
179
180 # do we attempt to simplemerge first?
180 # do we attempt to simplemerge first?
181 try:
181 try:
182 premerge = _toolbool(ui, tool, "premerge", not binary)
182 premerge = _toolbool(ui, tool, "premerge", not binary)
183 except error.ConfigError:
183 except error.ConfigError:
184 premerge = _toolstr(ui, tool, "premerge").lower()
184 premerge = _toolstr(ui, tool, "premerge").lower()
185 valid = 'keep'.split()
185 valid = 'keep'.split()
186 if premerge not in valid:
186 if premerge not in valid:
187 _valid = ', '.join(["'" + v + "'" for v in valid])
187 _valid = ', '.join(["'" + v + "'" for v in valid])
188 raise error.ConfigError(_("%s.premerge not valid "
188 raise error.ConfigError(_("%s.premerge not valid "
189 "('%s' is neither boolean nor %s)") %
189 "('%s' is neither boolean nor %s)") %
190 (tool, premerge, _valid))
190 (tool, premerge, _valid))
191
191
192 if premerge:
192 if premerge:
193 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
193 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
194 if not r:
194 if not r:
195 ui.debug(" premerge successful\n")
195 ui.debug(" premerge successful\n")
196 return 0
196 return 0
197 if premerge != 'keep':
197 if premerge != 'keep':
198 util.copyfile(back, a) # restore from backup and try again
198 util.copyfile(back, a) # restore from backup and try again
199 return 1 # continue merging
199 return 1 # continue merging
200
200
201 @internaltool('merge', True,
201 @internaltool('merge', True,
202 _("merging %s incomplete! "
202 _("merging %s incomplete! "
203 "(edit conflicts, then use 'hg resolve --mark')\n"))
203 "(edit conflicts, then use 'hg resolve --mark')\n"))
204 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
204 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
205 """
205 """
206 Uses the internal non-interactive simple merge algorithm for merging
206 Uses the internal non-interactive simple merge algorithm for merging
207 files. It will fail if there are any conflicts and leave markers in
207 files. It will fail if there are any conflicts and leave markers in
208 the partially merged file."""
208 the partially merged file."""
209 tool, toolpath, binary, symlink = toolconf
209 tool, toolpath, binary, symlink = toolconf
210 if symlink:
210 if symlink:
211 repo.ui.warn(_('warning: internal:merge cannot merge symlinks '
211 repo.ui.warn(_('warning: internal:merge cannot merge symlinks '
212 'for %s\n') % fcd.path())
212 'for %s\n') % fcd.path())
213 return False, 1
213 return False, 1
214
214
215 r = _premerge(repo, toolconf, files)
215 r = _premerge(repo, toolconf, files)
216 if r:
216 if r:
217 a, b, c, back = files
217 a, b, c, back = files
218
218
219 ui = repo.ui
219 ui = repo.ui
220
220
221 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
221 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
222 return True, r
222 return True, r
223 return False, 0
223 return False, 0
224
224
225 @internaltool('dump', True)
225 @internaltool('dump', True)
226 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files):
226 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files):
227 """
227 """
228 Creates three versions of the files to merge, containing the
228 Creates three versions of the files to merge, containing the
229 contents of local, other and base. These files can then be used to
229 contents of local, other and base. These files can then be used to
230 perform a merge manually. If the file to be merged is named
230 perform a merge manually. If the file to be merged is named
231 ``a.txt``, these files will accordingly be named ``a.txt.local``,
231 ``a.txt``, these files will accordingly be named ``a.txt.local``,
232 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
232 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
233 same directory as ``a.txt``."""
233 same directory as ``a.txt``."""
234 r = _premerge(repo, toolconf, files)
234 r = _premerge(repo, toolconf, files)
235 if r:
235 if r:
236 a, b, c, back = files
236 a, b, c, back = files
237
237
238 fd = fcd.path()
238 fd = fcd.path()
239
239
240 util.copyfile(a, a + ".local")
240 util.copyfile(a, a + ".local")
241 repo.wwrite(fd + ".other", fco.data(), fco.flags())
241 repo.wwrite(fd + ".other", fco.data(), fco.flags())
242 repo.wwrite(fd + ".base", fca.data(), fca.flags())
242 repo.wwrite(fd + ".base", fca.data(), fca.flags())
243 return False, r
243 return False, r
244
244
245 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
245 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
246 r = _premerge(repo, toolconf, files)
246 r = _premerge(repo, toolconf, files)
247 if r:
247 if r:
248 tool, toolpath, binary, symlink = toolconf
248 tool, toolpath, binary, symlink = toolconf
249 a, b, c, back = files
249 a, b, c, back = files
250 out = ""
250 out = ""
251 env = dict(HG_FILE=fcd.path(),
251 env = dict(HG_FILE=fcd.path(),
252 HG_MY_NODE=short(mynode),
252 HG_MY_NODE=short(mynode),
253 HG_OTHER_NODE=str(fco.changectx()),
253 HG_OTHER_NODE=str(fco.changectx()),
254 HG_BASE_NODE=str(fca.changectx()),
254 HG_BASE_NODE=str(fca.changectx()),
255 HG_MY_ISLINK='l' in fcd.flags(),
255 HG_MY_ISLINK='l' in fcd.flags(),
256 HG_OTHER_ISLINK='l' in fco.flags(),
256 HG_OTHER_ISLINK='l' in fco.flags(),
257 HG_BASE_ISLINK='l' in fca.flags())
257 HG_BASE_ISLINK='l' in fca.flags())
258
258
259 ui = repo.ui
259 ui = repo.ui
260
260
261 args = _toolstr(ui, tool, "args", '$local $base $other')
261 args = _toolstr(ui, tool, "args", '$local $base $other')
262 if "$output" in args:
262 if "$output" in args:
263 out, a = a, back # read input from backup, write to original
263 out, a = a, back # read input from backup, write to original
264 replace = dict(local=a, base=b, other=c, output=out)
264 replace = dict(local=a, base=b, other=c, output=out)
265 args = util.interpolate(r'\$', replace, args,
265 args = util.interpolate(r'\$', replace, args,
266 lambda s: util.shellquote(util.localpath(s)))
266 lambda s: util.shellquote(util.localpath(s)))
267 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
267 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
268 out=ui.fout)
268 out=ui.fout)
269 return True, r
269 return True, r
270 return False, 0
270 return False, 0
271
271
272 def filemerge(repo, mynode, orig, fcd, fco, fca):
272 def filemerge(repo, mynode, orig, fcd, fco, fca):
273 """perform a 3-way merge in the working directory
273 """perform a 3-way merge in the working directory
274
274
275 mynode = parent node before merge
275 mynode = parent node before merge
276 orig = original local filename before merge
276 orig = original local filename before merge
277 fco = other file context
277 fco = other file context
278 fca = ancestor file context
278 fca = ancestor file context
279 fcd = local file context for current/destination file
279 fcd = local file context for current/destination file
280 """
280 """
281
281
282 def temp(prefix, ctx):
282 def temp(prefix, ctx):
283 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
283 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
284 (fd, name) = tempfile.mkstemp(prefix=pre)
284 (fd, name) = tempfile.mkstemp(prefix=pre)
285 data = repo.wwritedata(ctx.path(), ctx.data())
285 data = repo.wwritedata(ctx.path(), ctx.data())
286 f = os.fdopen(fd, "wb")
286 f = os.fdopen(fd, "wb")
287 f.write(data)
287 f.write(data)
288 f.close()
288 f.close()
289 return name
289 return name
290
290
291 if not fco.cmp(fcd): # files identical?
291 if not fco.cmp(fcd): # files identical?
292 return None
292 return None
293
293
294 ui = repo.ui
294 ui = repo.ui
295 fd = fcd.path()
295 fd = fcd.path()
296 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
296 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
297 symlink = 'l' in fcd.flags() + fco.flags()
297 symlink = 'l' in fcd.flags() + fco.flags()
298 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
298 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
299 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
299 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
300 (tool, fd, binary, symlink))
300 (tool, fd, binary, symlink))
301
301
302 if tool in internals:
302 if tool in internals:
303 func = internals[tool]
303 func = internals[tool]
304 trymerge = func.trymerge
304 trymerge = func.trymerge
305 onfailure = func.onfailure
305 onfailure = func.onfailure
306 else:
306 else:
307 func = _xmerge
307 func = _xmerge
308 trymerge = True
308 trymerge = True
309 onfailure = _("merging %s failed!\n")
309 onfailure = _("merging %s failed!\n")
310
310
311 toolconf = tool, toolpath, binary, symlink
311 toolconf = tool, toolpath, binary, symlink
312
312
313 if not trymerge:
313 if not trymerge:
314 return func(repo, mynode, orig, fcd, fco, fca, toolconf)
314 return func(repo, mynode, orig, fcd, fco, fca, toolconf)
315
315
316 a = repo.wjoin(fd)
316 a = repo.wjoin(fd)
317 b = temp("base", fca)
317 b = temp("base", fca)
318 c = temp("other", fco)
318 c = temp("other", fco)
319 back = a + ".orig"
319 back = a + ".orig"
320 util.copyfile(a, back)
320 util.copyfile(a, back)
321
321
322 if orig != fco.path():
322 if orig != fco.path():
323 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
323 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
324 else:
324 else:
325 ui.status(_("merging %s\n") % fd)
325 ui.status(_("merging %s\n") % fd)
326
326
327 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
327 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
328
328
329 needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf,
329 needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf,
330 (a, b, c, back))
330 (a, b, c, back))
331 if not needcheck:
331 if not needcheck:
332 if r:
332 if r:
333 if onfailure:
333 if onfailure:
334 ui.warn(onfailure % fd)
334 ui.warn(onfailure % fd)
335 else:
335 else:
336 os.unlink(back)
336 os.unlink(back)
337
337
338 os.unlink(b)
338 os.unlink(b)
339 os.unlink(c)
339 os.unlink(c)
340 return r
340 return r
341
341
342 if not r and (_toolbool(ui, tool, "checkconflicts") or
342 if not r and (_toolbool(ui, tool, "checkconflicts") or
343 'conflicts' in _toollist(ui, tool, "check")):
343 'conflicts' in _toollist(ui, tool, "check")):
344 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
344 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
345 re.MULTILINE):
345 re.MULTILINE):
346 r = 1
346 r = 1
347
347
348 checked = False
348 checked = False
349 if 'prompt' in _toollist(ui, tool, "check"):
349 if 'prompt' in _toollist(ui, tool, "check"):
350 checked = True
350 checked = True
351 if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
351 if ui.promptchoice(_("was merge of '%s' successful (yn)?"
352 (_("&Yes"), _("&No")), 1):
352 "$$ &Yes $$ &No") % fd, 1):
353 r = 1
353 r = 1
354
354
355 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
355 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
356 'changed' in _toollist(ui, tool, "check")):
356 'changed' in _toollist(ui, tool, "check")):
357 if filecmp.cmp(a, back):
357 if filecmp.cmp(a, back):
358 if ui.promptchoice(_(" output file %s appears unchanged\n"
358 if ui.promptchoice(_(" output file %s appears unchanged\n"
359 "was merge successful (yn)?") % fd,
359 "was merge successful (yn)?"
360 (_("&Yes"), _("&No")), 1):
360 "$$ &Yes $$ &No") % fd, 1):
361 r = 1
361 r = 1
362
362
363 if _toolbool(ui, tool, "fixeol"):
363 if _toolbool(ui, tool, "fixeol"):
364 _matcheol(a, back)
364 _matcheol(a, back)
365
365
366 if r:
366 if r:
367 if onfailure:
367 if onfailure:
368 ui.warn(onfailure % fd)
368 ui.warn(onfailure % fd)
369 else:
369 else:
370 os.unlink(back)
370 os.unlink(back)
371
371
372 os.unlink(b)
372 os.unlink(b)
373 os.unlink(c)
373 os.unlink(c)
374 return r
374 return r
375
375
376 # tell hggettext to extract docstrings from these functions:
376 # tell hggettext to extract docstrings from these functions:
377 i18nfunctions = internals.values()
377 i18nfunctions = internals.values()
@@ -1,761 +1,761 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, hex, bin
8 from node import nullid, nullrev, hex, bin
9 from i18n import _
9 from i18n import _
10 from mercurial import obsolete
10 from mercurial import obsolete
11 import error, util, filemerge, copies, subrepo, worker, dicthelpers
11 import error, util, filemerge, copies, subrepo, worker, dicthelpers
12 import errno, os, shutil
12 import errno, os, shutil
13
13
14 class mergestate(object):
14 class mergestate(object):
15 '''track 3-way merge state of individual files'''
15 '''track 3-way merge state of individual files'''
16 def __init__(self, repo):
16 def __init__(self, repo):
17 self._repo = repo
17 self._repo = repo
18 self._dirty = False
18 self._dirty = False
19 self._read()
19 self._read()
20 def reset(self, node=None):
20 def reset(self, node=None):
21 self._state = {}
21 self._state = {}
22 if node:
22 if node:
23 self._local = node
23 self._local = node
24 shutil.rmtree(self._repo.join("merge"), True)
24 shutil.rmtree(self._repo.join("merge"), True)
25 self._dirty = False
25 self._dirty = False
26 def _read(self):
26 def _read(self):
27 self._state = {}
27 self._state = {}
28 try:
28 try:
29 f = self._repo.opener("merge/state")
29 f = self._repo.opener("merge/state")
30 for i, l in enumerate(f):
30 for i, l in enumerate(f):
31 if i == 0:
31 if i == 0:
32 self._local = bin(l[:-1])
32 self._local = bin(l[:-1])
33 else:
33 else:
34 bits = l[:-1].split("\0")
34 bits = l[:-1].split("\0")
35 self._state[bits[0]] = bits[1:]
35 self._state[bits[0]] = bits[1:]
36 f.close()
36 f.close()
37 except IOError, err:
37 except IOError, err:
38 if err.errno != errno.ENOENT:
38 if err.errno != errno.ENOENT:
39 raise
39 raise
40 self._dirty = False
40 self._dirty = False
41 def commit(self):
41 def commit(self):
42 if self._dirty:
42 if self._dirty:
43 f = self._repo.opener("merge/state", "w")
43 f = self._repo.opener("merge/state", "w")
44 f.write(hex(self._local) + "\n")
44 f.write(hex(self._local) + "\n")
45 for d, v in self._state.iteritems():
45 for d, v in self._state.iteritems():
46 f.write("\0".join([d] + v) + "\n")
46 f.write("\0".join([d] + v) + "\n")
47 f.close()
47 f.close()
48 self._dirty = False
48 self._dirty = False
49 def add(self, fcl, fco, fca, fd):
49 def add(self, fcl, fco, fca, fd):
50 hash = util.sha1(fcl.path()).hexdigest()
50 hash = util.sha1(fcl.path()).hexdigest()
51 self._repo.opener.write("merge/" + hash, fcl.data())
51 self._repo.opener.write("merge/" + hash, fcl.data())
52 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
52 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
53 hex(fca.filenode()), fco.path(), fcl.flags()]
53 hex(fca.filenode()), fco.path(), fcl.flags()]
54 self._dirty = True
54 self._dirty = True
55 def __contains__(self, dfile):
55 def __contains__(self, dfile):
56 return dfile in self._state
56 return dfile in self._state
57 def __getitem__(self, dfile):
57 def __getitem__(self, dfile):
58 return self._state[dfile][0]
58 return self._state[dfile][0]
59 def __iter__(self):
59 def __iter__(self):
60 l = self._state.keys()
60 l = self._state.keys()
61 l.sort()
61 l.sort()
62 for f in l:
62 for f in l:
63 yield f
63 yield f
64 def mark(self, dfile, state):
64 def mark(self, dfile, state):
65 self._state[dfile][0] = state
65 self._state[dfile][0] = state
66 self._dirty = True
66 self._dirty = True
67 def resolve(self, dfile, wctx, octx):
67 def resolve(self, dfile, wctx, octx):
68 if self[dfile] == 'r':
68 if self[dfile] == 'r':
69 return 0
69 return 0
70 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
70 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
71 fcd = wctx[dfile]
71 fcd = wctx[dfile]
72 fco = octx[ofile]
72 fco = octx[ofile]
73 fca = self._repo.filectx(afile, fileid=anode)
73 fca = self._repo.filectx(afile, fileid=anode)
74 # "premerge" x flags
74 # "premerge" x flags
75 flo = fco.flags()
75 flo = fco.flags()
76 fla = fca.flags()
76 fla = fca.flags()
77 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
77 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
78 if fca.node() == nullid:
78 if fca.node() == nullid:
79 self._repo.ui.warn(_('warning: cannot merge flags for %s\n') %
79 self._repo.ui.warn(_('warning: cannot merge flags for %s\n') %
80 afile)
80 afile)
81 elif flags == fla:
81 elif flags == fla:
82 flags = flo
82 flags = flo
83 # restore local
83 # restore local
84 f = self._repo.opener("merge/" + hash)
84 f = self._repo.opener("merge/" + hash)
85 self._repo.wwrite(dfile, f.read(), flags)
85 self._repo.wwrite(dfile, f.read(), flags)
86 f.close()
86 f.close()
87 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
87 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
88 if r is None:
88 if r is None:
89 # no real conflict
89 # no real conflict
90 del self._state[dfile]
90 del self._state[dfile]
91 elif not r:
91 elif not r:
92 self.mark(dfile, 'r')
92 self.mark(dfile, 'r')
93 return r
93 return r
94
94
95 def _checkunknownfile(repo, wctx, mctx, f):
95 def _checkunknownfile(repo, wctx, mctx, f):
96 return (not repo.dirstate._ignore(f)
96 return (not repo.dirstate._ignore(f)
97 and os.path.isfile(repo.wjoin(f))
97 and os.path.isfile(repo.wjoin(f))
98 and repo.wopener.audit.check(f)
98 and repo.wopener.audit.check(f)
99 and repo.dirstate.normalize(f) not in repo.dirstate
99 and repo.dirstate.normalize(f) not in repo.dirstate
100 and mctx[f].cmp(wctx[f]))
100 and mctx[f].cmp(wctx[f]))
101
101
102 def _checkunknown(repo, wctx, mctx):
102 def _checkunknown(repo, wctx, mctx):
103 "check for collisions between unknown files and files in mctx"
103 "check for collisions between unknown files and files in mctx"
104
104
105 error = False
105 error = False
106 for f in mctx:
106 for f in mctx:
107 if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
107 if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
108 error = True
108 error = True
109 wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
109 wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
110 if error:
110 if error:
111 raise util.Abort(_("untracked files in working directory differ "
111 raise util.Abort(_("untracked files in working directory differ "
112 "from files in requested revision"))
112 "from files in requested revision"))
113
113
114 def _forgetremoved(wctx, mctx, branchmerge):
114 def _forgetremoved(wctx, mctx, branchmerge):
115 """
115 """
116 Forget removed files
116 Forget removed files
117
117
118 If we're jumping between revisions (as opposed to merging), and if
118 If we're jumping between revisions (as opposed to merging), and if
119 neither the working directory nor the target rev has the file,
119 neither the working directory nor the target rev has the file,
120 then we need to remove it from the dirstate, to prevent the
120 then we need to remove it from the dirstate, to prevent the
121 dirstate from listing the file when it is no longer in the
121 dirstate from listing the file when it is no longer in the
122 manifest.
122 manifest.
123
123
124 If we're merging, and the other revision has removed a file
124 If we're merging, and the other revision has removed a file
125 that is not present in the working directory, we need to mark it
125 that is not present in the working directory, we need to mark it
126 as removed.
126 as removed.
127 """
127 """
128
128
129 actions = []
129 actions = []
130 state = branchmerge and 'r' or 'f'
130 state = branchmerge and 'r' or 'f'
131 for f in wctx.deleted():
131 for f in wctx.deleted():
132 if f not in mctx:
132 if f not in mctx:
133 actions.append((f, state, None, "forget deleted"))
133 actions.append((f, state, None, "forget deleted"))
134
134
135 if not branchmerge:
135 if not branchmerge:
136 for f in wctx.removed():
136 for f in wctx.removed():
137 if f not in mctx:
137 if f not in mctx:
138 actions.append((f, "f", None, "forget removed"))
138 actions.append((f, "f", None, "forget removed"))
139
139
140 return actions
140 return actions
141
141
142 def _checkcollision(repo, wmf, actions, prompts):
142 def _checkcollision(repo, wmf, actions, prompts):
143 # build provisional merged manifest up
143 # build provisional merged manifest up
144 pmmf = set(wmf)
144 pmmf = set(wmf)
145
145
146 def addop(f, args):
146 def addop(f, args):
147 pmmf.add(f)
147 pmmf.add(f)
148 def removeop(f, args):
148 def removeop(f, args):
149 pmmf.discard(f)
149 pmmf.discard(f)
150 def nop(f, args):
150 def nop(f, args):
151 pass
151 pass
152
152
153 def renameop(f, args):
153 def renameop(f, args):
154 f2, fd, flags = args
154 f2, fd, flags = args
155 if f:
155 if f:
156 pmmf.discard(f)
156 pmmf.discard(f)
157 pmmf.add(fd)
157 pmmf.add(fd)
158 def mergeop(f, args):
158 def mergeop(f, args):
159 f2, fd, move = args
159 f2, fd, move = args
160 if move:
160 if move:
161 pmmf.discard(f)
161 pmmf.discard(f)
162 pmmf.add(fd)
162 pmmf.add(fd)
163
163
164 opmap = {
164 opmap = {
165 "a": addop,
165 "a": addop,
166 "d": renameop,
166 "d": renameop,
167 "dr": nop,
167 "dr": nop,
168 "e": nop,
168 "e": nop,
169 "f": addop, # untracked file should be kept in working directory
169 "f": addop, # untracked file should be kept in working directory
170 "g": addop,
170 "g": addop,
171 "m": mergeop,
171 "m": mergeop,
172 "r": removeop,
172 "r": removeop,
173 "rd": nop,
173 "rd": nop,
174 }
174 }
175 for f, m, args, msg in actions:
175 for f, m, args, msg in actions:
176 op = opmap.get(m)
176 op = opmap.get(m)
177 assert op, m
177 assert op, m
178 op(f, args)
178 op(f, args)
179
179
180 opmap = {
180 opmap = {
181 "cd": addop,
181 "cd": addop,
182 "dc": addop,
182 "dc": addop,
183 }
183 }
184 for f, m in prompts:
184 for f, m in prompts:
185 op = opmap.get(m)
185 op = opmap.get(m)
186 assert op, m
186 assert op, m
187 op(f, None)
187 op(f, None)
188
188
189 # check case-folding collision in provisional merged manifest
189 # check case-folding collision in provisional merged manifest
190 foldmap = {}
190 foldmap = {}
191 for f in sorted(pmmf):
191 for f in sorted(pmmf):
192 fold = util.normcase(f)
192 fold = util.normcase(f)
193 if fold in foldmap:
193 if fold in foldmap:
194 raise util.Abort(_("case-folding collision between %s and %s")
194 raise util.Abort(_("case-folding collision between %s and %s")
195 % (f, foldmap[fold]))
195 % (f, foldmap[fold]))
196 foldmap[fold] = f
196 foldmap[fold] = f
197
197
198 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial,
198 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial,
199 acceptremote=False):
199 acceptremote=False):
200 """
200 """
201 Merge p1 and p2 with ancestor pa and generate merge action list
201 Merge p1 and p2 with ancestor pa and generate merge action list
202
202
203 branchmerge and force are as passed in to update
203 branchmerge and force are as passed in to update
204 partial = function to filter file lists
204 partial = function to filter file lists
205 acceptremote = accept the incoming changes without prompting
205 acceptremote = accept the incoming changes without prompting
206 """
206 """
207
207
208 overwrite = force and not branchmerge
208 overwrite = force and not branchmerge
209 actions, copy, movewithdir = [], {}, {}
209 actions, copy, movewithdir = [], {}, {}
210
210
211 followcopies = False
211 followcopies = False
212 if overwrite:
212 if overwrite:
213 pa = wctx
213 pa = wctx
214 elif pa == p2: # backwards
214 elif pa == p2: # backwards
215 pa = wctx.p1()
215 pa = wctx.p1()
216 elif not branchmerge and not wctx.dirty(missing=True):
216 elif not branchmerge and not wctx.dirty(missing=True):
217 pass
217 pass
218 elif pa and repo.ui.configbool("merge", "followcopies", True):
218 elif pa and repo.ui.configbool("merge", "followcopies", True):
219 followcopies = True
219 followcopies = True
220
220
221 # manifests fetched in order are going to be faster, so prime the caches
221 # manifests fetched in order are going to be faster, so prime the caches
222 [x.manifest() for x in
222 [x.manifest() for x in
223 sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
223 sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
224
224
225 if followcopies:
225 if followcopies:
226 ret = copies.mergecopies(repo, wctx, p2, pa)
226 ret = copies.mergecopies(repo, wctx, p2, pa)
227 copy, movewithdir, diverge, renamedelete = ret
227 copy, movewithdir, diverge, renamedelete = ret
228 for of, fl in diverge.iteritems():
228 for of, fl in diverge.iteritems():
229 actions.append((of, "dr", (fl,), "divergent renames"))
229 actions.append((of, "dr", (fl,), "divergent renames"))
230 for of, fl in renamedelete.iteritems():
230 for of, fl in renamedelete.iteritems():
231 actions.append((of, "rd", (fl,), "rename and delete"))
231 actions.append((of, "rd", (fl,), "rename and delete"))
232
232
233 repo.ui.note(_("resolving manifests\n"))
233 repo.ui.note(_("resolving manifests\n"))
234 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
234 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
235 % (bool(branchmerge), bool(force), bool(partial)))
235 % (bool(branchmerge), bool(force), bool(partial)))
236 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
236 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
237
237
238 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
238 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
239 copied = set(copy.values())
239 copied = set(copy.values())
240 copied.update(movewithdir.values())
240 copied.update(movewithdir.values())
241
241
242 if '.hgsubstate' in m1:
242 if '.hgsubstate' in m1:
243 # check whether sub state is modified
243 # check whether sub state is modified
244 for s in sorted(wctx.substate):
244 for s in sorted(wctx.substate):
245 if wctx.sub(s).dirty():
245 if wctx.sub(s).dirty():
246 m1['.hgsubstate'] += "+"
246 m1['.hgsubstate'] += "+"
247 break
247 break
248
248
249 aborts, prompts = [], []
249 aborts, prompts = [], []
250 # Compare manifests
250 # Compare manifests
251 fdiff = dicthelpers.diff(m1, m2)
251 fdiff = dicthelpers.diff(m1, m2)
252 flagsdiff = m1.flagsdiff(m2)
252 flagsdiff = m1.flagsdiff(m2)
253 diff12 = dicthelpers.join(fdiff, flagsdiff)
253 diff12 = dicthelpers.join(fdiff, flagsdiff)
254
254
255 for f, (n12, fl12) in diff12.iteritems():
255 for f, (n12, fl12) in diff12.iteritems():
256 if n12:
256 if n12:
257 n1, n2 = n12
257 n1, n2 = n12
258 else: # file contents didn't change, but flags did
258 else: # file contents didn't change, but flags did
259 n1 = n2 = m1.get(f, None)
259 n1 = n2 = m1.get(f, None)
260 if n1 is None:
260 if n1 is None:
261 # Since n1 == n2, the file isn't present in m2 either. This
261 # Since n1 == n2, the file isn't present in m2 either. This
262 # means that the file was removed or deleted locally and
262 # means that the file was removed or deleted locally and
263 # removed remotely, but that residual entries remain in flags.
263 # removed remotely, but that residual entries remain in flags.
264 # This can happen in manifests generated by workingctx.
264 # This can happen in manifests generated by workingctx.
265 continue
265 continue
266 if fl12:
266 if fl12:
267 fl1, fl2 = fl12
267 fl1, fl2 = fl12
268 else: # flags didn't change, file contents did
268 else: # flags didn't change, file contents did
269 fl1 = fl2 = m1.flags(f)
269 fl1 = fl2 = m1.flags(f)
270
270
271 if partial and not partial(f):
271 if partial and not partial(f):
272 continue
272 continue
273 if n1 and n2:
273 if n1 and n2:
274 fla = ma.flags(f)
274 fla = ma.flags(f)
275 nol = 'l' not in fl1 + fl2 + fla
275 nol = 'l' not in fl1 + fl2 + fla
276 a = ma.get(f, nullid)
276 a = ma.get(f, nullid)
277 if n2 == a and fl2 == fla:
277 if n2 == a and fl2 == fla:
278 pass # remote unchanged - keep local
278 pass # remote unchanged - keep local
279 elif n1 == a and fl1 == fla: # local unchanged - use remote
279 elif n1 == a and fl1 == fla: # local unchanged - use remote
280 if n1 == n2: # optimization: keep local content
280 if n1 == n2: # optimization: keep local content
281 actions.append((f, "e", (fl2,), "update permissions"))
281 actions.append((f, "e", (fl2,), "update permissions"))
282 else:
282 else:
283 actions.append((f, "g", (fl2,), "remote is newer"))
283 actions.append((f, "g", (fl2,), "remote is newer"))
284 elif nol and n2 == a: # remote only changed 'x'
284 elif nol and n2 == a: # remote only changed 'x'
285 actions.append((f, "e", (fl2,), "update permissions"))
285 actions.append((f, "e", (fl2,), "update permissions"))
286 elif nol and n1 == a: # local only changed 'x'
286 elif nol and n1 == a: # local only changed 'x'
287 actions.append((f, "g", (fl1,), "remote is newer"))
287 actions.append((f, "g", (fl1,), "remote is newer"))
288 else: # both changed something
288 else: # both changed something
289 actions.append((f, "m", (f, f, False), "versions differ"))
289 actions.append((f, "m", (f, f, False), "versions differ"))
290 elif f in copied: # files we'll deal with on m2 side
290 elif f in copied: # files we'll deal with on m2 side
291 pass
291 pass
292 elif n1 and f in movewithdir: # directory rename
292 elif n1 and f in movewithdir: # directory rename
293 f2 = movewithdir[f]
293 f2 = movewithdir[f]
294 actions.append((f, "d", (None, f2, fl1),
294 actions.append((f, "d", (None, f2, fl1),
295 "remote renamed directory to " + f2))
295 "remote renamed directory to " + f2))
296 elif n1 and f in copy:
296 elif n1 and f in copy:
297 f2 = copy[f]
297 f2 = copy[f]
298 actions.append((f, "m", (f2, f, False),
298 actions.append((f, "m", (f2, f, False),
299 "local copied/moved to " + f2))
299 "local copied/moved to " + f2))
300 elif n1 and f in ma: # clean, a different, no remote
300 elif n1 and f in ma: # clean, a different, no remote
301 if n1 != ma[f]:
301 if n1 != ma[f]:
302 prompts.append((f, "cd")) # prompt changed/deleted
302 prompts.append((f, "cd")) # prompt changed/deleted
303 elif n1[20:] == "a": # added, no remote
303 elif n1[20:] == "a": # added, no remote
304 actions.append((f, "f", None, "remote deleted"))
304 actions.append((f, "f", None, "remote deleted"))
305 else:
305 else:
306 actions.append((f, "r", None, "other deleted"))
306 actions.append((f, "r", None, "other deleted"))
307 elif n2 and f in movewithdir:
307 elif n2 and f in movewithdir:
308 f2 = movewithdir[f]
308 f2 = movewithdir[f]
309 actions.append((None, "d", (f, f2, fl2),
309 actions.append((None, "d", (f, f2, fl2),
310 "local renamed directory to " + f2))
310 "local renamed directory to " + f2))
311 elif n2 and f in copy:
311 elif n2 and f in copy:
312 f2 = copy[f]
312 f2 = copy[f]
313 if f2 in m2:
313 if f2 in m2:
314 actions.append((f2, "m", (f, f, False),
314 actions.append((f2, "m", (f, f, False),
315 "remote copied to " + f))
315 "remote copied to " + f))
316 else:
316 else:
317 actions.append((f2, "m", (f, f, True),
317 actions.append((f2, "m", (f, f, True),
318 "remote moved to " + f))
318 "remote moved to " + f))
319 elif n2 and f not in ma:
319 elif n2 and f not in ma:
320 # local unknown, remote created: the logic is described by the
320 # local unknown, remote created: the logic is described by the
321 # following table:
321 # following table:
322 #
322 #
323 # force branchmerge different | action
323 # force branchmerge different | action
324 # n * n | get
324 # n * n | get
325 # n * y | abort
325 # n * y | abort
326 # y n * | get
326 # y n * | get
327 # y y n | get
327 # y y n | get
328 # y y y | merge
328 # y y y | merge
329 #
329 #
330 # Checking whether the files are different is expensive, so we
330 # Checking whether the files are different is expensive, so we
331 # don't do that when we can avoid it.
331 # don't do that when we can avoid it.
332 if force and not branchmerge:
332 if force and not branchmerge:
333 actions.append((f, "g", (fl2,), "remote created"))
333 actions.append((f, "g", (fl2,), "remote created"))
334 else:
334 else:
335 different = _checkunknownfile(repo, wctx, p2, f)
335 different = _checkunknownfile(repo, wctx, p2, f)
336 if force and branchmerge and different:
336 if force and branchmerge and different:
337 actions.append((f, "m", (f, f, False),
337 actions.append((f, "m", (f, f, False),
338 "remote differs from untracked local"))
338 "remote differs from untracked local"))
339 elif not force and different:
339 elif not force and different:
340 aborts.append((f, "ud"))
340 aborts.append((f, "ud"))
341 else:
341 else:
342 actions.append((f, "g", (fl2,), "remote created"))
342 actions.append((f, "g", (fl2,), "remote created"))
343 elif n2 and n2 != ma[f]:
343 elif n2 and n2 != ma[f]:
344 prompts.append((f, "dc")) # prompt deleted/changed
344 prompts.append((f, "dc")) # prompt deleted/changed
345
345
346 for f, m in sorted(aborts):
346 for f, m in sorted(aborts):
347 if m == "ud":
347 if m == "ud":
348 repo.ui.warn(_("%s: untracked file differs\n") % f)
348 repo.ui.warn(_("%s: untracked file differs\n") % f)
349 else: assert False, m
349 else: assert False, m
350 if aborts:
350 if aborts:
351 raise util.Abort(_("untracked files in working directory differ "
351 raise util.Abort(_("untracked files in working directory differ "
352 "from files in requested revision"))
352 "from files in requested revision"))
353
353
354 if not util.checkcase(repo.path):
354 if not util.checkcase(repo.path):
355 # check collision between files only in p2 for clean update
355 # check collision between files only in p2 for clean update
356 if (not branchmerge and
356 if (not branchmerge and
357 (force or not wctx.dirty(missing=True, branch=False))):
357 (force or not wctx.dirty(missing=True, branch=False))):
358 _checkcollision(repo, m2, [], [])
358 _checkcollision(repo, m2, [], [])
359 else:
359 else:
360 _checkcollision(repo, m1, actions, prompts)
360 _checkcollision(repo, m1, actions, prompts)
361
361
362 for f, m in sorted(prompts):
362 for f, m in sorted(prompts):
363 if m == "cd":
363 if m == "cd":
364 if acceptremote:
364 if acceptremote:
365 actions.append((f, "r", None, "remote delete"))
365 actions.append((f, "r", None, "remote delete"))
366 elif repo.ui.promptchoice(
366 elif repo.ui.promptchoice(
367 _("local changed %s which remote deleted\n"
367 _("local changed %s which remote deleted\n"
368 "use (c)hanged version or (d)elete?") % f,
368 "use (c)hanged version or (d)elete?"
369 (_("&Changed"), _("&Delete")), 0):
369 "$$ &Changed $$ &Delete") % f, 0):
370 actions.append((f, "r", None, "prompt delete"))
370 actions.append((f, "r", None, "prompt delete"))
371 else:
371 else:
372 actions.append((f, "a", None, "prompt keep"))
372 actions.append((f, "a", None, "prompt keep"))
373 elif m == "dc":
373 elif m == "dc":
374 if acceptremote:
374 if acceptremote:
375 actions.append((f, "g", (m2.flags(f),), "remote recreating"))
375 actions.append((f, "g", (m2.flags(f),), "remote recreating"))
376 elif repo.ui.promptchoice(
376 elif repo.ui.promptchoice(
377 _("remote changed %s which local deleted\n"
377 _("remote changed %s which local deleted\n"
378 "use (c)hanged version or leave (d)eleted?") % f,
378 "use (c)hanged version or leave (d)eleted?"
379 (_("&Changed"), _("&Deleted")), 0) == 0:
379 "$$ &Changed $$ &Deleted") % f, 0) == 0:
380 actions.append((f, "g", (m2.flags(f),), "prompt recreating"))
380 actions.append((f, "g", (m2.flags(f),), "prompt recreating"))
381 else: assert False, m
381 else: assert False, m
382 return actions
382 return actions
383
383
384 def actionkey(a):
384 def actionkey(a):
385 return a[1] == "r" and -1 or 0, a
385 return a[1] == "r" and -1 or 0, a
386
386
387 def getremove(repo, mctx, overwrite, args):
387 def getremove(repo, mctx, overwrite, args):
388 """apply usually-non-interactive updates to the working directory
388 """apply usually-non-interactive updates to the working directory
389
389
390 mctx is the context to be merged into the working copy
390 mctx is the context to be merged into the working copy
391
391
392 yields tuples for progress updates
392 yields tuples for progress updates
393 """
393 """
394 verbose = repo.ui.verbose
394 verbose = repo.ui.verbose
395 unlink = util.unlinkpath
395 unlink = util.unlinkpath
396 wjoin = repo.wjoin
396 wjoin = repo.wjoin
397 fctx = mctx.filectx
397 fctx = mctx.filectx
398 wwrite = repo.wwrite
398 wwrite = repo.wwrite
399 audit = repo.wopener.audit
399 audit = repo.wopener.audit
400 i = 0
400 i = 0
401 for arg in args:
401 for arg in args:
402 f = arg[0]
402 f = arg[0]
403 if arg[1] == 'r':
403 if arg[1] == 'r':
404 if verbose:
404 if verbose:
405 repo.ui.note(_("removing %s\n") % f)
405 repo.ui.note(_("removing %s\n") % f)
406 audit(f)
406 audit(f)
407 try:
407 try:
408 unlink(wjoin(f), ignoremissing=True)
408 unlink(wjoin(f), ignoremissing=True)
409 except OSError, inst:
409 except OSError, inst:
410 repo.ui.warn(_("update failed to remove %s: %s!\n") %
410 repo.ui.warn(_("update failed to remove %s: %s!\n") %
411 (f, inst.strerror))
411 (f, inst.strerror))
412 else:
412 else:
413 if verbose:
413 if verbose:
414 repo.ui.note(_("getting %s\n") % f)
414 repo.ui.note(_("getting %s\n") % f)
415 wwrite(f, fctx(f).data(), arg[2][0])
415 wwrite(f, fctx(f).data(), arg[2][0])
416 if i == 100:
416 if i == 100:
417 yield i, f
417 yield i, f
418 i = 0
418 i = 0
419 i += 1
419 i += 1
420 if i > 0:
420 if i > 0:
421 yield i, f
421 yield i, f
422
422
423 def applyupdates(repo, actions, wctx, mctx, actx, overwrite):
423 def applyupdates(repo, actions, wctx, mctx, actx, overwrite):
424 """apply the merge action list to the working directory
424 """apply the merge action list to the working directory
425
425
426 wctx is the working copy context
426 wctx is the working copy context
427 mctx is the context to be merged into the working copy
427 mctx is the context to be merged into the working copy
428 actx is the context of the common ancestor
428 actx is the context of the common ancestor
429
429
430 Return a tuple of counts (updated, merged, removed, unresolved) that
430 Return a tuple of counts (updated, merged, removed, unresolved) that
431 describes how many files were affected by the update.
431 describes how many files were affected by the update.
432 """
432 """
433
433
434 updated, merged, removed, unresolved = 0, 0, 0, 0
434 updated, merged, removed, unresolved = 0, 0, 0, 0
435 ms = mergestate(repo)
435 ms = mergestate(repo)
436 ms.reset(wctx.p1().node())
436 ms.reset(wctx.p1().node())
437 moves = []
437 moves = []
438 actions.sort(key=actionkey)
438 actions.sort(key=actionkey)
439
439
440 # prescan for merges
440 # prescan for merges
441 for a in actions:
441 for a in actions:
442 f, m, args, msg = a
442 f, m, args, msg = a
443 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
443 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
444 if m == "m": # merge
444 if m == "m": # merge
445 f2, fd, move = args
445 f2, fd, move = args
446 if fd == '.hgsubstate': # merged internally
446 if fd == '.hgsubstate': # merged internally
447 continue
447 continue
448 repo.ui.debug(" preserving %s for resolve of %s\n" % (f, fd))
448 repo.ui.debug(" preserving %s for resolve of %s\n" % (f, fd))
449 fcl = wctx[f]
449 fcl = wctx[f]
450 fco = mctx[f2]
450 fco = mctx[f2]
451 if mctx == actx: # backwards, use working dir parent as ancestor
451 if mctx == actx: # backwards, use working dir parent as ancestor
452 if fcl.parents():
452 if fcl.parents():
453 fca = fcl.p1()
453 fca = fcl.p1()
454 else:
454 else:
455 fca = repo.filectx(f, fileid=nullrev)
455 fca = repo.filectx(f, fileid=nullrev)
456 else:
456 else:
457 fca = fcl.ancestor(fco, actx)
457 fca = fcl.ancestor(fco, actx)
458 if not fca:
458 if not fca:
459 fca = repo.filectx(f, fileid=nullrev)
459 fca = repo.filectx(f, fileid=nullrev)
460 ms.add(fcl, fco, fca, fd)
460 ms.add(fcl, fco, fca, fd)
461 if f != fd and move:
461 if f != fd and move:
462 moves.append(f)
462 moves.append(f)
463
463
464 audit = repo.wopener.audit
464 audit = repo.wopener.audit
465
465
466 # remove renamed files after safely stored
466 # remove renamed files after safely stored
467 for f in moves:
467 for f in moves:
468 if os.path.lexists(repo.wjoin(f)):
468 if os.path.lexists(repo.wjoin(f)):
469 repo.ui.debug("removing %s\n" % f)
469 repo.ui.debug("removing %s\n" % f)
470 audit(f)
470 audit(f)
471 util.unlinkpath(repo.wjoin(f))
471 util.unlinkpath(repo.wjoin(f))
472
472
473 numupdates = len(actions)
473 numupdates = len(actions)
474 workeractions = [a for a in actions if a[1] in 'gr']
474 workeractions = [a for a in actions if a[1] in 'gr']
475 updateactions = [a for a in workeractions if a[1] == 'g']
475 updateactions = [a for a in workeractions if a[1] == 'g']
476 updated = len(updateactions)
476 updated = len(updateactions)
477 removeactions = [a for a in workeractions if a[1] == 'r']
477 removeactions = [a for a in workeractions if a[1] == 'r']
478 removed = len(removeactions)
478 removed = len(removeactions)
479 actions = [a for a in actions if a[1] not in 'gr']
479 actions = [a for a in actions if a[1] not in 'gr']
480
480
481 hgsub = [a[1] for a in workeractions if a[0] == '.hgsubstate']
481 hgsub = [a[1] for a in workeractions if a[0] == '.hgsubstate']
482 if hgsub and hgsub[0] == 'r':
482 if hgsub and hgsub[0] == 'r':
483 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
483 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
484
484
485 z = 0
485 z = 0
486 prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
486 prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
487 removeactions)
487 removeactions)
488 for i, item in prog:
488 for i, item in prog:
489 z += i
489 z += i
490 repo.ui.progress(_('updating'), z, item=item, total=numupdates,
490 repo.ui.progress(_('updating'), z, item=item, total=numupdates,
491 unit=_('files'))
491 unit=_('files'))
492 prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
492 prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
493 updateactions)
493 updateactions)
494 for i, item in prog:
494 for i, item in prog:
495 z += i
495 z += i
496 repo.ui.progress(_('updating'), z, item=item, total=numupdates,
496 repo.ui.progress(_('updating'), z, item=item, total=numupdates,
497 unit=_('files'))
497 unit=_('files'))
498
498
499 if hgsub and hgsub[0] == 'g':
499 if hgsub and hgsub[0] == 'g':
500 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
500 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
501
501
502 _updating = _('updating')
502 _updating = _('updating')
503 _files = _('files')
503 _files = _('files')
504 progress = repo.ui.progress
504 progress = repo.ui.progress
505
505
506 for i, a in enumerate(actions):
506 for i, a in enumerate(actions):
507 f, m, args, msg = a
507 f, m, args, msg = a
508 progress(_updating, z + i + 1, item=f, total=numupdates, unit=_files)
508 progress(_updating, z + i + 1, item=f, total=numupdates, unit=_files)
509 if m == "m": # merge
509 if m == "m": # merge
510 f2, fd, move = args
510 f2, fd, move = args
511 if fd == '.hgsubstate': # subrepo states need updating
511 if fd == '.hgsubstate': # subrepo states need updating
512 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
512 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
513 overwrite)
513 overwrite)
514 continue
514 continue
515 audit(fd)
515 audit(fd)
516 r = ms.resolve(fd, wctx, mctx)
516 r = ms.resolve(fd, wctx, mctx)
517 if r is not None and r > 0:
517 if r is not None and r > 0:
518 unresolved += 1
518 unresolved += 1
519 else:
519 else:
520 if r is None:
520 if r is None:
521 updated += 1
521 updated += 1
522 else:
522 else:
523 merged += 1
523 merged += 1
524 elif m == "d": # directory rename
524 elif m == "d": # directory rename
525 f2, fd, flags = args
525 f2, fd, flags = args
526 if f:
526 if f:
527 repo.ui.note(_("moving %s to %s\n") % (f, fd))
527 repo.ui.note(_("moving %s to %s\n") % (f, fd))
528 audit(f)
528 audit(f)
529 repo.wwrite(fd, wctx.filectx(f).data(), flags)
529 repo.wwrite(fd, wctx.filectx(f).data(), flags)
530 util.unlinkpath(repo.wjoin(f))
530 util.unlinkpath(repo.wjoin(f))
531 if f2:
531 if f2:
532 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
532 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
533 repo.wwrite(fd, mctx.filectx(f2).data(), flags)
533 repo.wwrite(fd, mctx.filectx(f2).data(), flags)
534 updated += 1
534 updated += 1
535 elif m == "dr": # divergent renames
535 elif m == "dr": # divergent renames
536 fl, = args
536 fl, = args
537 repo.ui.warn(_("note: possible conflict - %s was renamed "
537 repo.ui.warn(_("note: possible conflict - %s was renamed "
538 "multiple times to:\n") % f)
538 "multiple times to:\n") % f)
539 for nf in fl:
539 for nf in fl:
540 repo.ui.warn(" %s\n" % nf)
540 repo.ui.warn(" %s\n" % nf)
541 elif m == "rd": # rename and delete
541 elif m == "rd": # rename and delete
542 fl, = args
542 fl, = args
543 repo.ui.warn(_("note: possible conflict - %s was deleted "
543 repo.ui.warn(_("note: possible conflict - %s was deleted "
544 "and renamed to:\n") % f)
544 "and renamed to:\n") % f)
545 for nf in fl:
545 for nf in fl:
546 repo.ui.warn(" %s\n" % nf)
546 repo.ui.warn(" %s\n" % nf)
547 elif m == "e": # exec
547 elif m == "e": # exec
548 flags, = args
548 flags, = args
549 audit(f)
549 audit(f)
550 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
550 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
551 updated += 1
551 updated += 1
552 ms.commit()
552 ms.commit()
553 progress(_updating, None, total=numupdates, unit=_files)
553 progress(_updating, None, total=numupdates, unit=_files)
554
554
555 return updated, merged, removed, unresolved
555 return updated, merged, removed, unresolved
556
556
557 def calculateupdates(repo, tctx, mctx, ancestor, branchmerge, force, partial,
557 def calculateupdates(repo, tctx, mctx, ancestor, branchmerge, force, partial,
558 acceptremote=False):
558 acceptremote=False):
559 "Calculate the actions needed to merge mctx into tctx"
559 "Calculate the actions needed to merge mctx into tctx"
560 actions = []
560 actions = []
561 actions += manifestmerge(repo, tctx, mctx,
561 actions += manifestmerge(repo, tctx, mctx,
562 ancestor,
562 ancestor,
563 branchmerge, force,
563 branchmerge, force,
564 partial, acceptremote)
564 partial, acceptremote)
565 if tctx.rev() is None:
565 if tctx.rev() is None:
566 actions += _forgetremoved(tctx, mctx, branchmerge)
566 actions += _forgetremoved(tctx, mctx, branchmerge)
567 return actions
567 return actions
568
568
569 def recordupdates(repo, actions, branchmerge):
569 def recordupdates(repo, actions, branchmerge):
570 "record merge actions to the dirstate"
570 "record merge actions to the dirstate"
571
571
572 for a in actions:
572 for a in actions:
573 f, m, args, msg = a
573 f, m, args, msg = a
574 if m == "r": # remove
574 if m == "r": # remove
575 if branchmerge:
575 if branchmerge:
576 repo.dirstate.remove(f)
576 repo.dirstate.remove(f)
577 else:
577 else:
578 repo.dirstate.drop(f)
578 repo.dirstate.drop(f)
579 elif m == "a": # re-add
579 elif m == "a": # re-add
580 if not branchmerge:
580 if not branchmerge:
581 repo.dirstate.add(f)
581 repo.dirstate.add(f)
582 elif m == "f": # forget
582 elif m == "f": # forget
583 repo.dirstate.drop(f)
583 repo.dirstate.drop(f)
584 elif m == "e": # exec change
584 elif m == "e": # exec change
585 repo.dirstate.normallookup(f)
585 repo.dirstate.normallookup(f)
586 elif m == "g": # get
586 elif m == "g": # get
587 if branchmerge:
587 if branchmerge:
588 repo.dirstate.otherparent(f)
588 repo.dirstate.otherparent(f)
589 else:
589 else:
590 repo.dirstate.normal(f)
590 repo.dirstate.normal(f)
591 elif m == "m": # merge
591 elif m == "m": # merge
592 f2, fd, move = args
592 f2, fd, move = args
593 if branchmerge:
593 if branchmerge:
594 # We've done a branch merge, mark this file as merged
594 # We've done a branch merge, mark this file as merged
595 # so that we properly record the merger later
595 # so that we properly record the merger later
596 repo.dirstate.merge(fd)
596 repo.dirstate.merge(fd)
597 if f != f2: # copy/rename
597 if f != f2: # copy/rename
598 if move:
598 if move:
599 repo.dirstate.remove(f)
599 repo.dirstate.remove(f)
600 if f != fd:
600 if f != fd:
601 repo.dirstate.copy(f, fd)
601 repo.dirstate.copy(f, fd)
602 else:
602 else:
603 repo.dirstate.copy(f2, fd)
603 repo.dirstate.copy(f2, fd)
604 else:
604 else:
605 # We've update-merged a locally modified file, so
605 # We've update-merged a locally modified file, so
606 # we set the dirstate to emulate a normal checkout
606 # we set the dirstate to emulate a normal checkout
607 # of that file some time in the past. Thus our
607 # of that file some time in the past. Thus our
608 # merge will appear as a normal local file
608 # merge will appear as a normal local file
609 # modification.
609 # modification.
610 if f2 == fd: # file not locally copied/moved
610 if f2 == fd: # file not locally copied/moved
611 repo.dirstate.normallookup(fd)
611 repo.dirstate.normallookup(fd)
612 if move:
612 if move:
613 repo.dirstate.drop(f)
613 repo.dirstate.drop(f)
614 elif m == "d": # directory rename
614 elif m == "d": # directory rename
615 f2, fd, flag = args
615 f2, fd, flag = args
616 if not f2 and f not in repo.dirstate:
616 if not f2 and f not in repo.dirstate:
617 # untracked file moved
617 # untracked file moved
618 continue
618 continue
619 if branchmerge:
619 if branchmerge:
620 repo.dirstate.add(fd)
620 repo.dirstate.add(fd)
621 if f:
621 if f:
622 repo.dirstate.remove(f)
622 repo.dirstate.remove(f)
623 repo.dirstate.copy(f, fd)
623 repo.dirstate.copy(f, fd)
624 if f2:
624 if f2:
625 repo.dirstate.copy(f2, fd)
625 repo.dirstate.copy(f2, fd)
626 else:
626 else:
627 repo.dirstate.normal(fd)
627 repo.dirstate.normal(fd)
628 if f:
628 if f:
629 repo.dirstate.drop(f)
629 repo.dirstate.drop(f)
630
630
631 def update(repo, node, branchmerge, force, partial, ancestor=None,
631 def update(repo, node, branchmerge, force, partial, ancestor=None,
632 mergeancestor=False):
632 mergeancestor=False):
633 """
633 """
634 Perform a merge between the working directory and the given node
634 Perform a merge between the working directory and the given node
635
635
636 node = the node to update to, or None if unspecified
636 node = the node to update to, or None if unspecified
637 branchmerge = whether to merge between branches
637 branchmerge = whether to merge between branches
638 force = whether to force branch merging or file overwriting
638 force = whether to force branch merging or file overwriting
639 partial = a function to filter file lists (dirstate not updated)
639 partial = a function to filter file lists (dirstate not updated)
640 mergeancestor = whether it is merging with an ancestor. If true,
640 mergeancestor = whether it is merging with an ancestor. If true,
641 we should accept the incoming changes for any prompts that occur.
641 we should accept the incoming changes for any prompts that occur.
642 If false, merging with an ancestor (fast-forward) is only allowed
642 If false, merging with an ancestor (fast-forward) is only allowed
643 between different named branches. This flag is used by rebase extension
643 between different named branches. This flag is used by rebase extension
644 as a temporary fix and should be avoided in general.
644 as a temporary fix and should be avoided in general.
645
645
646 The table below shows all the behaviors of the update command
646 The table below shows all the behaviors of the update command
647 given the -c and -C or no options, whether the working directory
647 given the -c and -C or no options, whether the working directory
648 is dirty, whether a revision is specified, and the relationship of
648 is dirty, whether a revision is specified, and the relationship of
649 the parent rev to the target rev (linear, on the same named
649 the parent rev to the target rev (linear, on the same named
650 branch, or on another named branch).
650 branch, or on another named branch).
651
651
652 This logic is tested by test-update-branches.t.
652 This logic is tested by test-update-branches.t.
653
653
654 -c -C dirty rev | linear same cross
654 -c -C dirty rev | linear same cross
655 n n n n | ok (1) x
655 n n n n | ok (1) x
656 n n n y | ok ok ok
656 n n n y | ok ok ok
657 n n y * | merge (2) (2)
657 n n y * | merge (2) (2)
658 n y * * | --- discard ---
658 n y * * | --- discard ---
659 y n y * | --- (3) ---
659 y n y * | --- (3) ---
660 y n n * | --- ok ---
660 y n n * | --- ok ---
661 y y * * | --- (4) ---
661 y y * * | --- (4) ---
662
662
663 x = can't happen
663 x = can't happen
664 * = don't-care
664 * = don't-care
665 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
665 1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
666 2 = abort: crosses branches (use 'hg merge' to merge or
666 2 = abort: crosses branches (use 'hg merge' to merge or
667 use 'hg update -C' to discard changes)
667 use 'hg update -C' to discard changes)
668 3 = abort: uncommitted local changes
668 3 = abort: uncommitted local changes
669 4 = incompatible options (checked in commands.py)
669 4 = incompatible options (checked in commands.py)
670
670
671 Return the same tuple as applyupdates().
671 Return the same tuple as applyupdates().
672 """
672 """
673
673
674 onode = node
674 onode = node
675 wlock = repo.wlock()
675 wlock = repo.wlock()
676 try:
676 try:
677 wc = repo[None]
677 wc = repo[None]
678 if node is None:
678 if node is None:
679 # tip of current branch
679 # tip of current branch
680 try:
680 try:
681 node = repo.branchtip(wc.branch())
681 node = repo.branchtip(wc.branch())
682 except error.RepoLookupError:
682 except error.RepoLookupError:
683 if wc.branch() == "default": # no default branch!
683 if wc.branch() == "default": # no default branch!
684 node = repo.lookup("tip") # update to tip
684 node = repo.lookup("tip") # update to tip
685 else:
685 else:
686 raise util.Abort(_("branch %s not found") % wc.branch())
686 raise util.Abort(_("branch %s not found") % wc.branch())
687 overwrite = force and not branchmerge
687 overwrite = force and not branchmerge
688 pl = wc.parents()
688 pl = wc.parents()
689 p1, p2 = pl[0], repo[node]
689 p1, p2 = pl[0], repo[node]
690 if ancestor:
690 if ancestor:
691 pa = repo[ancestor]
691 pa = repo[ancestor]
692 else:
692 else:
693 pa = p1.ancestor(p2)
693 pa = p1.ancestor(p2)
694
694
695 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
695 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
696
696
697 ### check phase
697 ### check phase
698 if not overwrite and len(pl) > 1:
698 if not overwrite and len(pl) > 1:
699 raise util.Abort(_("outstanding uncommitted merges"))
699 raise util.Abort(_("outstanding uncommitted merges"))
700 if branchmerge:
700 if branchmerge:
701 if pa == p2:
701 if pa == p2:
702 raise util.Abort(_("merging with a working directory ancestor"
702 raise util.Abort(_("merging with a working directory ancestor"
703 " has no effect"))
703 " has no effect"))
704 elif pa == p1:
704 elif pa == p1:
705 if not mergeancestor and p1.branch() == p2.branch():
705 if not mergeancestor and p1.branch() == p2.branch():
706 raise util.Abort(_("nothing to merge"),
706 raise util.Abort(_("nothing to merge"),
707 hint=_("use 'hg update' "
707 hint=_("use 'hg update' "
708 "or check 'hg heads'"))
708 "or check 'hg heads'"))
709 if not force and (wc.files() or wc.deleted()):
709 if not force and (wc.files() or wc.deleted()):
710 raise util.Abort(_("outstanding uncommitted changes"),
710 raise util.Abort(_("outstanding uncommitted changes"),
711 hint=_("use 'hg status' to list changes"))
711 hint=_("use 'hg status' to list changes"))
712 for s in sorted(wc.substate):
712 for s in sorted(wc.substate):
713 if wc.sub(s).dirty():
713 if wc.sub(s).dirty():
714 raise util.Abort(_("outstanding uncommitted changes in "
714 raise util.Abort(_("outstanding uncommitted changes in "
715 "subrepository '%s'") % s)
715 "subrepository '%s'") % s)
716
716
717 elif not overwrite:
717 elif not overwrite:
718 if pa not in (p1, p2): # nolinear
718 if pa not in (p1, p2): # nolinear
719 dirty = wc.dirty(missing=True)
719 dirty = wc.dirty(missing=True)
720 if dirty or onode is None:
720 if dirty or onode is None:
721 # Branching is a bit strange to ensure we do the minimal
721 # Branching is a bit strange to ensure we do the minimal
722 # amount of call to obsolete.background.
722 # amount of call to obsolete.background.
723 foreground = obsolete.foreground(repo, [p1.node()])
723 foreground = obsolete.foreground(repo, [p1.node()])
724 # note: the <node> variable contains a random identifier
724 # note: the <node> variable contains a random identifier
725 if repo[node].node() in foreground:
725 if repo[node].node() in foreground:
726 pa = p1 # allow updating to successors
726 pa = p1 # allow updating to successors
727 elif dirty:
727 elif dirty:
728 msg = _("crosses branches (merge branches or use"
728 msg = _("crosses branches (merge branches or use"
729 " --clean to discard changes)")
729 " --clean to discard changes)")
730 raise util.Abort(msg)
730 raise util.Abort(msg)
731 else: # node is none
731 else: # node is none
732 msg = _("crosses branches (merge branches or update"
732 msg = _("crosses branches (merge branches or update"
733 " --check to force update)")
733 " --check to force update)")
734 raise util.Abort(msg)
734 raise util.Abort(msg)
735 else:
735 else:
736 # Allow jumping branches if clean and specific rev given
736 # Allow jumping branches if clean and specific rev given
737 pa = p1
737 pa = p1
738
738
739 ### calculate phase
739 ### calculate phase
740 actions = calculateupdates(repo, wc, p2, pa,
740 actions = calculateupdates(repo, wc, p2, pa,
741 branchmerge, force, partial, mergeancestor)
741 branchmerge, force, partial, mergeancestor)
742
742
743 ### apply phase
743 ### apply phase
744 if not branchmerge: # just jump to the new rev
744 if not branchmerge: # just jump to the new rev
745 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
745 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
746 if not partial:
746 if not partial:
747 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
747 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
748
748
749 stats = applyupdates(repo, actions, wc, p2, pa, overwrite)
749 stats = applyupdates(repo, actions, wc, p2, pa, overwrite)
750
750
751 if not partial:
751 if not partial:
752 repo.setparents(fp1, fp2)
752 repo.setparents(fp1, fp2)
753 recordupdates(repo, actions, branchmerge)
753 recordupdates(repo, actions, branchmerge)
754 if not branchmerge:
754 if not branchmerge:
755 repo.dirstate.setbranch(p2.branch())
755 repo.dirstate.setbranch(p2.branch())
756 finally:
756 finally:
757 wlock.release()
757 wlock.release()
758
758
759 if not partial:
759 if not partial:
760 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
760 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
761 return stats
761 return stats
@@ -1,1451 +1,1452 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import errno, os, re, xml.dom.minidom, shutil, posixpath, sys
8 import errno, os, re, xml.dom.minidom, shutil, posixpath, sys
9 import stat, subprocess, tarfile
9 import stat, subprocess, tarfile
10 from i18n import _
10 from i18n import _
11 import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
11 import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
12 hg = None
12 hg = None
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 nullstate = ('', '', 'empty')
15 nullstate = ('', '', 'empty')
16
16
17 def _expandedabspath(path):
17 def _expandedabspath(path):
18 '''
18 '''
19 get a path or url and if it is a path expand it and return an absolute path
19 get a path or url and if it is a path expand it and return an absolute path
20 '''
20 '''
21 expandedpath = util.urllocalpath(util.expandpath(path))
21 expandedpath = util.urllocalpath(util.expandpath(path))
22 u = util.url(expandedpath)
22 u = util.url(expandedpath)
23 if not u.scheme:
23 if not u.scheme:
24 path = util.normpath(os.path.abspath(u.path))
24 path = util.normpath(os.path.abspath(u.path))
25 return path
25 return path
26
26
27 def _getstorehashcachename(remotepath):
27 def _getstorehashcachename(remotepath):
28 '''get a unique filename for the store hash cache of a remote repository'''
28 '''get a unique filename for the store hash cache of a remote repository'''
29 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
29 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
30
30
31 def _calcfilehash(filename):
31 def _calcfilehash(filename):
32 data = ''
32 data = ''
33 if os.path.exists(filename):
33 if os.path.exists(filename):
34 fd = open(filename, 'rb')
34 fd = open(filename, 'rb')
35 data = fd.read()
35 data = fd.read()
36 fd.close()
36 fd.close()
37 return util.sha1(data).hexdigest()
37 return util.sha1(data).hexdigest()
38
38
39 class SubrepoAbort(error.Abort):
39 class SubrepoAbort(error.Abort):
40 """Exception class used to avoid handling a subrepo error more than once"""
40 """Exception class used to avoid handling a subrepo error more than once"""
41 def __init__(self, *args, **kw):
41 def __init__(self, *args, **kw):
42 error.Abort.__init__(self, *args, **kw)
42 error.Abort.__init__(self, *args, **kw)
43 self.subrepo = kw.get('subrepo')
43 self.subrepo = kw.get('subrepo')
44 self.cause = kw.get('cause')
44 self.cause = kw.get('cause')
45
45
46 def annotatesubrepoerror(func):
46 def annotatesubrepoerror(func):
47 def decoratedmethod(self, *args, **kargs):
47 def decoratedmethod(self, *args, **kargs):
48 try:
48 try:
49 res = func(self, *args, **kargs)
49 res = func(self, *args, **kargs)
50 except SubrepoAbort, ex:
50 except SubrepoAbort, ex:
51 # This exception has already been handled
51 # This exception has already been handled
52 raise ex
52 raise ex
53 except error.Abort, ex:
53 except error.Abort, ex:
54 subrepo = subrelpath(self)
54 subrepo = subrelpath(self)
55 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
55 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
56 # avoid handling this exception by raising a SubrepoAbort exception
56 # avoid handling this exception by raising a SubrepoAbort exception
57 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
57 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
58 cause=sys.exc_info())
58 cause=sys.exc_info())
59 return res
59 return res
60 return decoratedmethod
60 return decoratedmethod
61
61
62 def state(ctx, ui):
62 def state(ctx, ui):
63 """return a state dict, mapping subrepo paths configured in .hgsub
63 """return a state dict, mapping subrepo paths configured in .hgsub
64 to tuple: (source from .hgsub, revision from .hgsubstate, kind
64 to tuple: (source from .hgsub, revision from .hgsubstate, kind
65 (key in types dict))
65 (key in types dict))
66 """
66 """
67 p = config.config()
67 p = config.config()
68 def read(f, sections=None, remap=None):
68 def read(f, sections=None, remap=None):
69 if f in ctx:
69 if f in ctx:
70 try:
70 try:
71 data = ctx[f].data()
71 data = ctx[f].data()
72 except IOError, err:
72 except IOError, err:
73 if err.errno != errno.ENOENT:
73 if err.errno != errno.ENOENT:
74 raise
74 raise
75 # handle missing subrepo spec files as removed
75 # handle missing subrepo spec files as removed
76 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
76 ui.warn(_("warning: subrepo spec file %s not found\n") % f)
77 return
77 return
78 p.parse(f, data, sections, remap, read)
78 p.parse(f, data, sections, remap, read)
79 else:
79 else:
80 raise util.Abort(_("subrepo spec file %s not found") % f)
80 raise util.Abort(_("subrepo spec file %s not found") % f)
81
81
82 if '.hgsub' in ctx:
82 if '.hgsub' in ctx:
83 read('.hgsub')
83 read('.hgsub')
84
84
85 for path, src in ui.configitems('subpaths'):
85 for path, src in ui.configitems('subpaths'):
86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
86 p.set('subpaths', path, src, ui.configsource('subpaths', path))
87
87
88 rev = {}
88 rev = {}
89 if '.hgsubstate' in ctx:
89 if '.hgsubstate' in ctx:
90 try:
90 try:
91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
91 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
92 l = l.lstrip()
92 l = l.lstrip()
93 if not l:
93 if not l:
94 continue
94 continue
95 try:
95 try:
96 revision, path = l.split(" ", 1)
96 revision, path = l.split(" ", 1)
97 except ValueError:
97 except ValueError:
98 raise util.Abort(_("invalid subrepository revision "
98 raise util.Abort(_("invalid subrepository revision "
99 "specifier in .hgsubstate line %d")
99 "specifier in .hgsubstate line %d")
100 % (i + 1))
100 % (i + 1))
101 rev[path] = revision
101 rev[path] = revision
102 except IOError, err:
102 except IOError, err:
103 if err.errno != errno.ENOENT:
103 if err.errno != errno.ENOENT:
104 raise
104 raise
105
105
106 def remap(src):
106 def remap(src):
107 for pattern, repl in p.items('subpaths'):
107 for pattern, repl in p.items('subpaths'):
108 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
108 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
109 # does a string decode.
109 # does a string decode.
110 repl = repl.encode('string-escape')
110 repl = repl.encode('string-escape')
111 # However, we still want to allow back references to go
111 # However, we still want to allow back references to go
112 # through unharmed, so we turn r'\\1' into r'\1'. Again,
112 # through unharmed, so we turn r'\\1' into r'\1'. Again,
113 # extra escapes are needed because re.sub string decodes.
113 # extra escapes are needed because re.sub string decodes.
114 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
114 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
115 try:
115 try:
116 src = re.sub(pattern, repl, src, 1)
116 src = re.sub(pattern, repl, src, 1)
117 except re.error, e:
117 except re.error, e:
118 raise util.Abort(_("bad subrepository pattern in %s: %s")
118 raise util.Abort(_("bad subrepository pattern in %s: %s")
119 % (p.source('subpaths', pattern), e))
119 % (p.source('subpaths', pattern), e))
120 return src
120 return src
121
121
122 state = {}
122 state = {}
123 for path, src in p[''].items():
123 for path, src in p[''].items():
124 kind = 'hg'
124 kind = 'hg'
125 if src.startswith('['):
125 if src.startswith('['):
126 if ']' not in src:
126 if ']' not in src:
127 raise util.Abort(_('missing ] in subrepo source'))
127 raise util.Abort(_('missing ] in subrepo source'))
128 kind, src = src.split(']', 1)
128 kind, src = src.split(']', 1)
129 kind = kind[1:]
129 kind = kind[1:]
130 src = src.lstrip() # strip any extra whitespace after ']'
130 src = src.lstrip() # strip any extra whitespace after ']'
131
131
132 if not util.url(src).isabs():
132 if not util.url(src).isabs():
133 parent = _abssource(ctx._repo, abort=False)
133 parent = _abssource(ctx._repo, abort=False)
134 if parent:
134 if parent:
135 parent = util.url(parent)
135 parent = util.url(parent)
136 parent.path = posixpath.join(parent.path or '', src)
136 parent.path = posixpath.join(parent.path or '', src)
137 parent.path = posixpath.normpath(parent.path)
137 parent.path = posixpath.normpath(parent.path)
138 joined = str(parent)
138 joined = str(parent)
139 # Remap the full joined path and use it if it changes,
139 # Remap the full joined path and use it if it changes,
140 # else remap the original source.
140 # else remap the original source.
141 remapped = remap(joined)
141 remapped = remap(joined)
142 if remapped == joined:
142 if remapped == joined:
143 src = remap(src)
143 src = remap(src)
144 else:
144 else:
145 src = remapped
145 src = remapped
146
146
147 src = remap(src)
147 src = remap(src)
148 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
148 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
149
149
150 return state
150 return state
151
151
152 def writestate(repo, state):
152 def writestate(repo, state):
153 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
153 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
154 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
154 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)]
155 repo.wwrite('.hgsubstate', ''.join(lines), '')
155 repo.wwrite('.hgsubstate', ''.join(lines), '')
156
156
157 def submerge(repo, wctx, mctx, actx, overwrite):
157 def submerge(repo, wctx, mctx, actx, overwrite):
158 """delegated from merge.applyupdates: merging of .hgsubstate file
158 """delegated from merge.applyupdates: merging of .hgsubstate file
159 in working context, merging context and ancestor context"""
159 in working context, merging context and ancestor context"""
160 if mctx == actx: # backwards?
160 if mctx == actx: # backwards?
161 actx = wctx.p1()
161 actx = wctx.p1()
162 s1 = wctx.substate
162 s1 = wctx.substate
163 s2 = mctx.substate
163 s2 = mctx.substate
164 sa = actx.substate
164 sa = actx.substate
165 sm = {}
165 sm = {}
166
166
167 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
167 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
168
168
169 def debug(s, msg, r=""):
169 def debug(s, msg, r=""):
170 if r:
170 if r:
171 r = "%s:%s:%s" % r
171 r = "%s:%s:%s" % r
172 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
172 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
173
173
174 for s, l in sorted(s1.iteritems()):
174 for s, l in sorted(s1.iteritems()):
175 a = sa.get(s, nullstate)
175 a = sa.get(s, nullstate)
176 ld = l # local state with possible dirty flag for compares
176 ld = l # local state with possible dirty flag for compares
177 if wctx.sub(s).dirty():
177 if wctx.sub(s).dirty():
178 ld = (l[0], l[1] + "+")
178 ld = (l[0], l[1] + "+")
179 if wctx == actx: # overwrite
179 if wctx == actx: # overwrite
180 a = ld
180 a = ld
181
181
182 if s in s2:
182 if s in s2:
183 r = s2[s]
183 r = s2[s]
184 if ld == r or r == a: # no change or local is newer
184 if ld == r or r == a: # no change or local is newer
185 sm[s] = l
185 sm[s] = l
186 continue
186 continue
187 elif ld == a: # other side changed
187 elif ld == a: # other side changed
188 debug(s, "other changed, get", r)
188 debug(s, "other changed, get", r)
189 wctx.sub(s).get(r, overwrite)
189 wctx.sub(s).get(r, overwrite)
190 sm[s] = r
190 sm[s] = r
191 elif ld[0] != r[0]: # sources differ
191 elif ld[0] != r[0]: # sources differ
192 if repo.ui.promptchoice(
192 if repo.ui.promptchoice(
193 _(' subrepository sources for %s differ\n'
193 _(' subrepository sources for %s differ\n'
194 'use (l)ocal source (%s) or (r)emote source (%s)?')
194 'use (l)ocal source (%s) or (r)emote source (%s)?'
195 % (s, l[0], r[0]),
195 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
196 (_('&Local'), _('&Remote')), 0):
197 debug(s, "prompt changed, get", r)
196 debug(s, "prompt changed, get", r)
198 wctx.sub(s).get(r, overwrite)
197 wctx.sub(s).get(r, overwrite)
199 sm[s] = r
198 sm[s] = r
200 elif ld[1] == a[1]: # local side is unchanged
199 elif ld[1] == a[1]: # local side is unchanged
201 debug(s, "other side changed, get", r)
200 debug(s, "other side changed, get", r)
202 wctx.sub(s).get(r, overwrite)
201 wctx.sub(s).get(r, overwrite)
203 sm[s] = r
202 sm[s] = r
204 else:
203 else:
205 debug(s, "both sides changed, merge with", r)
204 debug(s, "both sides changed, merge with", r)
206 wctx.sub(s).merge(r)
205 wctx.sub(s).merge(r)
207 sm[s] = l
206 sm[s] = l
208 elif ld == a: # remote removed, local unchanged
207 elif ld == a: # remote removed, local unchanged
209 debug(s, "remote removed, remove")
208 debug(s, "remote removed, remove")
210 wctx.sub(s).remove()
209 wctx.sub(s).remove()
211 elif a == nullstate: # not present in remote or ancestor
210 elif a == nullstate: # not present in remote or ancestor
212 debug(s, "local added, keep")
211 debug(s, "local added, keep")
213 sm[s] = l
212 sm[s] = l
214 continue
213 continue
215 else:
214 else:
216 if repo.ui.promptchoice(
215 if repo.ui.promptchoice(
217 _(' local changed subrepository %s which remote removed\n'
216 _(' local changed subrepository %s which remote removed\n'
218 'use (c)hanged version or (d)elete?') % s,
217 'use (c)hanged version or (d)elete?'
219 (_('&Changed'), _('&Delete')), 0):
218 '$$ &Changed $$ &Delete') % s, 0):
220 debug(s, "prompt remove")
219 debug(s, "prompt remove")
221 wctx.sub(s).remove()
220 wctx.sub(s).remove()
222
221
223 for s, r in sorted(s2.items()):
222 for s, r in sorted(s2.items()):
224 if s in s1:
223 if s in s1:
225 continue
224 continue
226 elif s not in sa:
225 elif s not in sa:
227 debug(s, "remote added, get", r)
226 debug(s, "remote added, get", r)
228 mctx.sub(s).get(r)
227 mctx.sub(s).get(r)
229 sm[s] = r
228 sm[s] = r
230 elif r != sa[s]:
229 elif r != sa[s]:
231 if repo.ui.promptchoice(
230 if repo.ui.promptchoice(
232 _(' remote changed subrepository %s which local removed\n'
231 _(' remote changed subrepository %s which local removed\n'
233 'use (c)hanged version or (d)elete?') % s,
232 'use (c)hanged version or (d)elete?'
234 (_('&Changed'), _('&Delete')), 0) == 0:
233 '$$ &Changed $$ &Delete') % s, 0) == 0:
235 debug(s, "prompt recreate", r)
234 debug(s, "prompt recreate", r)
236 wctx.sub(s).get(r)
235 wctx.sub(s).get(r)
237 sm[s] = r
236 sm[s] = r
238
237
239 # record merged .hgsubstate
238 # record merged .hgsubstate
240 writestate(repo, sm)
239 writestate(repo, sm)
241
240
242 def _updateprompt(ui, sub, dirty, local, remote):
241 def _updateprompt(ui, sub, dirty, local, remote):
243 if dirty:
242 if dirty:
244 msg = (_(' subrepository sources for %s differ\n'
243 msg = (_(' subrepository sources for %s differ\n'
245 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
244 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
245 '$$ &Local $$ &Remote')
246 % (subrelpath(sub), local, remote))
246 % (subrelpath(sub), local, remote))
247 else:
247 else:
248 msg = (_(' subrepository sources for %s differ (in checked out '
248 msg = (_(' subrepository sources for %s differ (in checked out '
249 'version)\n'
249 'version)\n'
250 'use (l)ocal source (%s) or (r)emote source (%s)?\n')
250 'use (l)ocal source (%s) or (r)emote source (%s)?\n'
251 '$$ &Local $$ &Remote')
251 % (subrelpath(sub), local, remote))
252 % (subrelpath(sub), local, remote))
252 return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
253 return ui.promptchoice(msg, 0)
253
254
254 def reporelpath(repo):
255 def reporelpath(repo):
255 """return path to this (sub)repo as seen from outermost repo"""
256 """return path to this (sub)repo as seen from outermost repo"""
256 parent = repo
257 parent = repo
257 while util.safehasattr(parent, '_subparent'):
258 while util.safehasattr(parent, '_subparent'):
258 parent = parent._subparent
259 parent = parent._subparent
259 p = parent.root.rstrip(os.sep)
260 p = parent.root.rstrip(os.sep)
260 return repo.root[len(p) + 1:]
261 return repo.root[len(p) + 1:]
261
262
262 def subrelpath(sub):
263 def subrelpath(sub):
263 """return path to this subrepo as seen from outermost repo"""
264 """return path to this subrepo as seen from outermost repo"""
264 if util.safehasattr(sub, '_relpath'):
265 if util.safehasattr(sub, '_relpath'):
265 return sub._relpath
266 return sub._relpath
266 if not util.safehasattr(sub, '_repo'):
267 if not util.safehasattr(sub, '_repo'):
267 return sub._path
268 return sub._path
268 return reporelpath(sub._repo)
269 return reporelpath(sub._repo)
269
270
270 def _abssource(repo, push=False, abort=True):
271 def _abssource(repo, push=False, abort=True):
271 """return pull/push path of repo - either based on parent repo .hgsub info
272 """return pull/push path of repo - either based on parent repo .hgsub info
272 or on the top repo config. Abort or return None if no source found."""
273 or on the top repo config. Abort or return None if no source found."""
273 if util.safehasattr(repo, '_subparent'):
274 if util.safehasattr(repo, '_subparent'):
274 source = util.url(repo._subsource)
275 source = util.url(repo._subsource)
275 if source.isabs():
276 if source.isabs():
276 return str(source)
277 return str(source)
277 source.path = posixpath.normpath(source.path)
278 source.path = posixpath.normpath(source.path)
278 parent = _abssource(repo._subparent, push, abort=False)
279 parent = _abssource(repo._subparent, push, abort=False)
279 if parent:
280 if parent:
280 parent = util.url(util.pconvert(parent))
281 parent = util.url(util.pconvert(parent))
281 parent.path = posixpath.join(parent.path or '', source.path)
282 parent.path = posixpath.join(parent.path or '', source.path)
282 parent.path = posixpath.normpath(parent.path)
283 parent.path = posixpath.normpath(parent.path)
283 return str(parent)
284 return str(parent)
284 else: # recursion reached top repo
285 else: # recursion reached top repo
285 if util.safehasattr(repo, '_subtoppath'):
286 if util.safehasattr(repo, '_subtoppath'):
286 return repo._subtoppath
287 return repo._subtoppath
287 if push and repo.ui.config('paths', 'default-push'):
288 if push and repo.ui.config('paths', 'default-push'):
288 return repo.ui.config('paths', 'default-push')
289 return repo.ui.config('paths', 'default-push')
289 if repo.ui.config('paths', 'default'):
290 if repo.ui.config('paths', 'default'):
290 return repo.ui.config('paths', 'default')
291 return repo.ui.config('paths', 'default')
291 if repo.sharedpath != repo.path:
292 if repo.sharedpath != repo.path:
292 # chop off the .hg component to get the default path form
293 # chop off the .hg component to get the default path form
293 return os.path.dirname(repo.sharedpath)
294 return os.path.dirname(repo.sharedpath)
294 if abort:
295 if abort:
295 raise util.Abort(_("default path for subrepository not found"))
296 raise util.Abort(_("default path for subrepository not found"))
296
297
297 def itersubrepos(ctx1, ctx2):
298 def itersubrepos(ctx1, ctx2):
298 """find subrepos in ctx1 or ctx2"""
299 """find subrepos in ctx1 or ctx2"""
299 # Create a (subpath, ctx) mapping where we prefer subpaths from
300 # Create a (subpath, ctx) mapping where we prefer subpaths from
300 # ctx1. The subpaths from ctx2 are important when the .hgsub file
301 # ctx1. The subpaths from ctx2 are important when the .hgsub file
301 # has been modified (in ctx2) but not yet committed (in ctx1).
302 # has been modified (in ctx2) but not yet committed (in ctx1).
302 subpaths = dict.fromkeys(ctx2.substate, ctx2)
303 subpaths = dict.fromkeys(ctx2.substate, ctx2)
303 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
304 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
304 for subpath, ctx in sorted(subpaths.iteritems()):
305 for subpath, ctx in sorted(subpaths.iteritems()):
305 yield subpath, ctx.sub(subpath)
306 yield subpath, ctx.sub(subpath)
306
307
307 def subrepo(ctx, path):
308 def subrepo(ctx, path):
308 """return instance of the right subrepo class for subrepo in path"""
309 """return instance of the right subrepo class for subrepo in path"""
309 # subrepo inherently violates our import layering rules
310 # subrepo inherently violates our import layering rules
310 # because it wants to make repo objects from deep inside the stack
311 # because it wants to make repo objects from deep inside the stack
311 # so we manually delay the circular imports to not break
312 # so we manually delay the circular imports to not break
312 # scripts that don't use our demand-loading
313 # scripts that don't use our demand-loading
313 global hg
314 global hg
314 import hg as h
315 import hg as h
315 hg = h
316 hg = h
316
317
317 scmutil.pathauditor(ctx._repo.root)(path)
318 scmutil.pathauditor(ctx._repo.root)(path)
318 state = ctx.substate[path]
319 state = ctx.substate[path]
319 if state[2] not in types:
320 if state[2] not in types:
320 raise util.Abort(_('unknown subrepo type %s') % state[2])
321 raise util.Abort(_('unknown subrepo type %s') % state[2])
321 return types[state[2]](ctx, path, state[:2])
322 return types[state[2]](ctx, path, state[:2])
322
323
323 # subrepo classes need to implement the following abstract class:
324 # subrepo classes need to implement the following abstract class:
324
325
325 class abstractsubrepo(object):
326 class abstractsubrepo(object):
326
327
327 def storeclean(self, path):
328 def storeclean(self, path):
328 """
329 """
329 returns true if the repository has not changed since it was last
330 returns true if the repository has not changed since it was last
330 cloned from or pushed to a given repository.
331 cloned from or pushed to a given repository.
331 """
332 """
332 return False
333 return False
333
334
334 def dirty(self, ignoreupdate=False):
335 def dirty(self, ignoreupdate=False):
335 """returns true if the dirstate of the subrepo is dirty or does not
336 """returns true if the dirstate of the subrepo is dirty or does not
336 match current stored state. If ignoreupdate is true, only check
337 match current stored state. If ignoreupdate is true, only check
337 whether the subrepo has uncommitted changes in its dirstate.
338 whether the subrepo has uncommitted changes in its dirstate.
338 """
339 """
339 raise NotImplementedError
340 raise NotImplementedError
340
341
341 def basestate(self):
342 def basestate(self):
342 """current working directory base state, disregarding .hgsubstate
343 """current working directory base state, disregarding .hgsubstate
343 state and working directory modifications"""
344 state and working directory modifications"""
344 raise NotImplementedError
345 raise NotImplementedError
345
346
346 def checknested(self, path):
347 def checknested(self, path):
347 """check if path is a subrepository within this repository"""
348 """check if path is a subrepository within this repository"""
348 return False
349 return False
349
350
350 def commit(self, text, user, date):
351 def commit(self, text, user, date):
351 """commit the current changes to the subrepo with the given
352 """commit the current changes to the subrepo with the given
352 log message. Use given user and date if possible. Return the
353 log message. Use given user and date if possible. Return the
353 new state of the subrepo.
354 new state of the subrepo.
354 """
355 """
355 raise NotImplementedError
356 raise NotImplementedError
356
357
357 def remove(self):
358 def remove(self):
358 """remove the subrepo
359 """remove the subrepo
359
360
360 (should verify the dirstate is not dirty first)
361 (should verify the dirstate is not dirty first)
361 """
362 """
362 raise NotImplementedError
363 raise NotImplementedError
363
364
364 def get(self, state, overwrite=False):
365 def get(self, state, overwrite=False):
365 """run whatever commands are needed to put the subrepo into
366 """run whatever commands are needed to put the subrepo into
366 this state
367 this state
367 """
368 """
368 raise NotImplementedError
369 raise NotImplementedError
369
370
370 def merge(self, state):
371 def merge(self, state):
371 """merge currently-saved state with the new state."""
372 """merge currently-saved state with the new state."""
372 raise NotImplementedError
373 raise NotImplementedError
373
374
374 def push(self, opts):
375 def push(self, opts):
375 """perform whatever action is analogous to 'hg push'
376 """perform whatever action is analogous to 'hg push'
376
377
377 This may be a no-op on some systems.
378 This may be a no-op on some systems.
378 """
379 """
379 raise NotImplementedError
380 raise NotImplementedError
380
381
381 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
382 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
382 return []
383 return []
383
384
384 def status(self, rev2, **opts):
385 def status(self, rev2, **opts):
385 return [], [], [], [], [], [], []
386 return [], [], [], [], [], [], []
386
387
387 def diff(self, ui, diffopts, node2, match, prefix, **opts):
388 def diff(self, ui, diffopts, node2, match, prefix, **opts):
388 pass
389 pass
389
390
390 def outgoing(self, ui, dest, opts):
391 def outgoing(self, ui, dest, opts):
391 return 1
392 return 1
392
393
393 def incoming(self, ui, source, opts):
394 def incoming(self, ui, source, opts):
394 return 1
395 return 1
395
396
396 def files(self):
397 def files(self):
397 """return filename iterator"""
398 """return filename iterator"""
398 raise NotImplementedError
399 raise NotImplementedError
399
400
400 def filedata(self, name):
401 def filedata(self, name):
401 """return file data"""
402 """return file data"""
402 raise NotImplementedError
403 raise NotImplementedError
403
404
404 def fileflags(self, name):
405 def fileflags(self, name):
405 """return file flags"""
406 """return file flags"""
406 return ''
407 return ''
407
408
408 def archive(self, ui, archiver, prefix, match=None):
409 def archive(self, ui, archiver, prefix, match=None):
409 if match is not None:
410 if match is not None:
410 files = [f for f in self.files() if match(f)]
411 files = [f for f in self.files() if match(f)]
411 else:
412 else:
412 files = self.files()
413 files = self.files()
413 total = len(files)
414 total = len(files)
414 relpath = subrelpath(self)
415 relpath = subrelpath(self)
415 ui.progress(_('archiving (%s)') % relpath, 0,
416 ui.progress(_('archiving (%s)') % relpath, 0,
416 unit=_('files'), total=total)
417 unit=_('files'), total=total)
417 for i, name in enumerate(files):
418 for i, name in enumerate(files):
418 flags = self.fileflags(name)
419 flags = self.fileflags(name)
419 mode = 'x' in flags and 0755 or 0644
420 mode = 'x' in flags and 0755 or 0644
420 symlink = 'l' in flags
421 symlink = 'l' in flags
421 archiver.addfile(os.path.join(prefix, self._path, name),
422 archiver.addfile(os.path.join(prefix, self._path, name),
422 mode, symlink, self.filedata(name))
423 mode, symlink, self.filedata(name))
423 ui.progress(_('archiving (%s)') % relpath, i + 1,
424 ui.progress(_('archiving (%s)') % relpath, i + 1,
424 unit=_('files'), total=total)
425 unit=_('files'), total=total)
425 ui.progress(_('archiving (%s)') % relpath, None)
426 ui.progress(_('archiving (%s)') % relpath, None)
426 return total
427 return total
427
428
428 def walk(self, match):
429 def walk(self, match):
429 '''
430 '''
430 walk recursively through the directory tree, finding all files
431 walk recursively through the directory tree, finding all files
431 matched by the match function
432 matched by the match function
432 '''
433 '''
433 pass
434 pass
434
435
435 def forget(self, ui, match, prefix):
436 def forget(self, ui, match, prefix):
436 return ([], [])
437 return ([], [])
437
438
438 def revert(self, ui, substate, *pats, **opts):
439 def revert(self, ui, substate, *pats, **opts):
439 ui.warn('%s: reverting %s subrepos is unsupported\n' \
440 ui.warn('%s: reverting %s subrepos is unsupported\n' \
440 % (substate[0], substate[2]))
441 % (substate[0], substate[2]))
441 return []
442 return []
442
443
443 class hgsubrepo(abstractsubrepo):
444 class hgsubrepo(abstractsubrepo):
444 def __init__(self, ctx, path, state):
445 def __init__(self, ctx, path, state):
445 self._path = path
446 self._path = path
446 self._state = state
447 self._state = state
447 r = ctx._repo
448 r = ctx._repo
448 root = r.wjoin(path)
449 root = r.wjoin(path)
449 create = False
450 create = False
450 if not os.path.exists(os.path.join(root, '.hg')):
451 if not os.path.exists(os.path.join(root, '.hg')):
451 create = True
452 create = True
452 util.makedirs(root)
453 util.makedirs(root)
453 self._repo = hg.repository(r.baseui, root, create=create)
454 self._repo = hg.repository(r.baseui, root, create=create)
454 for s, k in [('ui', 'commitsubrepos')]:
455 for s, k in [('ui', 'commitsubrepos')]:
455 v = r.ui.config(s, k)
456 v = r.ui.config(s, k)
456 if v:
457 if v:
457 self._repo.ui.setconfig(s, k, v)
458 self._repo.ui.setconfig(s, k, v)
458 self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
459 self._repo.ui.setconfig('ui', '_usedassubrepo', 'True')
459 self._initrepo(r, state[0], create)
460 self._initrepo(r, state[0], create)
460
461
461 def storeclean(self, path):
462 def storeclean(self, path):
462 clean = True
463 clean = True
463 lock = self._repo.lock()
464 lock = self._repo.lock()
464 itercache = self._calcstorehash(path)
465 itercache = self._calcstorehash(path)
465 try:
466 try:
466 for filehash in self._readstorehashcache(path):
467 for filehash in self._readstorehashcache(path):
467 if filehash != itercache.next():
468 if filehash != itercache.next():
468 clean = False
469 clean = False
469 break
470 break
470 except StopIteration:
471 except StopIteration:
471 # the cached and current pull states have a different size
472 # the cached and current pull states have a different size
472 clean = False
473 clean = False
473 if clean:
474 if clean:
474 try:
475 try:
475 itercache.next()
476 itercache.next()
476 # the cached and current pull states have a different size
477 # the cached and current pull states have a different size
477 clean = False
478 clean = False
478 except StopIteration:
479 except StopIteration:
479 pass
480 pass
480 lock.release()
481 lock.release()
481 return clean
482 return clean
482
483
483 def _calcstorehash(self, remotepath):
484 def _calcstorehash(self, remotepath):
484 '''calculate a unique "store hash"
485 '''calculate a unique "store hash"
485
486
486 This method is used to to detect when there are changes that may
487 This method is used to to detect when there are changes that may
487 require a push to a given remote path.'''
488 require a push to a given remote path.'''
488 # sort the files that will be hashed in increasing (likely) file size
489 # sort the files that will be hashed in increasing (likely) file size
489 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
490 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
490 yield '# %s\n' % _expandedabspath(remotepath)
491 yield '# %s\n' % _expandedabspath(remotepath)
491 for relname in filelist:
492 for relname in filelist:
492 absname = os.path.normpath(self._repo.join(relname))
493 absname = os.path.normpath(self._repo.join(relname))
493 yield '%s = %s\n' % (relname, _calcfilehash(absname))
494 yield '%s = %s\n' % (relname, _calcfilehash(absname))
494
495
495 def _getstorehashcachepath(self, remotepath):
496 def _getstorehashcachepath(self, remotepath):
496 '''get a unique path for the store hash cache'''
497 '''get a unique path for the store hash cache'''
497 return self._repo.join(os.path.join(
498 return self._repo.join(os.path.join(
498 'cache', 'storehash', _getstorehashcachename(remotepath)))
499 'cache', 'storehash', _getstorehashcachename(remotepath)))
499
500
500 def _readstorehashcache(self, remotepath):
501 def _readstorehashcache(self, remotepath):
501 '''read the store hash cache for a given remote repository'''
502 '''read the store hash cache for a given remote repository'''
502 cachefile = self._getstorehashcachepath(remotepath)
503 cachefile = self._getstorehashcachepath(remotepath)
503 if not os.path.exists(cachefile):
504 if not os.path.exists(cachefile):
504 return ''
505 return ''
505 fd = open(cachefile, 'r')
506 fd = open(cachefile, 'r')
506 pullstate = fd.readlines()
507 pullstate = fd.readlines()
507 fd.close()
508 fd.close()
508 return pullstate
509 return pullstate
509
510
510 def _cachestorehash(self, remotepath):
511 def _cachestorehash(self, remotepath):
511 '''cache the current store hash
512 '''cache the current store hash
512
513
513 Each remote repo requires its own store hash cache, because a subrepo
514 Each remote repo requires its own store hash cache, because a subrepo
514 store may be "clean" versus a given remote repo, but not versus another
515 store may be "clean" versus a given remote repo, but not versus another
515 '''
516 '''
516 cachefile = self._getstorehashcachepath(remotepath)
517 cachefile = self._getstorehashcachepath(remotepath)
517 lock = self._repo.lock()
518 lock = self._repo.lock()
518 storehash = list(self._calcstorehash(remotepath))
519 storehash = list(self._calcstorehash(remotepath))
519 cachedir = os.path.dirname(cachefile)
520 cachedir = os.path.dirname(cachefile)
520 if not os.path.exists(cachedir):
521 if not os.path.exists(cachedir):
521 util.makedirs(cachedir, notindexed=True)
522 util.makedirs(cachedir, notindexed=True)
522 fd = open(cachefile, 'w')
523 fd = open(cachefile, 'w')
523 fd.writelines(storehash)
524 fd.writelines(storehash)
524 fd.close()
525 fd.close()
525 lock.release()
526 lock.release()
526
527
527 @annotatesubrepoerror
528 @annotatesubrepoerror
528 def _initrepo(self, parentrepo, source, create):
529 def _initrepo(self, parentrepo, source, create):
529 self._repo._subparent = parentrepo
530 self._repo._subparent = parentrepo
530 self._repo._subsource = source
531 self._repo._subsource = source
531
532
532 if create:
533 if create:
533 fp = self._repo.opener("hgrc", "w", text=True)
534 fp = self._repo.opener("hgrc", "w", text=True)
534 fp.write('[paths]\n')
535 fp.write('[paths]\n')
535
536
536 def addpathconfig(key, value):
537 def addpathconfig(key, value):
537 if value:
538 if value:
538 fp.write('%s = %s\n' % (key, value))
539 fp.write('%s = %s\n' % (key, value))
539 self._repo.ui.setconfig('paths', key, value)
540 self._repo.ui.setconfig('paths', key, value)
540
541
541 defpath = _abssource(self._repo, abort=False)
542 defpath = _abssource(self._repo, abort=False)
542 defpushpath = _abssource(self._repo, True, abort=False)
543 defpushpath = _abssource(self._repo, True, abort=False)
543 addpathconfig('default', defpath)
544 addpathconfig('default', defpath)
544 if defpath != defpushpath:
545 if defpath != defpushpath:
545 addpathconfig('default-push', defpushpath)
546 addpathconfig('default-push', defpushpath)
546 fp.close()
547 fp.close()
547
548
548 @annotatesubrepoerror
549 @annotatesubrepoerror
549 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
550 def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
550 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
551 return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
551 os.path.join(prefix, self._path), explicitonly)
552 os.path.join(prefix, self._path), explicitonly)
552
553
553 @annotatesubrepoerror
554 @annotatesubrepoerror
554 def status(self, rev2, **opts):
555 def status(self, rev2, **opts):
555 try:
556 try:
556 rev1 = self._state[1]
557 rev1 = self._state[1]
557 ctx1 = self._repo[rev1]
558 ctx1 = self._repo[rev1]
558 ctx2 = self._repo[rev2]
559 ctx2 = self._repo[rev2]
559 return self._repo.status(ctx1, ctx2, **opts)
560 return self._repo.status(ctx1, ctx2, **opts)
560 except error.RepoLookupError, inst:
561 except error.RepoLookupError, inst:
561 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
562 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
562 % (inst, subrelpath(self)))
563 % (inst, subrelpath(self)))
563 return [], [], [], [], [], [], []
564 return [], [], [], [], [], [], []
564
565
565 @annotatesubrepoerror
566 @annotatesubrepoerror
566 def diff(self, ui, diffopts, node2, match, prefix, **opts):
567 def diff(self, ui, diffopts, node2, match, prefix, **opts):
567 try:
568 try:
568 node1 = node.bin(self._state[1])
569 node1 = node.bin(self._state[1])
569 # We currently expect node2 to come from substate and be
570 # We currently expect node2 to come from substate and be
570 # in hex format
571 # in hex format
571 if node2 is not None:
572 if node2 is not None:
572 node2 = node.bin(node2)
573 node2 = node.bin(node2)
573 cmdutil.diffordiffstat(ui, self._repo, diffopts,
574 cmdutil.diffordiffstat(ui, self._repo, diffopts,
574 node1, node2, match,
575 node1, node2, match,
575 prefix=posixpath.join(prefix, self._path),
576 prefix=posixpath.join(prefix, self._path),
576 listsubrepos=True, **opts)
577 listsubrepos=True, **opts)
577 except error.RepoLookupError, inst:
578 except error.RepoLookupError, inst:
578 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
579 self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
579 % (inst, subrelpath(self)))
580 % (inst, subrelpath(self)))
580
581
581 @annotatesubrepoerror
582 @annotatesubrepoerror
582 def archive(self, ui, archiver, prefix, match=None):
583 def archive(self, ui, archiver, prefix, match=None):
583 self._get(self._state + ('hg',))
584 self._get(self._state + ('hg',))
584 total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
585 total = abstractsubrepo.archive(self, ui, archiver, prefix, match)
585 rev = self._state[1]
586 rev = self._state[1]
586 ctx = self._repo[rev]
587 ctx = self._repo[rev]
587 for subpath in ctx.substate:
588 for subpath in ctx.substate:
588 s = subrepo(ctx, subpath)
589 s = subrepo(ctx, subpath)
589 submatch = matchmod.narrowmatcher(subpath, match)
590 submatch = matchmod.narrowmatcher(subpath, match)
590 total += s.archive(
591 total += s.archive(
591 ui, archiver, os.path.join(prefix, self._path), submatch)
592 ui, archiver, os.path.join(prefix, self._path), submatch)
592 return total
593 return total
593
594
594 @annotatesubrepoerror
595 @annotatesubrepoerror
595 def dirty(self, ignoreupdate=False):
596 def dirty(self, ignoreupdate=False):
596 r = self._state[1]
597 r = self._state[1]
597 if r == '' and not ignoreupdate: # no state recorded
598 if r == '' and not ignoreupdate: # no state recorded
598 return True
599 return True
599 w = self._repo[None]
600 w = self._repo[None]
600 if r != w.p1().hex() and not ignoreupdate:
601 if r != w.p1().hex() and not ignoreupdate:
601 # different version checked out
602 # different version checked out
602 return True
603 return True
603 return w.dirty() # working directory changed
604 return w.dirty() # working directory changed
604
605
605 def basestate(self):
606 def basestate(self):
606 return self._repo['.'].hex()
607 return self._repo['.'].hex()
607
608
608 def checknested(self, path):
609 def checknested(self, path):
609 return self._repo._checknested(self._repo.wjoin(path))
610 return self._repo._checknested(self._repo.wjoin(path))
610
611
611 @annotatesubrepoerror
612 @annotatesubrepoerror
612 def commit(self, text, user, date):
613 def commit(self, text, user, date):
613 # don't bother committing in the subrepo if it's only been
614 # don't bother committing in the subrepo if it's only been
614 # updated
615 # updated
615 if not self.dirty(True):
616 if not self.dirty(True):
616 return self._repo['.'].hex()
617 return self._repo['.'].hex()
617 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
618 self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self))
618 n = self._repo.commit(text, user, date)
619 n = self._repo.commit(text, user, date)
619 if not n:
620 if not n:
620 return self._repo['.'].hex() # different version checked out
621 return self._repo['.'].hex() # different version checked out
621 return node.hex(n)
622 return node.hex(n)
622
623
623 @annotatesubrepoerror
624 @annotatesubrepoerror
624 def remove(self):
625 def remove(self):
625 # we can't fully delete the repository as it may contain
626 # we can't fully delete the repository as it may contain
626 # local-only history
627 # local-only history
627 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
628 self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self))
628 hg.clean(self._repo, node.nullid, False)
629 hg.clean(self._repo, node.nullid, False)
629
630
630 def _get(self, state):
631 def _get(self, state):
631 source, revision, kind = state
632 source, revision, kind = state
632 if revision not in self._repo:
633 if revision not in self._repo:
633 self._repo._subsource = source
634 self._repo._subsource = source
634 srcurl = _abssource(self._repo)
635 srcurl = _abssource(self._repo)
635 other = hg.peer(self._repo, {}, srcurl)
636 other = hg.peer(self._repo, {}, srcurl)
636 if len(self._repo) == 0:
637 if len(self._repo) == 0:
637 self._repo.ui.status(_('cloning subrepo %s from %s\n')
638 self._repo.ui.status(_('cloning subrepo %s from %s\n')
638 % (subrelpath(self), srcurl))
639 % (subrelpath(self), srcurl))
639 parentrepo = self._repo._subparent
640 parentrepo = self._repo._subparent
640 shutil.rmtree(self._repo.path)
641 shutil.rmtree(self._repo.path)
641 other, cloned = hg.clone(self._repo._subparent.baseui, {},
642 other, cloned = hg.clone(self._repo._subparent.baseui, {},
642 other, self._repo.root,
643 other, self._repo.root,
643 update=False)
644 update=False)
644 self._repo = cloned.local()
645 self._repo = cloned.local()
645 self._initrepo(parentrepo, source, create=True)
646 self._initrepo(parentrepo, source, create=True)
646 self._cachestorehash(srcurl)
647 self._cachestorehash(srcurl)
647 else:
648 else:
648 self._repo.ui.status(_('pulling subrepo %s from %s\n')
649 self._repo.ui.status(_('pulling subrepo %s from %s\n')
649 % (subrelpath(self), srcurl))
650 % (subrelpath(self), srcurl))
650 cleansub = self.storeclean(srcurl)
651 cleansub = self.storeclean(srcurl)
651 remotebookmarks = other.listkeys('bookmarks')
652 remotebookmarks = other.listkeys('bookmarks')
652 self._repo.pull(other)
653 self._repo.pull(other)
653 bookmarks.updatefromremote(self._repo.ui, self._repo,
654 bookmarks.updatefromremote(self._repo.ui, self._repo,
654 remotebookmarks, srcurl)
655 remotebookmarks, srcurl)
655 if cleansub:
656 if cleansub:
656 # keep the repo clean after pull
657 # keep the repo clean after pull
657 self._cachestorehash(srcurl)
658 self._cachestorehash(srcurl)
658
659
659 @annotatesubrepoerror
660 @annotatesubrepoerror
660 def get(self, state, overwrite=False):
661 def get(self, state, overwrite=False):
661 self._get(state)
662 self._get(state)
662 source, revision, kind = state
663 source, revision, kind = state
663 self._repo.ui.debug("getting subrepo %s\n" % self._path)
664 self._repo.ui.debug("getting subrepo %s\n" % self._path)
664 hg.updaterepo(self._repo, revision, overwrite)
665 hg.updaterepo(self._repo, revision, overwrite)
665
666
666 @annotatesubrepoerror
667 @annotatesubrepoerror
667 def merge(self, state):
668 def merge(self, state):
668 self._get(state)
669 self._get(state)
669 cur = self._repo['.']
670 cur = self._repo['.']
670 dst = self._repo[state[1]]
671 dst = self._repo[state[1]]
671 anc = dst.ancestor(cur)
672 anc = dst.ancestor(cur)
672
673
673 def mergefunc():
674 def mergefunc():
674 if anc == cur and dst.branch() == cur.branch():
675 if anc == cur and dst.branch() == cur.branch():
675 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
676 self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
676 hg.update(self._repo, state[1])
677 hg.update(self._repo, state[1])
677 elif anc == dst:
678 elif anc == dst:
678 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
679 self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self))
679 else:
680 else:
680 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
681 self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self))
681 hg.merge(self._repo, state[1], remind=False)
682 hg.merge(self._repo, state[1], remind=False)
682
683
683 wctx = self._repo[None]
684 wctx = self._repo[None]
684 if self.dirty():
685 if self.dirty():
685 if anc != dst:
686 if anc != dst:
686 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
687 if _updateprompt(self._repo.ui, self, wctx.dirty(), cur, dst):
687 mergefunc()
688 mergefunc()
688 else:
689 else:
689 mergefunc()
690 mergefunc()
690 else:
691 else:
691 mergefunc()
692 mergefunc()
692
693
693 @annotatesubrepoerror
694 @annotatesubrepoerror
694 def push(self, opts):
695 def push(self, opts):
695 force = opts.get('force')
696 force = opts.get('force')
696 newbranch = opts.get('new_branch')
697 newbranch = opts.get('new_branch')
697 ssh = opts.get('ssh')
698 ssh = opts.get('ssh')
698
699
699 # push subrepos depth-first for coherent ordering
700 # push subrepos depth-first for coherent ordering
700 c = self._repo['']
701 c = self._repo['']
701 subs = c.substate # only repos that are committed
702 subs = c.substate # only repos that are committed
702 for s in sorted(subs):
703 for s in sorted(subs):
703 if c.sub(s).push(opts) == 0:
704 if c.sub(s).push(opts) == 0:
704 return False
705 return False
705
706
706 dsturl = _abssource(self._repo, True)
707 dsturl = _abssource(self._repo, True)
707 if not force:
708 if not force:
708 if self.storeclean(dsturl):
709 if self.storeclean(dsturl):
709 self._repo.ui.status(
710 self._repo.ui.status(
710 _('no changes made to subrepo %s since last push to %s\n')
711 _('no changes made to subrepo %s since last push to %s\n')
711 % (subrelpath(self), dsturl))
712 % (subrelpath(self), dsturl))
712 return None
713 return None
713 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
714 self._repo.ui.status(_('pushing subrepo %s to %s\n') %
714 (subrelpath(self), dsturl))
715 (subrelpath(self), dsturl))
715 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
716 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
716 res = self._repo.push(other, force, newbranch=newbranch)
717 res = self._repo.push(other, force, newbranch=newbranch)
717
718
718 # the repo is now clean
719 # the repo is now clean
719 self._cachestorehash(dsturl)
720 self._cachestorehash(dsturl)
720 return res
721 return res
721
722
722 @annotatesubrepoerror
723 @annotatesubrepoerror
723 def outgoing(self, ui, dest, opts):
724 def outgoing(self, ui, dest, opts):
724 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
725 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
725
726
726 @annotatesubrepoerror
727 @annotatesubrepoerror
727 def incoming(self, ui, source, opts):
728 def incoming(self, ui, source, opts):
728 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
729 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
729
730
730 @annotatesubrepoerror
731 @annotatesubrepoerror
731 def files(self):
732 def files(self):
732 rev = self._state[1]
733 rev = self._state[1]
733 ctx = self._repo[rev]
734 ctx = self._repo[rev]
734 return ctx.manifest()
735 return ctx.manifest()
735
736
736 def filedata(self, name):
737 def filedata(self, name):
737 rev = self._state[1]
738 rev = self._state[1]
738 return self._repo[rev][name].data()
739 return self._repo[rev][name].data()
739
740
740 def fileflags(self, name):
741 def fileflags(self, name):
741 rev = self._state[1]
742 rev = self._state[1]
742 ctx = self._repo[rev]
743 ctx = self._repo[rev]
743 return ctx.flags(name)
744 return ctx.flags(name)
744
745
745 def walk(self, match):
746 def walk(self, match):
746 ctx = self._repo[None]
747 ctx = self._repo[None]
747 return ctx.walk(match)
748 return ctx.walk(match)
748
749
749 @annotatesubrepoerror
750 @annotatesubrepoerror
750 def forget(self, ui, match, prefix):
751 def forget(self, ui, match, prefix):
751 return cmdutil.forget(ui, self._repo, match,
752 return cmdutil.forget(ui, self._repo, match,
752 os.path.join(prefix, self._path), True)
753 os.path.join(prefix, self._path), True)
753
754
754 @annotatesubrepoerror
755 @annotatesubrepoerror
755 def revert(self, ui, substate, *pats, **opts):
756 def revert(self, ui, substate, *pats, **opts):
756 # reverting a subrepo is a 2 step process:
757 # reverting a subrepo is a 2 step process:
757 # 1. if the no_backup is not set, revert all modified
758 # 1. if the no_backup is not set, revert all modified
758 # files inside the subrepo
759 # files inside the subrepo
759 # 2. update the subrepo to the revision specified in
760 # 2. update the subrepo to the revision specified in
760 # the corresponding substate dictionary
761 # the corresponding substate dictionary
761 ui.status(_('reverting subrepo %s\n') % substate[0])
762 ui.status(_('reverting subrepo %s\n') % substate[0])
762 if not opts.get('no_backup'):
763 if not opts.get('no_backup'):
763 # Revert all files on the subrepo, creating backups
764 # Revert all files on the subrepo, creating backups
764 # Note that this will not recursively revert subrepos
765 # Note that this will not recursively revert subrepos
765 # We could do it if there was a set:subrepos() predicate
766 # We could do it if there was a set:subrepos() predicate
766 opts = opts.copy()
767 opts = opts.copy()
767 opts['date'] = None
768 opts['date'] = None
768 opts['rev'] = substate[1]
769 opts['rev'] = substate[1]
769
770
770 pats = []
771 pats = []
771 if not opts.get('all'):
772 if not opts.get('all'):
772 pats = ['set:modified()']
773 pats = ['set:modified()']
773 self.filerevert(ui, *pats, **opts)
774 self.filerevert(ui, *pats, **opts)
774
775
775 # Update the repo to the revision specified in the given substate
776 # Update the repo to the revision specified in the given substate
776 self.get(substate, overwrite=True)
777 self.get(substate, overwrite=True)
777
778
778 def filerevert(self, ui, *pats, **opts):
779 def filerevert(self, ui, *pats, **opts):
779 ctx = self._repo[opts['rev']]
780 ctx = self._repo[opts['rev']]
780 parents = self._repo.dirstate.parents()
781 parents = self._repo.dirstate.parents()
781 if opts.get('all'):
782 if opts.get('all'):
782 pats = ['set:modified()']
783 pats = ['set:modified()']
783 else:
784 else:
784 pats = []
785 pats = []
785 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
786 cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
786
787
787 class svnsubrepo(abstractsubrepo):
788 class svnsubrepo(abstractsubrepo):
788 def __init__(self, ctx, path, state):
789 def __init__(self, ctx, path, state):
789 self._path = path
790 self._path = path
790 self._state = state
791 self._state = state
791 self._ctx = ctx
792 self._ctx = ctx
792 self._ui = ctx._repo.ui
793 self._ui = ctx._repo.ui
793 self._exe = util.findexe('svn')
794 self._exe = util.findexe('svn')
794 if not self._exe:
795 if not self._exe:
795 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
796 raise util.Abort(_("'svn' executable not found for subrepo '%s'")
796 % self._path)
797 % self._path)
797
798
798 def _svncommand(self, commands, filename='', failok=False):
799 def _svncommand(self, commands, filename='', failok=False):
799 cmd = [self._exe]
800 cmd = [self._exe]
800 extrakw = {}
801 extrakw = {}
801 if not self._ui.interactive():
802 if not self._ui.interactive():
802 # Making stdin be a pipe should prevent svn from behaving
803 # Making stdin be a pipe should prevent svn from behaving
803 # interactively even if we can't pass --non-interactive.
804 # interactively even if we can't pass --non-interactive.
804 extrakw['stdin'] = subprocess.PIPE
805 extrakw['stdin'] = subprocess.PIPE
805 # Starting in svn 1.5 --non-interactive is a global flag
806 # Starting in svn 1.5 --non-interactive is a global flag
806 # instead of being per-command, but we need to support 1.4 so
807 # instead of being per-command, but we need to support 1.4 so
807 # we have to be intelligent about what commands take
808 # we have to be intelligent about what commands take
808 # --non-interactive.
809 # --non-interactive.
809 if commands[0] in ('update', 'checkout', 'commit'):
810 if commands[0] in ('update', 'checkout', 'commit'):
810 cmd.append('--non-interactive')
811 cmd.append('--non-interactive')
811 cmd.extend(commands)
812 cmd.extend(commands)
812 if filename is not None:
813 if filename is not None:
813 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
814 path = os.path.join(self._ctx._repo.origroot, self._path, filename)
814 cmd.append(path)
815 cmd.append(path)
815 env = dict(os.environ)
816 env = dict(os.environ)
816 # Avoid localized output, preserve current locale for everything else.
817 # Avoid localized output, preserve current locale for everything else.
817 lc_all = env.get('LC_ALL')
818 lc_all = env.get('LC_ALL')
818 if lc_all:
819 if lc_all:
819 env['LANG'] = lc_all
820 env['LANG'] = lc_all
820 del env['LC_ALL']
821 del env['LC_ALL']
821 env['LC_MESSAGES'] = 'C'
822 env['LC_MESSAGES'] = 'C'
822 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
823 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
823 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
824 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
824 universal_newlines=True, env=env, **extrakw)
825 universal_newlines=True, env=env, **extrakw)
825 stdout, stderr = p.communicate()
826 stdout, stderr = p.communicate()
826 stderr = stderr.strip()
827 stderr = stderr.strip()
827 if not failok:
828 if not failok:
828 if p.returncode:
829 if p.returncode:
829 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
830 raise util.Abort(stderr or 'exited with code %d' % p.returncode)
830 if stderr:
831 if stderr:
831 self._ui.warn(stderr + '\n')
832 self._ui.warn(stderr + '\n')
832 return stdout, stderr
833 return stdout, stderr
833
834
834 @propertycache
835 @propertycache
835 def _svnversion(self):
836 def _svnversion(self):
836 output, err = self._svncommand(['--version', '--quiet'], filename=None)
837 output, err = self._svncommand(['--version', '--quiet'], filename=None)
837 m = re.search(r'^(\d+)\.(\d+)', output)
838 m = re.search(r'^(\d+)\.(\d+)', output)
838 if not m:
839 if not m:
839 raise util.Abort(_('cannot retrieve svn tool version'))
840 raise util.Abort(_('cannot retrieve svn tool version'))
840 return (int(m.group(1)), int(m.group(2)))
841 return (int(m.group(1)), int(m.group(2)))
841
842
842 def _wcrevs(self):
843 def _wcrevs(self):
843 # Get the working directory revision as well as the last
844 # Get the working directory revision as well as the last
844 # commit revision so we can compare the subrepo state with
845 # commit revision so we can compare the subrepo state with
845 # both. We used to store the working directory one.
846 # both. We used to store the working directory one.
846 output, err = self._svncommand(['info', '--xml'])
847 output, err = self._svncommand(['info', '--xml'])
847 doc = xml.dom.minidom.parseString(output)
848 doc = xml.dom.minidom.parseString(output)
848 entries = doc.getElementsByTagName('entry')
849 entries = doc.getElementsByTagName('entry')
849 lastrev, rev = '0', '0'
850 lastrev, rev = '0', '0'
850 if entries:
851 if entries:
851 rev = str(entries[0].getAttribute('revision')) or '0'
852 rev = str(entries[0].getAttribute('revision')) or '0'
852 commits = entries[0].getElementsByTagName('commit')
853 commits = entries[0].getElementsByTagName('commit')
853 if commits:
854 if commits:
854 lastrev = str(commits[0].getAttribute('revision')) or '0'
855 lastrev = str(commits[0].getAttribute('revision')) or '0'
855 return (lastrev, rev)
856 return (lastrev, rev)
856
857
857 def _wcrev(self):
858 def _wcrev(self):
858 return self._wcrevs()[0]
859 return self._wcrevs()[0]
859
860
860 def _wcchanged(self):
861 def _wcchanged(self):
861 """Return (changes, extchanges, missing) where changes is True
862 """Return (changes, extchanges, missing) where changes is True
862 if the working directory was changed, extchanges is
863 if the working directory was changed, extchanges is
863 True if any of these changes concern an external entry and missing
864 True if any of these changes concern an external entry and missing
864 is True if any change is a missing entry.
865 is True if any change is a missing entry.
865 """
866 """
866 output, err = self._svncommand(['status', '--xml'])
867 output, err = self._svncommand(['status', '--xml'])
867 externals, changes, missing = [], [], []
868 externals, changes, missing = [], [], []
868 doc = xml.dom.minidom.parseString(output)
869 doc = xml.dom.minidom.parseString(output)
869 for e in doc.getElementsByTagName('entry'):
870 for e in doc.getElementsByTagName('entry'):
870 s = e.getElementsByTagName('wc-status')
871 s = e.getElementsByTagName('wc-status')
871 if not s:
872 if not s:
872 continue
873 continue
873 item = s[0].getAttribute('item')
874 item = s[0].getAttribute('item')
874 props = s[0].getAttribute('props')
875 props = s[0].getAttribute('props')
875 path = e.getAttribute('path')
876 path = e.getAttribute('path')
876 if item == 'external':
877 if item == 'external':
877 externals.append(path)
878 externals.append(path)
878 elif item == 'missing':
879 elif item == 'missing':
879 missing.append(path)
880 missing.append(path)
880 if (item not in ('', 'normal', 'unversioned', 'external')
881 if (item not in ('', 'normal', 'unversioned', 'external')
881 or props not in ('', 'none', 'normal')):
882 or props not in ('', 'none', 'normal')):
882 changes.append(path)
883 changes.append(path)
883 for path in changes:
884 for path in changes:
884 for ext in externals:
885 for ext in externals:
885 if path == ext or path.startswith(ext + os.sep):
886 if path == ext or path.startswith(ext + os.sep):
886 return True, True, bool(missing)
887 return True, True, bool(missing)
887 return bool(changes), False, bool(missing)
888 return bool(changes), False, bool(missing)
888
889
889 def dirty(self, ignoreupdate=False):
890 def dirty(self, ignoreupdate=False):
890 if not self._wcchanged()[0]:
891 if not self._wcchanged()[0]:
891 if self._state[1] in self._wcrevs() or ignoreupdate:
892 if self._state[1] in self._wcrevs() or ignoreupdate:
892 return False
893 return False
893 return True
894 return True
894
895
895 def basestate(self):
896 def basestate(self):
896 lastrev, rev = self._wcrevs()
897 lastrev, rev = self._wcrevs()
897 if lastrev != rev:
898 if lastrev != rev:
898 # Last committed rev is not the same than rev. We would
899 # Last committed rev is not the same than rev. We would
899 # like to take lastrev but we do not know if the subrepo
900 # like to take lastrev but we do not know if the subrepo
900 # URL exists at lastrev. Test it and fallback to rev it
901 # URL exists at lastrev. Test it and fallback to rev it
901 # is not there.
902 # is not there.
902 try:
903 try:
903 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
904 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
904 return lastrev
905 return lastrev
905 except error.Abort:
906 except error.Abort:
906 pass
907 pass
907 return rev
908 return rev
908
909
909 @annotatesubrepoerror
910 @annotatesubrepoerror
910 def commit(self, text, user, date):
911 def commit(self, text, user, date):
911 # user and date are out of our hands since svn is centralized
912 # user and date are out of our hands since svn is centralized
912 changed, extchanged, missing = self._wcchanged()
913 changed, extchanged, missing = self._wcchanged()
913 if not changed:
914 if not changed:
914 return self.basestate()
915 return self.basestate()
915 if extchanged:
916 if extchanged:
916 # Do not try to commit externals
917 # Do not try to commit externals
917 raise util.Abort(_('cannot commit svn externals'))
918 raise util.Abort(_('cannot commit svn externals'))
918 if missing:
919 if missing:
919 # svn can commit with missing entries but aborting like hg
920 # svn can commit with missing entries but aborting like hg
920 # seems a better approach.
921 # seems a better approach.
921 raise util.Abort(_('cannot commit missing svn entries'))
922 raise util.Abort(_('cannot commit missing svn entries'))
922 commitinfo, err = self._svncommand(['commit', '-m', text])
923 commitinfo, err = self._svncommand(['commit', '-m', text])
923 self._ui.status(commitinfo)
924 self._ui.status(commitinfo)
924 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
925 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
925 if not newrev:
926 if not newrev:
926 if not commitinfo.strip():
927 if not commitinfo.strip():
927 # Sometimes, our definition of "changed" differs from
928 # Sometimes, our definition of "changed" differs from
928 # svn one. For instance, svn ignores missing files
929 # svn one. For instance, svn ignores missing files
929 # when committing. If there are only missing files, no
930 # when committing. If there are only missing files, no
930 # commit is made, no output and no error code.
931 # commit is made, no output and no error code.
931 raise util.Abort(_('failed to commit svn changes'))
932 raise util.Abort(_('failed to commit svn changes'))
932 raise util.Abort(commitinfo.splitlines()[-1])
933 raise util.Abort(commitinfo.splitlines()[-1])
933 newrev = newrev.groups()[0]
934 newrev = newrev.groups()[0]
934 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
935 self._ui.status(self._svncommand(['update', '-r', newrev])[0])
935 return newrev
936 return newrev
936
937
937 @annotatesubrepoerror
938 @annotatesubrepoerror
938 def remove(self):
939 def remove(self):
939 if self.dirty():
940 if self.dirty():
940 self._ui.warn(_('not removing repo %s because '
941 self._ui.warn(_('not removing repo %s because '
941 'it has changes.\n' % self._path))
942 'it has changes.\n' % self._path))
942 return
943 return
943 self._ui.note(_('removing subrepo %s\n') % self._path)
944 self._ui.note(_('removing subrepo %s\n') % self._path)
944
945
945 def onerror(function, path, excinfo):
946 def onerror(function, path, excinfo):
946 if function is not os.remove:
947 if function is not os.remove:
947 raise
948 raise
948 # read-only files cannot be unlinked under Windows
949 # read-only files cannot be unlinked under Windows
949 s = os.stat(path)
950 s = os.stat(path)
950 if (s.st_mode & stat.S_IWRITE) != 0:
951 if (s.st_mode & stat.S_IWRITE) != 0:
951 raise
952 raise
952 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
953 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
953 os.remove(path)
954 os.remove(path)
954
955
955 path = self._ctx._repo.wjoin(self._path)
956 path = self._ctx._repo.wjoin(self._path)
956 shutil.rmtree(path, onerror=onerror)
957 shutil.rmtree(path, onerror=onerror)
957 try:
958 try:
958 os.removedirs(os.path.dirname(path))
959 os.removedirs(os.path.dirname(path))
959 except OSError:
960 except OSError:
960 pass
961 pass
961
962
962 @annotatesubrepoerror
963 @annotatesubrepoerror
963 def get(self, state, overwrite=False):
964 def get(self, state, overwrite=False):
964 if overwrite:
965 if overwrite:
965 self._svncommand(['revert', '--recursive'])
966 self._svncommand(['revert', '--recursive'])
966 args = ['checkout']
967 args = ['checkout']
967 if self._svnversion >= (1, 5):
968 if self._svnversion >= (1, 5):
968 args.append('--force')
969 args.append('--force')
969 # The revision must be specified at the end of the URL to properly
970 # The revision must be specified at the end of the URL to properly
970 # update to a directory which has since been deleted and recreated.
971 # update to a directory which has since been deleted and recreated.
971 args.append('%s@%s' % (state[0], state[1]))
972 args.append('%s@%s' % (state[0], state[1]))
972 status, err = self._svncommand(args, failok=True)
973 status, err = self._svncommand(args, failok=True)
973 if not re.search('Checked out revision [0-9]+.', status):
974 if not re.search('Checked out revision [0-9]+.', status):
974 if ('is already a working copy for a different URL' in err
975 if ('is already a working copy for a different URL' in err
975 and (self._wcchanged()[:2] == (False, False))):
976 and (self._wcchanged()[:2] == (False, False))):
976 # obstructed but clean working copy, so just blow it away.
977 # obstructed but clean working copy, so just blow it away.
977 self.remove()
978 self.remove()
978 self.get(state, overwrite=False)
979 self.get(state, overwrite=False)
979 return
980 return
980 raise util.Abort((status or err).splitlines()[-1])
981 raise util.Abort((status or err).splitlines()[-1])
981 self._ui.status(status)
982 self._ui.status(status)
982
983
983 @annotatesubrepoerror
984 @annotatesubrepoerror
984 def merge(self, state):
985 def merge(self, state):
985 old = self._state[1]
986 old = self._state[1]
986 new = state[1]
987 new = state[1]
987 wcrev = self._wcrev()
988 wcrev = self._wcrev()
988 if new != wcrev:
989 if new != wcrev:
989 dirty = old == wcrev or self._wcchanged()[0]
990 dirty = old == wcrev or self._wcchanged()[0]
990 if _updateprompt(self._ui, self, dirty, wcrev, new):
991 if _updateprompt(self._ui, self, dirty, wcrev, new):
991 self.get(state, False)
992 self.get(state, False)
992
993
993 def push(self, opts):
994 def push(self, opts):
994 # push is a no-op for SVN
995 # push is a no-op for SVN
995 return True
996 return True
996
997
997 @annotatesubrepoerror
998 @annotatesubrepoerror
998 def files(self):
999 def files(self):
999 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1000 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1000 doc = xml.dom.minidom.parseString(output)
1001 doc = xml.dom.minidom.parseString(output)
1001 paths = []
1002 paths = []
1002 for e in doc.getElementsByTagName('entry'):
1003 for e in doc.getElementsByTagName('entry'):
1003 kind = str(e.getAttribute('kind'))
1004 kind = str(e.getAttribute('kind'))
1004 if kind != 'file':
1005 if kind != 'file':
1005 continue
1006 continue
1006 name = ''.join(c.data for c
1007 name = ''.join(c.data for c
1007 in e.getElementsByTagName('name')[0].childNodes
1008 in e.getElementsByTagName('name')[0].childNodes
1008 if c.nodeType == c.TEXT_NODE)
1009 if c.nodeType == c.TEXT_NODE)
1009 paths.append(name.encode('utf-8'))
1010 paths.append(name.encode('utf-8'))
1010 return paths
1011 return paths
1011
1012
1012 def filedata(self, name):
1013 def filedata(self, name):
1013 return self._svncommand(['cat'], name)[0]
1014 return self._svncommand(['cat'], name)[0]
1014
1015
1015
1016
1016 class gitsubrepo(abstractsubrepo):
1017 class gitsubrepo(abstractsubrepo):
1017 def __init__(self, ctx, path, state):
1018 def __init__(self, ctx, path, state):
1018 self._state = state
1019 self._state = state
1019 self._ctx = ctx
1020 self._ctx = ctx
1020 self._path = path
1021 self._path = path
1021 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1022 self._relpath = os.path.join(reporelpath(ctx._repo), path)
1022 self._abspath = ctx._repo.wjoin(path)
1023 self._abspath = ctx._repo.wjoin(path)
1023 self._subparent = ctx._repo
1024 self._subparent = ctx._repo
1024 self._ui = ctx._repo.ui
1025 self._ui = ctx._repo.ui
1025 self._ensuregit()
1026 self._ensuregit()
1026
1027
1027 def _ensuregit(self):
1028 def _ensuregit(self):
1028 try:
1029 try:
1029 self._gitexecutable = 'git'
1030 self._gitexecutable = 'git'
1030 out, err = self._gitnodir(['--version'])
1031 out, err = self._gitnodir(['--version'])
1031 except OSError, e:
1032 except OSError, e:
1032 if e.errno != 2 or os.name != 'nt':
1033 if e.errno != 2 or os.name != 'nt':
1033 raise
1034 raise
1034 self._gitexecutable = 'git.cmd'
1035 self._gitexecutable = 'git.cmd'
1035 out, err = self._gitnodir(['--version'])
1036 out, err = self._gitnodir(['--version'])
1036 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1037 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1037 if not m:
1038 if not m:
1038 self._ui.warn(_('cannot retrieve git version'))
1039 self._ui.warn(_('cannot retrieve git version'))
1039 return
1040 return
1040 version = (int(m.group(1)), m.group(2), m.group(3))
1041 version = (int(m.group(1)), m.group(2), m.group(3))
1041 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1042 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1042 # despite the docstring comment. For now, error on 1.4.0, warn on
1043 # despite the docstring comment. For now, error on 1.4.0, warn on
1043 # 1.5.0 but attempt to continue.
1044 # 1.5.0 but attempt to continue.
1044 if version < (1, 5, 0):
1045 if version < (1, 5, 0):
1045 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1046 raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
1046 elif version < (1, 6, 0):
1047 elif version < (1, 6, 0):
1047 self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
1048 self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
1048
1049
1049 def _gitcommand(self, commands, env=None, stream=False):
1050 def _gitcommand(self, commands, env=None, stream=False):
1050 return self._gitdir(commands, env=env, stream=stream)[0]
1051 return self._gitdir(commands, env=env, stream=stream)[0]
1051
1052
1052 def _gitdir(self, commands, env=None, stream=False):
1053 def _gitdir(self, commands, env=None, stream=False):
1053 return self._gitnodir(commands, env=env, stream=stream,
1054 return self._gitnodir(commands, env=env, stream=stream,
1054 cwd=self._abspath)
1055 cwd=self._abspath)
1055
1056
1056 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1057 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1057 """Calls the git command
1058 """Calls the git command
1058
1059
1059 The methods tries to call the git command. versions prior to 1.6.0
1060 The methods tries to call the git command. versions prior to 1.6.0
1060 are not supported and very probably fail.
1061 are not supported and very probably fail.
1061 """
1062 """
1062 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1063 self._ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1063 # unless ui.quiet is set, print git's stderr,
1064 # unless ui.quiet is set, print git's stderr,
1064 # which is mostly progress and useful info
1065 # which is mostly progress and useful info
1065 errpipe = None
1066 errpipe = None
1066 if self._ui.quiet:
1067 if self._ui.quiet:
1067 errpipe = open(os.devnull, 'w')
1068 errpipe = open(os.devnull, 'w')
1068 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1069 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1069 cwd=cwd, env=env, close_fds=util.closefds,
1070 cwd=cwd, env=env, close_fds=util.closefds,
1070 stdout=subprocess.PIPE, stderr=errpipe)
1071 stdout=subprocess.PIPE, stderr=errpipe)
1071 if stream:
1072 if stream:
1072 return p.stdout, None
1073 return p.stdout, None
1073
1074
1074 retdata = p.stdout.read().strip()
1075 retdata = p.stdout.read().strip()
1075 # wait for the child to exit to avoid race condition.
1076 # wait for the child to exit to avoid race condition.
1076 p.wait()
1077 p.wait()
1077
1078
1078 if p.returncode != 0 and p.returncode != 1:
1079 if p.returncode != 0 and p.returncode != 1:
1079 # there are certain error codes that are ok
1080 # there are certain error codes that are ok
1080 command = commands[0]
1081 command = commands[0]
1081 if command in ('cat-file', 'symbolic-ref'):
1082 if command in ('cat-file', 'symbolic-ref'):
1082 return retdata, p.returncode
1083 return retdata, p.returncode
1083 # for all others, abort
1084 # for all others, abort
1084 raise util.Abort('git %s error %d in %s' %
1085 raise util.Abort('git %s error %d in %s' %
1085 (command, p.returncode, self._relpath))
1086 (command, p.returncode, self._relpath))
1086
1087
1087 return retdata, p.returncode
1088 return retdata, p.returncode
1088
1089
1089 def _gitmissing(self):
1090 def _gitmissing(self):
1090 return not os.path.exists(os.path.join(self._abspath, '.git'))
1091 return not os.path.exists(os.path.join(self._abspath, '.git'))
1091
1092
1092 def _gitstate(self):
1093 def _gitstate(self):
1093 return self._gitcommand(['rev-parse', 'HEAD'])
1094 return self._gitcommand(['rev-parse', 'HEAD'])
1094
1095
1095 def _gitcurrentbranch(self):
1096 def _gitcurrentbranch(self):
1096 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1097 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1097 if err:
1098 if err:
1098 current = None
1099 current = None
1099 return current
1100 return current
1100
1101
1101 def _gitremote(self, remote):
1102 def _gitremote(self, remote):
1102 out = self._gitcommand(['remote', 'show', '-n', remote])
1103 out = self._gitcommand(['remote', 'show', '-n', remote])
1103 line = out.split('\n')[1]
1104 line = out.split('\n')[1]
1104 i = line.index('URL: ') + len('URL: ')
1105 i = line.index('URL: ') + len('URL: ')
1105 return line[i:]
1106 return line[i:]
1106
1107
1107 def _githavelocally(self, revision):
1108 def _githavelocally(self, revision):
1108 out, code = self._gitdir(['cat-file', '-e', revision])
1109 out, code = self._gitdir(['cat-file', '-e', revision])
1109 return code == 0
1110 return code == 0
1110
1111
1111 def _gitisancestor(self, r1, r2):
1112 def _gitisancestor(self, r1, r2):
1112 base = self._gitcommand(['merge-base', r1, r2])
1113 base = self._gitcommand(['merge-base', r1, r2])
1113 return base == r1
1114 return base == r1
1114
1115
1115 def _gitisbare(self):
1116 def _gitisbare(self):
1116 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1117 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1117
1118
1118 def _gitupdatestat(self):
1119 def _gitupdatestat(self):
1119 """This must be run before git diff-index.
1120 """This must be run before git diff-index.
1120 diff-index only looks at changes to file stat;
1121 diff-index only looks at changes to file stat;
1121 this command looks at file contents and updates the stat."""
1122 this command looks at file contents and updates the stat."""
1122 self._gitcommand(['update-index', '-q', '--refresh'])
1123 self._gitcommand(['update-index', '-q', '--refresh'])
1123
1124
1124 def _gitbranchmap(self):
1125 def _gitbranchmap(self):
1125 '''returns 2 things:
1126 '''returns 2 things:
1126 a map from git branch to revision
1127 a map from git branch to revision
1127 a map from revision to branches'''
1128 a map from revision to branches'''
1128 branch2rev = {}
1129 branch2rev = {}
1129 rev2branch = {}
1130 rev2branch = {}
1130
1131
1131 out = self._gitcommand(['for-each-ref', '--format',
1132 out = self._gitcommand(['for-each-ref', '--format',
1132 '%(objectname) %(refname)'])
1133 '%(objectname) %(refname)'])
1133 for line in out.split('\n'):
1134 for line in out.split('\n'):
1134 revision, ref = line.split(' ')
1135 revision, ref = line.split(' ')
1135 if (not ref.startswith('refs/heads/') and
1136 if (not ref.startswith('refs/heads/') and
1136 not ref.startswith('refs/remotes/')):
1137 not ref.startswith('refs/remotes/')):
1137 continue
1138 continue
1138 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1139 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1139 continue # ignore remote/HEAD redirects
1140 continue # ignore remote/HEAD redirects
1140 branch2rev[ref] = revision
1141 branch2rev[ref] = revision
1141 rev2branch.setdefault(revision, []).append(ref)
1142 rev2branch.setdefault(revision, []).append(ref)
1142 return branch2rev, rev2branch
1143 return branch2rev, rev2branch
1143
1144
1144 def _gittracking(self, branches):
1145 def _gittracking(self, branches):
1145 'return map of remote branch to local tracking branch'
1146 'return map of remote branch to local tracking branch'
1146 # assumes no more than one local tracking branch for each remote
1147 # assumes no more than one local tracking branch for each remote
1147 tracking = {}
1148 tracking = {}
1148 for b in branches:
1149 for b in branches:
1149 if b.startswith('refs/remotes/'):
1150 if b.startswith('refs/remotes/'):
1150 continue
1151 continue
1151 bname = b.split('/', 2)[2]
1152 bname = b.split('/', 2)[2]
1152 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1153 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1153 if remote:
1154 if remote:
1154 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1155 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1155 tracking['refs/remotes/%s/%s' %
1156 tracking['refs/remotes/%s/%s' %
1156 (remote, ref.split('/', 2)[2])] = b
1157 (remote, ref.split('/', 2)[2])] = b
1157 return tracking
1158 return tracking
1158
1159
1159 def _abssource(self, source):
1160 def _abssource(self, source):
1160 if '://' not in source:
1161 if '://' not in source:
1161 # recognize the scp syntax as an absolute source
1162 # recognize the scp syntax as an absolute source
1162 colon = source.find(':')
1163 colon = source.find(':')
1163 if colon != -1 and '/' not in source[:colon]:
1164 if colon != -1 and '/' not in source[:colon]:
1164 return source
1165 return source
1165 self._subsource = source
1166 self._subsource = source
1166 return _abssource(self)
1167 return _abssource(self)
1167
1168
1168 def _fetch(self, source, revision):
1169 def _fetch(self, source, revision):
1169 if self._gitmissing():
1170 if self._gitmissing():
1170 source = self._abssource(source)
1171 source = self._abssource(source)
1171 self._ui.status(_('cloning subrepo %s from %s\n') %
1172 self._ui.status(_('cloning subrepo %s from %s\n') %
1172 (self._relpath, source))
1173 (self._relpath, source))
1173 self._gitnodir(['clone', source, self._abspath])
1174 self._gitnodir(['clone', source, self._abspath])
1174 if self._githavelocally(revision):
1175 if self._githavelocally(revision):
1175 return
1176 return
1176 self._ui.status(_('pulling subrepo %s from %s\n') %
1177 self._ui.status(_('pulling subrepo %s from %s\n') %
1177 (self._relpath, self._gitremote('origin')))
1178 (self._relpath, self._gitremote('origin')))
1178 # try only origin: the originally cloned repo
1179 # try only origin: the originally cloned repo
1179 self._gitcommand(['fetch'])
1180 self._gitcommand(['fetch'])
1180 if not self._githavelocally(revision):
1181 if not self._githavelocally(revision):
1181 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1182 raise util.Abort(_("revision %s does not exist in subrepo %s\n") %
1182 (revision, self._relpath))
1183 (revision, self._relpath))
1183
1184
1184 @annotatesubrepoerror
1185 @annotatesubrepoerror
1185 def dirty(self, ignoreupdate=False):
1186 def dirty(self, ignoreupdate=False):
1186 if self._gitmissing():
1187 if self._gitmissing():
1187 return self._state[1] != ''
1188 return self._state[1] != ''
1188 if self._gitisbare():
1189 if self._gitisbare():
1189 return True
1190 return True
1190 if not ignoreupdate and self._state[1] != self._gitstate():
1191 if not ignoreupdate and self._state[1] != self._gitstate():
1191 # different version checked out
1192 # different version checked out
1192 return True
1193 return True
1193 # check for staged changes or modified files; ignore untracked files
1194 # check for staged changes or modified files; ignore untracked files
1194 self._gitupdatestat()
1195 self._gitupdatestat()
1195 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1196 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1196 return code == 1
1197 return code == 1
1197
1198
1198 def basestate(self):
1199 def basestate(self):
1199 return self._gitstate()
1200 return self._gitstate()
1200
1201
1201 @annotatesubrepoerror
1202 @annotatesubrepoerror
1202 def get(self, state, overwrite=False):
1203 def get(self, state, overwrite=False):
1203 source, revision, kind = state
1204 source, revision, kind = state
1204 if not revision:
1205 if not revision:
1205 self.remove()
1206 self.remove()
1206 return
1207 return
1207 self._fetch(source, revision)
1208 self._fetch(source, revision)
1208 # if the repo was set to be bare, unbare it
1209 # if the repo was set to be bare, unbare it
1209 if self._gitisbare():
1210 if self._gitisbare():
1210 self._gitcommand(['config', 'core.bare', 'false'])
1211 self._gitcommand(['config', 'core.bare', 'false'])
1211 if self._gitstate() == revision:
1212 if self._gitstate() == revision:
1212 self._gitcommand(['reset', '--hard', 'HEAD'])
1213 self._gitcommand(['reset', '--hard', 'HEAD'])
1213 return
1214 return
1214 elif self._gitstate() == revision:
1215 elif self._gitstate() == revision:
1215 if overwrite:
1216 if overwrite:
1216 # first reset the index to unmark new files for commit, because
1217 # first reset the index to unmark new files for commit, because
1217 # reset --hard will otherwise throw away files added for commit,
1218 # reset --hard will otherwise throw away files added for commit,
1218 # not just unmark them.
1219 # not just unmark them.
1219 self._gitcommand(['reset', 'HEAD'])
1220 self._gitcommand(['reset', 'HEAD'])
1220 self._gitcommand(['reset', '--hard', 'HEAD'])
1221 self._gitcommand(['reset', '--hard', 'HEAD'])
1221 return
1222 return
1222 branch2rev, rev2branch = self._gitbranchmap()
1223 branch2rev, rev2branch = self._gitbranchmap()
1223
1224
1224 def checkout(args):
1225 def checkout(args):
1225 cmd = ['checkout']
1226 cmd = ['checkout']
1226 if overwrite:
1227 if overwrite:
1227 # first reset the index to unmark new files for commit, because
1228 # first reset the index to unmark new files for commit, because
1228 # the -f option will otherwise throw away files added for
1229 # the -f option will otherwise throw away files added for
1229 # commit, not just unmark them.
1230 # commit, not just unmark them.
1230 self._gitcommand(['reset', 'HEAD'])
1231 self._gitcommand(['reset', 'HEAD'])
1231 cmd.append('-f')
1232 cmd.append('-f')
1232 self._gitcommand(cmd + args)
1233 self._gitcommand(cmd + args)
1233
1234
1234 def rawcheckout():
1235 def rawcheckout():
1235 # no branch to checkout, check it out with no branch
1236 # no branch to checkout, check it out with no branch
1236 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1237 self._ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1237 self._relpath)
1238 self._relpath)
1238 self._ui.warn(_('check out a git branch if you intend '
1239 self._ui.warn(_('check out a git branch if you intend '
1239 'to make changes\n'))
1240 'to make changes\n'))
1240 checkout(['-q', revision])
1241 checkout(['-q', revision])
1241
1242
1242 if revision not in rev2branch:
1243 if revision not in rev2branch:
1243 rawcheckout()
1244 rawcheckout()
1244 return
1245 return
1245 branches = rev2branch[revision]
1246 branches = rev2branch[revision]
1246 firstlocalbranch = None
1247 firstlocalbranch = None
1247 for b in branches:
1248 for b in branches:
1248 if b == 'refs/heads/master':
1249 if b == 'refs/heads/master':
1249 # master trumps all other branches
1250 # master trumps all other branches
1250 checkout(['refs/heads/master'])
1251 checkout(['refs/heads/master'])
1251 return
1252 return
1252 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1253 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1253 firstlocalbranch = b
1254 firstlocalbranch = b
1254 if firstlocalbranch:
1255 if firstlocalbranch:
1255 checkout([firstlocalbranch])
1256 checkout([firstlocalbranch])
1256 return
1257 return
1257
1258
1258 tracking = self._gittracking(branch2rev.keys())
1259 tracking = self._gittracking(branch2rev.keys())
1259 # choose a remote branch already tracked if possible
1260 # choose a remote branch already tracked if possible
1260 remote = branches[0]
1261 remote = branches[0]
1261 if remote not in tracking:
1262 if remote not in tracking:
1262 for b in branches:
1263 for b in branches:
1263 if b in tracking:
1264 if b in tracking:
1264 remote = b
1265 remote = b
1265 break
1266 break
1266
1267
1267 if remote not in tracking:
1268 if remote not in tracking:
1268 # create a new local tracking branch
1269 # create a new local tracking branch
1269 local = remote.split('/', 3)[3]
1270 local = remote.split('/', 3)[3]
1270 checkout(['-b', local, remote])
1271 checkout(['-b', local, remote])
1271 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1272 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1272 # When updating to a tracked remote branch,
1273 # When updating to a tracked remote branch,
1273 # if the local tracking branch is downstream of it,
1274 # if the local tracking branch is downstream of it,
1274 # a normal `git pull` would have performed a "fast-forward merge"
1275 # a normal `git pull` would have performed a "fast-forward merge"
1275 # which is equivalent to updating the local branch to the remote.
1276 # which is equivalent to updating the local branch to the remote.
1276 # Since we are only looking at branching at update, we need to
1277 # Since we are only looking at branching at update, we need to
1277 # detect this situation and perform this action lazily.
1278 # detect this situation and perform this action lazily.
1278 if tracking[remote] != self._gitcurrentbranch():
1279 if tracking[remote] != self._gitcurrentbranch():
1279 checkout([tracking[remote]])
1280 checkout([tracking[remote]])
1280 self._gitcommand(['merge', '--ff', remote])
1281 self._gitcommand(['merge', '--ff', remote])
1281 else:
1282 else:
1282 # a real merge would be required, just checkout the revision
1283 # a real merge would be required, just checkout the revision
1283 rawcheckout()
1284 rawcheckout()
1284
1285
1285 @annotatesubrepoerror
1286 @annotatesubrepoerror
1286 def commit(self, text, user, date):
1287 def commit(self, text, user, date):
1287 if self._gitmissing():
1288 if self._gitmissing():
1288 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1289 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1289 cmd = ['commit', '-a', '-m', text]
1290 cmd = ['commit', '-a', '-m', text]
1290 env = os.environ.copy()
1291 env = os.environ.copy()
1291 if user:
1292 if user:
1292 cmd += ['--author', user]
1293 cmd += ['--author', user]
1293 if date:
1294 if date:
1294 # git's date parser silently ignores when seconds < 1e9
1295 # git's date parser silently ignores when seconds < 1e9
1295 # convert to ISO8601
1296 # convert to ISO8601
1296 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1297 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1297 '%Y-%m-%dT%H:%M:%S %1%2')
1298 '%Y-%m-%dT%H:%M:%S %1%2')
1298 self._gitcommand(cmd, env=env)
1299 self._gitcommand(cmd, env=env)
1299 # make sure commit works otherwise HEAD might not exist under certain
1300 # make sure commit works otherwise HEAD might not exist under certain
1300 # circumstances
1301 # circumstances
1301 return self._gitstate()
1302 return self._gitstate()
1302
1303
1303 @annotatesubrepoerror
1304 @annotatesubrepoerror
1304 def merge(self, state):
1305 def merge(self, state):
1305 source, revision, kind = state
1306 source, revision, kind = state
1306 self._fetch(source, revision)
1307 self._fetch(source, revision)
1307 base = self._gitcommand(['merge-base', revision, self._state[1]])
1308 base = self._gitcommand(['merge-base', revision, self._state[1]])
1308 self._gitupdatestat()
1309 self._gitupdatestat()
1309 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1310 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1310
1311
1311 def mergefunc():
1312 def mergefunc():
1312 if base == revision:
1313 if base == revision:
1313 self.get(state) # fast forward merge
1314 self.get(state) # fast forward merge
1314 elif base != self._state[1]:
1315 elif base != self._state[1]:
1315 self._gitcommand(['merge', '--no-commit', revision])
1316 self._gitcommand(['merge', '--no-commit', revision])
1316
1317
1317 if self.dirty():
1318 if self.dirty():
1318 if self._gitstate() != revision:
1319 if self._gitstate() != revision:
1319 dirty = self._gitstate() == self._state[1] or code != 0
1320 dirty = self._gitstate() == self._state[1] or code != 0
1320 if _updateprompt(self._ui, self, dirty,
1321 if _updateprompt(self._ui, self, dirty,
1321 self._state[1][:7], revision[:7]):
1322 self._state[1][:7], revision[:7]):
1322 mergefunc()
1323 mergefunc()
1323 else:
1324 else:
1324 mergefunc()
1325 mergefunc()
1325
1326
1326 @annotatesubrepoerror
1327 @annotatesubrepoerror
1327 def push(self, opts):
1328 def push(self, opts):
1328 force = opts.get('force')
1329 force = opts.get('force')
1329
1330
1330 if not self._state[1]:
1331 if not self._state[1]:
1331 return True
1332 return True
1332 if self._gitmissing():
1333 if self._gitmissing():
1333 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1334 raise util.Abort(_("subrepo %s is missing") % self._relpath)
1334 # if a branch in origin contains the revision, nothing to do
1335 # if a branch in origin contains the revision, nothing to do
1335 branch2rev, rev2branch = self._gitbranchmap()
1336 branch2rev, rev2branch = self._gitbranchmap()
1336 if self._state[1] in rev2branch:
1337 if self._state[1] in rev2branch:
1337 for b in rev2branch[self._state[1]]:
1338 for b in rev2branch[self._state[1]]:
1338 if b.startswith('refs/remotes/origin/'):
1339 if b.startswith('refs/remotes/origin/'):
1339 return True
1340 return True
1340 for b, revision in branch2rev.iteritems():
1341 for b, revision in branch2rev.iteritems():
1341 if b.startswith('refs/remotes/origin/'):
1342 if b.startswith('refs/remotes/origin/'):
1342 if self._gitisancestor(self._state[1], revision):
1343 if self._gitisancestor(self._state[1], revision):
1343 return True
1344 return True
1344 # otherwise, try to push the currently checked out branch
1345 # otherwise, try to push the currently checked out branch
1345 cmd = ['push']
1346 cmd = ['push']
1346 if force:
1347 if force:
1347 cmd.append('--force')
1348 cmd.append('--force')
1348
1349
1349 current = self._gitcurrentbranch()
1350 current = self._gitcurrentbranch()
1350 if current:
1351 if current:
1351 # determine if the current branch is even useful
1352 # determine if the current branch is even useful
1352 if not self._gitisancestor(self._state[1], current):
1353 if not self._gitisancestor(self._state[1], current):
1353 self._ui.warn(_('unrelated git branch checked out '
1354 self._ui.warn(_('unrelated git branch checked out '
1354 'in subrepo %s\n') % self._relpath)
1355 'in subrepo %s\n') % self._relpath)
1355 return False
1356 return False
1356 self._ui.status(_('pushing branch %s of subrepo %s\n') %
1357 self._ui.status(_('pushing branch %s of subrepo %s\n') %
1357 (current.split('/', 2)[2], self._relpath))
1358 (current.split('/', 2)[2], self._relpath))
1358 self._gitcommand(cmd + ['origin', current])
1359 self._gitcommand(cmd + ['origin', current])
1359 return True
1360 return True
1360 else:
1361 else:
1361 self._ui.warn(_('no branch checked out in subrepo %s\n'
1362 self._ui.warn(_('no branch checked out in subrepo %s\n'
1362 'cannot push revision %s\n') %
1363 'cannot push revision %s\n') %
1363 (self._relpath, self._state[1]))
1364 (self._relpath, self._state[1]))
1364 return False
1365 return False
1365
1366
1366 @annotatesubrepoerror
1367 @annotatesubrepoerror
1367 def remove(self):
1368 def remove(self):
1368 if self._gitmissing():
1369 if self._gitmissing():
1369 return
1370 return
1370 if self.dirty():
1371 if self.dirty():
1371 self._ui.warn(_('not removing repo %s because '
1372 self._ui.warn(_('not removing repo %s because '
1372 'it has changes.\n') % self._relpath)
1373 'it has changes.\n') % self._relpath)
1373 return
1374 return
1374 # we can't fully delete the repository as it may contain
1375 # we can't fully delete the repository as it may contain
1375 # local-only history
1376 # local-only history
1376 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1377 self._ui.note(_('removing subrepo %s\n') % self._relpath)
1377 self._gitcommand(['config', 'core.bare', 'true'])
1378 self._gitcommand(['config', 'core.bare', 'true'])
1378 for f in os.listdir(self._abspath):
1379 for f in os.listdir(self._abspath):
1379 if f == '.git':
1380 if f == '.git':
1380 continue
1381 continue
1381 path = os.path.join(self._abspath, f)
1382 path = os.path.join(self._abspath, f)
1382 if os.path.isdir(path) and not os.path.islink(path):
1383 if os.path.isdir(path) and not os.path.islink(path):
1383 shutil.rmtree(path)
1384 shutil.rmtree(path)
1384 else:
1385 else:
1385 os.remove(path)
1386 os.remove(path)
1386
1387
1387 def archive(self, ui, archiver, prefix, match=None):
1388 def archive(self, ui, archiver, prefix, match=None):
1388 total = 0
1389 total = 0
1389 source, revision = self._state
1390 source, revision = self._state
1390 if not revision:
1391 if not revision:
1391 return total
1392 return total
1392 self._fetch(source, revision)
1393 self._fetch(source, revision)
1393
1394
1394 # Parse git's native archive command.
1395 # Parse git's native archive command.
1395 # This should be much faster than manually traversing the trees
1396 # This should be much faster than manually traversing the trees
1396 # and objects with many subprocess calls.
1397 # and objects with many subprocess calls.
1397 tarstream = self._gitcommand(['archive', revision], stream=True)
1398 tarstream = self._gitcommand(['archive', revision], stream=True)
1398 tar = tarfile.open(fileobj=tarstream, mode='r|')
1399 tar = tarfile.open(fileobj=tarstream, mode='r|')
1399 relpath = subrelpath(self)
1400 relpath = subrelpath(self)
1400 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1401 ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1401 for i, info in enumerate(tar):
1402 for i, info in enumerate(tar):
1402 if info.isdir():
1403 if info.isdir():
1403 continue
1404 continue
1404 if match and not match(info.name):
1405 if match and not match(info.name):
1405 continue
1406 continue
1406 if info.issym():
1407 if info.issym():
1407 data = info.linkname
1408 data = info.linkname
1408 else:
1409 else:
1409 data = tar.extractfile(info).read()
1410 data = tar.extractfile(info).read()
1410 archiver.addfile(os.path.join(prefix, self._path, info.name),
1411 archiver.addfile(os.path.join(prefix, self._path, info.name),
1411 info.mode, info.issym(), data)
1412 info.mode, info.issym(), data)
1412 total += 1
1413 total += 1
1413 ui.progress(_('archiving (%s)') % relpath, i + 1,
1414 ui.progress(_('archiving (%s)') % relpath, i + 1,
1414 unit=_('files'))
1415 unit=_('files'))
1415 ui.progress(_('archiving (%s)') % relpath, None)
1416 ui.progress(_('archiving (%s)') % relpath, None)
1416 return total
1417 return total
1417
1418
1418
1419
1419 @annotatesubrepoerror
1420 @annotatesubrepoerror
1420 def status(self, rev2, **opts):
1421 def status(self, rev2, **opts):
1421 rev1 = self._state[1]
1422 rev1 = self._state[1]
1422 if self._gitmissing() or not rev1:
1423 if self._gitmissing() or not rev1:
1423 # if the repo is missing, return no results
1424 # if the repo is missing, return no results
1424 return [], [], [], [], [], [], []
1425 return [], [], [], [], [], [], []
1425 modified, added, removed = [], [], []
1426 modified, added, removed = [], [], []
1426 self._gitupdatestat()
1427 self._gitupdatestat()
1427 if rev2:
1428 if rev2:
1428 command = ['diff-tree', rev1, rev2]
1429 command = ['diff-tree', rev1, rev2]
1429 else:
1430 else:
1430 command = ['diff-index', rev1]
1431 command = ['diff-index', rev1]
1431 out = self._gitcommand(command)
1432 out = self._gitcommand(command)
1432 for line in out.split('\n'):
1433 for line in out.split('\n'):
1433 tab = line.find('\t')
1434 tab = line.find('\t')
1434 if tab == -1:
1435 if tab == -1:
1435 continue
1436 continue
1436 status, f = line[tab - 1], line[tab + 1:]
1437 status, f = line[tab - 1], line[tab + 1:]
1437 if status == 'M':
1438 if status == 'M':
1438 modified.append(f)
1439 modified.append(f)
1439 elif status == 'A':
1440 elif status == 'A':
1440 added.append(f)
1441 added.append(f)
1441 elif status == 'D':
1442 elif status == 'D':
1442 removed.append(f)
1443 removed.append(f)
1443
1444
1444 deleted = unknown = ignored = clean = []
1445 deleted = unknown = ignored = clean = []
1445 return modified, added, removed, deleted, unknown, ignored, clean
1446 return modified, added, removed, deleted, unknown, ignored, clean
1446
1447
1447 types = {
1448 types = {
1448 'hg': hgsubrepo,
1449 'hg': hgsubrepo,
1449 'svn': svnsubrepo,
1450 'svn': svnsubrepo,
1450 'git': gitsubrepo,
1451 'git': gitsubrepo,
1451 }
1452 }
@@ -1,807 +1,814 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import errno, getpass, os, socket, sys, tempfile, traceback
9 import errno, getpass, os, socket, sys, tempfile, traceback
10 import config, scmutil, util, error, formatter
10 import config, scmutil, util, error, formatter
11
11
12 class ui(object):
12 class ui(object):
13 def __init__(self, src=None):
13 def __init__(self, src=None):
14 self._buffers = []
14 self._buffers = []
15 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
15 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
16 self._reportuntrusted = True
16 self._reportuntrusted = True
17 self._ocfg = config.config() # overlay
17 self._ocfg = config.config() # overlay
18 self._tcfg = config.config() # trusted
18 self._tcfg = config.config() # trusted
19 self._ucfg = config.config() # untrusted
19 self._ucfg = config.config() # untrusted
20 self._trustusers = set()
20 self._trustusers = set()
21 self._trustgroups = set()
21 self._trustgroups = set()
22 self.callhooks = True
22 self.callhooks = True
23
23
24 if src:
24 if src:
25 self.fout = src.fout
25 self.fout = src.fout
26 self.ferr = src.ferr
26 self.ferr = src.ferr
27 self.fin = src.fin
27 self.fin = src.fin
28
28
29 self._tcfg = src._tcfg.copy()
29 self._tcfg = src._tcfg.copy()
30 self._ucfg = src._ucfg.copy()
30 self._ucfg = src._ucfg.copy()
31 self._ocfg = src._ocfg.copy()
31 self._ocfg = src._ocfg.copy()
32 self._trustusers = src._trustusers.copy()
32 self._trustusers = src._trustusers.copy()
33 self._trustgroups = src._trustgroups.copy()
33 self._trustgroups = src._trustgroups.copy()
34 self.environ = src.environ
34 self.environ = src.environ
35 self.callhooks = src.callhooks
35 self.callhooks = src.callhooks
36 self.fixconfig()
36 self.fixconfig()
37 else:
37 else:
38 self.fout = sys.stdout
38 self.fout = sys.stdout
39 self.ferr = sys.stderr
39 self.ferr = sys.stderr
40 self.fin = sys.stdin
40 self.fin = sys.stdin
41
41
42 # shared read-only environment
42 # shared read-only environment
43 self.environ = os.environ
43 self.environ = os.environ
44 # we always trust global config files
44 # we always trust global config files
45 for f in scmutil.rcpath():
45 for f in scmutil.rcpath():
46 self.readconfig(f, trust=True)
46 self.readconfig(f, trust=True)
47
47
48 def copy(self):
48 def copy(self):
49 return self.__class__(self)
49 return self.__class__(self)
50
50
51 def formatter(self, topic, opts):
51 def formatter(self, topic, opts):
52 return formatter.formatter(self, topic, opts)
52 return formatter.formatter(self, topic, opts)
53
53
54 def _trusted(self, fp, f):
54 def _trusted(self, fp, f):
55 st = util.fstat(fp)
55 st = util.fstat(fp)
56 if util.isowner(st):
56 if util.isowner(st):
57 return True
57 return True
58
58
59 tusers, tgroups = self._trustusers, self._trustgroups
59 tusers, tgroups = self._trustusers, self._trustgroups
60 if '*' in tusers or '*' in tgroups:
60 if '*' in tusers or '*' in tgroups:
61 return True
61 return True
62
62
63 user = util.username(st.st_uid)
63 user = util.username(st.st_uid)
64 group = util.groupname(st.st_gid)
64 group = util.groupname(st.st_gid)
65 if user in tusers or group in tgroups or user == util.username():
65 if user in tusers or group in tgroups or user == util.username():
66 return True
66 return True
67
67
68 if self._reportuntrusted:
68 if self._reportuntrusted:
69 self.warn(_('not trusting file %s from untrusted '
69 self.warn(_('not trusting file %s from untrusted '
70 'user %s, group %s\n') % (f, user, group))
70 'user %s, group %s\n') % (f, user, group))
71 return False
71 return False
72
72
73 def readconfig(self, filename, root=None, trust=False,
73 def readconfig(self, filename, root=None, trust=False,
74 sections=None, remap=None):
74 sections=None, remap=None):
75 try:
75 try:
76 fp = open(filename)
76 fp = open(filename)
77 except IOError:
77 except IOError:
78 if not sections: # ignore unless we were looking for something
78 if not sections: # ignore unless we were looking for something
79 return
79 return
80 raise
80 raise
81
81
82 cfg = config.config()
82 cfg = config.config()
83 trusted = sections or trust or self._trusted(fp, filename)
83 trusted = sections or trust or self._trusted(fp, filename)
84
84
85 try:
85 try:
86 cfg.read(filename, fp, sections=sections, remap=remap)
86 cfg.read(filename, fp, sections=sections, remap=remap)
87 fp.close()
87 fp.close()
88 except error.ConfigError, inst:
88 except error.ConfigError, inst:
89 if trusted:
89 if trusted:
90 raise
90 raise
91 self.warn(_("ignored: %s\n") % str(inst))
91 self.warn(_("ignored: %s\n") % str(inst))
92
92
93 if self.plain():
93 if self.plain():
94 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
94 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
95 'logtemplate', 'style',
95 'logtemplate', 'style',
96 'traceback', 'verbose'):
96 'traceback', 'verbose'):
97 if k in cfg['ui']:
97 if k in cfg['ui']:
98 del cfg['ui'][k]
98 del cfg['ui'][k]
99 for k, v in cfg.items('defaults'):
99 for k, v in cfg.items('defaults'):
100 del cfg['defaults'][k]
100 del cfg['defaults'][k]
101 # Don't remove aliases from the configuration if in the exceptionlist
101 # Don't remove aliases from the configuration if in the exceptionlist
102 if self.plain('alias'):
102 if self.plain('alias'):
103 for k, v in cfg.items('alias'):
103 for k, v in cfg.items('alias'):
104 del cfg['alias'][k]
104 del cfg['alias'][k]
105
105
106 if trusted:
106 if trusted:
107 self._tcfg.update(cfg)
107 self._tcfg.update(cfg)
108 self._tcfg.update(self._ocfg)
108 self._tcfg.update(self._ocfg)
109 self._ucfg.update(cfg)
109 self._ucfg.update(cfg)
110 self._ucfg.update(self._ocfg)
110 self._ucfg.update(self._ocfg)
111
111
112 if root is None:
112 if root is None:
113 root = os.path.expanduser('~')
113 root = os.path.expanduser('~')
114 self.fixconfig(root=root)
114 self.fixconfig(root=root)
115
115
116 def fixconfig(self, root=None, section=None):
116 def fixconfig(self, root=None, section=None):
117 if section in (None, 'paths'):
117 if section in (None, 'paths'):
118 # expand vars and ~
118 # expand vars and ~
119 # translate paths relative to root (or home) into absolute paths
119 # translate paths relative to root (or home) into absolute paths
120 root = root or os.getcwd()
120 root = root or os.getcwd()
121 for c in self._tcfg, self._ucfg, self._ocfg:
121 for c in self._tcfg, self._ucfg, self._ocfg:
122 for n, p in c.items('paths'):
122 for n, p in c.items('paths'):
123 if not p:
123 if not p:
124 continue
124 continue
125 if '%%' in p:
125 if '%%' in p:
126 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
126 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
127 % (n, p, self.configsource('paths', n)))
127 % (n, p, self.configsource('paths', n)))
128 p = p.replace('%%', '%')
128 p = p.replace('%%', '%')
129 p = util.expandpath(p)
129 p = util.expandpath(p)
130 if not util.hasscheme(p) and not os.path.isabs(p):
130 if not util.hasscheme(p) and not os.path.isabs(p):
131 p = os.path.normpath(os.path.join(root, p))
131 p = os.path.normpath(os.path.join(root, p))
132 c.set("paths", n, p)
132 c.set("paths", n, p)
133
133
134 if section in (None, 'ui'):
134 if section in (None, 'ui'):
135 # update ui options
135 # update ui options
136 self.debugflag = self.configbool('ui', 'debug')
136 self.debugflag = self.configbool('ui', 'debug')
137 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
137 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
138 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
138 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
139 if self.verbose and self.quiet:
139 if self.verbose and self.quiet:
140 self.quiet = self.verbose = False
140 self.quiet = self.verbose = False
141 self._reportuntrusted = self.debugflag or self.configbool("ui",
141 self._reportuntrusted = self.debugflag or self.configbool("ui",
142 "report_untrusted", True)
142 "report_untrusted", True)
143 self.tracebackflag = self.configbool('ui', 'traceback', False)
143 self.tracebackflag = self.configbool('ui', 'traceback', False)
144
144
145 if section in (None, 'trusted'):
145 if section in (None, 'trusted'):
146 # update trust information
146 # update trust information
147 self._trustusers.update(self.configlist('trusted', 'users'))
147 self._trustusers.update(self.configlist('trusted', 'users'))
148 self._trustgroups.update(self.configlist('trusted', 'groups'))
148 self._trustgroups.update(self.configlist('trusted', 'groups'))
149
149
150 def backupconfig(self, section, item):
150 def backupconfig(self, section, item):
151 return (self._ocfg.backup(section, item),
151 return (self._ocfg.backup(section, item),
152 self._tcfg.backup(section, item),
152 self._tcfg.backup(section, item),
153 self._ucfg.backup(section, item),)
153 self._ucfg.backup(section, item),)
154 def restoreconfig(self, data):
154 def restoreconfig(self, data):
155 self._ocfg.restore(data[0])
155 self._ocfg.restore(data[0])
156 self._tcfg.restore(data[1])
156 self._tcfg.restore(data[1])
157 self._ucfg.restore(data[2])
157 self._ucfg.restore(data[2])
158
158
159 def setconfig(self, section, name, value, overlay=True):
159 def setconfig(self, section, name, value, overlay=True):
160 if overlay:
160 if overlay:
161 self._ocfg.set(section, name, value)
161 self._ocfg.set(section, name, value)
162 self._tcfg.set(section, name, value)
162 self._tcfg.set(section, name, value)
163 self._ucfg.set(section, name, value)
163 self._ucfg.set(section, name, value)
164 self.fixconfig(section=section)
164 self.fixconfig(section=section)
165
165
166 def _data(self, untrusted):
166 def _data(self, untrusted):
167 return untrusted and self._ucfg or self._tcfg
167 return untrusted and self._ucfg or self._tcfg
168
168
169 def configsource(self, section, name, untrusted=False):
169 def configsource(self, section, name, untrusted=False):
170 return self._data(untrusted).source(section, name) or 'none'
170 return self._data(untrusted).source(section, name) or 'none'
171
171
172 def config(self, section, name, default=None, untrusted=False):
172 def config(self, section, name, default=None, untrusted=False):
173 if isinstance(name, list):
173 if isinstance(name, list):
174 alternates = name
174 alternates = name
175 else:
175 else:
176 alternates = [name]
176 alternates = [name]
177
177
178 for n in alternates:
178 for n in alternates:
179 value = self._data(untrusted).get(section, name, None)
179 value = self._data(untrusted).get(section, name, None)
180 if value is not None:
180 if value is not None:
181 name = n
181 name = n
182 break
182 break
183 else:
183 else:
184 value = default
184 value = default
185
185
186 if self.debugflag and not untrusted and self._reportuntrusted:
186 if self.debugflag and not untrusted and self._reportuntrusted:
187 uvalue = self._ucfg.get(section, name)
187 uvalue = self._ucfg.get(section, name)
188 if uvalue is not None and uvalue != value:
188 if uvalue is not None and uvalue != value:
189 self.debug("ignoring untrusted configuration option "
189 self.debug("ignoring untrusted configuration option "
190 "%s.%s = %s\n" % (section, name, uvalue))
190 "%s.%s = %s\n" % (section, name, uvalue))
191 return value
191 return value
192
192
193 def configpath(self, section, name, default=None, untrusted=False):
193 def configpath(self, section, name, default=None, untrusted=False):
194 'get a path config item, expanded relative to repo root or config file'
194 'get a path config item, expanded relative to repo root or config file'
195 v = self.config(section, name, default, untrusted)
195 v = self.config(section, name, default, untrusted)
196 if v is None:
196 if v is None:
197 return None
197 return None
198 if not os.path.isabs(v) or "://" not in v:
198 if not os.path.isabs(v) or "://" not in v:
199 src = self.configsource(section, name, untrusted)
199 src = self.configsource(section, name, untrusted)
200 if ':' in src:
200 if ':' in src:
201 base = os.path.dirname(src.rsplit(':')[0])
201 base = os.path.dirname(src.rsplit(':')[0])
202 v = os.path.join(base, os.path.expanduser(v))
202 v = os.path.join(base, os.path.expanduser(v))
203 return v
203 return v
204
204
205 def configbool(self, section, name, default=False, untrusted=False):
205 def configbool(self, section, name, default=False, untrusted=False):
206 """parse a configuration element as a boolean
206 """parse a configuration element as a boolean
207
207
208 >>> u = ui(); s = 'foo'
208 >>> u = ui(); s = 'foo'
209 >>> u.setconfig(s, 'true', 'yes')
209 >>> u.setconfig(s, 'true', 'yes')
210 >>> u.configbool(s, 'true')
210 >>> u.configbool(s, 'true')
211 True
211 True
212 >>> u.setconfig(s, 'false', 'no')
212 >>> u.setconfig(s, 'false', 'no')
213 >>> u.configbool(s, 'false')
213 >>> u.configbool(s, 'false')
214 False
214 False
215 >>> u.configbool(s, 'unknown')
215 >>> u.configbool(s, 'unknown')
216 False
216 False
217 >>> u.configbool(s, 'unknown', True)
217 >>> u.configbool(s, 'unknown', True)
218 True
218 True
219 >>> u.setconfig(s, 'invalid', 'somevalue')
219 >>> u.setconfig(s, 'invalid', 'somevalue')
220 >>> u.configbool(s, 'invalid')
220 >>> u.configbool(s, 'invalid')
221 Traceback (most recent call last):
221 Traceback (most recent call last):
222 ...
222 ...
223 ConfigError: foo.invalid is not a boolean ('somevalue')
223 ConfigError: foo.invalid is not a boolean ('somevalue')
224 """
224 """
225
225
226 v = self.config(section, name, None, untrusted)
226 v = self.config(section, name, None, untrusted)
227 if v is None:
227 if v is None:
228 return default
228 return default
229 if isinstance(v, bool):
229 if isinstance(v, bool):
230 return v
230 return v
231 b = util.parsebool(v)
231 b = util.parsebool(v)
232 if b is None:
232 if b is None:
233 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
233 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
234 % (section, name, v))
234 % (section, name, v))
235 return b
235 return b
236
236
237 def configint(self, section, name, default=None, untrusted=False):
237 def configint(self, section, name, default=None, untrusted=False):
238 """parse a configuration element as an integer
238 """parse a configuration element as an integer
239
239
240 >>> u = ui(); s = 'foo'
240 >>> u = ui(); s = 'foo'
241 >>> u.setconfig(s, 'int1', '42')
241 >>> u.setconfig(s, 'int1', '42')
242 >>> u.configint(s, 'int1')
242 >>> u.configint(s, 'int1')
243 42
243 42
244 >>> u.setconfig(s, 'int2', '-42')
244 >>> u.setconfig(s, 'int2', '-42')
245 >>> u.configint(s, 'int2')
245 >>> u.configint(s, 'int2')
246 -42
246 -42
247 >>> u.configint(s, 'unknown', 7)
247 >>> u.configint(s, 'unknown', 7)
248 7
248 7
249 >>> u.setconfig(s, 'invalid', 'somevalue')
249 >>> u.setconfig(s, 'invalid', 'somevalue')
250 >>> u.configint(s, 'invalid')
250 >>> u.configint(s, 'invalid')
251 Traceback (most recent call last):
251 Traceback (most recent call last):
252 ...
252 ...
253 ConfigError: foo.invalid is not an integer ('somevalue')
253 ConfigError: foo.invalid is not an integer ('somevalue')
254 """
254 """
255
255
256 v = self.config(section, name, None, untrusted)
256 v = self.config(section, name, None, untrusted)
257 if v is None:
257 if v is None:
258 return default
258 return default
259 try:
259 try:
260 return int(v)
260 return int(v)
261 except ValueError:
261 except ValueError:
262 raise error.ConfigError(_("%s.%s is not an integer ('%s')")
262 raise error.ConfigError(_("%s.%s is not an integer ('%s')")
263 % (section, name, v))
263 % (section, name, v))
264
264
265 def configbytes(self, section, name, default=0, untrusted=False):
265 def configbytes(self, section, name, default=0, untrusted=False):
266 """parse a configuration element as a quantity in bytes
266 """parse a configuration element as a quantity in bytes
267
267
268 Units can be specified as b (bytes), k or kb (kilobytes), m or
268 Units can be specified as b (bytes), k or kb (kilobytes), m or
269 mb (megabytes), g or gb (gigabytes).
269 mb (megabytes), g or gb (gigabytes).
270
270
271 >>> u = ui(); s = 'foo'
271 >>> u = ui(); s = 'foo'
272 >>> u.setconfig(s, 'val1', '42')
272 >>> u.setconfig(s, 'val1', '42')
273 >>> u.configbytes(s, 'val1')
273 >>> u.configbytes(s, 'val1')
274 42
274 42
275 >>> u.setconfig(s, 'val2', '42.5 kb')
275 >>> u.setconfig(s, 'val2', '42.5 kb')
276 >>> u.configbytes(s, 'val2')
276 >>> u.configbytes(s, 'val2')
277 43520
277 43520
278 >>> u.configbytes(s, 'unknown', '7 MB')
278 >>> u.configbytes(s, 'unknown', '7 MB')
279 7340032
279 7340032
280 >>> u.setconfig(s, 'invalid', 'somevalue')
280 >>> u.setconfig(s, 'invalid', 'somevalue')
281 >>> u.configbytes(s, 'invalid')
281 >>> u.configbytes(s, 'invalid')
282 Traceback (most recent call last):
282 Traceback (most recent call last):
283 ...
283 ...
284 ConfigError: foo.invalid is not a byte quantity ('somevalue')
284 ConfigError: foo.invalid is not a byte quantity ('somevalue')
285 """
285 """
286
286
287 value = self.config(section, name)
287 value = self.config(section, name)
288 if value is None:
288 if value is None:
289 if not isinstance(default, str):
289 if not isinstance(default, str):
290 return default
290 return default
291 value = default
291 value = default
292 try:
292 try:
293 return util.sizetoint(value)
293 return util.sizetoint(value)
294 except error.ParseError:
294 except error.ParseError:
295 raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
295 raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
296 % (section, name, value))
296 % (section, name, value))
297
297
298 def configlist(self, section, name, default=None, untrusted=False):
298 def configlist(self, section, name, default=None, untrusted=False):
299 """parse a configuration element as a list of comma/space separated
299 """parse a configuration element as a list of comma/space separated
300 strings
300 strings
301
301
302 >>> u = ui(); s = 'foo'
302 >>> u = ui(); s = 'foo'
303 >>> u.setconfig(s, 'list1', 'this,is "a small" ,test')
303 >>> u.setconfig(s, 'list1', 'this,is "a small" ,test')
304 >>> u.configlist(s, 'list1')
304 >>> u.configlist(s, 'list1')
305 ['this', 'is', 'a small', 'test']
305 ['this', 'is', 'a small', 'test']
306 """
306 """
307
307
308 def _parse_plain(parts, s, offset):
308 def _parse_plain(parts, s, offset):
309 whitespace = False
309 whitespace = False
310 while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
310 while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
311 whitespace = True
311 whitespace = True
312 offset += 1
312 offset += 1
313 if offset >= len(s):
313 if offset >= len(s):
314 return None, parts, offset
314 return None, parts, offset
315 if whitespace:
315 if whitespace:
316 parts.append('')
316 parts.append('')
317 if s[offset] == '"' and not parts[-1]:
317 if s[offset] == '"' and not parts[-1]:
318 return _parse_quote, parts, offset + 1
318 return _parse_quote, parts, offset + 1
319 elif s[offset] == '"' and parts[-1][-1] == '\\':
319 elif s[offset] == '"' and parts[-1][-1] == '\\':
320 parts[-1] = parts[-1][:-1] + s[offset]
320 parts[-1] = parts[-1][:-1] + s[offset]
321 return _parse_plain, parts, offset + 1
321 return _parse_plain, parts, offset + 1
322 parts[-1] += s[offset]
322 parts[-1] += s[offset]
323 return _parse_plain, parts, offset + 1
323 return _parse_plain, parts, offset + 1
324
324
325 def _parse_quote(parts, s, offset):
325 def _parse_quote(parts, s, offset):
326 if offset < len(s) and s[offset] == '"': # ""
326 if offset < len(s) and s[offset] == '"': # ""
327 parts.append('')
327 parts.append('')
328 offset += 1
328 offset += 1
329 while offset < len(s) and (s[offset].isspace() or
329 while offset < len(s) and (s[offset].isspace() or
330 s[offset] == ','):
330 s[offset] == ','):
331 offset += 1
331 offset += 1
332 return _parse_plain, parts, offset
332 return _parse_plain, parts, offset
333
333
334 while offset < len(s) and s[offset] != '"':
334 while offset < len(s) and s[offset] != '"':
335 if (s[offset] == '\\' and offset + 1 < len(s)
335 if (s[offset] == '\\' and offset + 1 < len(s)
336 and s[offset + 1] == '"'):
336 and s[offset + 1] == '"'):
337 offset += 1
337 offset += 1
338 parts[-1] += '"'
338 parts[-1] += '"'
339 else:
339 else:
340 parts[-1] += s[offset]
340 parts[-1] += s[offset]
341 offset += 1
341 offset += 1
342
342
343 if offset >= len(s):
343 if offset >= len(s):
344 real_parts = _configlist(parts[-1])
344 real_parts = _configlist(parts[-1])
345 if not real_parts:
345 if not real_parts:
346 parts[-1] = '"'
346 parts[-1] = '"'
347 else:
347 else:
348 real_parts[0] = '"' + real_parts[0]
348 real_parts[0] = '"' + real_parts[0]
349 parts = parts[:-1]
349 parts = parts[:-1]
350 parts.extend(real_parts)
350 parts.extend(real_parts)
351 return None, parts, offset
351 return None, parts, offset
352
352
353 offset += 1
353 offset += 1
354 while offset < len(s) and s[offset] in [' ', ',']:
354 while offset < len(s) and s[offset] in [' ', ',']:
355 offset += 1
355 offset += 1
356
356
357 if offset < len(s):
357 if offset < len(s):
358 if offset + 1 == len(s) and s[offset] == '"':
358 if offset + 1 == len(s) and s[offset] == '"':
359 parts[-1] += '"'
359 parts[-1] += '"'
360 offset += 1
360 offset += 1
361 else:
361 else:
362 parts.append('')
362 parts.append('')
363 else:
363 else:
364 return None, parts, offset
364 return None, parts, offset
365
365
366 return _parse_plain, parts, offset
366 return _parse_plain, parts, offset
367
367
368 def _configlist(s):
368 def _configlist(s):
369 s = s.rstrip(' ,')
369 s = s.rstrip(' ,')
370 if not s:
370 if not s:
371 return []
371 return []
372 parser, parts, offset = _parse_plain, [''], 0
372 parser, parts, offset = _parse_plain, [''], 0
373 while parser:
373 while parser:
374 parser, parts, offset = parser(parts, s, offset)
374 parser, parts, offset = parser(parts, s, offset)
375 return parts
375 return parts
376
376
377 result = self.config(section, name, untrusted=untrusted)
377 result = self.config(section, name, untrusted=untrusted)
378 if result is None:
378 if result is None:
379 result = default or []
379 result = default or []
380 if isinstance(result, basestring):
380 if isinstance(result, basestring):
381 result = _configlist(result.lstrip(' ,\n'))
381 result = _configlist(result.lstrip(' ,\n'))
382 if result is None:
382 if result is None:
383 result = default or []
383 result = default or []
384 return result
384 return result
385
385
386 def has_section(self, section, untrusted=False):
386 def has_section(self, section, untrusted=False):
387 '''tell whether section exists in config.'''
387 '''tell whether section exists in config.'''
388 return section in self._data(untrusted)
388 return section in self._data(untrusted)
389
389
390 def configitems(self, section, untrusted=False):
390 def configitems(self, section, untrusted=False):
391 items = self._data(untrusted).items(section)
391 items = self._data(untrusted).items(section)
392 if self.debugflag and not untrusted and self._reportuntrusted:
392 if self.debugflag and not untrusted and self._reportuntrusted:
393 for k, v in self._ucfg.items(section):
393 for k, v in self._ucfg.items(section):
394 if self._tcfg.get(section, k) != v:
394 if self._tcfg.get(section, k) != v:
395 self.debug("ignoring untrusted configuration option "
395 self.debug("ignoring untrusted configuration option "
396 "%s.%s = %s\n" % (section, k, v))
396 "%s.%s = %s\n" % (section, k, v))
397 return items
397 return items
398
398
399 def walkconfig(self, untrusted=False):
399 def walkconfig(self, untrusted=False):
400 cfg = self._data(untrusted)
400 cfg = self._data(untrusted)
401 for section in cfg.sections():
401 for section in cfg.sections():
402 for name, value in self.configitems(section, untrusted):
402 for name, value in self.configitems(section, untrusted):
403 yield section, name, value
403 yield section, name, value
404
404
405 def plain(self, feature=None):
405 def plain(self, feature=None):
406 '''is plain mode active?
406 '''is plain mode active?
407
407
408 Plain mode means that all configuration variables which affect
408 Plain mode means that all configuration variables which affect
409 the behavior and output of Mercurial should be
409 the behavior and output of Mercurial should be
410 ignored. Additionally, the output should be stable,
410 ignored. Additionally, the output should be stable,
411 reproducible and suitable for use in scripts or applications.
411 reproducible and suitable for use in scripts or applications.
412
412
413 The only way to trigger plain mode is by setting either the
413 The only way to trigger plain mode is by setting either the
414 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
414 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
415
415
416 The return value can either be
416 The return value can either be
417 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
417 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
418 - True otherwise
418 - True otherwise
419 '''
419 '''
420 if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
420 if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
421 return False
421 return False
422 exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
422 exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
423 if feature and exceptions:
423 if feature and exceptions:
424 return feature not in exceptions
424 return feature not in exceptions
425 return True
425 return True
426
426
427 def username(self):
427 def username(self):
428 """Return default username to be used in commits.
428 """Return default username to be used in commits.
429
429
430 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
430 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
431 and stop searching if one of these is set.
431 and stop searching if one of these is set.
432 If not found and ui.askusername is True, ask the user, else use
432 If not found and ui.askusername is True, ask the user, else use
433 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
433 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
434 """
434 """
435 user = os.environ.get("HGUSER")
435 user = os.environ.get("HGUSER")
436 if user is None:
436 if user is None:
437 user = self.config("ui", "username")
437 user = self.config("ui", "username")
438 if user is not None:
438 if user is not None:
439 user = os.path.expandvars(user)
439 user = os.path.expandvars(user)
440 if user is None:
440 if user is None:
441 user = os.environ.get("EMAIL")
441 user = os.environ.get("EMAIL")
442 if user is None and self.configbool("ui", "askusername"):
442 if user is None and self.configbool("ui", "askusername"):
443 user = self.prompt(_("enter a commit username:"), default=None)
443 user = self.prompt(_("enter a commit username:"), default=None)
444 if user is None and not self.interactive():
444 if user is None and not self.interactive():
445 try:
445 try:
446 user = '%s@%s' % (util.getuser(), socket.getfqdn())
446 user = '%s@%s' % (util.getuser(), socket.getfqdn())
447 self.warn(_("no username found, using '%s' instead\n") % user)
447 self.warn(_("no username found, using '%s' instead\n") % user)
448 except KeyError:
448 except KeyError:
449 pass
449 pass
450 if not user:
450 if not user:
451 raise util.Abort(_('no username supplied (see "hg help config")'))
451 raise util.Abort(_('no username supplied (see "hg help config")'))
452 if "\n" in user:
452 if "\n" in user:
453 raise util.Abort(_("username %s contains a newline\n") % repr(user))
453 raise util.Abort(_("username %s contains a newline\n") % repr(user))
454 return user
454 return user
455
455
456 def shortuser(self, user):
456 def shortuser(self, user):
457 """Return a short representation of a user name or email address."""
457 """Return a short representation of a user name or email address."""
458 if not self.verbose:
458 if not self.verbose:
459 user = util.shortuser(user)
459 user = util.shortuser(user)
460 return user
460 return user
461
461
462 def expandpath(self, loc, default=None):
462 def expandpath(self, loc, default=None):
463 """Return repository location relative to cwd or from [paths]"""
463 """Return repository location relative to cwd or from [paths]"""
464 if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
464 if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
465 return loc
465 return loc
466
466
467 path = self.config('paths', loc)
467 path = self.config('paths', loc)
468 if not path and default is not None:
468 if not path and default is not None:
469 path = self.config('paths', default)
469 path = self.config('paths', default)
470 return path or loc
470 return path or loc
471
471
472 def pushbuffer(self):
472 def pushbuffer(self):
473 self._buffers.append([])
473 self._buffers.append([])
474
474
475 def popbuffer(self, labeled=False):
475 def popbuffer(self, labeled=False):
476 '''pop the last buffer and return the buffered output
476 '''pop the last buffer and return the buffered output
477
477
478 If labeled is True, any labels associated with buffered
478 If labeled is True, any labels associated with buffered
479 output will be handled. By default, this has no effect
479 output will be handled. By default, this has no effect
480 on the output returned, but extensions and GUI tools may
480 on the output returned, but extensions and GUI tools may
481 handle this argument and returned styled output. If output
481 handle this argument and returned styled output. If output
482 is being buffered so it can be captured and parsed or
482 is being buffered so it can be captured and parsed or
483 processed, labeled should not be set to True.
483 processed, labeled should not be set to True.
484 '''
484 '''
485 return "".join(self._buffers.pop())
485 return "".join(self._buffers.pop())
486
486
487 def write(self, *args, **opts):
487 def write(self, *args, **opts):
488 '''write args to output
488 '''write args to output
489
489
490 By default, this method simply writes to the buffer or stdout,
490 By default, this method simply writes to the buffer or stdout,
491 but extensions or GUI tools may override this method,
491 but extensions or GUI tools may override this method,
492 write_err(), popbuffer(), and label() to style output from
492 write_err(), popbuffer(), and label() to style output from
493 various parts of hg.
493 various parts of hg.
494
494
495 An optional keyword argument, "label", can be passed in.
495 An optional keyword argument, "label", can be passed in.
496 This should be a string containing label names separated by
496 This should be a string containing label names separated by
497 space. Label names take the form of "topic.type". For example,
497 space. Label names take the form of "topic.type". For example,
498 ui.debug() issues a label of "ui.debug".
498 ui.debug() issues a label of "ui.debug".
499
499
500 When labeling output for a specific command, a label of
500 When labeling output for a specific command, a label of
501 "cmdname.type" is recommended. For example, status issues
501 "cmdname.type" is recommended. For example, status issues
502 a label of "status.modified" for modified files.
502 a label of "status.modified" for modified files.
503 '''
503 '''
504 if self._buffers:
504 if self._buffers:
505 self._buffers[-1].extend([str(a) for a in args])
505 self._buffers[-1].extend([str(a) for a in args])
506 else:
506 else:
507 for a in args:
507 for a in args:
508 self.fout.write(str(a))
508 self.fout.write(str(a))
509
509
510 def write_err(self, *args, **opts):
510 def write_err(self, *args, **opts):
511 try:
511 try:
512 if not getattr(self.fout, 'closed', False):
512 if not getattr(self.fout, 'closed', False):
513 self.fout.flush()
513 self.fout.flush()
514 for a in args:
514 for a in args:
515 self.ferr.write(str(a))
515 self.ferr.write(str(a))
516 # stderr may be buffered under win32 when redirected to files,
516 # stderr may be buffered under win32 when redirected to files,
517 # including stdout.
517 # including stdout.
518 if not getattr(self.ferr, 'closed', False):
518 if not getattr(self.ferr, 'closed', False):
519 self.ferr.flush()
519 self.ferr.flush()
520 except IOError, inst:
520 except IOError, inst:
521 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
521 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
522 raise
522 raise
523
523
524 def flush(self):
524 def flush(self):
525 try: self.fout.flush()
525 try: self.fout.flush()
526 except (IOError, ValueError): pass
526 except (IOError, ValueError): pass
527 try: self.ferr.flush()
527 try: self.ferr.flush()
528 except (IOError, ValueError): pass
528 except (IOError, ValueError): pass
529
529
530 def _isatty(self, fh):
530 def _isatty(self, fh):
531 if self.configbool('ui', 'nontty', False):
531 if self.configbool('ui', 'nontty', False):
532 return False
532 return False
533 return util.isatty(fh)
533 return util.isatty(fh)
534
534
535 def interactive(self):
535 def interactive(self):
536 '''is interactive input allowed?
536 '''is interactive input allowed?
537
537
538 An interactive session is a session where input can be reasonably read
538 An interactive session is a session where input can be reasonably read
539 from `sys.stdin'. If this function returns false, any attempt to read
539 from `sys.stdin'. If this function returns false, any attempt to read
540 from stdin should fail with an error, unless a sensible default has been
540 from stdin should fail with an error, unless a sensible default has been
541 specified.
541 specified.
542
542
543 Interactiveness is triggered by the value of the `ui.interactive'
543 Interactiveness is triggered by the value of the `ui.interactive'
544 configuration variable or - if it is unset - when `sys.stdin' points
544 configuration variable or - if it is unset - when `sys.stdin' points
545 to a terminal device.
545 to a terminal device.
546
546
547 This function refers to input only; for output, see `ui.formatted()'.
547 This function refers to input only; for output, see `ui.formatted()'.
548 '''
548 '''
549 i = self.configbool("ui", "interactive", None)
549 i = self.configbool("ui", "interactive", None)
550 if i is None:
550 if i is None:
551 # some environments replace stdin without implementing isatty
551 # some environments replace stdin without implementing isatty
552 # usually those are non-interactive
552 # usually those are non-interactive
553 return self._isatty(self.fin)
553 return self._isatty(self.fin)
554
554
555 return i
555 return i
556
556
557 def termwidth(self):
557 def termwidth(self):
558 '''how wide is the terminal in columns?
558 '''how wide is the terminal in columns?
559 '''
559 '''
560 if 'COLUMNS' in os.environ:
560 if 'COLUMNS' in os.environ:
561 try:
561 try:
562 return int(os.environ['COLUMNS'])
562 return int(os.environ['COLUMNS'])
563 except ValueError:
563 except ValueError:
564 pass
564 pass
565 return util.termwidth()
565 return util.termwidth()
566
566
567 def formatted(self):
567 def formatted(self):
568 '''should formatted output be used?
568 '''should formatted output be used?
569
569
570 It is often desirable to format the output to suite the output medium.
570 It is often desirable to format the output to suite the output medium.
571 Examples of this are truncating long lines or colorizing messages.
571 Examples of this are truncating long lines or colorizing messages.
572 However, this is not often not desirable when piping output into other
572 However, this is not often not desirable when piping output into other
573 utilities, e.g. `grep'.
573 utilities, e.g. `grep'.
574
574
575 Formatted output is triggered by the value of the `ui.formatted'
575 Formatted output is triggered by the value of the `ui.formatted'
576 configuration variable or - if it is unset - when `sys.stdout' points
576 configuration variable or - if it is unset - when `sys.stdout' points
577 to a terminal device. Please note that `ui.formatted' should be
577 to a terminal device. Please note that `ui.formatted' should be
578 considered an implementation detail; it is not intended for use outside
578 considered an implementation detail; it is not intended for use outside
579 Mercurial or its extensions.
579 Mercurial or its extensions.
580
580
581 This function refers to output only; for input, see `ui.interactive()'.
581 This function refers to output only; for input, see `ui.interactive()'.
582 This function always returns false when in plain mode, see `ui.plain()'.
582 This function always returns false when in plain mode, see `ui.plain()'.
583 '''
583 '''
584 if self.plain():
584 if self.plain():
585 return False
585 return False
586
586
587 i = self.configbool("ui", "formatted", None)
587 i = self.configbool("ui", "formatted", None)
588 if i is None:
588 if i is None:
589 # some environments replace stdout without implementing isatty
589 # some environments replace stdout without implementing isatty
590 # usually those are non-interactive
590 # usually those are non-interactive
591 return self._isatty(self.fout)
591 return self._isatty(self.fout)
592
592
593 return i
593 return i
594
594
595 def _readline(self, prompt=''):
595 def _readline(self, prompt=''):
596 if self._isatty(self.fin):
596 if self._isatty(self.fin):
597 try:
597 try:
598 # magically add command line editing support, where
598 # magically add command line editing support, where
599 # available
599 # available
600 import readline
600 import readline
601 # force demandimport to really load the module
601 # force demandimport to really load the module
602 readline.read_history_file
602 readline.read_history_file
603 # windows sometimes raises something other than ImportError
603 # windows sometimes raises something other than ImportError
604 except Exception:
604 except Exception:
605 pass
605 pass
606
606
607 # call write() so output goes through subclassed implementation
607 # call write() so output goes through subclassed implementation
608 # e.g. color extension on Windows
608 # e.g. color extension on Windows
609 self.write(prompt)
609 self.write(prompt)
610
610
611 # instead of trying to emulate raw_input, swap (self.fin,
611 # instead of trying to emulate raw_input, swap (self.fin,
612 # self.fout) with (sys.stdin, sys.stdout)
612 # self.fout) with (sys.stdin, sys.stdout)
613 oldin = sys.stdin
613 oldin = sys.stdin
614 oldout = sys.stdout
614 oldout = sys.stdout
615 sys.stdin = self.fin
615 sys.stdin = self.fin
616 sys.stdout = self.fout
616 sys.stdout = self.fout
617 line = raw_input(' ')
617 line = raw_input(' ')
618 sys.stdin = oldin
618 sys.stdin = oldin
619 sys.stdout = oldout
619 sys.stdout = oldout
620
620
621 # When stdin is in binary mode on Windows, it can cause
621 # When stdin is in binary mode on Windows, it can cause
622 # raw_input() to emit an extra trailing carriage return
622 # raw_input() to emit an extra trailing carriage return
623 if os.linesep == '\r\n' and line and line[-1] == '\r':
623 if os.linesep == '\r\n' and line and line[-1] == '\r':
624 line = line[:-1]
624 line = line[:-1]
625 return line
625 return line
626
626
627 def prompt(self, msg, default="y"):
627 def prompt(self, msg, default="y"):
628 """Prompt user with msg, read response.
628 """Prompt user with msg, read response.
629 If ui is not interactive, the default is returned.
629 If ui is not interactive, the default is returned.
630 """
630 """
631 if not self.interactive():
631 if not self.interactive():
632 self.write(msg, ' ', default, "\n")
632 self.write(msg, ' ', default, "\n")
633 return default
633 return default
634 try:
634 try:
635 r = self._readline(self.label(msg, 'ui.prompt'))
635 r = self._readline(self.label(msg, 'ui.prompt'))
636 if not r:
636 if not r:
637 return default
637 return default
638 return r
638 return r
639 except EOFError:
639 except EOFError:
640 raise util.Abort(_('response expected'))
640 raise util.Abort(_('response expected'))
641
641
642 def promptchoice(self, msg, choices, default=0):
642 def promptchoice(self, prompt, default=0):
643 """Prompt user with msg, read response, and ensure it matches
643 """Prompt user with a message, read response, and ensure it matches
644 one of the provided choices. The index of the choice is returned.
644 one of the provided choices. The prompt is formatted as follows:
645 choices is a sequence of acceptable responses with the format:
645
646 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
646 "would you like fries with that (Yn)? $$ &Yes $$ &No"
647 If ui is not interactive, the default is returned.
647
648 The index of the choice is returned. Responses are case
649 insensitive. If ui is not interactive, the default is
650 returned.
648 """
651 """
652
653 parts = prompt.split('$$')
654 msg = parts[0].rstrip(' ')
655 choices = [p.strip(' ') for p in parts[1:]]
649 resps = [s[s.index('&') + 1].lower() for s in choices]
656 resps = [s[s.index('&') + 1].lower() for s in choices]
650 while True:
657 while True:
651 r = self.prompt(msg, resps[default])
658 r = self.prompt(msg, resps[default])
652 if r.lower() in resps:
659 if r.lower() in resps:
653 return resps.index(r.lower())
660 return resps.index(r.lower())
654 self.write(_("unrecognized response\n"))
661 self.write(_("unrecognized response\n"))
655
662
656 def getpass(self, prompt=None, default=None):
663 def getpass(self, prompt=None, default=None):
657 if not self.interactive():
664 if not self.interactive():
658 return default
665 return default
659 try:
666 try:
660 self.write(self.label(prompt or _('password: '), 'ui.prompt'))
667 self.write(self.label(prompt or _('password: '), 'ui.prompt'))
661 return getpass.getpass('')
668 return getpass.getpass('')
662 except EOFError:
669 except EOFError:
663 raise util.Abort(_('response expected'))
670 raise util.Abort(_('response expected'))
664 def status(self, *msg, **opts):
671 def status(self, *msg, **opts):
665 '''write status message to output (if ui.quiet is False)
672 '''write status message to output (if ui.quiet is False)
666
673
667 This adds an output label of "ui.status".
674 This adds an output label of "ui.status".
668 '''
675 '''
669 if not self.quiet:
676 if not self.quiet:
670 opts['label'] = opts.get('label', '') + ' ui.status'
677 opts['label'] = opts.get('label', '') + ' ui.status'
671 self.write(*msg, **opts)
678 self.write(*msg, **opts)
672 def warn(self, *msg, **opts):
679 def warn(self, *msg, **opts):
673 '''write warning message to output (stderr)
680 '''write warning message to output (stderr)
674
681
675 This adds an output label of "ui.warning".
682 This adds an output label of "ui.warning".
676 '''
683 '''
677 opts['label'] = opts.get('label', '') + ' ui.warning'
684 opts['label'] = opts.get('label', '') + ' ui.warning'
678 self.write_err(*msg, **opts)
685 self.write_err(*msg, **opts)
679 def note(self, *msg, **opts):
686 def note(self, *msg, **opts):
680 '''write note to output (if ui.verbose is True)
687 '''write note to output (if ui.verbose is True)
681
688
682 This adds an output label of "ui.note".
689 This adds an output label of "ui.note".
683 '''
690 '''
684 if self.verbose:
691 if self.verbose:
685 opts['label'] = opts.get('label', '') + ' ui.note'
692 opts['label'] = opts.get('label', '') + ' ui.note'
686 self.write(*msg, **opts)
693 self.write(*msg, **opts)
687 def debug(self, *msg, **opts):
694 def debug(self, *msg, **opts):
688 '''write debug message to output (if ui.debugflag is True)
695 '''write debug message to output (if ui.debugflag is True)
689
696
690 This adds an output label of "ui.debug".
697 This adds an output label of "ui.debug".
691 '''
698 '''
692 if self.debugflag:
699 if self.debugflag:
693 opts['label'] = opts.get('label', '') + ' ui.debug'
700 opts['label'] = opts.get('label', '') + ' ui.debug'
694 self.write(*msg, **opts)
701 self.write(*msg, **opts)
695 def edit(self, text, user):
702 def edit(self, text, user):
696 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
703 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
697 text=True)
704 text=True)
698 try:
705 try:
699 f = os.fdopen(fd, "w")
706 f = os.fdopen(fd, "w")
700 f.write(text)
707 f.write(text)
701 f.close()
708 f.close()
702
709
703 editor = self.geteditor()
710 editor = self.geteditor()
704
711
705 util.system("%s \"%s\"" % (editor, name),
712 util.system("%s \"%s\"" % (editor, name),
706 environ={'HGUSER': user},
713 environ={'HGUSER': user},
707 onerr=util.Abort, errprefix=_("edit failed"),
714 onerr=util.Abort, errprefix=_("edit failed"),
708 out=self.fout)
715 out=self.fout)
709
716
710 f = open(name)
717 f = open(name)
711 t = f.read()
718 t = f.read()
712 f.close()
719 f.close()
713 finally:
720 finally:
714 os.unlink(name)
721 os.unlink(name)
715
722
716 return t
723 return t
717
724
718 def traceback(self, exc=None, force=False):
725 def traceback(self, exc=None, force=False):
719 '''print exception traceback if traceback printing enabled or forced.
726 '''print exception traceback if traceback printing enabled or forced.
720 only to call in exception handler. returns true if traceback
727 only to call in exception handler. returns true if traceback
721 printed.'''
728 printed.'''
722 if self.tracebackflag or force:
729 if self.tracebackflag or force:
723 if exc is None:
730 if exc is None:
724 exc = sys.exc_info()
731 exc = sys.exc_info()
725 cause = getattr(exc[1], 'cause', None)
732 cause = getattr(exc[1], 'cause', None)
726
733
727 if cause is not None:
734 if cause is not None:
728 causetb = traceback.format_tb(cause[2])
735 causetb = traceback.format_tb(cause[2])
729 exctb = traceback.format_tb(exc[2])
736 exctb = traceback.format_tb(exc[2])
730 exconly = traceback.format_exception_only(cause[0], cause[1])
737 exconly = traceback.format_exception_only(cause[0], cause[1])
731
738
732 # exclude frame where 'exc' was chained and rethrown from exctb
739 # exclude frame where 'exc' was chained and rethrown from exctb
733 self.write_err('Traceback (most recent call last):\n',
740 self.write_err('Traceback (most recent call last):\n',
734 ''.join(exctb[:-1]),
741 ''.join(exctb[:-1]),
735 ''.join(causetb),
742 ''.join(causetb),
736 ''.join(exconly))
743 ''.join(exconly))
737 else:
744 else:
738 traceback.print_exception(exc[0], exc[1], exc[2],
745 traceback.print_exception(exc[0], exc[1], exc[2],
739 file=self.ferr)
746 file=self.ferr)
740 return self.tracebackflag or force
747 return self.tracebackflag or force
741
748
742 def geteditor(self):
749 def geteditor(self):
743 '''return editor to use'''
750 '''return editor to use'''
744 if sys.platform == 'plan9':
751 if sys.platform == 'plan9':
745 # vi is the MIPS instruction simulator on Plan 9. We
752 # vi is the MIPS instruction simulator on Plan 9. We
746 # instead default to E to plumb commit messages to
753 # instead default to E to plumb commit messages to
747 # avoid confusion.
754 # avoid confusion.
748 editor = 'E'
755 editor = 'E'
749 else:
756 else:
750 editor = 'vi'
757 editor = 'vi'
751 return (os.environ.get("HGEDITOR") or
758 return (os.environ.get("HGEDITOR") or
752 self.config("ui", "editor") or
759 self.config("ui", "editor") or
753 os.environ.get("VISUAL") or
760 os.environ.get("VISUAL") or
754 os.environ.get("EDITOR", editor))
761 os.environ.get("EDITOR", editor))
755
762
756 def progress(self, topic, pos, item="", unit="", total=None):
763 def progress(self, topic, pos, item="", unit="", total=None):
757 '''show a progress message
764 '''show a progress message
758
765
759 With stock hg, this is simply a debug message that is hidden
766 With stock hg, this is simply a debug message that is hidden
760 by default, but with extensions or GUI tools it may be
767 by default, but with extensions or GUI tools it may be
761 visible. 'topic' is the current operation, 'item' is a
768 visible. 'topic' is the current operation, 'item' is a
762 non-numeric marker of the current position (i.e. the currently
769 non-numeric marker of the current position (i.e. the currently
763 in-process file), 'pos' is the current numeric position (i.e.
770 in-process file), 'pos' is the current numeric position (i.e.
764 revision, bytes, etc.), unit is a corresponding unit label,
771 revision, bytes, etc.), unit is a corresponding unit label,
765 and total is the highest expected pos.
772 and total is the highest expected pos.
766
773
767 Multiple nested topics may be active at a time.
774 Multiple nested topics may be active at a time.
768
775
769 All topics should be marked closed by setting pos to None at
776 All topics should be marked closed by setting pos to None at
770 termination.
777 termination.
771 '''
778 '''
772
779
773 if pos is None or not self.debugflag:
780 if pos is None or not self.debugflag:
774 return
781 return
775
782
776 if unit:
783 if unit:
777 unit = ' ' + unit
784 unit = ' ' + unit
778 if item:
785 if item:
779 item = ' ' + item
786 item = ' ' + item
780
787
781 if total:
788 if total:
782 pct = 100.0 * pos / total
789 pct = 100.0 * pos / total
783 self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
790 self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
784 % (topic, item, pos, total, unit, pct))
791 % (topic, item, pos, total, unit, pct))
785 else:
792 else:
786 self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
793 self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
787
794
788 def log(self, service, *msg, **opts):
795 def log(self, service, *msg, **opts):
789 '''hook for logging facility extensions
796 '''hook for logging facility extensions
790
797
791 service should be a readily-identifiable subsystem, which will
798 service should be a readily-identifiable subsystem, which will
792 allow filtering.
799 allow filtering.
793 message should be a newline-terminated string to log.
800 message should be a newline-terminated string to log.
794 '''
801 '''
795 pass
802 pass
796
803
797 def label(self, msg, label):
804 def label(self, msg, label):
798 '''style msg based on supplied label
805 '''style msg based on supplied label
799
806
800 Like ui.write(), this just returns msg unchanged, but extensions
807 Like ui.write(), this just returns msg unchanged, but extensions
801 and GUI tools can override it to allow styling output without
808 and GUI tools can override it to allow styling output without
802 writing it.
809 writing it.
803
810
804 ui.write(s, 'label') is equivalent to
811 ui.write(s, 'label') is equivalent to
805 ui.write(ui.label(s, 'label')).
812 ui.write(ui.label(s, 'label')).
806 '''
813 '''
807 return msg
814 return msg
General Comments 0
You need to be logged in to leave comments. Login now